From 2d481e7bcb38bf5b3fc2a768a82e7f8aba4095ba Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Thu, 16 Jan 2025 22:40:39 -0800 Subject: [PATCH] fix more node:stream (#16385) Co-authored-by: Jarred Sumner --- scripts/check-node-all.sh | 9 +- scripts/check-node.sh | 2 +- src/bun.js/bindings/BufferEncodingType.h | 5 +- src/bun.js/bindings/BunCommonStrings.h | 11 +- src/bun.js/bindings/ErrorCode.cpp | 339 +- src/bun.js/bindings/ErrorCode.h | 15 +- src/bun.js/bindings/ErrorCode.ts | 25 +- src/bun.js/bindings/JSBuffer.cpp | 18 + src/bun.js/bindings/ZigGlobalObject.cpp | 21 +- src/bun.js/bindings/bindings.cpp | 32 +- src/bun.js/bindings/exports.zig | 6 +- src/bun.js/bindings/headers-handwritten.h | 4 +- src/bun.js/bindings/js_classes.ts | 8 + src/bun.js/bindings/webcore/JSEventTarget.cpp | 5 +- src/bun.js/javascript.zig | 6 +- src/bun.js/module_loader.zig | 44 +- src/bun.js/node/node_util_binding.zig | 17 +- src/bun.js/node/types.zig | 6 + src/codegen/bundle-modules.ts | 22 +- src/codegen/generate-classes.ts | 35 + src/codegen/generate-node-errors.ts | 31 +- src/codegen/replacements.ts | 23 +- src/js/builtins.d.ts | 18 +- src/js/builtins/BunBuiltinNames.h | 3 +- .../builtins/ReadableByteStreamController.ts | 6 +- src/js/builtins/ReadableStream.ts | 30 +- src/js/builtins/ReadableStreamBYOBReader.ts | 8 +- src/js/builtins/ReadableStreamBYOBRequest.ts | 4 +- .../ReadableStreamDefaultController.ts | 6 +- .../builtins/ReadableStreamDefaultReader.ts | 8 +- src/js/builtins/TextDecoderStream.ts | 13 +- src/js/builtins/TextEncoderStream.ts | 7 +- src/js/builtins/TransformStream.ts | 4 +- .../TransformStreamDefaultController.ts | 11 +- .../WritableStreamDefaultController.ts | 2 +- .../builtins/WritableStreamDefaultWriter.ts | 16 +- src/js/bun/ffi.ts | 6 +- src/js/internal/abort_listener.ts | 33 + src/js/internal/errors.ts | 25 +- src/js/internal/primordials.js | 25 +- src/js/internal/shared.ts | 26 +- src/js/internal/stream.promises.ts | 45 + src/js/internal/stream.ts | 113 + src/js/internal/streams/add-abort-signal.ts | 51 + src/js/internal/streams/compose.ts | 221 + src/js/internal/streams/destroy.ts | 340 + src/js/internal/streams/duplex.ts | 153 + src/js/internal/streams/duplexify.ts | 369 ++ src/js/internal/streams/duplexpair.ts | 59 + src/js/internal/streams/end-of-stream.ts | 297 + src/js/internal/streams/from.ts | 197 + src/js/internal/streams/lazy_transform.ts | 53 + src/js/internal/streams/legacy.ts | 116 + src/js/internal/streams/nativereadable.ts | 246 + src/js/internal/streams/nativewritable.ts | 135 + src/js/internal/streams/operators.ts | 410 ++ src/js/internal/streams/passthrough.ts | 20 + src/js/internal/streams/pipeline.ts | 448 ++ src/js/internal/streams/readable.ts | 1650 +++++ src/js/internal/streams/state.ts | 47 + src/js/internal/streams/transform.ts | 172 + src/js/internal/streams/utils.ts | 321 + src/js/internal/streams/writable.ts | 1123 ++++ src/js/internal/webstreams_adapters.ts | 785 +++ src/js/node/_stream_duplex.ts | 3 + src/js/node/_stream_passthrough.ts | 3 + src/js/node/_stream_readable.ts | 3 + src/js/node/_stream_transform.ts | 3 + src/js/node/_stream_wrap.ts | 5 + src/js/node/_stream_writable.ts | 3 + src/js/node/child_process.ts | 41 +- src/js/node/dgram.ts | 4 +- src/js/node/diagnostics_channel.ts | 4 +- src/js/node/dns.ts | 4 +- src/js/node/domain.ts | 4 +- src/js/node/events.ts | 42 +- src/js/node/fs.ts | 10 +- src/js/node/http.ts | 10 +- src/js/node/http2.ts | 5 + src/js/node/readline.ts | 16 +- src/js/node/stream.consumers.ts | 53 +- src/js/node/stream.ts | 5665 +---------------- src/js/node/timers.promises.ts | 19 +- src/js/node/tls.ts | 7 +- src/js/node/trace_events.ts | 8 +- src/js/node/util.ts | 3 +- src/js/node/zlib.ts | 9 +- src/output.zig | 28 +- test/bundler/native-plugin.test.ts | 4 +- test/js/node/http2/node-http2.test.js | 2 +- test/js/node/readline/readline.node.test.ts | 6 +- test/js/node/stream/bufferlist.test.ts | 247 - .../stream/node-stream-uint8array.test.ts | 2 +- .../test-http2-compat-serverresponse-drain.js | 43 - ...st-readable-from-web-enqueue-then-close.js | 26 + .../parallel/test-stream-aliases-legacy.js | 14 + .../parallel/test-stream-compose-operator.js | 127 + .../node/test/parallel/test-stream-compose.js | 539 ++ .../test/parallel/test-stream-consumers.js | 262 + .../test/parallel/test-stream-drop-take.js | 124 + .../parallel/test-stream-duplex-destroy.js | 286 + .../test/parallel/test-stream-duplex-from.js | 403 ++ .../node/test/parallel/test-stream-duplex.js | 133 + .../test/parallel/test-stream-duplexpair.js | 74 + .../test/parallel/test-stream-event-names.js | 42 + .../node/test/parallel/test-stream-filter.js | 174 + .../node/test/parallel/test-stream-flatMap.js | 129 + .../node/test/parallel/test-stream-forEach.js | 139 + .../test/parallel/test-stream-ispaused.js | 44 + test/js/node/test/parallel/test-stream-map.js | 360 ++ .../test-stream-objectmode-undefined.js | 44 + .../parallel/test-stream-pipe-deadlock.js | 27 + .../test-stream-pipe-without-listenerCount.js | 17 + .../parallel/test-stream-pipeline-duplex.js | 21 + .../parallel/test-stream-pipeline-process.js | 9 +- .../test-stream-readable-default-encoding.js | 37 + .../parallel/test-stream-readable-dispose.js | 23 + ...st-stream-readable-from-web-termination.js | 15 + .../test-stream-readable-pause-and-resume.js | 74 + ...st-stream-readable-readable-then-resume.js | 31 + ...test-stream-readable-to-web-termination.js | 12 + .../test-stream-readable-unpipe-resume.js | 20 + .../parallel/test-stream-readable-unshift.js | 6 +- .../node/test/parallel/test-stream-reduce.js | 132 + .../parallel/test-stream-some-find-every.mjs | 172 + .../node/test/parallel/test-stream-toArray.js | 91 + ...est-stream-toWeb-allows-server-response.js | 29 + .../parallel/test-stream-transform-destroy.js | 154 + .../parallel/test-stream-transform-hwm0.js | 28 + ...st-stream-transform-split-highwatermark.js | 73 + .../test-stream-transform-split-objectmode.js | 83 + .../test/parallel/test-stream-typedarray.js | 105 + .../test/parallel/test-stream-uint8array.js | 101 + ...stream-writable-change-default-encoding.js | 2 +- .../test-stream-writable-decoded-encoding.js | 105 + .../parallel/test-stream-writable-destroy.js | 501 ++ .../test-stream-writable-end-cb-error.js | 78 + .../parallel/test-stream2-large-read-stall.js | 4 +- .../test/parallel/test-stream2-transform.js | 492 ++ .../test/parallel/test-stream2-unpipe-leak.js | 73 + .../test/parallel/test-stream2-writable.js | 464 ++ .../parallel/test-streams-highwatermark.js | 111 + .../node/test/sequential/test-stream2-fs.js | 70 + 143 files changed, 14248 insertions(+), 6394 deletions(-) create mode 100644 src/bun.js/bindings/js_classes.ts create mode 100644 src/js/internal/abort_listener.ts create mode 100644 src/js/internal/stream.promises.ts create mode 100644 src/js/internal/stream.ts create mode 100644 src/js/internal/streams/add-abort-signal.ts create mode 100644 src/js/internal/streams/compose.ts create mode 100644 src/js/internal/streams/destroy.ts create mode 100644 src/js/internal/streams/duplex.ts create mode 100644 src/js/internal/streams/duplexify.ts create mode 100644 src/js/internal/streams/duplexpair.ts create mode 100644 src/js/internal/streams/end-of-stream.ts create mode 100644 src/js/internal/streams/from.ts create mode 100644 src/js/internal/streams/lazy_transform.ts create mode 100644 src/js/internal/streams/legacy.ts create mode 100644 src/js/internal/streams/nativereadable.ts create mode 100644 src/js/internal/streams/nativewritable.ts create mode 100644 src/js/internal/streams/operators.ts create mode 100644 src/js/internal/streams/passthrough.ts create mode 100644 src/js/internal/streams/pipeline.ts create mode 100644 src/js/internal/streams/readable.ts create mode 100644 src/js/internal/streams/state.ts create mode 100644 src/js/internal/streams/transform.ts create mode 100644 src/js/internal/streams/utils.ts create mode 100644 src/js/internal/streams/writable.ts create mode 100644 src/js/internal/webstreams_adapters.ts create mode 100644 src/js/node/_stream_duplex.ts create mode 100644 src/js/node/_stream_passthrough.ts create mode 100644 src/js/node/_stream_readable.ts create mode 100644 src/js/node/_stream_transform.ts create mode 100644 src/js/node/_stream_wrap.ts create mode 100644 src/js/node/_stream_writable.ts delete mode 100644 test/js/node/stream/bufferlist.test.ts delete mode 100644 test/js/node/test/parallel/test-http2-compat-serverresponse-drain.js create mode 100644 test/js/node/test/parallel/test-readable-from-web-enqueue-then-close.js create mode 100644 test/js/node/test/parallel/test-stream-aliases-legacy.js create mode 100644 test/js/node/test/parallel/test-stream-compose-operator.js create mode 100644 test/js/node/test/parallel/test-stream-compose.js create mode 100644 test/js/node/test/parallel/test-stream-consumers.js create mode 100644 test/js/node/test/parallel/test-stream-drop-take.js create mode 100644 test/js/node/test/parallel/test-stream-duplex-destroy.js create mode 100644 test/js/node/test/parallel/test-stream-duplex-from.js create mode 100644 test/js/node/test/parallel/test-stream-duplex.js create mode 100644 test/js/node/test/parallel/test-stream-duplexpair.js create mode 100644 test/js/node/test/parallel/test-stream-event-names.js create mode 100644 test/js/node/test/parallel/test-stream-filter.js create mode 100644 test/js/node/test/parallel/test-stream-flatMap.js create mode 100644 test/js/node/test/parallel/test-stream-forEach.js create mode 100644 test/js/node/test/parallel/test-stream-ispaused.js create mode 100644 test/js/node/test/parallel/test-stream-map.js create mode 100644 test/js/node/test/parallel/test-stream-objectmode-undefined.js create mode 100644 test/js/node/test/parallel/test-stream-pipe-deadlock.js create mode 100644 test/js/node/test/parallel/test-stream-pipe-without-listenerCount.js create mode 100644 test/js/node/test/parallel/test-stream-pipeline-duplex.js create mode 100644 test/js/node/test/parallel/test-stream-readable-default-encoding.js create mode 100644 test/js/node/test/parallel/test-stream-readable-dispose.js create mode 100644 test/js/node/test/parallel/test-stream-readable-from-web-termination.js create mode 100644 test/js/node/test/parallel/test-stream-readable-pause-and-resume.js create mode 100644 test/js/node/test/parallel/test-stream-readable-readable-then-resume.js create mode 100644 test/js/node/test/parallel/test-stream-readable-to-web-termination.js create mode 100644 test/js/node/test/parallel/test-stream-readable-unpipe-resume.js create mode 100644 test/js/node/test/parallel/test-stream-reduce.js create mode 100644 test/js/node/test/parallel/test-stream-some-find-every.mjs create mode 100644 test/js/node/test/parallel/test-stream-toArray.js create mode 100644 test/js/node/test/parallel/test-stream-toWeb-allows-server-response.js create mode 100644 test/js/node/test/parallel/test-stream-transform-destroy.js create mode 100644 test/js/node/test/parallel/test-stream-transform-hwm0.js create mode 100644 test/js/node/test/parallel/test-stream-transform-split-highwatermark.js create mode 100644 test/js/node/test/parallel/test-stream-transform-split-objectmode.js create mode 100644 test/js/node/test/parallel/test-stream-typedarray.js create mode 100644 test/js/node/test/parallel/test-stream-uint8array.js create mode 100644 test/js/node/test/parallel/test-stream-writable-decoded-encoding.js create mode 100644 test/js/node/test/parallel/test-stream-writable-destroy.js create mode 100644 test/js/node/test/parallel/test-stream-writable-end-cb-error.js create mode 100644 test/js/node/test/parallel/test-stream2-transform.js create mode 100644 test/js/node/test/parallel/test-stream2-unpipe-leak.js create mode 100644 test/js/node/test/parallel/test-stream2-writable.js create mode 100644 test/js/node/test/parallel/test-streams-highwatermark.js create mode 100644 test/js/node/test/sequential/test-stream2-fs.js diff --git a/scripts/check-node-all.sh b/scripts/check-node-all.sh index 4c907de593..3928ce9a34 100755 --- a/scripts/check-node-all.sh +++ b/scripts/check-node-all.sh @@ -1,11 +1,5 @@ #!/bin/sh -# How to use this script: -# 1. Pick a module from node's standard library (e.g. 'assert', 'fs') -# 2. Copy over relevant tests from node's parallel test suite into test/js/node/test/parallel -# 3. Run this script, e.g. `./scripts/check-node.sh fs` -# 4. Tests that passed get staged for commit - i=0 j=0 @@ -32,9 +26,8 @@ do if timeout 2 $PWD/build/debug/bun-debug ./$x then j=$((j+1)) - git add ./$x + git add $x fi - echo done echo $i tests tested diff --git a/scripts/check-node.sh b/scripts/check-node.sh index a3c3159525..9d5337d62f 100755 --- a/scripts/check-node.sh +++ b/scripts/check-node.sh @@ -32,7 +32,7 @@ do if timeout 2 $PWD/build/debug/bun-debug ./$x then j=$((j+1)) - git add ./$x + git add $x fi echo done diff --git a/src/bun.js/bindings/BufferEncodingType.h b/src/bun.js/bindings/BufferEncodingType.h index 6d3e93274c..afe5589f0d 100644 --- a/src/bun.js/bindings/BufferEncodingType.h +++ b/src/bun.js/bindings/BufferEncodingType.h @@ -1,8 +1,11 @@ #pragma once +#include "stdint.h" + namespace WebCore { -enum class BufferEncodingType { +// must match src/bun.js/node/types.zig#Encoding +enum class BufferEncodingType : uint8_t { utf8 = 0, ucs2 = 1, utf16le = 2, diff --git a/src/bun.js/bindings/BunCommonStrings.h b/src/bun.js/bindings/BunCommonStrings.h index b74b2e7be8..4e772840e6 100644 --- a/src/bun.js/bindings/BunCommonStrings.h +++ b/src/bun.js/bindings/BunCommonStrings.h @@ -12,7 +12,16 @@ // If we don't use it as an identifier name, but we want to avoid allocating the string frequently, put it in this list. #define BUN_COMMON_STRINGS_EACH_NAME_NOT_BUILTIN_NAMES(macro) \ macro(SystemError) \ - macro(S3Error) + macro(S3Error) \ + macro(utf8) \ + macro(ucs2) \ + macro(utf16le) \ + macro(latin1) \ + macro(ascii) \ + macro(base64) \ + macro(base64url) \ + macro(hex) \ + macro(buffer) // clang-format on #define BUN_COMMON_STRINGS_ACCESSOR_DEFINITION(name) \ diff --git a/src/bun.js/bindings/ErrorCode.cpp b/src/bun.js/bindings/ErrorCode.cpp index 8852628317..ad17a1ae31 100644 --- a/src/bun.js/bindings/ErrorCode.cpp +++ b/src/bun.js/bindings/ErrorCode.cpp @@ -27,7 +27,30 @@ #include "ErrorCode.h" -static JSC::JSObject* createErrorPrototype(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::ErrorType type, WTF::ASCIILiteral name, WTF::ASCIILiteral code, bool isDOMExceptionPrototype = false) +JSC_DEFINE_HOST_FUNCTION(NodeError_proto_toString, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +{ + JSC::VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto thisVal = callFrame->thisValue(); + + auto name = thisVal.get(globalObject, vm.propertyNames->name); + RETURN_IF_EXCEPTION(scope, {}); + auto code = thisVal.get(globalObject, WebCore::builtinNames(vm).codePublicName()); + RETURN_IF_EXCEPTION(scope, {}); + auto message = thisVal.get(globalObject, vm.propertyNames->message); + RETURN_IF_EXCEPTION(scope, {}); + + auto name_s = name.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto code_s = code.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message_s = message.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + return JSC::JSValue::encode(JSC::jsString(vm, WTF::makeString(name_s, " ["_s, code_s, "]: "_s, message_s))); +} + +static JSC::JSObject* createErrorPrototype(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::ErrorType type, WTF::ASCIILiteral name, WTF::ASCIILiteral code, bool isDOMExceptionPrototype) { JSC::JSObject* prototype; @@ -62,6 +85,7 @@ static JSC::JSObject* createErrorPrototype(JSC::VM& vm, JSC::JSGlobalObject* glo prototype->putDirect(vm, vm.propertyNames->name, jsString(vm, String(name)), 0); prototype->putDirect(vm, WebCore::builtinNames(vm).codePublicName(), jsString(vm, String(code)), 0); + prototype->putDirect(vm, vm.propertyNames->toString, JSC::JSFunction::create(vm, globalObject, 0, "toString"_s, NodeError_proto_toString, JSC::ImplementationVisibility::Private), 0); return prototype; } @@ -128,18 +152,18 @@ static ErrorCodeCache* errorCache(Zig::GlobalObject* globalObject) } // clang-format on -static Structure* createErrorStructure(JSC::VM& vm, JSGlobalObject* globalObject, JSC::ErrorType type, WTF::ASCIILiteral name, WTF::ASCIILiteral code, bool isDOMExceptionPrototype = false) +static Structure* createErrorStructure(JSC::VM& vm, JSGlobalObject* globalObject, JSC::ErrorType type, WTF::ASCIILiteral name, WTF::ASCIILiteral code, bool isDOMExceptionPrototype) { auto* prototype = createErrorPrototype(vm, globalObject, type, name, code, isDOMExceptionPrototype); return ErrorInstance::createStructure(vm, globalObject, prototype); } -JSObject* ErrorCodeCache::createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options) +JSObject* ErrorCodeCache::createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options, bool isDOMExceptionPrototype) { auto* cache = errorCache(globalObject); const auto& data = errors[static_cast(code)]; if (!cache->internalField(static_cast(code))) { - auto* structure = createErrorStructure(vm, globalObject, data.type, data.name, data.code, code == ErrorCode::ABORT_ERR); + auto* structure = createErrorStructure(vm, globalObject, data.type, data.name, data.code, isDOMExceptionPrototype); cache->internalField(static_cast(code)).set(vm, cache, structure); } @@ -147,35 +171,35 @@ JSObject* ErrorCodeCache::createError(VM& vm, Zig::GlobalObject* globalObject, E return JSC::ErrorInstance::create(globalObject, structure, message, options, nullptr, JSC::RuntimeType::TypeNothing, data.type, true); } -JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, const String& message) +JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, const String& message, bool isDOMExceptionPrototype) { - return errorCache(globalObject)->createError(vm, globalObject, code, jsString(vm, message), jsUndefined()); + return errorCache(globalObject)->createError(vm, globalObject, code, jsString(vm, message), jsUndefined(), isDOMExceptionPrototype); } -JSObject* createError(VM& vm, JSC::JSGlobalObject* globalObject, ErrorCode code, JSValue message) +JSObject* createError(VM& vm, JSC::JSGlobalObject* globalObject, ErrorCode code, JSValue message, bool isDOMExceptionPrototype) { if (auto* zigGlobalObject = jsDynamicCast(globalObject)) - return createError(vm, zigGlobalObject, code, message, jsUndefined()); + return createError(vm, zigGlobalObject, code, message, jsUndefined(), isDOMExceptionPrototype); - auto* structure = createErrorStructure(vm, globalObject, errors[static_cast(code)].type, errors[static_cast(code)].name, errors[static_cast(code)].code); + auto* structure = createErrorStructure(vm, globalObject, errors[static_cast(code)].type, errors[static_cast(code)].name, errors[static_cast(code)].code, isDOMExceptionPrototype); return JSC::ErrorInstance::create(globalObject, structure, message, jsUndefined(), nullptr, JSC::RuntimeType::TypeNothing, errors[static_cast(code)].type, true); } -JSC::JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options) +JSC::JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options, bool isDOMExceptionPrototype) { - return errorCache(globalObject)->createError(vm, globalObject, code, message, options); + return errorCache(globalObject)->createError(vm, globalObject, code, message, options, isDOMExceptionPrototype); } -JSObject* createError(JSC::JSGlobalObject* globalObject, ErrorCode code, const String& message) +JSObject* createError(JSC::JSGlobalObject* globalObject, ErrorCode code, const String& message, bool isDOMExceptionPrototype) { auto& vm = globalObject->vm(); - return createError(vm, globalObject, code, jsString(vm, message)); + return createError(vm, globalObject, code, jsString(vm, message), isDOMExceptionPrototype); } -JSObject* createError(Zig::JSGlobalObject* globalObject, ErrorCode code, JSC::JSValue message) +JSObject* createError(Zig::JSGlobalObject* globalObject, ErrorCode code, JSC::JSValue message, bool isDOMExceptionPrototype) { auto& vm = globalObject->vm(); - return createError(vm, globalObject, code, message); + return createError(vm, globalObject, code, message, isDOMExceptionPrototype); } // export fn Bun__inspect(globalThis: *JSGlobalObject, value: JSValue) ZigString @@ -518,7 +542,7 @@ JSC::EncodedJSValue INVALID_ARG_VALUE_RangeError(JSC::ThrowScope& throwScope, JS auto& vm = globalObject->vm(); auto message = makeString("The "_s, type, " '"_s, name, "' "_s, reason, ". Received "_s, value_string); - auto* structure = createErrorStructure(vm, globalObject, ErrorType::RangeError, "RangeError"_s, "ERR_INVALID_ARG_VALUE"_s); + auto* structure = createErrorStructure(vm, globalObject, ErrorType::RangeError, "RangeError"_s, "ERR_INVALID_ARG_VALUE"_s, false); auto error = JSC::ErrorInstance::create(vm, structure, message, jsUndefined(), nullptr, JSC::RuntimeType::TypeNothing, ErrorType::RangeError, true); throwScope.throwException(globalObject, error); return {}; @@ -651,107 +675,39 @@ static JSValue ERR_INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobalO return createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, message); } -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_OUT_OF_RANGE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - JSC::VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - EXPECT_ARG_COUNT(3); - - auto message = Message::ERR_OUT_OF_RANGE(scope, globalObject, callFrame->argument(0), callFrame->argument(1), callFrame->argument(2)); - RETURN_IF_EXCEPTION(scope, {}); - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, message)); -} - extern "C" JSC::EncodedJSValue Bun__createErrorWithCode(JSC::JSGlobalObject* globalObject, ErrorCode code, BunString* message) { return JSValue::encode(createError(globalObject, code, message->toWTFString(BunString::ZeroCopy))); } -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_INVALID_PROTOCOL, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - JSC::VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - EXPECT_ARG_COUNT(2); - - auto actual = callFrame->argument(0).toWTFString(globalObject); - RETURN_IF_EXCEPTION(scope, {}); - - auto expected = callFrame->argument(1).toWTFString(globalObject); - RETURN_IF_EXCEPTION(scope, {}); - - auto message = makeString("Protocol \""_s, actual, "\" not supported. Expected \""_s, expected, "\""_s); - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_INVALID_PROTOCOL, message)); -} - -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_BROTLI_INVALID_PARAM, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - JSC::VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - EXPECT_ARG_COUNT(1); - - auto param = callFrame->argument(0).toWTFString(globalObject); - RETURN_IF_EXCEPTION(scope, {}); - - auto message = makeString(param, " is not a valid Brotli parameter"_s); - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_BROTLI_INVALID_PARAM, message)); -} - -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_BUFFER_TOO_LARGE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - JSC::VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - EXPECT_ARG_COUNT(1); - - auto param = callFrame->argument(0).toWTFString(globalObject); - RETURN_IF_EXCEPTION(scope, {}); - - auto message = makeString("Cannot create a Buffer larger than "_s, param, " bytes"_s); - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_BUFFER_TOO_LARGE, message)); -} - -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_UNHANDLED_ERROR, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - JSC::VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - auto err = callFrame->argument(0); - - if (err.isUndefined()) { - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_UNHANDLED_ERROR, "Unhandled error."_s)); - } - if (err.isString()) { - auto err_str = err.getString(globalObject); - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_UNHANDLED_ERROR, makeString("Unhandled error. ("_s, err_str, ")"_s))); - } - if (err.isCell()) { - auto cell = err.asCell(); - if (cell->inherits()) { - return JSC::JSValue::encode(jsCast(cell)->value()); - } - } - auto err_str = err.toWTFString(globalObject); - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_UNHANDLED_ERROR, makeString("Unhandled error. ("_s, err_str, ")"_s))); -} - } // namespace Bun +JSC_DEFINE_HOST_FUNCTION(jsFunctionMakeAbortError, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) +{ + auto* globalObject = reinterpret_cast(lexicalGlobalObject); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto message = callFrame->argument(0); + if (message.isUndefined()) message = JSC::jsString(vm, String("The operation was aborted"_s)); + auto options = callFrame->argument(1); + if (!options.isUndefined() && options.isCell() && !options.asCell()->isObject()) return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, "options"_s, "object"_s, options); + auto error = Bun::createError(vm, globalObject, Bun::ErrorCode::ABORT_ERR, message, options, false); + return JSC::JSValue::encode(error); +} + JSC::JSValue WebCore::toJS(JSC::JSGlobalObject* globalObject, CommonAbortReason abortReason) { switch (abortReason) { case CommonAbortReason::Timeout: { - return createError(globalObject, Bun::ErrorCode::ABORT_ERR, "The operation timed out"_s); + return createError(globalObject, Bun::ErrorCode::ABORT_ERR, "The operation timed out"_s, true); } case CommonAbortReason::UserAbort: { // This message is a standardized error message. We cannot change it. // https://webidl.spec.whatwg.org/#idl-DOMException:~:text=The%20operation%20was%20aborted. - return createError(globalObject, Bun::ErrorCode::ABORT_ERR, "The operation was aborted."_s); + return createError(globalObject, Bun::ErrorCode::ABORT_ERR, "The operation was aborted."_s, true); } case CommonAbortReason::ConnectionClosed: { - return createError(globalObject, Bun::ErrorCode::ABORT_ERR, "The connection was closed"_s); + return createError(globalObject, Bun::ErrorCode::ABORT_ERR, "The connection was closed"_s, true); } default: { break; @@ -839,6 +795,171 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject return JSValue::encode(ERR_INVALID_ARG_VALUE(scope, globalObject, arg0, arg1, arg2)); } + case Bun::ErrorCode::ERR_UNKNOWN_ENCODING: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Unknown encoding: "_s, str0); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_STREAM_DESTROYED: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Cannot call "_s, str0, " after a stream was destroyed"_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_METHOD_NOT_IMPLEMENTED: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + return JSC::JSValue::encode(createError(globalObject, error, makeString("The "_s, str0, " method is not implemented"_s))); + } + + case Bun::ErrorCode::ERR_STREAM_ALREADY_FINISHED: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Cannot call "_s, str0, " after a stream was finished"_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_MISSING_ARGS: { + switch (callFrame->argumentCount()) { + case 0: { + UNREACHABLE(); + } + case 1: { + ASSERT("At least one arg needs to be specified"); + } + case 2: { + JSValue arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + return JSC::JSValue::encode(createError(globalObject, error, makeString("The \""_s, str0, "\" argument must be specified"_s))); + } + case 3: { + JSValue arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + JSValue arg1 = callFrame->argument(2); + auto str1 = arg1.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + return JSC::JSValue::encode(createError(globalObject, error, makeString("The \""_s, str0, "\" and \""_s, str1, "\" arguments must be specified"_s))); + } + default: { + WTF::StringBuilder result; + result.append("The "_s); + auto argumentCount = callFrame->argumentCount(); + for (int i = 1; i < argumentCount; i += 1) { + if (i == argumentCount - 1) result.append("and "_s); + result.append("\""_s); + JSValue arg = callFrame->argument(i); + auto str = arg.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + result.append(str); + result.append("\""_s); + if (i != argumentCount - 1) result.append(","_s); + result.append(" "_s); + } + result.append("arguments must be specified"_s); + return JSC::JSValue::encode(createError(globalObject, error, result.toString())); + } + } + } + + case Bun::ErrorCode::ERR_INVALID_RETURN_VALUE: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto arg1 = callFrame->argument(2); + auto str1 = arg1.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto arg2 = callFrame->argument(3); + auto str2 = determineSpecificType(globalObject, arg2); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Expected "_s, str0, " to be returned from the \""_s, str1, "\" function but got "_s, str2, "."_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_OUT_OF_RANGE: { + auto arg0 = callFrame->argument(1); + auto arg1 = callFrame->argument(2); + auto arg2 = callFrame->argument(3); + return JSC::JSValue::encode(createError(globalObject, error, Message::ERR_OUT_OF_RANGE(scope, globalObject, arg0, arg1, arg2))); + } + + case Bun::ErrorCode::ERR_INVALID_STATE: + case Bun::ErrorCode::ERR_INVALID_STATE_TypeError: + case Bun::ErrorCode::ERR_INVALID_STATE_RangeError: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Invalid state: "_s, str0); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_INVALID_PROTOCOL: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto arg1 = callFrame->argument(2); + auto str1 = arg1.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Protocol \""_s, str0, "\" not supported. Expected \""_s, str1, "\""_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_BROTLI_INVALID_PARAM: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString(str0, " is not a valid Brotli parameter"_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_BUFFER_TOO_LARGE: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Cannot create a Buffer larger than "_s, str0, " bytes"_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_UNHANDLED_ERROR: { + auto arg0 = callFrame->argument(1); + + if (arg0.isUndefined()) { + auto message = "Unhandled error."_s; + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + if (arg0.isString()) { + auto str0 = arg0.getString(globalObject); + auto message = makeString("Unhandled error. ("_s, str0, ")"_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + if (arg0.isCell()) { + auto cell = arg0.asCell(); + if (cell->inherits()) { + return JSC::JSValue::encode(jsCast(cell)->value()); + } + } + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Unhandled error. ("_s, str0, ")"_s); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + + case Bun::ErrorCode::ERR_INVALID_THIS: { + auto arg0 = callFrame->argument(1); + auto str0 = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto message = makeString("Value of \"this\" must be of type "_s, str0); + return JSC::JSValue::encode(createError(globalObject, error, message)); + } + case ErrorCode::ERR_IPC_DISCONNECTED: return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_IPC_DISCONNECTED, "IPC channel is already disconnected"_s)); case ErrorCode::ERR_SERVER_NOT_RUNNING: @@ -865,6 +986,24 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_DGRAM_NOT_RUNNING, "Not running"_s)); case ErrorCode::ERR_INVALID_CURSOR_POS: return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_INVALID_CURSOR_POS, "Cannot set cursor row without setting its column"_s)); + case ErrorCode::ERR_MULTIPLE_CALLBACK: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_MULTIPLE_CALLBACK, "Callback called multiple times"_s)); + case ErrorCode::ERR_STREAM_PREMATURE_CLOSE: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_STREAM_PREMATURE_CLOSE, "Premature close"_s)); + case ErrorCode::ERR_STREAM_NULL_VALUES: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_STREAM_NULL_VALUES, "May not write null values to stream"_s)); + case ErrorCode::ERR_STREAM_CANNOT_PIPE: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_STREAM_CANNOT_PIPE, "Cannot pipe, not readable"_s)); + case ErrorCode::ERR_STREAM_WRITE_AFTER_END: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_STREAM_WRITE_AFTER_END, "write after end"_s)); + case ErrorCode::ERR_STREAM_UNSHIFT_AFTER_END_EVENT: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_STREAM_UNSHIFT_AFTER_END_EVENT, "stream.unshift() after end event"_s)); + case ErrorCode::ERR_STREAM_PUSH_AFTER_EOF: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_STREAM_PUSH_AFTER_EOF, "stream.push() after EOF"_s)); + case ErrorCode::ERR_STREAM_UNABLE_TO_PIPE: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_STREAM_UNABLE_TO_PIPE, "Cannot pipe to a closed or destroyed stream"_s)); + case ErrorCode::ERR_ILLEGAL_CONSTRUCTOR: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_ILLEGAL_CONSTRUCTOR, "Illegal constructor"_s)); default: { break; diff --git a/src/bun.js/bindings/ErrorCode.h b/src/bun.js/bindings/ErrorCode.h index d06bb8e4a2..45ae3c3798 100644 --- a/src/bun.js/bindings/ErrorCode.h +++ b/src/bun.js/bindings/ErrorCode.h @@ -38,7 +38,7 @@ public: static ErrorCodeCache* create(VM& vm, Structure* structure); static Structure* createStructure(VM& vm, JSGlobalObject* globalObject); - JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options); + JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options, bool isDOMExceptionPrototype); private: JS_EXPORT_PRIVATE ErrorCodeCache(VM&, Structure*); @@ -47,20 +47,15 @@ private: }; JSC::EncodedJSValue throwError(JSC::JSGlobalObject* globalObject, JSC::ThrowScope& scope, ErrorCode code, const WTF::String& message); -JSC::JSObject* createError(Zig::GlobalObject* globalObject, ErrorCode code, const WTF::String& message); -JSC::JSObject* createError(JSC::JSGlobalObject* globalObject, ErrorCode code, const WTF::String& message); -JSC::JSObject* createError(Zig::GlobalObject* globalObject, ErrorCode code, JSC::JSValue message); -JSC::JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options = jsUndefined()); +JSC::JSObject* createError(Zig::GlobalObject* globalObject, ErrorCode code, const WTF::String& message, bool isDOMExceptionPrototype = false); +JSC::JSObject* createError(JSC::JSGlobalObject* globalObject, ErrorCode code, const WTF::String& message, bool isDOMExceptionPrototype = false); +JSC::JSObject* createError(Zig::GlobalObject* globalObject, ErrorCode code, JSC::JSValue message, bool isDOMExceptionPrototype = false); +JSC::JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options, bool isDOMExceptionPrototype = false); JSC::JSValue toJS(JSC::JSGlobalObject*, ErrorCode); JSObject* createInvalidThisError(JSGlobalObject* globalObject, JSValue thisValue, const ASCIILiteral typeName); JSObject* createInvalidThisError(JSGlobalObject* globalObject, const String& message); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_OUT_OF_RANGE); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_INVALID_PROTOCOL); JSC_DECLARE_HOST_FUNCTION(jsFunctionMakeErrorWithCode); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_BROTLI_INVALID_PARAM); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_BUFFER_TOO_LARGE); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_UNHANDLED_ERROR); enum Bound { LOWER, diff --git a/src/bun.js/bindings/ErrorCode.ts b/src/bun.js/bindings/ErrorCode.ts index bfe08a4f78..8a6b4eab76 100644 --- a/src/bun.js/bindings/ErrorCode.ts +++ b/src/bun.js/bindings/ErrorCode.ts @@ -6,11 +6,13 @@ type ErrorCodeMapping = Array< /** Constructor **/ typeof TypeError | typeof RangeError | typeof Error | typeof SyntaxError, /** error.name. Defaults to `Constructor.name` (that is, mapping[1].name */ - string, + string?, + (typeof TypeError | typeof RangeError | typeof Error | typeof SyntaxError)?, + (typeof TypeError | typeof RangeError | typeof Error | typeof SyntaxError)?, ] >; -export default [ +const errors: ErrorCodeMapping = [ ["ABORT_ERR", Error, "AbortError"], ["ERR_CRYPTO_INVALID_DIGEST", TypeError], ["ERR_ENCODING_INVALID_ENCODED_DATA", TypeError], @@ -29,11 +31,11 @@ export default [ ["ERR_PARSE_ARGS_UNKNOWN_OPTION", TypeError], ["ERR_SERVER_NOT_RUNNING", Error], ["ERR_SOCKET_BAD_TYPE", TypeError], - ["ERR_STREAM_ALREADY_FINISHED", TypeError], - ["ERR_STREAM_CANNOT_PIPE", TypeError], - ["ERR_STREAM_DESTROYED", TypeError], + ["ERR_STREAM_ALREADY_FINISHED", Error], + ["ERR_STREAM_CANNOT_PIPE", Error], + ["ERR_STREAM_DESTROYED", Error], ["ERR_STREAM_NULL_VALUES", TypeError], - ["ERR_STREAM_WRITE_AFTER_END", TypeError], + ["ERR_STREAM_WRITE_AFTER_END", Error], ["ERR_ZLIB_INITIALIZATION_FAILED", Error], ["ERR_STRING_TOO_LONG", Error], ["ERR_CRYPTO_SCRYPT_INVALID_PARAMETER", Error], @@ -44,7 +46,7 @@ export default [ ["ERR_BUFFER_TOO_LARGE", RangeError], ["ERR_BROTLI_INVALID_PARAM", RangeError], ["ERR_UNKNOWN_ENCODING", TypeError], - ["ERR_INVALID_STATE", Error], + ["ERR_INVALID_STATE", Error, undefined, TypeError, RangeError], ["ERR_BUFFER_OUT_OF_BOUNDS", RangeError], ["ERR_UNKNOWN_SIGNAL", TypeError], ["ERR_SOCKET_BAD_PORT", RangeError], @@ -67,6 +69,12 @@ export default [ ["ERR_SOCKET_DGRAM_NOT_CONNECTED", Error], ["ERR_SOCKET_DGRAM_NOT_RUNNING", Error], ["ERR_INVALID_CURSOR_POS", TypeError], + ["ERR_MULTIPLE_CALLBACK", Error], + ["ERR_STREAM_PREMATURE_CLOSE", Error], + ["ERR_METHOD_NOT_IMPLEMENTED", Error], + ["ERR_STREAM_UNSHIFT_AFTER_END_EVENT", Error], + ["ERR_STREAM_PUSH_AFTER_EOF", Error], + ["ERR_STREAM_UNABLE_TO_PIPE", Error], // Bun-specific ["ERR_FORMDATA_PARSE_ERROR", TypeError], @@ -155,4 +163,5 @@ export default [ ["ERR_S3_INVALID_ENDPOINT", Error], ["ERR_S3_INVALID_SIGNATURE", Error], ["ERR_S3_INVALID_SESSION_TOKEN", Error], -] as ErrorCodeMapping; +] as const; +export default errors; diff --git a/src/bun.js/bindings/JSBuffer.cpp b/src/bun.js/bindings/JSBuffer.cpp index f3cdd036b8..d40f59d2f6 100644 --- a/src/bun.js/bindings/JSBuffer.cpp +++ b/src/bun.js/bindings/JSBuffer.cpp @@ -109,6 +109,24 @@ static JSC_DECLARE_HOST_FUNCTION(jsBufferPrototypeFunction_toString); static JSC_DECLARE_HOST_FUNCTION(jsBufferPrototypeFunction_write); #pragma clang diagnostic pop +extern "C" EncodedJSValue WebCore_BufferEncodingType_toJS(JSC::JSGlobalObject* lexicalGlobalObject, WebCore::BufferEncodingType encoding) +{ + // clang-format off + auto* globalObject = reinterpret_cast(lexicalGlobalObject); + switch (encoding) { + case WebCore::BufferEncodingType::utf8: return JSC::JSValue::encode(globalObject->commonStrings().utf8String(globalObject)); + case WebCore::BufferEncodingType::ucs2: return JSC::JSValue::encode(globalObject->commonStrings().ucs2String(globalObject)); + case WebCore::BufferEncodingType::utf16le: return JSC::JSValue::encode(globalObject->commonStrings().utf16leString(globalObject)); + case WebCore::BufferEncodingType::latin1: return JSC::JSValue::encode(globalObject->commonStrings().latin1String(globalObject)); + case WebCore::BufferEncodingType::ascii: return JSC::JSValue::encode(globalObject->commonStrings().asciiString(globalObject)); + case WebCore::BufferEncodingType::base64: return JSC::JSValue::encode(globalObject->commonStrings().base64String(globalObject)); + case WebCore::BufferEncodingType::base64url: return JSC::JSValue::encode(globalObject->commonStrings().base64urlString(globalObject)); + case WebCore::BufferEncodingType::hex: return JSC::JSValue::encode(globalObject->commonStrings().hexString(globalObject)); + case WebCore::BufferEncodingType::buffer: return JSC::JSValue::encode(globalObject->commonStrings().bufferString(globalObject)); + } + // clang-format on +} + namespace Bun { // Use a JSString* here to avoid unnecessarily joining the rope string. diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index 819cbb59d6..0eb54881dd 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -184,6 +184,8 @@ BUN_DECLARE_HOST_FUNCTION(BUN__HTTP2__getUnpackedSettings); BUN_DECLARE_HOST_FUNCTION(BUN__HTTP2_getPackedSettings); BUN_DECLARE_HOST_FUNCTION(BUN__HTTP2_assertSettings); +JSC_DEFINE_HOST_FUNCTION(jsFunctionMakeAbortError, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)); + using JSGlobalObject = JSC::JSGlobalObject; using Exception = JSC::Exception; using JSValue = JSC::JSValue; @@ -1975,28 +1977,12 @@ JSC_DEFINE_CUSTOM_SETTER(setterSubtleCrypto, return true; } -JSC_DECLARE_HOST_FUNCTION(makeThisTypeErrorForBuiltins); JSC_DECLARE_HOST_FUNCTION(makeGetterTypeErrorForBuiltins); JSC_DECLARE_HOST_FUNCTION(makeDOMExceptionForBuiltins); JSC_DECLARE_HOST_FUNCTION(createWritableStreamFromInternal); JSC_DECLARE_HOST_FUNCTION(getInternalWritableStream); JSC_DECLARE_HOST_FUNCTION(isAbortSignal); -JSC_DEFINE_HOST_FUNCTION(makeThisTypeErrorForBuiltins, (JSGlobalObject * globalObject, CallFrame* callFrame)) -{ - ASSERT(callFrame); - ASSERT(callFrame->argumentCount() == 2); - VM& vm = globalObject->vm(); - DeferTermination deferScope(vm); - auto scope = DECLARE_CATCH_SCOPE(vm); - - auto interfaceName = callFrame->uncheckedArgument(0).getString(globalObject); - scope.assertNoException(); - auto functionName = callFrame->uncheckedArgument(1).getString(globalObject); - scope.assertNoException(); - return JSValue::encode(createTypeError(globalObject, makeThisTypeErrorMessage(interfaceName.utf8().data(), functionName.utf8().data()))); -} - JSC_DEFINE_HOST_FUNCTION(makeGetterTypeErrorForBuiltins, (JSGlobalObject * globalObject, CallFrame* callFrame)) { ASSERT(callFrame); @@ -3593,7 +3579,6 @@ void GlobalObject::addBuiltinGlobals(JSC::VM& vm) JSC::JSFunction::create(vm, this, 0, "@lazy"_s, JS2Native::jsDollarLazy, ImplementationVisibility::Public), PropertyAttribute::ReadOnly | PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0 }, - GlobalPropertyInfo(builtinNames.makeThisTypeErrorPrivateName(), JSFunction::create(vm, this, 2, String(), makeThisTypeErrorForBuiltins, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), GlobalPropertyInfo(builtinNames.makeGetterTypeErrorPrivateName(), JSFunction::create(vm, this, 2, String(), makeGetterTypeErrorForBuiltins, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), GlobalPropertyInfo(builtinNames.makeDOMExceptionPrivateName(), JSFunction::create(vm, this, 2, String(), makeDOMExceptionForBuiltins, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), GlobalPropertyInfo(builtinNames.addAbortAlgorithmToSignalPrivateName(), JSFunction::create(vm, this, 2, String(), addAbortAlgorithmToSignal, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), @@ -3612,6 +3597,8 @@ void GlobalObject::addBuiltinGlobals(JSC::VM& vm) GlobalPropertyInfo(builtinNames.TextEncoderStreamEncoderPrivateName(), JSTextEncoderStreamEncoderConstructor(), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly | 0), GlobalPropertyInfo(builtinNames.makeErrorWithCodePrivateName(), JSFunction::create(vm, this, 2, String(), jsFunctionMakeErrorWithCode, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), GlobalPropertyInfo(builtinNames.toClassPrivateName(), JSFunction::create(vm, this, 1, String(), jsFunctionToClass, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), + GlobalPropertyInfo(builtinNames.inheritsPrivateName(), JSFunction::create(vm, this, 1, String(), jsFunctionInherits, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), + GlobalPropertyInfo(builtinNames.makeAbortErrorPrivateName(), JSFunction::create(vm, this, 1, String(), jsFunctionMakeAbortError, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), }; addStaticGlobals(staticGlobals, std::size(staticGlobals)); diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 78cc6298bd..4ec0afd6ae 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -1,5 +1,3 @@ - - #include "root.h" #include "JavaScriptCore/ErrorType.h" @@ -4562,14 +4560,14 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, } else { getFromSourceURL = true; } - except->code = (unsigned char)err->errorType(); + except->type = (unsigned char)err->errorType(); if (err->isStackOverflowError()) { - except->code = 253; + except->type = 253; } if (err->isOutOfMemoryError()) { - except->code = 8; + except->type = 8; } - if (except->code == SYNTAX_ERROR_CODE) { + if (except->type == SYNTAX_ERROR_CODE) { except->message = Bun::toStringRef(err->sanitizedMessageString(global)); } else if (JSC::JSValue message = obj->getIfPropertyExists(global, vm.propertyNames->message)) { except->message = Bun::toStringRef(global, message); @@ -4586,7 +4584,7 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, except->runtime_type = err->runtimeTypeForCause(); const auto& names = builtinNames(vm); - if (except->code != SYNTAX_ERROR_CODE) { + if (except->type != SYNTAX_ERROR_CODE) { if (JSC::JSValue syscall = getNonObservable(vm, global, obj, names.syscallPublicName())) { if (syscall.isString()) { @@ -4600,7 +4598,7 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, if (JSC::JSValue code = getNonObservable(vm, global, obj, names.codePublicName())) { if (code.isString() || code.isNumber()) { - except->code_ = Bun::toStringRef(global, code); + except->system_code = Bun::toStringRef(global, code); } } @@ -4760,21 +4758,21 @@ void exceptionFromString(ZigException* except, JSC::JSValue value, JSC::JSGlobal auto name_str = name_value.toWTFString(global); except->name = Bun::toStringRef(name_str); if (name_str == "Error"_s) { - except->code = JSErrorCodeError; + except->type = JSErrorCodeError; } else if (name_str == "EvalError"_s) { - except->code = JSErrorCodeEvalError; + except->type = JSErrorCodeEvalError; } else if (name_str == "RangeError"_s) { - except->code = JSErrorCodeRangeError; + except->type = JSErrorCodeRangeError; } else if (name_str == "ReferenceError"_s) { - except->code = JSErrorCodeReferenceError; + except->type = JSErrorCodeReferenceError; } else if (name_str == "SyntaxError"_s) { - except->code = JSErrorCodeSyntaxError; + except->type = JSErrorCodeSyntaxError; } else if (name_str == "TypeError"_s) { - except->code = JSErrorCodeTypeError; + except->type = JSErrorCodeTypeError; } else if (name_str == "URIError"_s) { - except->code = JSErrorCodeURIError; + except->type = JSErrorCodeURIError; } else if (name_str == "AggregateError"_s) { - except->code = JSErrorCodeAggregateError; + except->type = JSErrorCodeAggregateError; } } } @@ -5000,7 +4998,7 @@ void JSC__JSValue__toZigException(JSC__JSValue jsException, JSC__JSGlobalObject* { JSC::JSValue value = JSC::JSValue::decode(jsException); if (value == JSC::JSValue {}) { - exception->code = JSErrorCodeError; + exception->type = JSErrorCodeError; exception->name = Bun::toStringRef("Error"_s); exception->message = Bun::toStringRef("Unknown error"_s); return; diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig index 9aaae2a718..6690660950 100644 --- a/src/bun.js/bindings/exports.zig +++ b/src/bun.js/bindings/exports.zig @@ -789,7 +789,7 @@ pub const ZigStackFramePosition = extern struct { }; pub const ZigException = extern struct { - code: JSErrorCode, + type: JSErrorCode, runtime_type: JSRuntimeType, /// SystemError only @@ -889,7 +889,7 @@ pub const ZigException = extern struct { pub fn zigException(this: *Holder) *ZigException { if (!this.loaded) { this.zig_exception = ZigException{ - .code = @as(JSErrorCode, @enumFromInt(255)), + .type = @as(JSErrorCode, @enumFromInt(255)), .runtime_type = JSRuntimeType.Nothing, .name = String.empty, .message = String.empty, @@ -931,7 +931,7 @@ pub const ZigException = extern struct { var is_empty = true; var api_exception = Api.JsException{ .runtime_type = @intFromEnum(this.runtime_type), - .code = @intFromEnum(this.code), + .code = @intFromEnum(this.type), }; if (_name.len > 0) { diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h index 01c5c1a978..35248377b0 100644 --- a/src/bun.js/bindings/headers-handwritten.h +++ b/src/bun.js/bindings/headers-handwritten.h @@ -183,11 +183,11 @@ typedef struct ZigStackTrace { } ZigStackTrace; typedef struct ZigException { - unsigned char code; + unsigned char type; uint16_t runtime_type; int errno_; BunString syscall; - BunString code_; + BunString system_code; BunString path; BunString name; BunString message; diff --git a/src/bun.js/bindings/js_classes.ts b/src/bun.js/bindings/js_classes.ts new file mode 100644 index 0000000000..31386e3580 --- /dev/null +++ b/src/bun.js/bindings/js_classes.ts @@ -0,0 +1,8 @@ +export default [ + // class list for $inherits*() builtins, eg. $inheritsBlob() + // tests if a value is an instanceof a native class in a robust cross-realm manner + ["Blob"], + ["ReadableStream", "JSReadableStream.h"], + ["WritableStream", "JSWritableStream.h"], + ["TransformStream", "JSTransformStream.h"], +]; diff --git a/src/bun.js/bindings/webcore/JSEventTarget.cpp b/src/bun.js/bindings/webcore/JSEventTarget.cpp index a4d9590ca1..4db90950e5 100644 --- a/src/bun.js/bindings/webcore/JSEventTarget.cpp +++ b/src/bun.js/bindings/webcore/JSEventTarget.cpp @@ -351,10 +351,7 @@ JSC_DEFINE_HOST_FUNCTION(jsEventTargetGetEventListenersCount, (JSC::JSGlobalObje JSC::VM& vm = JSC::getVM(lexicalGlobalObject); auto throwScope = DECLARE_THROW_SCOPE(vm); auto* thisValue = jsDynamicCast(callFrame->argument(0)); - if (!thisValue) { - return JSC::JSValue::encode(JSC::jsNumber(0)); - } - + if (!thisValue) return JSC::JSValue::encode(JSC::jsUndefined()); JSC::JSString* eventName = callFrame->argument(1).toString(lexicalGlobalObject); RETURN_IF_EXCEPTION(throwScope, {}); String str = eventName->value(lexicalGlobalObject); diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 3e11e7e3fb..135152f232 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -3439,7 +3439,9 @@ pub const VirtualMachine = struct { allow_ansi_color, allow_side_effects, ) catch |err| { - if (comptime Environment.isDebug) { + if (err == error.JSError) { + this.global.clearException(); + } else if (comptime Environment.isDebug) { // yo dawg Output.printErrorln("Error while printing Error-like object: {s}", .{@errorName(err)}); Output.flush(); @@ -3800,7 +3802,7 @@ pub const VirtualMachine = struct { } } - fn printErrorInstance(this: *VirtualMachine, error_instance: JSValue, exception_list: ?*ExceptionList, formatter: *ConsoleObject.Formatter, comptime Writer: type, writer: Writer, comptime allow_ansi_color: bool, comptime allow_side_effects: bool) anyerror!void { + fn printErrorInstance(this: *VirtualMachine, error_instance: JSValue, exception_list: ?*ExceptionList, formatter: *ConsoleObject.Formatter, comptime Writer: type, writer: Writer, comptime allow_ansi_color: bool, comptime allow_side_effects: bool) !void { var exception_holder = ZigException.Holder.init(); var exception = exception_holder.zigException(); defer exception_holder.deinit(this); diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index ff63039f88..d9726bba6b 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -2576,6 +2576,12 @@ pub const ModuleLoader = struct { .@"abort-controller" => return jsSyntheticModule(.@"abort-controller", specifier), .undici => return jsSyntheticModule(.undici, specifier), .ws => return jsSyntheticModule(.ws, specifier), + .@"node:_stream_duplex" => return jsSyntheticModule(.@"node:_stream_duplex", specifier), + .@"node:_stream_passthrough" => return jsSyntheticModule(.@"node:_stream_passthrough", specifier), + .@"node:_stream_readable" => return jsSyntheticModule(.@"node:_stream_readable", specifier), + .@"node:_stream_transform" => return jsSyntheticModule(.@"node:_stream_transform", specifier), + .@"node:_stream_wrap" => return jsSyntheticModule(.@"node:_stream_wrap", specifier), + .@"node:_stream_writable" => return jsSyntheticModule(.@"node:_stream_writable", specifier), } } else if (specifier.hasPrefixComptime(js_ast.Macro.namespaceWithColon)) { const spec = specifier.toUTF8(bun.default_allocator); @@ -2788,6 +2794,13 @@ pub const HardcodedModule = enum { @"node:cluster", // these are gated behind '--expose-internals' @"bun:internal-for-testing", + // + @"node:_stream_duplex", + @"node:_stream_passthrough", + @"node:_stream_readable", + @"node:_stream_transform", + @"node:_stream_wrap", + @"node:_stream_writable", /// Already resolved modules go in here. /// This does not remap the module name, it is just a hash table. @@ -2864,6 +2877,13 @@ pub const HardcodedModule = enum { .{ "worker_threads", HardcodedModule.@"node:worker_threads" }, .{ "zlib", HardcodedModule.@"node:zlib" }, + .{ "_stream_duplex", .@"node:_stream_duplex" }, + .{ "_stream_passthrough", .@"node:_stream_passthrough" }, + .{ "_stream_readable", .@"node:_stream_readable" }, + .{ "_stream_transform", .@"node:_stream_transform" }, + .{ "_stream_wrap", .@"node:_stream_wrap" }, + .{ "_stream_writable", .@"node:_stream_writable" }, + .{ "undici", HardcodedModule.undici }, .{ "ws", HardcodedModule.ws }, .{ "@vercel/fetch", HardcodedModule.@"@vercel/fetch" }, @@ -2941,12 +2961,12 @@ pub const HardcodedModule = enum { .{ "node:_http_incoming", .{ .path = "http" } }, .{ "node:_http_outgoing", .{ .path = "http" } }, .{ "node:_http_server", .{ .path = "http" } }, - .{ "node:_stream_duplex", .{ .path = "stream" } }, - .{ "node:_stream_passthrough", .{ .path = "stream" } }, - .{ "node:_stream_readable", .{ .path = "stream" } }, - .{ "node:_stream_transform", .{ .path = "stream" } }, - .{ "node:_stream_writable", .{ .path = "stream" } }, - .{ "node:_stream_wrap", .{ .path = "stream" } }, + .{ "node:_stream_duplex", .{ .path = "_stream_duplex" } }, + .{ "node:_stream_passthrough", .{ .path = "_stream_passthrough" } }, + .{ "node:_stream_readable", .{ .path = "_stream_readable" } }, + .{ "node:_stream_transform", .{ .path = "_stream_transform" } }, + .{ "node:_stream_wrap", .{ .path = "_stream_wrap" } }, + .{ "node:_stream_writable", .{ .path = "_stream_writable" } }, .{ "node:_tls_wrap", .{ .path = "tls" } }, .{ "node:_tls_common", .{ .path = "tls" } }, @@ -3016,12 +3036,12 @@ pub const HardcodedModule = enum { .{ "_http_incoming", .{ .path = "http" } }, .{ "_http_outgoing", .{ .path = "http" } }, .{ "_http_server", .{ .path = "http" } }, - .{ "_stream_duplex", .{ .path = "stream" } }, - .{ "_stream_passthrough", .{ .path = "stream" } }, - .{ "_stream_readable", .{ .path = "stream" } }, - .{ "_stream_transform", .{ .path = "stream" } }, - .{ "_stream_writable", .{ .path = "stream" } }, - .{ "_stream_wrap", .{ .path = "stream" } }, + .{ "_stream_duplex", .{ .path = "_stream_duplex" } }, + .{ "_stream_passthrough", .{ .path = "_stream_passthrough" } }, + .{ "_stream_readable", .{ .path = "_stream_readable" } }, + .{ "_stream_transform", .{ .path = "_stream_transform" } }, + .{ "_stream_wrap", .{ .path = "_stream_wrap" } }, + .{ "_stream_writable", .{ .path = "_stream_writable" } }, .{ "_tls_wrap", .{ .path = "tls" } }, .{ "_tls_common", .{ .path = "tls" } }, diff --git a/src/bun.js/node/node_util_binding.zig b/src/bun.js/node/node_util_binding.zig index cbcf25f09b..6acfccc725 100644 --- a/src/bun.js/node/node_util_binding.zig +++ b/src/bun.js/node/node_util_binding.zig @@ -132,12 +132,7 @@ pub fn extractedSplitNewLinesFastPathStringsOnly(globalThis: *JSC.JSGlobalObject }; } -fn split( - comptime encoding: bun.strings.EncodingNonAscii, - globalThis: *JSC.JSGlobalObject, - allocator: Allocator, - str: *const bun.String, -) bun.JSError!JSC.JSValue { +fn split(comptime encoding: bun.strings.EncodingNonAscii, globalThis: *JSC.JSGlobalObject, allocator: Allocator, str: *const bun.String) bun.JSError!JSC.JSValue { var fallback = std.heap.stackFallback(1024, allocator); const alloc = fallback.get(); const Char = switch (encoding) { @@ -194,3 +189,13 @@ pub fn SplitNewlineIterator(comptime T: type) type { } }; } + +pub fn normalizeEncoding(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const input = callframe.argument(0); + const str = bun.String.fromJS(input, globalThis); + bun.assert(str.tag != .Dead); + defer str.deref(); + if (str.length() == 0) return JSC.Node.Encoding.utf8.toJS(globalThis); + if (str.inMapCaseInsensitive(JSC.Node.Encoding.map)) |enc| return enc.toJS(globalThis); + return JSC.JSValue.jsUndefined(); +} diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 66d1bad3a7..c31674b085 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -654,6 +654,7 @@ pub const ErrorCode = @import("./nodejs_error_code.zig").Code; // and various issues with std.posix that make it too unstable for arbitrary user input (e.g. how .BADF is marked as unreachable) /// https://github.com/nodejs/node/blob/master/lib/buffer.js#L587 +/// must match src/bun.js/bindings/BufferEncodingType.h pub const Encoding = enum(u8) { utf8, ucs2, @@ -791,6 +792,11 @@ pub const Encoding = enum(u8) { }, } } + + extern fn WebCore_BufferEncodingType_toJS(globalObject: *JSC.JSGlobalObject, encoding: Encoding) JSC.JSValue; + pub fn toJS(encoding: Encoding, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return WebCore_BufferEncodingType_toJS(globalObject, encoding); + } }; const PathOrBuffer = union(Tag) { diff --git a/src/codegen/bundle-modules.ts b/src/codegen/bundle-modules.ts index 4e8dc7edaa..9a6bb44dc9 100644 --- a/src/codegen/bundle-modules.ts +++ b/src/codegen/bundle-modules.ts @@ -19,6 +19,7 @@ import { getJS2NativeCPP, getJS2NativeZig } from "./generate-js2native"; import { cap, declareASCIILiteral, writeIfNotChanged } from "./helpers"; import { createInternalModuleRegistry } from "./internal-module-registry-scanner"; import { define } from "./replacements"; +import jsclasses from "./../bun.js/bindings/js_classes"; const BASE = path.join(import.meta.dir, "../js"); const debug = process.argv[2] === "--debug=ON"; @@ -457,16 +458,33 @@ writeIfNotChanged( `; for (let i = 0; i < ErrorCode.length; i++) { - const [code, _, name] = ErrorCode[i]; + const [code, constructor, name, ...other_constructors] = ErrorCode[i]; dts += ` /** - * Generate a ${name} error with the \`code\` property set to ${code}. + * Generate a ${name ?? constructor.name} error with the \`code\` property set to ${code}. * * @param msg The error message * @param args Additional arguments */ declare function $${code}(msg: string, ...args: any[]): ${name}; `; + + for (const con of other_constructors) { + if (con == null) continue; + dts += ` +/** + * Generate a ${con.name} error with the \`code\` property set to ${code}. + * + * @param msg The error message + * @param args Additional arguments + */ +declare function $${code}_${con.name}(msg: string, ...args: any[]): ${name}; +`; + } + } + + for (const [name] of jsclasses) { + dts += `\ndeclare function $inherits${name}(value: any): boolean;`; } return dts; diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts index 1875972f61..d60f1ae548 100644 --- a/src/codegen/generate-classes.ts +++ b/src/codegen/generate-classes.ts @@ -2,6 +2,7 @@ import path from "path"; import type { ClassDefinition, Field } from "./class-definitions"; import { camelCase, pascalCase, writeIfNotChanged } from "./helpers"; +import jsclasses from "./../bun.js/bindings/js_classes"; if (process.env.BUN_SILENT === "1") { console.log = () => {}; @@ -910,6 +911,7 @@ function renderStaticDecls(symbolName, typeName, fields, supportsObjectCreate = return rows.join("\n"); } + function writeBarrier(symbolName, typeName, name, cacheName) { return ` @@ -928,6 +930,7 @@ extern JSC_CALLCONV JSC::EncodedJSValue ${symbolName(typeName, name)}GetCachedVa `.trim(); } + function renderFieldsImpl( symbolName: (typeName: string, name: string) => string, typeName: string, @@ -1176,6 +1179,7 @@ JSC_DEFINE_HOST_FUNCTION(${symbolName(typeName, name)}Callback, (JSGlobalObject return rows.map(a => a.trim()).join("\n"); } + function allCachedValues(obj: ClassDefinition) { let values = (obj.values ?? []).slice().map(name => [name, `m_${name}`]); for (const name in obj.proto) { @@ -1193,6 +1197,7 @@ function allCachedValues(obj: ClassDefinition) { return values; } + var extraIncludes = []; function generateClassHeader(typeName, obj: ClassDefinition) { var { klass, proto, JSType = "ObjectType", values = [], callbacks = {}, zigOnly = false } = obj; @@ -2101,6 +2106,9 @@ const GENERATED_CLASSES_HEADER = [ #include "root.h" namespace Zig { + +JSC_DECLARE_HOST_FUNCTION(jsFunctionInherits); + } #include "JSDOMWrapper.h" @@ -2168,6 +2176,30 @@ const GENERATED_CLASSES_IMPL_FOOTER = ` `; +function jsInheritsCppImpl() { + return ` +${jsclasses + .map(v => v[1]) + .filter(v => v?.length > 0) + .map((v, i) => `#include "${v}"`) + .join("\n")} + +JSC_DEFINE_HOST_FUNCTION(Zig::jsFunctionInherits, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +{ + auto id = callFrame->argument(0).toInt32(globalObject); + auto value = callFrame->argument(1); + if (!value.isCell()) return JSValue::encode(jsBoolean(false)); + auto cell = value.asCell(); + switch (id) { +${jsclasses + .map(v => v[0]) + .map((v, i) => ` case ${i}: return JSValue::encode(jsBoolean(cell->inherits()));`) + .join("\n")} + } + return JSValue::encode(jsBoolean(false)); +}`; +} + function initLazyClasses(initLaterFunctions) { return ` @@ -2342,6 +2374,7 @@ comptime { `, ]); + if (!process.env.ONLY_ZIG) { const allHeaders = classes.map(a => generateHeader(a.name, a)); await writeIfNotChanged(`${outBase}/ZigGeneratedClasses.h`, [ @@ -2360,7 +2393,9 @@ if (!process.env.ONLY_ZIG) { allImpls.join("\n"), writeCppSerializers(classes), GENERATED_CLASSES_IMPL_FOOTER, + jsInheritsCppImpl(), ]); + await writeIfNotChanged( `${outBase}/ZigGeneratedClasses+lazyStructureHeader.h`, classes.map(a => generateLazyClassStructureHeader(a.name, a)).join("\n"), diff --git a/src/codegen/generate-node-errors.ts b/src/codegen/generate-node-errors.ts index e4c807be70..f7ba157fea 100644 --- a/src/codegen/generate-node-errors.ts +++ b/src/codegen/generate-node-errors.ts @@ -6,6 +6,18 @@ if (!outputDir) { throw new Error("Missing output directory"); } +const extra_count = NodeErrors.map(x => x.slice(3)) + .filter(x => x.length > 0) + .reduce((ac, cv) => ac + cv.length, 0); +const count = NodeErrors.length + extra_count; + +if (count > 256) { + // increase size of enum's to have more tags + // src/bun.js/node/types.zig#Encoding + // src/bun.js/bindings/BufferEncodingType.h + throw new Error("NodeError count exceeds u8"); +} + let enumHeader = ``; let listHeader = ``; let zig = ``; @@ -18,7 +30,7 @@ enumHeader = ` #include namespace Bun { - static constexpr size_t NODE_ERROR_COUNT = ${NodeErrors.length}; + static constexpr size_t NODE_ERROR_COUNT = ${count}; enum class ErrorCode : uint8_t { `; @@ -34,7 +46,7 @@ struct ErrorCodeData { WTF::ASCIILiteral name; WTF::ASCIILiteral code; }; -static constexpr ErrorCodeData errors[${NodeErrors.length}] = { +static constexpr ErrorCodeData errors[${count}] = { `; zig = ` @@ -71,7 +83,7 @@ pub const Error = enum(u8) { let i = 0; let listForUsingNamespace = ""; -for (let [code, constructor, name] of NodeErrors) { +for (let [code, constructor, name, ...other_constructors] of NodeErrors) { if (name == null) name = constructor.name; enumHeader += ` ${code} = ${i},\n`; listHeader += ` { JSC::ErrorType::${constructor.name}, "${name}"_s, "${code}"_s },\n`; @@ -81,6 +93,19 @@ for (let [code, constructor, name] of NodeErrors) { listForUsingNamespace += ` return .{ .globalThis = globalThis, .args = args };\n`; listForUsingNamespace += ` }\n`; i++; + + for (const con of other_constructors) { + if (con == null) continue; + if (name == null) name = con.name; + enumHeader += ` ${code}_${con.name} = ${i},\n`; + listHeader += ` { JSC::ErrorType::${con.name}, "${con.name}"_s, "${code}"_s },\n`; + zig += ` ${code}_${con.name} = ${i},\n`; + listForUsingNamespace += ` /// ${name}: ${code} (instanceof ${con.name})\n`; + listForUsingNamespace += ` pub inline fn ${code}_${con.name}(globalThis: *JSC.JSGlobalObject, comptime fmt: [:0]const u8, args: anytype) ErrorBuilder(Error.${code}_${con.name}, fmt, @TypeOf(args)) {\n`; + listForUsingNamespace += ` return .{ .globalThis = globalThis, .args = args };\n`; + listForUsingNamespace += ` }\n`; + i++; + } } enumHeader += ` diff --git a/src/codegen/replacements.ts b/src/codegen/replacements.ts index 86da43ffae..103ba1b8b0 100644 --- a/src/codegen/replacements.ts +++ b/src/codegen/replacements.ts @@ -2,6 +2,7 @@ import { LoaderKeys } from "../api/schema"; import NodeErrors from "../bun.js/bindings/ErrorCode.ts"; import { sliceSourceCode } from "./builtin-parser"; import { registerNativeCall } from "./generate-js2native"; +import jsclasses from "./../bun.js/bindings/js_classes"; // This is a list of extra syntax replacements to do. Kind of like macros // These are only run on code itself, not string contents or comments. @@ -13,11 +14,29 @@ export const replacements: ReplacementRule[] = [ { from: /\bexport\s*default/g, to: "$exports =" }, ]; +let error_i = 0; for (let i = 0; i < NodeErrors.length; i++) { - const [code] = NodeErrors[i]; + const [code, _constructor, _name, ...other_constructors] = NodeErrors[i]; replacements.push({ from: new RegExp(`\\b\\__intrinsic__${code}\\(`, "g"), - to: `$makeErrorWithCode(${i}, `, + to: `$makeErrorWithCode(${error_i}, `, + }); + error_i += 1; + for (const con of other_constructors) { + if (con == null) continue; + replacements.push({ + from: new RegExp(`\\b\\__intrinsic__${code}_${con.name}\\(`, "g"), + to: `$makeErrorWithCode(${error_i}, `, + }); + error_i += 1; + } +} + +for (let id = 0; id < jsclasses.length; id++) { + const name = jsclasses[id][0]; + replacements.push({ + from: new RegExp(`\\b\\__intrinsic__inherits${name}\\(`, "g"), + to: `$inherits(${id}, `, }); } diff --git a/src/js/builtins.d.ts b/src/js/builtins.d.ts index aa38f45245..46b9d87da7 100644 --- a/src/js/builtins.d.ts +++ b/src/js/builtins.d.ts @@ -355,7 +355,6 @@ declare function $localStreams(): TODO; declare function $main(): TODO; declare function $makeDOMException(): TODO; declare function $makeGetterTypeError(className: string, prop: string): Error; -declare function $makeThisTypeError(className: string, method: string): Error; declare function $map(): TODO; declare function $method(): TODO; declare function $nextTick(): TODO; @@ -568,12 +567,20 @@ declare interface Error { code?: string; } +declare function $makeAbortError(message?: string, options?: { cause: Error }): Error; + /** * -- Error Codes with manual messages */ declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedType: string, actualValue: string): TypeError; declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedTypes: any[], actualValue: string): TypeError; declare function $ERR_INVALID_ARG_VALUE(name: string, value: any, reason?: string): TypeError; +declare function $ERR_UNKNOWN_ENCODING(enc: string): TypeError; +declare function $ERR_STREAM_DESTROYED(method: string): Error; +declare function $ERR_METHOD_NOT_IMPLEMENTED(method: string): Error; +declare function $ERR_STREAM_ALREADY_FINISHED(method: string): Error; +declare function $ERR_MISSING_ARGS(a1: string, a2?: string): TypeError; +declare function $ERR_INVALID_RETURN_VALUE(expected_type: string, name: string, actual_value: any): TypeError; declare function $ERR_IPC_DISCONNECTED(): Error; declare function $ERR_SERVER_NOT_RUNNING(): Error; @@ -588,6 +595,15 @@ declare function $ERR_SOCKET_DGRAM_IS_CONNECTED(): Error; declare function $ERR_SOCKET_DGRAM_NOT_CONNECTED(): Error; declare function $ERR_SOCKET_DGRAM_NOT_RUNNING(): Error; declare function $ERR_INVALID_CURSOR_POS(): Error; +declare function $ERR_MULTIPLE_CALLBACK(): Error; +declare function $ERR_STREAM_PREMATURE_CLOSE(): Error; +declare function $ERR_STREAM_NULL_VALUES(): TypeError; +declare function $ERR_STREAM_CANNOT_PIPE(): Error; +declare function $ERR_STREAM_WRITE_AFTER_END(): Error; +declare function $ERR_STREAM_UNSHIFT_AFTER_END_EVENT(): Error; +declare function $ERR_STREAM_PUSH_AFTER_EOF(): Error; +declare function $ERR_STREAM_UNABLE_TO_PIPE(): Error; +declare function $ERR_ILLEGAL_CONSTRUCTOR(): TypeError; /** * Convert a function to a class-like object. diff --git a/src/js/builtins/BunBuiltinNames.h b/src/js/builtins/BunBuiltinNames.h index b7017f0154..7fd2016a10 100644 --- a/src/js/builtins/BunBuiltinNames.h +++ b/src/js/builtins/BunBuiltinNames.h @@ -115,6 +115,7 @@ using namespace JSC; macro(inFlightCloseRequest) \ macro(inFlightWriteRequest) \ macro(initializeWith) \ + macro(inherits) \ macro(internalModuleRegistry) \ macro(internalRequire) \ macro(internalStream) \ @@ -133,10 +134,10 @@ using namespace JSC; macro(Loader) \ macro(localStreams) \ macro(main) \ + macro(makeAbortError) \ macro(makeDOMException) \ macro(makeErrorWithCode) \ macro(makeGetterTypeError) \ - macro(makeThisTypeError) \ macro(method) \ macro(mockedFunction) \ macro(nextTick) \ diff --git a/src/js/builtins/ReadableByteStreamController.ts b/src/js/builtins/ReadableByteStreamController.ts index 888f241bca..10cd75028e 100644 --- a/src/js/builtins/ReadableByteStreamController.ts +++ b/src/js/builtins/ReadableByteStreamController.ts @@ -31,7 +31,7 @@ export function initializeReadableByteStreamController(this, stream, underlyingB } export function enqueue(this, chunk) { - if (!$isReadableByteStreamController(this)) throw $makeThisTypeError("ReadableByteStreamController", "enqueue"); + if (!$isReadableByteStreamController(this)) throw $ERR_INVALID_THIS("ReadableByteStreamController"); if ($getByIdDirectPrivate(this, "closeRequested")) throw new TypeError("ReadableByteStreamController is requested to close"); @@ -45,7 +45,7 @@ export function enqueue(this, chunk) { } export function error(this, error) { - if (!$isReadableByteStreamController(this)) throw $makeThisTypeError("ReadableByteStreamController", "error"); + if (!$isReadableByteStreamController(this)) throw $ERR_INVALID_THIS("ReadableByteStreamController"); if ($getByIdDirectPrivate($getByIdDirectPrivate(this, "controlledReadableStream"), "state") !== $streamReadable) throw new TypeError("ReadableStream is not readable"); @@ -54,7 +54,7 @@ export function error(this, error) { } export function close(this) { - if (!$isReadableByteStreamController(this)) throw $makeThisTypeError("ReadableByteStreamController", "close"); + if (!$isReadableByteStreamController(this)) throw $ERR_INVALID_THIS("ReadableByteStreamController"); if ($getByIdDirectPrivate(this, "closeRequested")) throw new TypeError("Close has already been requested"); diff --git a/src/js/builtins/ReadableStream.ts b/src/js/builtins/ReadableStream.ts index 6e7e2d5951..d8cf7ff8d5 100644 --- a/src/js/builtins/ReadableStream.ts +++ b/src/js/builtins/ReadableStream.ts @@ -114,7 +114,7 @@ export function readableStreamToArray(stream: ReadableStream): Promise { if (underlyingSource !== undefined) { return $readableStreamToTextDirect(stream, underlyingSource); } - if ($isReadableStreamLocked(stream)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(stream)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); const result = $tryUseReadableStreamBufferedFastPath(stream, "text"); @@ -145,7 +145,7 @@ export function readableStreamToArrayBuffer(stream: ReadableStream) if (underlyingSource !== undefined) { return $readableStreamToArrayBufferDirect(stream, underlyingSource, false); } - if ($isReadableStreamLocked(stream)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(stream)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); let result = $tryUseReadableStreamBufferedFastPath(stream, "arrayBuffer"); @@ -226,7 +226,7 @@ export function readableStreamToBytes(stream: ReadableStream): Prom if (underlyingSource !== undefined) { return $readableStreamToArrayBufferDirect(stream, underlyingSource, true); } - if ($isReadableStreamLocked(stream)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(stream)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); let result = $tryUseReadableStreamBufferedFastPath(stream, "bytes"); @@ -302,7 +302,7 @@ export function readableStreamToFormData( contentType: string | ArrayBuffer | ArrayBufferView, ): Promise { if (!$isReadableStream(stream)) throw $ERR_INVALID_ARG_TYPE("stream", "ReadableStream", typeof stream); - if ($isReadableStreamLocked(stream)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(stream)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); return Bun.readableStreamToBlob(stream).then(blob => { return FormData.from(blob, contentType); }); @@ -311,7 +311,7 @@ export function readableStreamToFormData( $linkTimeConstant; export function readableStreamToJSON(stream: ReadableStream): unknown { if (!$isReadableStream(stream)) throw $ERR_INVALID_ARG_TYPE("stream", "ReadableStream", typeof stream); - if ($isReadableStreamLocked(stream)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(stream)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); let result = $tryUseReadableStreamBufferedFastPath(stream, "json"); if (result) { return result; @@ -333,7 +333,7 @@ export function readableStreamToJSON(stream: ReadableStream): unknown { $linkTimeConstant; export function readableStreamToBlob(stream: ReadableStream): Promise { if (!$isReadableStream(stream)) throw $ERR_INVALID_ARG_TYPE("stream", "ReadableStream", typeof stream); - if ($isReadableStreamLocked(stream)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(stream)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); return ( $tryUseReadableStreamBufferedFastPath(stream, "blob") || @@ -370,15 +370,15 @@ export function createNativeReadableStream(nativePtr, autoAllocateChunkSize) { } export function cancel(this, reason) { - if (!$isReadableStream(this)) return Promise.$reject($makeThisTypeError("ReadableStream", "cancel")); + if (!$isReadableStream(this)) return Promise.$reject($ERR_INVALID_THIS("ReadableStream")); - if ($isReadableStreamLocked(this)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(this)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); return $readableStreamCancel(this, reason); } export function getReader(this, options) { - if (!$isReadableStream(this)) throw $makeThisTypeError("ReadableStream", "getReader"); + if (!$isReadableStream(this)) throw $ERR_INVALID_THIS("ReadableStream"); const mode = $toDictionary(options, {}, "ReadableStream.getReader takes an object as first argument").mode; if (mode === undefined) { @@ -423,9 +423,9 @@ export function pipeThrough(this, streams, options) { if (signal !== undefined && !$isAbortSignal(signal)) throw $makeTypeError("options.signal must be AbortSignal"); } - if (!$isReadableStream(this)) throw $makeThisTypeError("ReadableStream", "pipeThrough"); + if (!$isReadableStream(this)) throw $ERR_INVALID_THIS("ReadableStream"); - if ($isReadableStreamLocked(this)) throw $makeTypeError("ReadableStream is locked"); + if ($isReadableStreamLocked(this)) throw $ERR_INVALID_STATE_TypeError("ReadableStream is locked"); if ($isWritableStreamLocked(internalWritable)) throw $makeTypeError("WritableStream is locked"); @@ -443,9 +443,9 @@ export function pipeThrough(this, streams, options) { } export function pipeTo(this, destination) { - if (!$isReadableStream(this)) return Promise.$reject($makeThisTypeError("ReadableStream", "pipeTo")); + if (!$isReadableStream(this)) return Promise.$reject($ERR_INVALID_THIS("ReadableStream")); - if ($isReadableStreamLocked(this)) return Promise.$reject($makeTypeError("ReadableStream is locked")); + if ($isReadableStreamLocked(this)) return Promise.$reject($ERR_INVALID_STATE_TypeError("ReadableStream is locked")); // FIXME: https://bugs.webkit.org/show_bug.cgi?id=159869. // Built-in generator should be able to parse function signature to compute the function length correctly. @@ -489,7 +489,7 @@ export function pipeTo(this, destination) { } export function tee(this) { - if (!$isReadableStream(this)) throw $makeThisTypeError("ReadableStream", "tee"); + if (!$isReadableStream(this)) throw $ERR_INVALID_THIS("ReadableStream"); return $readableStreamTee(this, false); } diff --git a/src/js/builtins/ReadableStreamBYOBReader.ts b/src/js/builtins/ReadableStreamBYOBReader.ts index 62a04d8a3d..10481564e2 100644 --- a/src/js/builtins/ReadableStreamBYOBReader.ts +++ b/src/js/builtins/ReadableStreamBYOBReader.ts @@ -35,8 +35,7 @@ export function initializeReadableStreamBYOBReader(this, stream) { } export function cancel(this, reason) { - if (!$isReadableStreamBYOBReader(this)) - return Promise.$reject($makeThisTypeError("ReadableStreamBYOBReader", "cancel")); + if (!$isReadableStreamBYOBReader(this)) return Promise.$reject($ERR_INVALID_THIS("ReadableStreamBYOBReader")); if (!$getByIdDirectPrivate(this, "ownerReadableStream")) return Promise.$reject($makeTypeError("cancel() called on a reader owned by no readable stream")); @@ -45,8 +44,7 @@ export function cancel(this, reason) { } export function read(this, view: DataView) { - if (!$isReadableStreamBYOBReader(this)) - return Promise.$reject($makeThisTypeError("ReadableStreamBYOBReader", "read")); + if (!$isReadableStreamBYOBReader(this)) return Promise.$reject($ERR_INVALID_THIS("ReadableStreamBYOBReader")); if (!$getByIdDirectPrivate(this, "ownerReadableStream")) return Promise.$reject($makeTypeError("read() called on a reader owned by no readable stream")); @@ -61,7 +59,7 @@ export function read(this, view: DataView) { } export function releaseLock(this) { - if (!$isReadableStreamBYOBReader(this)) throw $makeThisTypeError("ReadableStreamBYOBReader", "releaseLock"); + if (!$isReadableStreamBYOBReader(this)) throw $ERR_INVALID_THIS("ReadableStreamBYOBReader"); if (!$getByIdDirectPrivate(this, "ownerReadableStream")) return; diff --git a/src/js/builtins/ReadableStreamBYOBRequest.ts b/src/js/builtins/ReadableStreamBYOBRequest.ts index 1354f93499..aacf265771 100644 --- a/src/js/builtins/ReadableStreamBYOBRequest.ts +++ b/src/js/builtins/ReadableStreamBYOBRequest.ts @@ -31,7 +31,7 @@ export function initializeReadableStreamBYOBRequest(this, controller, view) { } export function respond(this, bytesWritten) { - if (!$isReadableStreamBYOBRequest(this)) throw $makeThisTypeError("ReadableStreamBYOBRequest", "respond"); + if (!$isReadableStreamBYOBRequest(this)) throw $ERR_INVALID_THIS("ReadableStreamBYOBRequest"); if ($getByIdDirectPrivate(this, "associatedReadableByteStreamController") === undefined) throw new TypeError("ReadableStreamBYOBRequest.associatedReadableByteStreamController is undefined"); @@ -43,7 +43,7 @@ export function respond(this, bytesWritten) { } export function respondWithNewView(this, view) { - if (!$isReadableStreamBYOBRequest(this)) throw $makeThisTypeError("ReadableStreamBYOBRequest", "respond"); + if (!$isReadableStreamBYOBRequest(this)) throw $ERR_INVALID_THIS("ReadableStreamBYOBRequest"); if ($getByIdDirectPrivate(this, "associatedReadableByteStreamController") === undefined) throw new TypeError("ReadableStreamBYOBRequest.associatedReadableByteStreamController is undefined"); diff --git a/src/js/builtins/ReadableStreamDefaultController.ts b/src/js/builtins/ReadableStreamDefaultController.ts index 6a04addc33..53d3fdd8e9 100644 --- a/src/js/builtins/ReadableStreamDefaultController.ts +++ b/src/js/builtins/ReadableStreamDefaultController.ts @@ -31,7 +31,7 @@ export function initializeReadableStreamDefaultController(this, stream, underlyi } export function enqueue(this, chunk) { - if (!$isReadableStreamDefaultController(this)) throw $makeThisTypeError("ReadableStreamDefaultController", "enqueue"); + if (!$isReadableStreamDefaultController(this)) throw $ERR_INVALID_THIS("ReadableStreamDefaultController"); if (!$readableStreamDefaultControllerCanCloseOrEnqueue(this)) { throw $ERR_INVALID_STATE("ReadableStreamDefaultController is not in a state where chunk can be enqueued"); @@ -41,12 +41,12 @@ export function enqueue(this, chunk) { } export function error(this, err) { - if (!$isReadableStreamDefaultController(this)) throw $makeThisTypeError("ReadableStreamDefaultController", "error"); + if (!$isReadableStreamDefaultController(this)) throw $ERR_INVALID_THIS("ReadableStreamDefaultController"); $readableStreamDefaultControllerError(this, err); } export function close(this) { - if (!$isReadableStreamDefaultController(this)) throw $makeThisTypeError("ReadableStreamDefaultController", "close"); + if (!$isReadableStreamDefaultController(this)) throw $ERR_INVALID_THIS("ReadableStreamDefaultController"); if (!$readableStreamDefaultControllerCanCloseOrEnqueue(this)) throw new TypeError("ReadableStreamDefaultController is not in a state where it can be closed"); diff --git a/src/js/builtins/ReadableStreamDefaultReader.ts b/src/js/builtins/ReadableStreamDefaultReader.ts index 9ddb3e3f38..c1004488a3 100644 --- a/src/js/builtins/ReadableStreamDefaultReader.ts +++ b/src/js/builtins/ReadableStreamDefaultReader.ts @@ -34,8 +34,7 @@ export function initializeReadableStreamDefaultReader(this, stream) { } export function cancel(this, reason) { - if (!$isReadableStreamDefaultReader(this)) - return Promise.$reject($makeThisTypeError("ReadableStreamDefaultReader", "cancel")); + if (!$isReadableStreamDefaultReader(this)) return Promise.$reject($ERR_INVALID_THIS("ReadableStreamDefaultReader")); if (!$getByIdDirectPrivate(this, "ownerReadableStream")) return Promise.$reject(new TypeError("cancel() called on a reader owned by no readable stream")); @@ -159,8 +158,7 @@ export function readMany(this: ReadableStreamDefaultReader): ReadableStreamDefau } export function read(this) { - if (!$isReadableStreamDefaultReader(this)) - return Promise.$reject($makeThisTypeError("ReadableStreamDefaultReader", "read")); + if (!$isReadableStreamDefaultReader(this)) return Promise.$reject($ERR_INVALID_THIS("ReadableStreamDefaultReader")); if (!$getByIdDirectPrivate(this, "ownerReadableStream")) return Promise.$reject(new TypeError("read() called on a reader owned by no readable stream")); @@ -168,7 +166,7 @@ export function read(this) { } export function releaseLock(this) { - if (!$isReadableStreamDefaultReader(this)) throw $makeThisTypeError("ReadableStreamDefaultReader", "releaseLock"); + if (!$isReadableStreamDefaultReader(this)) throw $ERR_INVALID_THIS("ReadableStreamDefaultReader"); if (!$getByIdDirectPrivate(this, "ownerReadableStream")) return; diff --git a/src/js/builtins/TextDecoderStream.ts b/src/js/builtins/TextDecoderStream.ts index 2a5f1e528d..e64ad22f9f 100644 --- a/src/js/builtins/TextDecoderStream.ts +++ b/src/js/builtins/TextDecoderStream.ts @@ -78,24 +78,21 @@ export function initializeTextDecoderStream() { $getter; export function encoding() { - if (!$getByIdDirectPrivate(this, "textDecoderStreamTransform")) - throw $makeThisTypeError("TextDecoderStream", "encoding"); + if (!$getByIdDirectPrivate(this, "textDecoderStreamTransform")) throw $ERR_INVALID_THIS("TextDecoderStream"); return $getByIdDirectPrivate(this, "encoding"); } $getter; export function fatal() { - if (!$getByIdDirectPrivate(this, "textDecoderStreamTransform")) - throw $makeThisTypeError("TextDecoderStream", "fatal"); + if (!$getByIdDirectPrivate(this, "textDecoderStreamTransform")) throw $ERR_INVALID_THIS("TextDecoderStream"); return $getByIdDirectPrivate(this, "fatal"); } $getter; export function ignoreBOM() { - if (!$getByIdDirectPrivate(this, "textDecoderStreamTransform")) - throw $makeThisTypeError("TextDecoderStream", "ignoreBOM"); + if (!$getByIdDirectPrivate(this, "textDecoderStreamTransform")) throw $ERR_INVALID_THIS("TextDecoderStream"); return $getByIdDirectPrivate(this, "ignoreBOM"); } @@ -103,7 +100,7 @@ export function ignoreBOM() { $getter; export function readable() { const transform = $getByIdDirectPrivate(this, "textDecoderStreamTransform"); - if (!transform) throw $makeThisTypeError("TextDecoderStream", "readable"); + if (!transform) throw $ERR_INVALID_THIS("TextDecoderStream"); return $getByIdDirectPrivate(transform, "readable"); } @@ -111,7 +108,7 @@ export function readable() { $getter; export function writable() { const transform = $getByIdDirectPrivate(this, "textDecoderStreamTransform"); - if (!transform) throw $makeThisTypeError("TextDecoderStream", "writable"); + if (!transform) throw $ERR_INVALID_THIS("TextDecoderStream"); return $getByIdDirectPrivate(transform, "writable"); } diff --git a/src/js/builtins/TextEncoderStream.ts b/src/js/builtins/TextEncoderStream.ts index 4aa0c895dd..c9dda44bea 100644 --- a/src/js/builtins/TextEncoderStream.ts +++ b/src/js/builtins/TextEncoderStream.ts @@ -61,8 +61,7 @@ export function initializeTextEncoderStream() { $getter; export function encoding() { - if (!$getByIdDirectPrivate(this, "textEncoderStreamTransform")) - throw $makeThisTypeError("TextEncoderStream", "encoding"); + if (!$getByIdDirectPrivate(this, "textEncoderStreamTransform")) throw $ERR_INVALID_THIS("TextEncoderStream"); return "utf-8"; } @@ -70,7 +69,7 @@ export function encoding() { $getter; export function readable() { const transform = $getByIdDirectPrivate(this, "textEncoderStreamTransform"); - if (!transform) throw $makeThisTypeError("TextEncoderStream", "readable"); + if (!transform) throw $ERR_INVALID_THIS("TextEncoderStream"); return $getByIdDirectPrivate(transform, "readable"); } @@ -78,7 +77,7 @@ export function readable() { $getter; export function writable() { const transform = $getByIdDirectPrivate(this, "textEncoderStreamTransform"); - if (!transform) throw $makeThisTypeError("TextEncoderStream", "writable"); + if (!transform) throw $ERR_INVALID_THIS("TextEncoderStream"); return $getByIdDirectPrivate(transform, "writable"); } diff --git a/src/js/builtins/TransformStream.ts b/src/js/builtins/TransformStream.ts index f9d80b7cbd..f8bb7d3438 100644 --- a/src/js/builtins/TransformStream.ts +++ b/src/js/builtins/TransformStream.ts @@ -95,13 +95,13 @@ export function initializeTransformStream(this) { $getter; export function readable() { - if (!$isTransformStream(this)) throw $makeThisTypeError("TransformStream", "readable"); + if (!$isTransformStream(this)) throw $ERR_INVALID_THIS("TransformStream"); return $getByIdDirectPrivate(this, "readable"); } export function writable() { - if (!$isTransformStream(this)) throw $makeThisTypeError("TransformStream", "writable"); + if (!$isTransformStream(this)) throw $ERR_INVALID_THIS("TransformStream"); return $getByIdDirectPrivate(this, "writable"); } diff --git a/src/js/builtins/TransformStreamDefaultController.ts b/src/js/builtins/TransformStreamDefaultController.ts index 1045498b8d..84eb6ff6e9 100644 --- a/src/js/builtins/TransformStreamDefaultController.ts +++ b/src/js/builtins/TransformStreamDefaultController.ts @@ -29,8 +29,7 @@ export function initializeTransformStreamDefaultController(this) { $getter; export function desiredSize(this) { - if (!$isTransformStreamDefaultController(this)) - throw $makeThisTypeError("TransformStreamDefaultController", "enqueue"); + if (!$isTransformStreamDefaultController(this)) throw $ERR_INVALID_THIS("TransformStreamDefaultController"); const stream = $getByIdDirectPrivate(this, "stream"); const readable = $getByIdDirectPrivate(stream, "readable"); @@ -40,21 +39,19 @@ export function desiredSize(this) { } export function enqueue(this, chunk) { - if (!$isTransformStreamDefaultController(this)) - throw $makeThisTypeError("TransformStreamDefaultController", "enqueue"); + if (!$isTransformStreamDefaultController(this)) throw $ERR_INVALID_THIS("TransformStreamDefaultController"); $transformStreamDefaultControllerEnqueue(this, chunk); } export function error(this, e) { - if (!$isTransformStreamDefaultController(this)) throw $makeThisTypeError("TransformStreamDefaultController", "error"); + if (!$isTransformStreamDefaultController(this)) throw $ERR_INVALID_THIS("TransformStreamDefaultController"); $transformStreamDefaultControllerError(this, e); } export function terminate(this) { - if (!$isTransformStreamDefaultController(this)) - throw $makeThisTypeError("TransformStreamDefaultController", "terminate"); + if (!$isTransformStreamDefaultController(this)) throw $ERR_INVALID_THIS("TransformStreamDefaultController"); $transformStreamDefaultControllerTerminate(this); } diff --git a/src/js/builtins/WritableStreamDefaultController.ts b/src/js/builtins/WritableStreamDefaultController.ts index 1a3ddc2904..05cf16ba06 100644 --- a/src/js/builtins/WritableStreamDefaultController.ts +++ b/src/js/builtins/WritableStreamDefaultController.ts @@ -40,7 +40,7 @@ export function initializeWritableStreamDefaultController(this) { export function error(this, e) { if ($getByIdDirectPrivate(this, "abortSteps") === undefined) - throw $makeThisTypeError("WritableStreamDefaultController", "error"); + throw $ERR_INVALID_THIS("WritableStreamDefaultController"); const stream = $getByIdDirectPrivate(this, "stream"); if ($getByIdDirectPrivate(stream, "state") !== "writable") return; diff --git a/src/js/builtins/WritableStreamDefaultWriter.ts b/src/js/builtins/WritableStreamDefaultWriter.ts index ff3f38f006..87de5aa8e2 100644 --- a/src/js/builtins/WritableStreamDefaultWriter.ts +++ b/src/js/builtins/WritableStreamDefaultWriter.ts @@ -46,7 +46,7 @@ export function closed() { $getter; export function desiredSize() { - if (!$isWritableStreamDefaultWriter(this)) throw $makeThisTypeError("WritableStreamDefaultWriter", "desiredSize"); + if (!$isWritableStreamDefaultWriter(this)) throw $ERR_INVALID_THIS("WritableStreamDefaultWriter"); if ($getByIdDirectPrivate(this, "stream") === undefined) $throwTypeError("WritableStreamDefaultWriter has no stream"); @@ -55,15 +55,13 @@ export function desiredSize() { $getter; export function ready() { - if (!$isWritableStreamDefaultWriter(this)) - return Promise.$reject($makeThisTypeError("WritableStreamDefaultWriter", "ready")); + if (!$isWritableStreamDefaultWriter(this)) return Promise.$reject($ERR_INVALID_THIS("WritableStreamDefaultWriter")); return $getByIdDirectPrivate(this, "readyPromise").promise; } export function abort(reason) { - if (!$isWritableStreamDefaultWriter(this)) - return Promise.$reject($makeThisTypeError("WritableStreamDefaultWriter", "abort")); + if (!$isWritableStreamDefaultWriter(this)) return Promise.$reject($ERR_INVALID_THIS("WritableStreamDefaultWriter")); if ($getByIdDirectPrivate(this, "stream") === undefined) return Promise.$reject($makeTypeError("WritableStreamDefaultWriter has no stream")); @@ -72,8 +70,7 @@ export function abort(reason) { } export function close() { - if (!$isWritableStreamDefaultWriter(this)) - return Promise.$reject($makeThisTypeError("WritableStreamDefaultWriter", "close")); + if (!$isWritableStreamDefaultWriter(this)) return Promise.$reject($ERR_INVALID_THIS("WritableStreamDefaultWriter")); const stream = $getByIdDirectPrivate(this, "stream"); if (stream === undefined) return Promise.$reject($makeTypeError("WritableStreamDefaultWriter has no stream")); @@ -85,7 +82,7 @@ export function close() { } export function releaseLock() { - if (!$isWritableStreamDefaultWriter(this)) throw $makeThisTypeError("WritableStreamDefaultWriter", "releaseLock"); + if (!$isWritableStreamDefaultWriter(this)) throw $ERR_INVALID_THIS("WritableStreamDefaultWriter"); const stream = $getByIdDirectPrivate(this, "stream"); if (stream === undefined) return; @@ -95,8 +92,7 @@ export function releaseLock() { } export function write(chunk) { - if (!$isWritableStreamDefaultWriter(this)) - return Promise.$reject($makeThisTypeError("WritableStreamDefaultWriter", "write")); + if (!$isWritableStreamDefaultWriter(this)) return Promise.$reject($ERR_INVALID_THIS("WritableStreamDefaultWriter")); if ($getByIdDirectPrivate(this, "stream") === undefined) return Promise.$reject($makeTypeError("WritableStreamDefaultWriter has no stream")); diff --git a/src/js/bun/ffi.ts b/src/js/bun/ffi.ts index d5e4d74da2..f7c3803758 100644 --- a/src/js/bun/ffi.ts +++ b/src/js/bun/ffi.ts @@ -433,7 +433,7 @@ function normalizePath(path) { // This is mostly for import.meta.resolve() // https://github.com/oven-sh/bun/issues/10304 path = Bun.fileURLToPath(path as URL); - } else if (path instanceof Blob) { + } else if ($inheritsBlob(path)) { // must be a Bun.file() blob // https://discord.com/channels/876711213126520882/1230114905898614794/1230114905898614794 path = path.name; @@ -447,7 +447,7 @@ function dlopen(path, options) { path = normalizePath(path); const result = nativeDLOpen(path, options); - if (result instanceof Error) throw result; + if (Error.isError(result)) throw result; for (let key in result.symbols) { var symbol = result.symbols[key]; @@ -495,7 +495,7 @@ function cc(options) { options.source = path; const result = ccFn(options); - if (result instanceof Error) throw result; + if (Error.isError(result)) throw result; for (let key in result.symbols) { var symbol = result.symbols[key]; diff --git a/src/js/internal/abort_listener.ts b/src/js/internal/abort_listener.ts new file mode 100644 index 0000000000..8a5ee71867 --- /dev/null +++ b/src/js/internal/abort_listener.ts @@ -0,0 +1,33 @@ +const { validateAbortSignal, validateFunction } = require("internal/validators"); +const { kResistStopPropagation } = require("internal/shared"); + +const SymbolDispose = Symbol.dispose; + +function addAbortListener(signal: AbortSignal, listener: EventListener): Disposable { + if (signal === undefined) { + throw $ERR_INVALID_ARG_TYPE("signal", "AbortSignal", signal); + } + validateAbortSignal(signal, "signal"); + validateFunction(listener, "listener"); + + let removeEventListener; + if (signal.aborted) { + queueMicrotask(() => listener()); + } else { + // TODO(atlowChemi) add { subscription: true } and return directly + signal.addEventListener("abort", listener, { __proto__: null, once: true, [kResistStopPropagation]: true }); + removeEventListener = () => { + signal.removeEventListener("abort", listener); + }; + } + return { + __proto__: null, + [SymbolDispose]() { + removeEventListener?.(); + }, + }; +} + +export default { + addAbortListener, +}; diff --git a/src/js/internal/errors.ts b/src/js/internal/errors.ts index f12e6a067c..b0e84f460e 100644 --- a/src/js/internal/errors.ts +++ b/src/js/internal/errors.ts @@ -1,7 +1,22 @@ +const { SafeArrayIterator } = require("internal/primordials"); + +const ArrayIsArray = Array.isArray; +const ArrayPrototypePush = Array.prototype.push; + +function aggregateTwoErrors(innerError, outerError) { + if (innerError && outerError && innerError !== outerError) { + if (ArrayIsArray(outerError.errors)) { + // If `outerError` is already an `AggregateError`. + ArrayPrototypePush.$call(outerError.errors, innerError); + return outerError; + } + const err = new AggregateError(new SafeArrayIterator([outerError, innerError]), outerError.message); + err.code = outerError.code; + return err; + } + return innerError || outerError; +} + export default { - ERR_OUT_OF_RANGE: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_OUT_OF_RANGE", 3), - ERR_INVALID_PROTOCOL: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_INVALID_PROTOCOL", 0), - ERR_BROTLI_INVALID_PARAM: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_BROTLI_INVALID_PARAM", 0), - ERR_BUFFER_TOO_LARGE: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_BUFFER_TOO_LARGE", 0), - ERR_UNHANDLED_ERROR: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_UNHANDLED_ERROR", 0), + aggregateTwoErrors, }; diff --git a/src/js/internal/primordials.js b/src/js/internal/primordials.js index faff386124..046f44fa65 100644 --- a/src/js/internal/primordials.js +++ b/src/js/internal/primordials.js @@ -5,13 +5,13 @@ const ObjectSetPrototypeOf = Object.setPrototypeOf; const ObjectFreeze = Object.freeze; -const createSafeIterator = (factory, next) => { +const createSafeIterator = (factory, next_) => { class SafeIterator { constructor(iterable) { this._iterator = factory(iterable); } next() { - return next(this._iterator); + return next_(this._iterator); } [Symbol.iterator]() { return this; @@ -78,13 +78,27 @@ const makeSafe = (unsafe, safe) => { const StringIterator = uncurryThis(String.prototype[Symbol.iterator]); const StringIteratorPrototype = Reflect.getPrototypeOf(StringIterator("")); const ArrayPrototypeForEach = uncurryThis(Array.prototype.forEach); - const ArrayPrototypeSymbolIterator = uncurryThis(Array.prototype[Symbol.iterator]); -const ArrayIteratorPrototypeNext = uncurryThis(ArrayPrototypeSymbolIterator.next); +const ArrayIteratorPrototypeNext = uncurryThis(Array.prototype[Symbol.iterator]().next); +const SafeArrayIterator = createSafeIterator(ArrayPrototypeSymbolIterator, ArrayIteratorPrototypeNext); + +const ArrayPrototypeMap = Array.prototype.map; +const PromisePrototypeThen = Promise.prototype.then; + +const arrayToSafePromiseIterable = (promises, mapFn) => + new SafeArrayIterator( + ArrayPrototypeMap.$call( + promises, + (promise, i) => + new Promise((a, b) => PromisePrototypeThen.$call(mapFn == null ? promise : mapFn(promise, i), a, b)), + ), + ); +const PromiseAll = Promise.all; +const SafePromiseAll = (promises, mapFn) => PromiseAll(arrayToSafePromiseIterable(promises, mapFn)); export default { Array, - SafeArrayIterator: createSafeIterator(ArrayPrototypeSymbolIterator, ArrayIteratorPrototypeNext), + SafeArrayIterator, MapPrototypeGetSize: getGetter(Map, "size"), Number, Object, @@ -98,6 +112,7 @@ export default { } }, ), + SafePromiseAll, SafeSet: makeSafe( Set, class SafeSet extends Set { diff --git a/src/js/internal/shared.ts b/src/js/internal/shared.ts index dc1dcd93e0..af82b5c0ba 100644 --- a/src/js/internal/shared.ts +++ b/src/js/internal/shared.ts @@ -1,3 +1,5 @@ +const ObjectFreeze = Object.freeze; + class NotImplementedError extends Error { code: string; constructor(feature: string, issue?: number, extra?: string) { @@ -47,11 +49,12 @@ const fileSinkSymbol = Symbol("fileSink"); // -let util; +let util: typeof import("node:util"); class ExceptionWithHostPort extends Error { errno: number; syscall: string; port?: number; + address; constructor(err, syscall, address, port) { // TODO(joyeecheung): We have to use the type-checked @@ -79,6 +82,20 @@ class ExceptionWithHostPort extends Error { } } +function once(callback, { preserveReturnValue = false } = kEmptyObject) { + let called = false; + let returnValue; + return function (...args) { + if (called) return returnValue; + called = true; + const result = callback.$apply(this, args); + returnValue = preserveReturnValue ? result : undefined; + return result; + }; +} + +const kEmptyObject = ObjectFreeze({ __proto__: null }); + // export default { @@ -88,6 +105,13 @@ export default { warnNotImplementedOnce, fileSinkSymbol, ExceptionWithHostPort, + once, + kHandle: Symbol("kHandle"), kAutoDestroyed: Symbol("kAutoDestroyed"), + kResistStopPropagation: Symbol("kResistStopPropagation"), + kWeakHandler: Symbol("kWeak"), + kEnsureConstructed: Symbol("kEnsureConstructed"), + kGetNativeReadableProto: Symbol("kGetNativeReadableProto"), + kEmptyObject, }; diff --git a/src/js/internal/stream.promises.ts b/src/js/internal/stream.promises.ts new file mode 100644 index 0000000000..c25b026f65 --- /dev/null +++ b/src/js/internal/stream.promises.ts @@ -0,0 +1,45 @@ +"use strict"; + +const ArrayPrototypePop = Array.prototype.pop; + +const { isIterable, isNodeStream, isWebStream } = require("internal/streams/utils"); +const { pipelineImpl: pl } = require("internal/streams/pipeline"); +const { finished } = require("internal/streams/end-of-stream"); + +// require("internal/stream"); + +function pipeline(...streams) { + return new Promise((resolve, reject) => { + let signal; + let end; + const lastArg = streams[streams.length - 1]; + if ( + lastArg && + typeof lastArg === "object" && + !isNodeStream(lastArg) && + !isIterable(lastArg) && + !isWebStream(lastArg) + ) { + const options = ArrayPrototypePop.$call(streams); + signal = options.signal; + end = options.end; + } + + pl( + streams, + (err, value) => { + if (err) { + reject(err); + } else { + resolve(value); + } + }, + { signal, end }, + ); + }); +} + +export default { + finished, + pipeline, +}; diff --git a/src/js/internal/stream.ts b/src/js/internal/stream.ts new file mode 100644 index 0000000000..41c2aae63a --- /dev/null +++ b/src/js/internal/stream.ts @@ -0,0 +1,113 @@ +"use strict"; + +const ObjectKeys = Object.keys; +const ObjectDefineProperty = Object.defineProperty; + +const customPromisify = Symbol.for("nodejs.util.promisify.custom"); +const { streamReturningOperators, promiseReturningOperators } = require("internal/streams/operators"); +const compose = require("internal/streams/compose"); +const { setDefaultHighWaterMark, getDefaultHighWaterMark } = require("internal/streams/state"); +const { pipeline } = require("internal/streams/pipeline"); +const { destroyer } = require("internal/streams/destroy"); +const eos = require("internal/streams/end-of-stream"); +const promises = require("internal/stream.promises"); +const utils = require("internal/streams/utils"); +const { isArrayBufferView, isUint8Array } = require("node:util/types"); +const Stream = require("internal/streams/legacy").Stream; + +Stream.isDestroyed = utils.isDestroyed; +Stream.isDisturbed = utils.isDisturbed; +Stream.isErrored = utils.isErrored; +Stream.isReadable = utils.isReadable; +Stream.isWritable = utils.isWritable; + +Stream.Readable = require("internal/streams/readable"); +const streamKeys = ObjectKeys(streamReturningOperators); +for (let i = 0; i < streamKeys.length; i++) { + const key = streamKeys[i]; + const op = streamReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw $ERR_ILLEGAL_CONSTRUCTOR(); + } + return Stream.Readable.from(op.$apply(this, args)); + } + ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name }); + ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +const promiseKeys = ObjectKeys(promiseReturningOperators); +for (let i = 0; i < promiseKeys.length; i++) { + const key = promiseKeys[i]; + const op = promiseReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw $ERR_ILLEGAL_CONSTRUCTOR(); + } + return Promise.resolve().then(() => op.$apply(this, args)); + } + ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name }); + ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +Stream.Writable = require("internal/streams/writable"); +Stream.Duplex = require("internal/streams/duplex"); +Stream.Transform = require("internal/streams/transform"); +Stream.PassThrough = require("internal/streams/passthrough"); +Stream.duplexPair = require("internal/streams/duplexpair"); +Stream.pipeline = pipeline; +const { addAbortSignal } = require("internal/streams/add-abort-signal"); +Stream.addAbortSignal = addAbortSignal; +Stream.finished = eos; +Stream.destroy = destroyer; +Stream.compose = compose; +Stream.setDefaultHighWaterMark = setDefaultHighWaterMark; +Stream.getDefaultHighWaterMark = getDefaultHighWaterMark; + +ObjectDefineProperty(Stream, "promises", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return promises; + }, +}); + +ObjectDefineProperty(pipeline, customPromisify, { + __proto__: null, + enumerable: true, + get() { + return promises.pipeline; + }, +}); + +ObjectDefineProperty(eos, customPromisify, { + __proto__: null, + enumerable: true, + get() { + return promises.finished; + }, +}); + +// Backwards-compat with node 0.4.x +Stream.Stream = Stream; + +Stream._isArrayBufferView = isArrayBufferView; +Stream._isUint8Array = isUint8Array; +Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { + return new $Buffer(chunk.buffer, chunk.byteOffset, chunk.byteLength); +}; + +export default Stream; diff --git a/src/js/internal/streams/add-abort-signal.ts b/src/js/internal/streams/add-abort-signal.ts new file mode 100644 index 0000000000..85007b81ce --- /dev/null +++ b/src/js/internal/streams/add-abort-signal.ts @@ -0,0 +1,51 @@ +"use strict"; + +const { isNodeStream, isWebStream, kControllerErrorFunction } = require("internal/streams/utils"); +const eos = require("internal/streams/end-of-stream"); + +const SymbolDispose = Symbol.dispose; + +let addAbortListener; + +// This method is inlined here for readable-stream +// It also does not allow for signal to not exist on the stream +// https://github.com/nodejs/node/pull/36061#discussion_r533718029 +const validateAbortSignal = (signal, name) => { + if (typeof signal !== "object" || !("aborted" in signal)) { + throw $ERR_INVALID_ARG_TYPE(name, "AbortSignal", signal); + } +}; + +function addAbortSignal(signal, stream) { + validateAbortSignal(signal, "signal"); + if (!isNodeStream(stream) && !isWebStream(stream)) { + throw $ERR_INVALID_ARG_TYPE("stream", ["ReadableStream", "WritableStream", "Stream"], stream); + } + return addAbortSignalNoValidate(signal, stream); +} + +function addAbortSignalNoValidate(signal, stream) { + if (typeof signal !== "object" || !("aborted" in signal)) { + return stream; + } + const onAbort = isNodeStream(stream) + ? () => { + stream.destroy($makeAbortError(undefined, { cause: signal.reason })); + } + : () => { + stream[kControllerErrorFunction]($makeAbortError(undefined, { cause: signal.reason })); + }; + if (signal.aborted) { + onAbort(); + } else { + addAbortListener ??= require("internal/abort_listener").addAbortListener; + const disposable = addAbortListener(signal, onAbort); + eos(stream, disposable[SymbolDispose]); + } + return stream; +} + +export default { + addAbortSignal, + addAbortSignalNoValidate, +}; diff --git a/src/js/internal/streams/compose.ts b/src/js/internal/streams/compose.ts new file mode 100644 index 0000000000..2a4df02339 --- /dev/null +++ b/src/js/internal/streams/compose.ts @@ -0,0 +1,221 @@ +"use strict"; + +const { pipeline } = require("internal/streams/pipeline"); +const Duplex = require("internal/streams/duplex"); +const { destroyer } = require("internal/streams/destroy"); +const { + isNodeStream, + isReadable, + isWritable, + isWebStream, + isTransformStream, + isWritableStream, + isReadableStream, +} = require("internal/streams/utils"); +const eos = require("internal/streams/end-of-stream"); + +const ArrayPrototypeSlice = Array.prototype.slice; + +export default function compose(...streams) { + if (streams.length === 0) { + throw $ERR_MISSING_ARGS("streams"); + } + + if (streams.length === 1) { + return Duplex.from(streams[0]); + } + + const orgStreams = ArrayPrototypeSlice.$call(streams); + + if (typeof streams[0] === "function") { + streams[0] = Duplex.from(streams[0]); + } + + if (typeof streams[streams.length - 1] === "function") { + const idx = streams.length - 1; + streams[idx] = Duplex.from(streams[idx]); + } + + for (let n = 0; n < streams.length; ++n) { + if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) { + // TODO(ronag): Add checks for non streams. + continue; + } + if ( + n < streams.length - 1 && + !(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n])) + ) { + throw $ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be readable"); + } + if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) { + throw $ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be writable"); + } + } + + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + + function onfinished(err) { + const cb = onclose; + onclose = null; + + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } else if (!readable && !writable) { + d.destroy(); + } + } + + const head = streams[0]; + const tail = pipeline(streams, onfinished); + + const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head)); + const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail)); + + // TODO(ronag): Avoid double buffering. + // Implement Writable/Readable/Duplex traits. + // See, https://github.com/nodejs/node/pull/33515. + d = new Duplex({ + // TODO (ronag): highWaterMark? + writableObjectMode: !!head?.writableObjectMode, + readableObjectMode: !!tail?.readableObjectMode, + writable, + readable, + }); + + if (writable) { + if (isNodeStream(head)) { + d._write = function (chunk, encoding, callback) { + if (head.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + + d._final = function (callback) { + head.end(); + onfinish = callback; + }; + + head.on("drain", function () { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + } else if (isWebStream(head)) { + const writable = isTransformStream(head) ? head.writable : head; + const writer = writable.getWriter(); + + d._write = async function (chunk, encoding, callback) { + try { + await writer.ready; + writer.write(chunk).catch(() => {}); + callback(); + } catch (err) { + callback(err); + } + }; + + d._final = async function (callback) { + try { + await writer.ready; + writer.close().catch(() => {}); + onfinish = callback; + } catch (err) { + callback(err); + } + }; + } + + const toRead = isTransformStream(tail) ? tail.readable : tail; + + eos(toRead, () => { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + + if (readable) { + if (isNodeStream(tail)) { + tail.on("readable", function () { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + + tail.on("end", function () { + d.push(null); + }); + + d._read = function () { + while (true) { + const buf = tail.read(); + if (buf === null) { + onreadable = d._read; + return; + } + + if (!d.push(buf)) { + return; + } + } + }; + } else if (isWebStream(tail)) { + const readable = isTransformStream(tail) ? tail.readable : tail; + const reader = readable.getReader(); + d._read = async function () { + while (true) { + try { + const { value, done } = await reader.read(); + + if (!d.push(value)) { + return; + } + + if (done) { + d.push(null); + return; + } + } catch { + return; + } + } + }; + } + } + + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = $makeAbortError(); + } + + onreadable = null; + ondrain = null; + onfinish = null; + + if (isNodeStream(tail)) { + destroyer(tail, err); + } + + if (onclose === null) { + callback(err); + } else { + onclose = callback; + } + }; + + return d; +} diff --git a/src/js/internal/streams/destroy.ts b/src/js/internal/streams/destroy.ts new file mode 100644 index 0000000000..b11e41161b --- /dev/null +++ b/src/js/internal/streams/destroy.ts @@ -0,0 +1,340 @@ +"use strict"; + +const { aggregateTwoErrors } = require("internal/errors"); +const { + kIsDestroyed, + isDestroyed, + isFinished, + isServerRequest, + kState, + kErrorEmitted, + kEmitClose, + kClosed, + kCloseEmitted, + kConstructed, + kDestroyed, + kAutoDestroy, + kErrored, +} = require("internal/streams/utils"); + +const ProcessNextTick = process.nextTick; + +const kDestroy = Symbol("kDestroy"); +const kConstruct = Symbol("kConstruct"); + +function checkError(err, w, r) { + if (err) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + err.stack; // eslint-disable-line no-unused-expressions + + if (w && !w.errored) { + w.errored = err; + } + if (r && !r.errored) { + r.errored = err; + } + } +} + +// Backwards compat. cb() is undocumented and unused in core but +// unfortunately might be used by modules. +function destroy(err, cb) { + const r = this._readableState; + const w = this._writableState; + // With duplex streams we use the writable side for state. + const s = w || r; + + if ((w && (w[kState] & kDestroyed) !== 0) || (r && (r[kState] & kDestroyed) !== 0)) { + if (typeof cb === "function") { + cb(); + } + + return this; + } + + // We set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + checkError(err, w, r); + + if (w) { + w[kState] |= kDestroyed; + } + if (r) { + r[kState] |= kDestroyed; + } + + // If still constructing then defer calling _destroy. + if ((s[kState] & kConstructed) === 0) { + this.once(kDestroy, function (er) { + _destroy(this, aggregateTwoErrors(er, err), cb); + }); + } else { + _destroy(this, err, cb); + } + + return this; +} + +function _destroy(self, err, cb) { + let called = false; + + function onDestroy(err) { + if (called) { + return; + } + called = true; + + const r = self._readableState; + const w = self._writableState; + + checkError(err, w, r); + + if (w) { + w[kState] |= kClosed; + } + if (r) { + r[kState] |= kClosed; + } + + if (typeof cb === "function") { + cb(err); + } + + if (err) { + ProcessNextTick(emitErrorCloseNT, self, err); + } else { + ProcessNextTick(emitCloseNT, self); + } + } + try { + self._destroy(err || null, onDestroy); + } catch (err) { + onDestroy(err); + } +} + +function emitErrorCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + const r = self._readableState; + const w = self._writableState; + + if (w) { + w[kState] |= kCloseEmitted; + } + if (r) { + r[kState] |= kCloseEmitted; + } + + if ((w && (w[kState] & kEmitClose) !== 0) || (r && (r[kState] & kEmitClose) !== 0)) { + self.emit("close"); + } +} + +function emitErrorNT(self, err) { + const r = self._readableState; + const w = self._writableState; + + if ((w && (w[kState] & kErrorEmitted) !== 0) || (r && (r[kState] & kErrorEmitted) !== 0)) { + return; + } + + if (w) { + w[kState] |= kErrorEmitted; + } + if (r) { + r[kState] |= kErrorEmitted; + } + + self.emit("error", err); +} + +function undestroy() { + const r = this._readableState; + const w = this._writableState; + + if (r) { + r.constructed = true; + r.closed = false; + r.closeEmitted = false; + r.destroyed = false; + r.errored = null; + r.errorEmitted = false; + r.reading = false; + r.ended = r.readable === false; + r.endEmitted = r.readable === false; + } + + if (w) { + w.constructed = true; + w.destroyed = false; + w.closed = false; + w.closeEmitted = false; + w.errored = null; + w.errorEmitted = false; + w.finalCalled = false; + w.prefinished = false; + w.ended = w.writable === false; + w.ending = w.writable === false; + w.finished = w.writable === false; + } +} + +function errorOrDestroy(stream, err, sync?) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + const r = stream._readableState; + const w = stream._writableState; + + if ( + (w && (w[kState] ? (w[kState] & kDestroyed) !== 0 : w.destroyed)) || + (r && (r[kState] ? (r[kState] & kDestroyed) !== 0 : r.destroyed)) + ) { + return this; + } + + if ((r && (r[kState] & kAutoDestroy) !== 0) || (w && (w[kState] & kAutoDestroy) !== 0)) { + stream.destroy(err); + } else if (err) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + err.stack; // eslint-disable-line no-unused-expressions + + if (w && (w[kState] & kErrored) === 0) { + w.errored = err; + } + if (r && (r[kState] & kErrored) === 0) { + r.errored = err; + } + if (sync) { + ProcessNextTick(emitErrorNT, stream, err); + } else { + emitErrorNT(stream, err); + } + } +} + +function construct(stream, cb) { + if (typeof stream._construct !== "function") { + return; + } + + const r = stream._readableState; + const w = stream._writableState; + + if (r) { + r[kState] &= ~kConstructed; + } + if (w) { + w[kState] &= ~kConstructed; + } + + stream.once(kConstruct, cb); + + if (stream.listenerCount(kConstruct) > 1) { + // Duplex + return; + } + + ProcessNextTick(constructNT, stream); +} + +function constructNT(stream) { + let called = false; + + function onConstruct(err) { + if (called) { + errorOrDestroy(stream, err ?? $ERR_MULTIPLE_CALLBACK()); + return; + } + called = true; + + const r = stream._readableState; + const w = stream._writableState; + const s = w || r; + + if (r) { + r[kState] |= kConstructed; + } + if (w) { + w[kState] |= kConstructed; + } + + if (s.destroyed) { + stream.emit(kDestroy, err); + } else if (err) { + errorOrDestroy(stream, err, true); + } else { + stream.emit(kConstruct); + } + } + + try { + stream._construct(err => { + ProcessNextTick(onConstruct, err); + }); + } catch (err) { + ProcessNextTick(onConstruct, err); + } +} + +function isRequest(stream) { + return stream?.setHeader && typeof stream.abort === "function"; +} + +function emitCloseLegacy(stream) { + stream.emit("close"); +} + +function emitErrorCloseLegacy(stream, err) { + stream.emit("error", err); + ProcessNextTick(emitCloseLegacy, stream); +} + +// Normalize destroy for legacy. +function destroyer(stream, err) { + if (!stream || isDestroyed(stream)) { + return; + } + + if (!err && !isFinished(stream)) { + err = $makeAbortError(); + } + + // TODO: Remove isRequest branches. + if (isServerRequest(stream)) { + stream.socket = null; + stream.destroy(err); + } else if (isRequest(stream)) { + stream.abort(); + } else if (isRequest(stream.req)) { + stream.req.abort(); + } else if (typeof stream.destroy === "function") { + stream.destroy(err); + } else if (typeof stream.close === "function") { + // TODO: Don't lose err? + stream.close(); + } else if (err) { + ProcessNextTick(emitErrorCloseLegacy, stream, err); + } else { + ProcessNextTick(emitCloseLegacy, stream); + } + + if (!stream.destroyed) { + stream[kIsDestroyed] = true; + } +} + +export default { + construct, + destroyer, + destroy, + undestroy, + errorOrDestroy, +}; diff --git a/src/js/internal/streams/duplex.ts b/src/js/internal/streams/duplex.ts new file mode 100644 index 0000000000..c814b5b157 --- /dev/null +++ b/src/js/internal/streams/duplex.ts @@ -0,0 +1,153 @@ +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototype inheritance, this class +// prototypically inherits from Readable, and then parasitically from +// Writable. + +"use strict"; + +const Stream = require("internal/streams/legacy").Stream; +const Readable = require("internal/streams/readable"); +const Writable = require("internal/streams/writable"); +const { addAbortSignal } = require("internal/streams/add-abort-signal"); +const destroyImpl = require("internal/streams/destroy"); +const { kOnConstructed } = require("internal/streams/utils"); + +const ObjectKeys = Object.keys; +const ObjectDefineProperties = Object.defineProperties; +const ObjectGetOwnPropertyDescriptor = Object.getOwnPropertyDescriptor; + +function Duplex(options) { + if (!(this instanceof Duplex)) return Reflect.construct(Duplex, [options]); + + this._events ??= { + close: undefined, + error: undefined, + prefinish: undefined, + finish: undefined, + drain: undefined, + data: undefined, + end: undefined, + readable: undefined, + // Skip uncommon events... + // pause: undefined, + // resume: undefined, + // pipe: undefined, + // unpipe: undefined, + // [destroyImpl.kConstruct]: undefined, + // [destroyImpl.kDestroy]: undefined, + }; + + this._readableState = new Readable.ReadableState(options, this, true); + this._writableState = new Writable.WritableState(options, this, true); + + if (options) { + this.allowHalfOpen = options.allowHalfOpen !== false; + + if (options.readable === false) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + + if (options.writable === false) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + + if (typeof options.read === "function") this._read = options.read; + + if (typeof options.write === "function") this._write = options.write; + + if (typeof options.writev === "function") this._writev = options.writev; + + if (typeof options.destroy === "function") this._destroy = options.destroy; + + if (typeof options.final === "function") this._final = options.final; + + if (typeof options.construct === "function") this._construct = options.construct; + + if (options.signal) addAbortSignal(options.signal, this); + } else { + this.allowHalfOpen = true; + } + + Stream.$call(this, options); + + if (this._construct != null) { + destroyImpl.construct(this, () => { + this._readableState[kOnConstructed](this); + this._writableState[kOnConstructed](this); + }); + } +} +$toClass(Duplex, "Duplex", Readable); + +// Use the `destroy` method of `Writable`. +Duplex.prototype.destroy = Writable.prototype.destroy; + +{ + const keys = ObjectKeys(Writable.prototype); + // Allow the keys array to be GC'ed. + for (let i = 0; i < keys.length; i++) { + const method = keys[i]; + Duplex.prototype[method] ||= Writable.prototype[method]; + } +} + +ObjectDefineProperties(Duplex.prototype, { + writable: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writable") }, + writableHighWaterMark: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableHighWaterMark"), + }, + writableObjectMode: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableObjectMode") }, + writableBuffer: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableBuffer") }, + writableLength: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableLength") }, + writableFinished: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableFinished") }, + writableCorked: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableCorked") }, + writableEnded: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableEnded") }, + writableNeedDrain: { __proto__: null, ...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableNeedDrain") }, + + destroyed: { + __proto__: null, + get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set(value) { + // Backward compatibility, the user is explicitly + // managing destroyed. + if (this._readableState && this._writableState) { + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } + }, + }, +}); + +// Lazy to avoid circular references +let webStreamsAdapters; +function lazyWebStreams() { + if (webStreamsAdapters === undefined) webStreamsAdapters = require("internal/webstreams_adapters"); + return webStreamsAdapters; +} + +Duplex.fromWeb = function (pair, options) { + return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options); +}; + +Duplex.toWeb = function (duplex) { + return lazyWebStreams().newReadableWritablePairFromDuplex(duplex); +}; + +let duplexify; +Duplex.from = function (body) { + duplexify ??= require("internal/streams/duplexify"); + return duplexify(body, "body"); +}; + +export default Duplex; diff --git a/src/js/internal/streams/duplexify.ts b/src/js/internal/streams/duplexify.ts new file mode 100644 index 0000000000..cb193e7895 --- /dev/null +++ b/src/js/internal/streams/duplexify.ts @@ -0,0 +1,369 @@ +"use strict"; + +const { + isReadable, + isWritable, + isIterable, + isNodeStream, + isReadableNodeStream, + isWritableNodeStream, + isDuplexNodeStream, + isReadableStream, + isWritableStream, +} = require("internal/streams/utils"); +const eos = require("internal/streams/end-of-stream"); +const { destroyer } = require("internal/streams/destroy"); +const Duplex = require("internal/streams/duplex"); +const Readable = require("internal/streams/readable"); +const Writable = require("internal/streams/writable"); +const from = require("internal/streams/from"); + +const PromiseWithResolvers = Promise.withResolvers.bind(Promise); + +class Duplexify extends Duplex { + constructor(options) { + super(options); + + // https://github.com/nodejs/node/pull/34385 + + if (options?.readable === false) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + + if (options?.writable === false) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + } +} + +function duplexify(body, name?) { + if (isDuplexNodeStream(body)) { + return body; + } + + if (isReadableNodeStream(body)) { + return _duplexify({ readable: body }); + } + + if (isWritableNodeStream(body)) { + return _duplexify({ writable: body }); + } + + if (isNodeStream(body)) { + return _duplexify({ writable: false, readable: false }); + } + + if (isReadableStream(body)) { + return _duplexify({ readable: Readable.fromWeb(body) }); + } + + if (isWritableStream(body)) { + return _duplexify({ writable: Writable.fromWeb(body) }); + } + + if (typeof body === "function") { + const { value, write, final, destroy } = fromAsyncGen(body); + + // Body might be a constructor function instead of an async generator function. + if (isDuplexNodeStream(value)) { + return value; + } + + if (isIterable(value)) { + return from(Duplexify, value, { + // TODO (ronag): highWaterMark? + objectMode: true, + write, + final, + destroy, + }); + } + + const then = value?.then; + if (typeof then === "function") { + let d; + + const promise = then.$call( + value, + val => { + if (val != null) { + throw $ERR_INVALID_RETURN_VALUE("nully", "body", val); + } + }, + err => { + destroyer(d, err); + }, + ); + + return (d = new Duplexify({ + // TODO (ronag): highWaterMark? + objectMode: true, + readable: false, + write, + final(cb) { + final(async () => { + try { + await promise; + process.nextTick(cb, null); + } catch (err) { + process.nextTick(cb, err); + } + }); + }, + destroy, + })); + } + + throw $ERR_INVALID_RETURN_VALUE("Iterable, AsyncIterable or AsyncFunction", name, value); + } + + if ($inheritsBlob(body)) { + return duplexify(body.arrayBuffer()); + } + + if (isIterable(body)) { + return from(Duplexify, body, { + // TODO (ronag): highWaterMark? + objectMode: true, + writable: false, + }); + } + + if (isReadableStream(body?.readable) && isWritableStream(body?.writable)) { + return Duplexify.fromWeb(body); + } + + if (typeof body?.writable === "object" || typeof body?.readable === "object") { + const readable = body?.readable + ? isReadableNodeStream(body?.readable) + ? body?.readable + : duplexify(body.readable) + : undefined; + + const writable = body?.writable + ? isWritableNodeStream(body?.writable) + ? body?.writable + : duplexify(body.writable) + : undefined; + + return _duplexify({ readable, writable }); + } + + const then = body?.then; + if (typeof then === "function") { + let d; + + then.$call( + body, + val => { + if (val != null) { + d.push(val); + } + d.push(null); + }, + err => { + destroyer(d, err); + }, + ); + + return (d = new Duplexify({ + objectMode: true, + writable: false, + read() {}, + })); + } + + throw $ERR_INVALID_ARG_TYPE( + name, + [ + "Blob", + "ReadableStream", + "WritableStream", + "Stream", + "Iterable", + "AsyncIterable", + "Function", + "{ readable, writable } pair", + "Promise", + ], + body, + ); +} + +function fromAsyncGen(fn) { + let { promise, resolve } = PromiseWithResolvers(); + const ac = new AbortController(); + const signal = ac.signal; + const value = fn( + (async function* () { + while (true) { + const _promise = promise; + promise = null; + const { chunk, done, cb } = await _promise; + process.nextTick(cb); + if (done) return; + if (signal.aborted) throw $makeAbortError(undefined, { cause: signal.reason }); + ({ promise, resolve } = PromiseWithResolvers()); + yield chunk; + } + })(), + { signal }, + ); + + return { + value, + write(chunk, encoding, cb) { + const _resolve = resolve; + resolve = null; + _resolve({ chunk, done: false, cb }); + }, + final(cb) { + const _resolve = resolve; + resolve = null; + _resolve({ done: true, cb }); + }, + destroy(err, cb) { + ac.abort(); + cb(err); + }, + }; +} + +function _duplexify(pair) { + const r = pair.readable && typeof pair.readable.read !== "function" ? Readable.wrap(pair.readable) : pair.readable; + const w = pair.writable; + + let readable = !!isReadable(r); + let writable = !!isWritable(w); + + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + + function onfinished(err) { + const cb = onclose; + onclose = null; + + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } + } + + // TODO(ronag): Avoid double buffering. + // Implement Writable/Readable/Duplex traits. + // See, https://github.com/nodejs/node/pull/33515. + d = new Duplexify({ + // TODO (ronag): highWaterMark? + readableObjectMode: !!r?.readableObjectMode, + writableObjectMode: !!w?.writableObjectMode, + readable, + writable, + }); + + if (writable) { + eos(w, err => { + writable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + + d._write = function (chunk, encoding, callback) { + if (w.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + + d._final = function (callback) { + w.end(); + onfinish = callback; + }; + + w.on("drain", function () { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + + w.on("finish", function () { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + + if (readable) { + eos(r, err => { + readable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + + r.on("readable", function () { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + + r.on("end", function () { + d.push(null); + }); + + d._read = function () { + while (true) { + const buf = r.read(); + + if (buf === null) { + onreadable = d._read; + return; + } + + if (!d.push(buf)) { + return; + } + } + }; + } + + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = $makeAbortError(); + } + + onreadable = null; + ondrain = null; + onfinish = null; + + if (onclose === null) { + callback(err); + } else { + onclose = callback; + destroyer(w, err); + destroyer(r, err); + } + }; + + return d; +} + +export default duplexify; diff --git a/src/js/internal/streams/duplexpair.ts b/src/js/internal/streams/duplexpair.ts new file mode 100644 index 0000000000..63211b8032 --- /dev/null +++ b/src/js/internal/streams/duplexpair.ts @@ -0,0 +1,59 @@ +"use strict"; + +const Duplex = require("internal/streams/duplex"); + +const kCallback = Symbol("Callback"); +const kInitOtherSide = Symbol("InitOtherSide"); + +class DuplexSide extends Duplex { + #otherSide = null; + + constructor(options) { + super(options); + this[kCallback] = null; + this.#otherSide = null; + } + + [kInitOtherSide](otherSide) { + // Ensure this can only be set once, to enforce encapsulation. + if (this.#otherSide === null) { + this.#otherSide = otherSide; + } else { + $assert(this.#otherSide === null); + } + } + + _read() { + const callback = this[kCallback]; + if (callback) { + this[kCallback] = null; + callback(); + } + } + + _write(chunk, encoding, callback) { + $assert(this.#otherSide !== null); + $assert(this.#otherSide[kCallback] === null); + if (chunk.length === 0) { + process.nextTick(callback); + } else { + this.#otherSide.push(chunk); + this.#otherSide[kCallback] = callback; + } + } + + _final(callback) { + this.#otherSide.on("end", callback); + this.#otherSide.push(null); + } +} + +function duplexPair(options) { + const side0 = new DuplexSide(options); + const side1 = new DuplexSide(options); + side0[kInitOtherSide](side1); + side1[kInitOtherSide](side0); + return [side0, side1]; +} + +export default duplexPair; diff --git a/src/js/internal/streams/end-of-stream.ts b/src/js/internal/streams/end-of-stream.ts new file mode 100644 index 0000000000..fdf0b2d236 --- /dev/null +++ b/src/js/internal/streams/end-of-stream.ts @@ -0,0 +1,297 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +"use strict"; + +const { kEmptyObject, once } = require("internal/shared"); +const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require("internal/validators"); +const { + isClosed, + isReadable, + isReadableNodeStream, + isReadableStream, + isReadableFinished, + isReadableErrored, + isWritable, + isWritableNodeStream, + isWritableStream, + isWritableFinished, + isWritableErrored, + isNodeStream, + willEmitClose: _willEmitClose, + kIsClosedPromise, +} = require("internal/streams/utils"); + +const SymbolDispose = Symbol.dispose; +const PromisePrototypeThen = Promise.prototype.then; + +let addAbortListener; + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === "function"; +} + +const nop = () => {}; + +function eos(stream, options, callback) { + if (arguments.length === 2) { + callback = options; + options = kEmptyObject; + } else if (options == null) { + options = kEmptyObject; + } else { + validateObject(options, "options"); + } + validateFunction(callback, "callback"); + validateAbortSignal(options.signal, "options.signal"); + + callback = once(callback); + + if (isReadableStream(stream) || isWritableStream(stream)) { + return eosWeb(stream, options, callback); + } + + if (!isNodeStream(stream)) { + throw $ERR_INVALID_ARG_TYPE("stream", ["ReadableStream", "WritableStream", "Stream"], stream); + } + + const readable = options.readable ?? isReadableNodeStream(stream); + const writable = options.writable ?? isWritableNodeStream(stream); + + const wState = stream._writableState; + const rState = stream._readableState; + + const onlegacyfinish = () => { + if (!stream.writable) { + onfinish(); + } + }; + + // TODO (ronag): Improve soft detection to include core modules and + // common ecosystem modules that do properly emit 'close' but fail + // this generic check. + let willEmitClose = + _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable; + + let writableFinished = isWritableFinished(stream, false); + const onfinish = () => { + writableFinished = true; + // Stream should not be destroyed here. If it is that + // means that user space is doing something differently and + // we cannot trust willEmitClose. + if (stream.destroyed) { + willEmitClose = false; + } + + if (willEmitClose && (!stream.readable || readable)) { + return; + } + + if (!readable || readableFinished) { + callback.$call(stream); + } + }; + + let readableFinished = isReadableFinished(stream, false); + const onend = () => { + readableFinished = true; + // Stream should not be destroyed here. If it is that + // means that user space is doing something differently and + // we cannot trust willEmitClose. + if (stream.destroyed) { + willEmitClose = false; + } + + if (willEmitClose && (!stream.writable || writable)) { + return; + } + + if (!writable || writableFinished) { + callback.$call(stream); + } + }; + + const onerror = err => { + callback.$call(stream, err); + }; + + let closed = isClosed(stream); + + const onclose = () => { + closed = true; + + const errored = isWritableErrored(stream) || isReadableErrored(stream); + + if (errored && typeof errored !== "boolean") { + return callback.$call(stream, errored); + } + + if (readable && !readableFinished && isReadableNodeStream(stream, true)) { + if (!isReadableFinished(stream, false)) return callback.$call(stream, $ERR_STREAM_PREMATURE_CLOSE()); + } + if (writable && !writableFinished) { + if (!isWritableFinished(stream, false)) return callback.$call(stream, $ERR_STREAM_PREMATURE_CLOSE()); + } + + callback.$call(stream); + }; + + const onclosed = () => { + closed = true; + + const errored = isWritableErrored(stream) || isReadableErrored(stream); + + if (errored && typeof errored !== "boolean") { + return callback.$call(stream, errored); + } + + callback.$call(stream); + }; + + const onrequest = () => { + stream.req.on("finish", onfinish); + }; + + if (isRequest(stream)) { + stream.on("complete", onfinish); + if (!willEmitClose) { + stream.on("abort", onclose); + } + if (stream.req) { + onrequest(); + } else { + stream.on("request", onrequest); + } + } else if (writable && !wState) { + // legacy streams + stream.on("end", onlegacyfinish); + stream.on("close", onlegacyfinish); + } + + // Not all streams will emit 'close' after 'aborted'. + if (!willEmitClose && typeof stream.aborted === "boolean") { + stream.on("aborted", onclose); + } + + stream.on("end", onend); + stream.on("finish", onfinish); + if (options.error !== false) { + stream.on("error", onerror); + } + stream.on("close", onclose); + + if (closed) { + process.nextTick(onclose); + } else if (wState?.errorEmitted || rState?.errorEmitted) { + if (!willEmitClose) { + process.nextTick(onclosed); + } + } else if ( + !readable && + (!willEmitClose || isReadable(stream)) && + (writableFinished || isWritable(stream) === false) && + (wState == null || wState.pendingcb === undefined || wState.pendingcb === 0) + ) { + process.nextTick(onclosed); + } else if ( + !writable && + (!willEmitClose || isWritable(stream)) && + (readableFinished || isReadable(stream) === false) + ) { + process.nextTick(onclosed); + } else if (rState && stream.req && stream.aborted) { + process.nextTick(onclosed); + } + + const cleanup = () => { + callback = nop; + stream.removeListener("aborted", onclose); + stream.removeListener("complete", onfinish); + stream.removeListener("abort", onclose); + stream.removeListener("request", onrequest); + if (stream.req) stream.req.removeListener("finish", onfinish); + stream.removeListener("end", onlegacyfinish); + stream.removeListener("close", onlegacyfinish); + stream.removeListener("finish", onfinish); + stream.removeListener("end", onend); + stream.removeListener("error", onerror); + stream.removeListener("close", onclose); + }; + + if (options.signal && !closed) { + const abort = () => { + // Keep it because cleanup removes it. + const endCallback = callback; + cleanup(); + endCallback.$call(stream, $makeAbortError(undefined, { cause: options.signal.reason })); + }; + if (options.signal.aborted) { + process.nextTick(abort); + } else { + addAbortListener ??= require("internal/abort_listener").addAbortListener; + const disposable = addAbortListener(options.signal, abort); + const originalCallback = callback; + callback = once((...args) => { + disposable[SymbolDispose](); + originalCallback.$apply(stream, args); + }); + } + } + + return cleanup; +} + +function eosWeb(stream, options, callback) { + let isAborted = false; + let abort = nop; + if (options.signal) { + abort = () => { + isAborted = true; + callback.$call(stream, $makeAbortError(undefined, { cause: options.signal.reason })); + }; + if (options.signal.aborted) { + process.nextTick(abort); + } else { + addAbortListener ??= require("internal/abort_listener").addAbortListener; + const disposable = addAbortListener(options.signal, abort); + const originalCallback = callback; + callback = once((...args) => { + disposable[SymbolDispose](); + originalCallback.$apply(stream, args); + }); + } + } + const resolverFn = (...args) => { + if (!isAborted) { + process.nextTick(() => callback.$apply(stream, args)); + } + }; + PromisePrototypeThen.$call(stream[kIsClosedPromise].promise, resolverFn, resolverFn); + return nop; +} + +function finished(stream, opts) { + let autoCleanup = false; + if (opts === null) { + opts = kEmptyObject; + } + if (opts?.cleanup) { + validateBoolean(opts.cleanup, "cleanup"); + autoCleanup = opts.cleanup; + } + return new Promise((resolve, reject) => { + const cleanup = eos(stream, opts, err => { + if (autoCleanup) { + cleanup(); + } + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); +} + +eos.finished = finished; +export default eos; diff --git a/src/js/internal/streams/from.ts b/src/js/internal/streams/from.ts new file mode 100644 index 0000000000..46308d5d3d --- /dev/null +++ b/src/js/internal/streams/from.ts @@ -0,0 +1,197 @@ +"use strict"; + +const { Buffer } = require("node:buffer"); + +const SymbolIterator = Symbol.iterator; +const SymbolAsyncIterator = Symbol.asyncIterator; +const PromisePrototypeThen = Promise.prototype.then; + +function from(Readable, iterable, opts) { + let iterator; + if (typeof iterable === "string" || iterable instanceof Buffer) { + return new Readable({ + objectMode: true, + ...opts, + read() { + this.push(iterable); + this.push(null); + }, + }); + } + + let isAsync; + if (iterable?.[SymbolAsyncIterator]) { + isAsync = true; + iterator = iterable[SymbolAsyncIterator](); + } else if (iterable?.[SymbolIterator]) { + isAsync = false; + iterator = iterable[SymbolIterator](); + } else { + throw $ERR_INVALID_ARG_TYPE("iterable", ["Iterable"], iterable); + } + + const readable = new Readable({ + objectMode: true, + highWaterMark: 1, + // TODO(ronag): What options should be allowed? + ...opts, + }); + + // Flag to protect against _read + // being called before last iteration completion. + let reading = false; + let isAsyncValues = false; + + readable._read = function () { + if (!reading) { + reading = true; + + if (isAsync) { + nextAsync(); + } else if (isAsyncValues) { + nextSyncWithAsyncValues(); + } else { + nextSyncWithSyncValues(); + } + } + }; + + readable._destroy = function (error, cb) { + PromisePrototypeThen.$call( + close(error), + () => process.nextTick(cb, error), // nextTick is here in case cb throws + e => process.nextTick(cb, e || error), + ); + }; + + async function close(error) { + const hadError = error !== undefined && error !== null; + const hasThrow = typeof iterator.throw === "function"; + if (hadError && hasThrow) { + const { value, done } = await iterator.throw(error); + await value; + if (done) { + return; + } + } + if (typeof iterator.return === "function") { + const { value } = await iterator.return(); + await value; + } + } + + // There are a lot of duplication here, it's done on purpose for performance + // reasons - avoid await when not needed. + + function nextSyncWithSyncValues() { + for (;;) { + try { + const { value, done } = iterator.next(); + + if (done) { + readable.push(null); + return; + } + + if (value && typeof value.then === "function") { + return changeToAsyncValues(value); + } + + if (value === null) { + reading = false; + throw $ERR_STREAM_NULL_VALUES(); + } + + if (readable.push(value)) { + continue; + } + + reading = false; + } catch (err) { + readable.destroy(err); + } + break; + } + } + + async function changeToAsyncValues(value) { + isAsyncValues = true; + + try { + const res = await value; + + if (res === null) { + reading = false; + throw $ERR_STREAM_NULL_VALUES(); + } + + if (readable.push(res)) { + nextSyncWithAsyncValues(); + return; + } + + reading = false; + } catch (err) { + readable.destroy(err); + } + } + + async function nextSyncWithAsyncValues() { + for (;;) { + try { + const { value, done } = iterator.next(); + + if (done) { + readable.push(null); + return; + } + + const res = value && typeof value.then === "function" ? await value : value; + + if (res === null) { + reading = false; + throw $ERR_STREAM_NULL_VALUES(); + } + + if (readable.push(res)) { + continue; + } + + reading = false; + } catch (err) { + readable.destroy(err); + } + break; + } + } + + async function nextAsync() { + for (;;) { + try { + const { value, done } = await iterator.next(); + + if (done) { + readable.push(null); + return; + } + + if (value === null) { + reading = false; + throw $ERR_STREAM_NULL_VALUES(); + } + + if (readable.push(value)) { + continue; + } + + reading = false; + } catch (err) { + readable.destroy(err); + } + break; + } + } + return readable; +} + +export default from; diff --git a/src/js/internal/streams/lazy_transform.ts b/src/js/internal/streams/lazy_transform.ts new file mode 100644 index 0000000000..56bdb1f8d4 --- /dev/null +++ b/src/js/internal/streams/lazy_transform.ts @@ -0,0 +1,53 @@ +// LazyTransform is a special type of Transform stream that is lazily loaded. +// This is used for performance with bi-API-ship: when two APIs are available +// for the stream, one conventional and one non-conventional. +"use strict"; + +const Transform = require("internal/streams/transform"); + +const ObjectDefineProperty = Object.defineProperty; +const ObjectDefineProperties = Object.defineProperties; + +function LazyTransform(options) { + this._options = options; +} +$toClass(LazyTransform, "LazyTransform", Transform); + +function makeGetter(name) { + return function () { + Transform.$call(this, this._options); + this._writableState.decodeStrings = false; + return this[name]; + }; +} + +function makeSetter(name) { + return function (val) { + ObjectDefineProperty(this, name, { + __proto__: null, + value: val, + enumerable: true, + configurable: true, + writable: true, + }); + }; +} + +ObjectDefineProperties(LazyTransform.prototype, { + _readableState: { + __proto__: null, + get: makeGetter("_readableState"), + set: makeSetter("_readableState"), + configurable: true, + enumerable: true, + }, + _writableState: { + __proto__: null, + get: makeGetter("_writableState"), + set: makeSetter("_writableState"), + configurable: true, + enumerable: true, + }, +}); + +export default LazyTransform; diff --git a/src/js/internal/streams/legacy.ts b/src/js/internal/streams/legacy.ts new file mode 100644 index 0000000000..251cac6438 --- /dev/null +++ b/src/js/internal/streams/legacy.ts @@ -0,0 +1,116 @@ +"use strict"; + +const EE = require("node:events"); + +const ReflectOwnKeys = Reflect.ownKeys; +const ArrayIsArray = Array.isArray; + +function Stream(opts) { + EE.$call(this, opts); +} +$toClass(Stream, "Stream", EE); + +Stream.prototype.pipe = function (dest, options) { + const source = this; + + function ondata(chunk) { + if (dest.writable && dest.write(chunk) === false && source.pause) { + source.pause(); + } + } + + source.on("data", ondata); + + function ondrain() { + if (source.readable && source.resume) { + source.resume(); + } + } + + dest.on("drain", ondrain); + + // If the 'end' option is not supplied, dest.end() will be called when + // source gets the 'end' or 'close' events. Only dest.end() once. + if (!dest._isStdio && (!options || options.end !== false)) { + source.on("end", onend); + source.on("close", onclose); + } + + let didOnEnd = false; + function onend() { + if (didOnEnd) return; + didOnEnd = true; + + dest.end(); + } + + function onclose() { + if (didOnEnd) return; + didOnEnd = true; + + if (typeof dest.destroy === "function") dest.destroy(); + } + + // Don't leave dangling pipes when there are errors. + function onerror(er) { + cleanup(); + if (EE.listenerCount(this, "error") === 0) { + this.emit("error", er); + } + } + + prependListener(source, "error", onerror); + prependListener(dest, "error", onerror); + + // Remove all the event listeners that were added. + function cleanup() { + source.removeListener("data", ondata); + dest.removeListener("drain", ondrain); + + source.removeListener("end", onend); + source.removeListener("close", onclose); + + source.removeListener("error", onerror); + dest.removeListener("error", onerror); + + source.removeListener("end", cleanup); + source.removeListener("close", cleanup); + + dest.removeListener("close", cleanup); + } + + source.on("end", cleanup); + source.on("close", cleanup); + + dest.on("close", cleanup); + dest.emit("pipe", source); + + // Allow for unix-like usage: A.pipe(B).pipe(C) + return dest; +}; + +Stream.prototype.eventNames = function eventNames() { + const names = []; + for (const key of ReflectOwnKeys(this._events)) { + if (typeof this._events[key] === "function" || (ArrayIsArray(this._events[key]) && this._events[key].length > 0)) { + names.push(key); + } + } + return names; +}; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === "function") return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn); + else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn); + else emitter._events[event] = [fn, emitter._events[event]]; +} + +export default { Stream, prependListener }; diff --git a/src/js/internal/streams/nativereadable.ts b/src/js/internal/streams/nativereadable.ts new file mode 100644 index 0000000000..74f897c519 --- /dev/null +++ b/src/js/internal/streams/nativereadable.ts @@ -0,0 +1,246 @@ +const { kEnsureConstructed } = require("internal/shared"); +const { errorOrDestroy } = require("internal/streams/destroy"); + +const ProcessNextTick = process.nextTick; + +var DYNAMICALLY_ADJUST_CHUNK_SIZE = process.env.BUN_DISABLE_DYNAMIC_CHUNK_SIZE !== "1"; + +const MIN_BUFFER_SIZE = 512; + +const refCount = Symbol("refCount"); +const constructed = Symbol("constructed"); +const remainingChunk = Symbol("remainingChunk"); +const highWaterMark = Symbol("highWaterMark"); +const pendingRead = Symbol("pendingRead"); +const hasResized = Symbol("hasResized"); +const _onClose = Symbol("_onClose"); +const _onDrain = Symbol("_onDrain"); +const _internalConstruct = Symbol("_internalConstruct"); +const _getRemainingChunk = Symbol("_getRemainingChunk"); +const _adjustHighWaterMark = Symbol("_adjustHighWaterMark"); +const _handleResult = Symbol("_handleResult"); +const _internalRead = Symbol("_internalRead"); + +export default function () { + const Readable = require("internal/streams/readable"); + + var closer = [false]; + var handleNumberResult = function (nativeReadable, result, view, isClosed) { + if (result > 0) { + const slice = view.subarray(0, result); + view = slice.byteLength < view.byteLength ? view.subarray(result) : undefined; + if (slice.byteLength > 0) { + nativeReadable.push(slice); + } + } + + if (isClosed) { + ProcessNextTick(() => { + nativeReadable.push(null); + }); + } + + return view; + }; + + var handleArrayBufferViewResult = function (nativeReadable, result, view, isClosed) { + if (result.byteLength > 0) { + nativeReadable.push(result); + } + + if (isClosed) { + ProcessNextTick(() => { + nativeReadable.push(null); + }); + } + + return view; + }; + + function NativeReadable(ptr, options) { + if (!(this instanceof NativeReadable)) return Reflect.construct(NativeReadable, [ptr, options]); + + this[refCount] = 0; + this[constructed] = false; + this[remainingChunk] = undefined; + this[pendingRead] = false; + this[hasResized] = !DYNAMICALLY_ADJUST_CHUNK_SIZE; + + options ??= {}; + Readable.$apply(this, [options]); + + if (typeof options.highWaterMark === "number") { + this[highWaterMark] = options.highWaterMark; + } else { + this[highWaterMark] = 256 * 1024; + } + this.$bunNativePtr = ptr; + this[constructed] = false; + this[remainingChunk] = undefined; + this[pendingRead] = false; + ptr.onClose = this[_onClose].bind(this); + ptr.onDrain = this[_onDrain].bind(this); + } + $toClass(NativeReadable, "NativeReadable", Readable); + + NativeReadable.prototype[_onClose] = function () { + this.push(null); + }; + + NativeReadable.prototype[_onDrain] = function (chunk) { + this.push(chunk); + }; + + // maxToRead is by default the highWaterMark passed from the Readable.read call to this fn + // However, in the case of an fs.ReadStream, we can pass the number of bytes we want to read + // which may be significantly less than the actual highWaterMark + NativeReadable.prototype._read = function _read(maxToRead) { + $debug("NativeReadable._read", this.__id); + if (this[pendingRead]) { + $debug("pendingRead is true", this.__id); + return; + } + var ptr = this.$bunNativePtr; + $debug("ptr @ NativeReadable._read", ptr, this.__id); + if (!ptr) { + this.push(null); + return; + } + if (!this[constructed]) { + $debug("NativeReadable not constructed yet", this.__id); + this[_internalConstruct](ptr); + } + return this[_internalRead](this[_getRemainingChunk](maxToRead), ptr); + }; + + NativeReadable.prototype[_internalConstruct] = function (ptr) { + $assert(this[constructed] === false); + this[constructed] = true; + + const result = ptr.start(this[highWaterMark]); + + $debug("NativeReadable internal `start` result", result, this.__id); + + if (typeof result === "number" && result > 1) { + this[hasResized] = true; + $debug("NativeReadable resized", this.__id); + + this[highWaterMark] = Math.min(this[highWaterMark], result); + } + + const drainResult = ptr.drain(); + $debug("NativeReadable drain result", drainResult, this.__id); + if ((drainResult?.byteLength ?? 0) > 0) { + this.push(drainResult); + } + }; + + // maxToRead can be the highWaterMark (by default) or the remaining amount of the stream to read + // This is so the consumer of the stream can terminate the stream early if they know + // how many bytes they want to read (ie. when reading only part of a file) + // ObjectDefinePrivateProperty(NativeReadable.prototype, "_getRemainingChunk", ); + NativeReadable.prototype[_getRemainingChunk] = function (maxToRead) { + maxToRead ??= this[highWaterMark]; + var chunk = this[remainingChunk]; + $debug("chunk @ #getRemainingChunk", chunk, this.__id); + if (chunk?.byteLength ?? 0 < MIN_BUFFER_SIZE) { + var size = maxToRead > MIN_BUFFER_SIZE ? maxToRead : MIN_BUFFER_SIZE; + this[remainingChunk] = chunk = new Buffer(size); + } + return chunk; + }; + + // ObjectDefinePrivateProperty(NativeReadable.prototype, "_adjustHighWaterMark", ); + NativeReadable.prototype[_adjustHighWaterMark] = function () { + this[highWaterMark] = Math.min(this[highWaterMark] * 2, 1024 * 1024 * 2); + this[hasResized] = true; + $debug("Resized", this.__id); + }; + + // ObjectDefinePrivateProperty(NativeReadable.prototype, "_handleResult", ); + NativeReadable.prototype[_handleResult] = function (result, view, isClosed) { + $debug("result, isClosed @ #handleResult", result, isClosed, this.__id); + + if (typeof result === "number") { + if (result >= this[highWaterMark] && !this[hasResized] && !isClosed) { + this[_adjustHighWaterMark](); + } + return handleNumberResult(this, result, view, isClosed); + } else if (typeof result === "boolean") { + ProcessNextTick(() => { + this.push(null); + }); + return (view?.byteLength ?? 0 > 0) ? view : undefined; + } else if ($isTypedArrayView(result)) { + if (result.byteLength >= this[highWaterMark] && !this[hasResized] && !isClosed) { + this[_adjustHighWaterMark](); + } + + return handleArrayBufferViewResult(this, result, view, isClosed); + } else { + $debug("Unknown result type", result, this.__id); + throw new Error("Invalid result from pull"); + } + }; + + NativeReadable.prototype[_internalRead] = function (view, ptr) { + $debug("#internalRead()", this.__id); + closer[0] = false; + var result = ptr.pull(view, closer); + if ($isPromise(result)) { + this[pendingRead] = true; + return result.then( + result => { + this[pendingRead] = false; + $debug("pending no longerrrrrrrr (result returned from pull)", this.__id); + const isClosed = closer[0]; + this[remainingChunk] = this[_handleResult](result, view, isClosed); + }, + reason => { + $debug("error from pull", reason, this.__id); + errorOrDestroy(this, reason); + }, + ); + } else { + this[remainingChunk] = this[_handleResult](result, view, closer[0]); + } + }; + + NativeReadable.prototype._destroy = function (error, callback) { + var ptr = this.$bunNativePtr; + if (!ptr) { + callback(error); + return; + } + + this.$bunNativePtr = undefined; + ptr.updateRef(false); + + $debug("NativeReadable destroyed", this.__id); + ptr.cancel(error); + callback(error); + }; + + NativeReadable.prototype.ref = function () { + var ptr = this.$bunNativePtr; + if (ptr === undefined) return; + if (this[refCount]++ === 0) { + ptr.updateRef(true); + } + }; + + NativeReadable.prototype.unref = function () { + var ptr = this.$bunNativePtr; + if (ptr === undefined) return; + if (this[refCount]-- === 1) { + ptr.updateRef(false); + } + }; + + NativeReadable.prototype[kEnsureConstructed] = function () { + if (this[constructed]) return; + this[_internalConstruct](this.$bunNativePtr); + }; + + return NativeReadable; +} diff --git a/src/js/internal/streams/nativewritable.ts b/src/js/internal/streams/nativewritable.ts new file mode 100644 index 0000000000..fcc371efd2 --- /dev/null +++ b/src/js/internal/streams/nativewritable.ts @@ -0,0 +1,135 @@ +const Writable = require("internal/streams/writable"); + +const ProcessNextTick = process.nextTick; + +const _native = Symbol("native"); +const _pathOrFdOrSink = Symbol("pathOrFdOrSink"); +const { fileSinkSymbol: _fileSink } = require("internal/shared"); + +function NativeWritable(pathOrFdOrSink, options = {}) { + Writable.$call(this, options); + + this[_native] = true; + + this._construct = NativeWritable_internalConstruct; + this._final = NativeWritable_internalFinal; + this._write = NativeWritablePrototypeWrite; + + this[_pathOrFdOrSink] = pathOrFdOrSink; +} +$toClass(NativeWritable, "NativeWritable", Writable); + +// These are confusingly two different fns for construct which initially were the same thing because +// `_construct` is part of the lifecycle of Writable and is not called lazily, +// so we need to separate our _construct for Writable state and actual construction of the write stream +function NativeWritable_internalConstruct(cb) { + this._writableState.constructed = true; + this.constructed = true; + if (typeof cb === "function") ProcessNextTick(cb); + ProcessNextTick(() => { + this.emit("open", this.fd); + this.emit("ready"); + }); +} + +function NativeWritable_internalFinal(cb) { + var sink = this[_fileSink]; + if (sink) { + const end = sink.end(true); + if ($isPromise(end) && cb) { + end.then(() => { + if (cb) cb(); + }, cb); + } + } + if (cb) cb(); +} + +function NativeWritablePrototypeWrite(chunk, encoding, cb) { + var fileSink = this[_fileSink] ?? NativeWritable_lazyConstruct(this); + var result = fileSink.write(chunk); + + if (typeof encoding === "function") { + cb = encoding; + } + + if ($isPromise(result)) { + // var writePromises = this.#writePromises; + // var i = writePromises.length; + // writePromises[i] = result; + result + .then(result => { + this.emit("drain"); + if (cb) { + cb(null, result); + } + }) + .catch( + cb + ? err => { + cb(err); + } + : err => { + this.emit("error", err); + }, + ); + return false; + } + + // TODO: Should we just have a calculation based on encoding and length of chunk? + if (cb) cb(null, chunk.byteLength); + return true; +} + +function NativeWritable_lazyConstruct(stream) { + // TODO: Turn this check into check for instanceof FileSink + var sink = stream[_pathOrFdOrSink]; + if (typeof sink === "object") { + if (typeof sink.write === "function") { + return (stream[_fileSink] = sink); + } else { + throw new Error("Invalid FileSink"); + } + } else { + return (stream[_fileSink] = Bun.file(sink).writer()); + } +} + +const WritablePrototypeEnd = Writable.prototype.end; +NativeWritable.prototype.end = function end(chunk, encoding, cb, native) { + return WritablePrototypeEnd.$call(this, chunk, encoding, cb, native ?? this[_native]); +}; + +NativeWritable.prototype._destroy = function (error, cb) { + const w = this._writableState; + const r = this._readableState; + + if (w) { + w.destroyed = true; + w.closeEmitted = true; + } + if (r) { + r.destroyed = true; + r.closeEmitted = true; + } + + if (typeof cb === "function") cb(error); + + if (w?.closeEmitted || r?.closeEmitted) { + this.emit("close"); + } +}; + +NativeWritable.prototype.ref = function ref() { + const sink = (this[_fileSink] ||= NativeWritable_lazyConstruct(this)); + sink.ref(); + return this; +}; + +NativeWritable.prototype.unref = function unref() { + const sink = (this[_fileSink] ||= NativeWritable_lazyConstruct(this)); + sink.unref(); + return this; +}; + +export default NativeWritable; diff --git a/src/js/internal/streams/operators.ts b/src/js/internal/streams/operators.ts new file mode 100644 index 0000000000..c2ae0b0744 --- /dev/null +++ b/src/js/internal/streams/operators.ts @@ -0,0 +1,410 @@ +"use strict"; + +const { validateAbortSignal, validateInteger, validateObject } = require("internal/validators"); +const { kWeakHandler, kResistStopPropagation } = require("internal/shared"); +const { finished } = require("internal/streams/end-of-stream"); +const staticCompose = require("internal/streams/compose"); +const { addAbortSignalNoValidate } = require("internal/streams/add-abort-signal"); +const { isWritable, isNodeStream } = require("internal/streams/utils"); + +const MathFloor = Math.floor; +const PromiseResolve = Promise.resolve.bind(Promise); +const PromiseReject = Promise.reject.bind(Promise); +const PromisePrototypeThen = Promise.prototype.then; +const ArrayPrototypePush = Array.prototype.push; +const NumberIsNaN = Number.isNaN; +const ObjectDefineProperty = Object.defineProperty; + +const kEmpty = Symbol("kEmpty"); +const kEof = Symbol("kEof"); + +function compose(stream, options) { + if (options != null) { + validateObject(options, "options"); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, "options.signal"); + } + + if (isNodeStream(stream) && !isWritable(stream)) { + throw $ERR_INVALID_ARG_VALUE("stream", stream, "must be writable"); + } + + const composedStream = staticCompose(this, stream); + + if (options?.signal) { + // Not validating as we already validated before + addAbortSignalNoValidate(options.signal, composedStream); + } + + return composedStream; +} + +function map(fn, options) { + if (typeof fn !== "function") { + throw $ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + if (options != null) { + validateObject(options, "options"); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, "options.signal"); + } + + let concurrency = 1; + if (options?.concurrency != null) { + concurrency = MathFloor(options.concurrency); + } + + let highWaterMark = concurrency - 1; + if (options?.highWaterMark != null) { + highWaterMark = MathFloor(options.highWaterMark); + } + + validateInteger(concurrency, "options.concurrency", 1); + validateInteger(highWaterMark, "options.highWaterMark", 0); + + highWaterMark += concurrency; + + return async function* map() { + const signal = AbortSignal.any([options?.signal].filter(Boolean)); + const stream = this; + const queue: (Promise | typeof kEof)[] = []; + const signalOpt = { signal }; + + let next; + let resume; + let done = false; + let cnt = 0; + + function onCatch() { + done = true; + afterItemProcessed(); + } + + function afterItemProcessed() { + cnt -= 1; + maybeResume(); + } + + function maybeResume() { + if (resume && !done && cnt < concurrency && queue.length < highWaterMark) { + resume(); + resume = null; + } + } + + async function pump() { + try { + for await (let val of stream) { + if (done) { + return; + } + + if (signal.aborted) { + throw $makeAbortError(); + } + + try { + val = fn(val, signalOpt); + + if (val === kEmpty) { + continue; + } + + val = PromiseResolve(val); + } catch (err) { + val = PromiseReject(err); + } + + cnt += 1; + + PromisePrototypeThen.$call(val, afterItemProcessed, onCatch); + + queue.push(val); + if (next) { + next(); + next = null; + } + + if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) { + await new Promise(resolve => { + resume = resolve; + }); + } + } + queue.push(kEof); + } catch (err) { + const val = PromiseReject(err); + PromisePrototypeThen.$call(val, afterItemProcessed, onCatch); + queue.push(val); + } finally { + done = true; + if (next) { + next(); + next = null; + } + } + } + + pump(); + + try { + while (true) { + while (queue.length > 0) { + const val = await queue[0]; + + if (val === kEof) { + return; + } + + if (signal.aborted) { + throw $makeAbortError(); + } + + if (val !== kEmpty) { + yield val; + } + + queue.shift(); + maybeResume(); + } + + await new Promise(resolve => { + next = resolve; + }); + } + } finally { + done = true; + if (resume) { + resume(); + resume = null; + } + } + }.$call(this); +} + +async function some(fn, options = undefined) { + for await (const unused of filter.$call(this, fn, options)) { + return true; + } + return false; +} + +async function every(fn, options = undefined) { + if (typeof fn !== "function") { + throw $ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + // https://en.wikipedia.org/wiki/De_Morgan's_laws + return !(await some.$call( + this, + async (...args) => { + return !(await fn(...args)); + }, + options, + )); +} + +async function find(fn, options) { + for await (const result of filter.$call(this, fn, options)) { + return result; + } + return undefined; +} + +async function forEach(fn, options) { + if (typeof fn !== "function") { + throw $ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + async function forEachFn(value, options) { + await fn(value, options); + return kEmpty; + } + // eslint-disable-next-line no-unused-vars + for await (const unused of map.$call(this, forEachFn, options)); +} + +function filter(fn, options) { + if (typeof fn !== "function") { + throw $ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); + } + async function filterFn(value, options) { + if (await fn(value, options)) { + return value; + } + return kEmpty; + } + return map.$call(this, filterFn, options); +} + +// Specific to provide better error to reduce since the argument is only +// missing if the stream has no items in it - but the code is still appropriate +class ReduceAwareErrMissingArgs extends TypeError { + constructor() { + super("reduce"); + this.code = "ERR_MISSING_ARGS"; + this.message = "Reduce of an empty stream requires an initial value"; + } +} + +async function reduce(reducer, initialValue, options) { + if (typeof reducer !== "function") { + throw $ERR_INVALID_ARG_TYPE("reducer", ["Function", "AsyncFunction"], reducer); + } + if (options != null) { + validateObject(options, "options"); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, "options.signal"); + } + + let hasInitialValue = arguments.length > 1; + if (options?.signal?.aborted) { + const err = $makeAbortError(undefined, { cause: options.signal.reason }); + this.once("error", () => {}); // The error is already propagated + await finished(this.destroy(err)); + throw err; + } + const ac = new AbortController(); + const signal = ac.signal; + if (options?.signal) { + const opts = { once: true, [kWeakHandler]: this, [kResistStopPropagation]: true }; + options.signal.addEventListener("abort", () => ac.abort(), opts); + } + let gotAnyItemFromStream = false; + try { + for await (const value of this) { + gotAnyItemFromStream = true; + if (options?.signal?.aborted) { + throw $makeAbortError(); + } + if (!hasInitialValue) { + initialValue = value; + hasInitialValue = true; + } else { + initialValue = await reducer(initialValue, value, { signal }); + } + } + if (!gotAnyItemFromStream && !hasInitialValue) { + throw new ReduceAwareErrMissingArgs(); + } + } finally { + ac.abort(); + } + return initialValue; +} + +async function toArray(options) { + if (options != null) { + validateObject(options, "options"); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, "options.signal"); + } + + const result = []; + for await (const val of this) { + if (options?.signal?.aborted) { + throw $makeAbortError(undefined, { cause: options.signal.reason }); + } + ArrayPrototypePush.$call(result, val); + } + return result; +} + +function flatMap(fn, options) { + const values = map.$call(this, fn, options); + async function* flatMapInner() { + for await (const val of values) { + yield* val; + } + } + return flatMapInner.$call(this); +} + +function toIntegerOrInfinity(number) { + // We coerce here to align with the spec + // https://github.com/tc39/proposal-iterator-helpers/issues/169 + number = Number(number); + if (NumberIsNaN(number)) { + return 0; + } + if (number < 0) { + throw $ERR_OUT_OF_RANGE("number", ">= 0", number); + } + return number; +} + +function drop(number, options = undefined) { + if (options != null) { + validateObject(options, "options"); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, "options.signal"); + } + + number = toIntegerOrInfinity(number); + return async function* drop() { + if (options?.signal?.aborted) { + throw $makeAbortError(); + } + for await (const val of this) { + if (options?.signal?.aborted) { + throw $makeAbortError(); + } + if (number-- <= 0) { + yield val; + } + } + }.$call(this); +} +ObjectDefineProperty(drop, "length", { value: 1 }); + +function take(number, options?: { signal: AbortSignal }) { + if (options != null) { + validateObject(options, "options"); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, "options.signal"); + } + + number = toIntegerOrInfinity(number); + return async function* take() { + if (options?.signal?.aborted) { + throw $makeAbortError(); + } + for await (const val of this) { + if (options?.signal?.aborted) { + throw $makeAbortError(); + } + if (number-- > 0) { + yield val; + } + + // Don't get another item from iterator in case we reached the end + if (number <= 0) { + return; + } + } + }.$call(this); +} +ObjectDefineProperty(take, "length", { value: 1 }); + +export default { + streamReturningOperators: { + drop, + filter, + flatMap, + map, + take, + compose, + }, + promiseReturningOperators: { + every, + forEach, + reduce, + toArray, + some, + find, + }, +}; diff --git a/src/js/internal/streams/passthrough.ts b/src/js/internal/streams/passthrough.ts new file mode 100644 index 0000000000..7fb5fd901d --- /dev/null +++ b/src/js/internal/streams/passthrough.ts @@ -0,0 +1,20 @@ +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +"use strict"; + +const Transform = require("internal/streams/transform"); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return Reflect.construct(PassThrough, [options]); + + Transform.$call(this, options); +} +$toClass(PassThrough, "PassThrough", Transform); + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +export default PassThrough; diff --git a/src/js/internal/streams/pipeline.ts b/src/js/internal/streams/pipeline.ts new file mode 100644 index 0000000000..c771436558 --- /dev/null +++ b/src/js/internal/streams/pipeline.ts @@ -0,0 +1,448 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +"use strict"; + +const eos = require("internal/streams/end-of-stream"); +const { once } = require("internal/shared"); +const destroyImpl = require("internal/streams/destroy"); +const Duplex = require("internal/streams/duplex"); +const { aggregateTwoErrors } = require("internal/errors"); +const { validateFunction, validateAbortSignal } = require("internal/validators"); +const { + isIterable, + isReadable, + isReadableNodeStream, + isNodeStream, + isTransformStream, + isWebStream, + isReadableStream, + isReadableFinished, +} = require("internal/streams/utils"); + +const SymbolAsyncIterator = Symbol.asyncIterator; +const ArrayIsArray = Array.isArray; +const SymbolDispose = Symbol.dispose; + +let PassThrough; +let Readable; +let addAbortListener; + +function destroyer(stream, reading, writing) { + let finished = false; + stream.on("close", () => { + finished = true; + }); + + const cleanup = eos(stream, { readable: reading, writable: writing }, err => { + finished = !err; + }); + + return { + destroy: err => { + if (finished) return; + finished = true; + destroyImpl.destroyer(stream, err || $ERR_STREAM_DESTROYED("pipe")); + }, + cleanup, + }; +} + +function popCallback(streams) { + // Streams should never be an empty array. It should always contain at least + // a single stream. Therefore optimize for the average case instead of + // checking for length === 0 as well. + validateFunction(streams[streams.length - 1], "streams[stream.length - 1]"); + return streams.pop(); +} + +function makeAsyncIterable(val) { + if (isIterable(val)) { + return val; + } else if (isReadableNodeStream(val)) { + // Legacy streams are not Iterable. + return fromReadable(val); + } + throw $ERR_INVALID_ARG_TYPE("val", ["Readable", "Iterable", "AsyncIterable"], val); +} + +async function* fromReadable(val) { + Readable ??= require("internal/streams/readable"); + yield* Readable.prototype[SymbolAsyncIterator].$call(val); +} + +async function pumpToNode(iterable, writable, finish, { end }) { + let error; + let onresolve: (() => void) | null = null; + + const resume = err => { + if (err) { + error = err; + } + + if (onresolve) { + const callback = onresolve; + onresolve = null; + callback(); + } + }; + + const wait = () => + new Promise((resolve, reject) => { + if (error) { + reject(error); + } else { + onresolve = () => { + if (error) { + reject(error); + } else { + resolve(); + } + }; + } + }); + + writable.on("drain", resume); + const cleanup = eos(writable, { readable: false }, resume); + + try { + if (writable.writableNeedDrain) { + await wait(); + } + + for await (const chunk of iterable) { + if (!writable.write(chunk)) { + await wait(); + } + } + + if (end) { + writable.end(); + await wait(); + } + + finish(); + } catch (err) { + finish(error !== err ? aggregateTwoErrors(error, err) : err); + } finally { + cleanup(); + writable.off("drain", resume); + } +} + +async function pumpToWeb(readable, writable, finish, { end }) { + if (isTransformStream(writable)) { + writable = writable.writable; + } + // https://streams.spec.whatwg.org/#example-manual-write-with-backpressure + const writer = writable.getWriter(); + try { + for await (const chunk of readable) { + await writer.ready; + writer.write(chunk).catch(() => {}); + } + + await writer.ready; + + if (end) { + await writer.close(); + } + + finish(); + } catch (err) { + try { + await writer.abort(err); + finish(err); + } catch (err) { + finish(err); + } + } +} + +function pipeline(...streams) { + return pipelineImpl(streams, once(popCallback(streams))); +} + +function pipelineImpl(streams, callback, opts?) { + if (streams.length === 1 && ArrayIsArray(streams[0])) { + streams = streams[0]; + } + + if (streams.length < 2) { + throw $ERR_MISSING_ARGS("streams"); + } + + const ac = new AbortController(); + const signal = ac.signal; + const outerSignal = opts?.signal; + + // Need to cleanup event listeners if last stream is readable + // https://github.com/nodejs/node/issues/35452 + const lastStreamCleanup: (() => void)[] = []; + + validateAbortSignal(outerSignal, "options.signal"); + + function abort() { + finishImpl($makeAbortError(undefined, { cause: outerSignal?.reason })); + } + + addAbortListener ??= require("internal/abort_listener").addAbortListener; + let disposable; + if (outerSignal) { + disposable = addAbortListener(outerSignal, abort); + } + + let error; + let value; + const destroys: ((err: Error) => void)[] = []; + + let finishCount = 0; + + function finish(err) { + finishImpl(err, --finishCount === 0); + } + + function finishOnlyHandleError(err) { + finishImpl(err, false); + } + + function finishImpl(err, final?) { + if (err && (!error || error.code === "ERR_STREAM_PREMATURE_CLOSE")) { + error = err; + } + + if (!error && !final) { + return; + } + + while (destroys.length) { + destroys.shift()?.(error); + } + + disposable?.[SymbolDispose](); + ac.abort(); + + if (final) { + if (!error) { + lastStreamCleanup.forEach(fn => fn()); + } + process.nextTick(callback, error, value); + } + } + + let ret; + for (let i = 0; i < streams.length; i++) { + const stream = streams[i]; + const reading = i < streams.length - 1; + const writing = i > 0; + const next = i + 1 < streams.length ? streams[i + 1] : null; + const end = reading || opts?.end !== false; + const isLastStream = i === streams.length - 1; + + if (isNodeStream(stream)) { + if (next !== null && (next?.closed || next?.destroyed)) { + throw $ERR_STREAM_UNABLE_TO_PIPE(); + } + + if (end) { + const { destroy, cleanup } = destroyer(stream, reading, writing); + destroys.push(destroy); + + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } + } + + // Catch stream errors that occur after pipe/pump has completed. + function onError(err) { + if (err && err.name !== "AbortError" && err.code !== "ERR_STREAM_PREMATURE_CLOSE") { + finishOnlyHandleError(err); + } + } + stream.on("error", onError); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(() => { + stream.removeListener("error", onError); + }); + } + } + + if (i === 0) { + if (typeof stream === "function") { + ret = stream({ signal }); + if (!isIterable(ret)) { + throw $ERR_INVALID_RETURN_VALUE("Iterable, AsyncIterable or Stream", "source", ret); + } + } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) { + ret = stream; + } else { + ret = Duplex.from(stream); + } + } else if (typeof stream === "function") { + if (isTransformStream(ret)) { + ret = makeAsyncIterable(ret?.readable); + } else { + ret = makeAsyncIterable(ret); + } + ret = stream(ret, { signal }); + + if (reading) { + if (!isIterable(ret, true)) { + throw $ERR_INVALID_RETURN_VALUE("AsyncIterable", `transform[${i - 1}]`, ret); + } + } else { + PassThrough ??= require("internal/streams/passthrough"); + + // If the last argument to pipeline is not a stream + // we must create a proxy stream so that pipeline(...) + // always returns a stream which can be further + // composed through `.pipe(stream)`. + + const pt = new PassThrough({ + objectMode: true, + }); + + // Handle Promises/A+ spec, `then` could be a getter that throws on + // second use. + const then = ret?.then; + if (typeof then === "function") { + finishCount++; + then.$call( + ret, + val => { + value = val; + if (val != null) { + pt.write(val); + } + if (end) { + pt.end(); + } + process.nextTick(finish); + }, + err => { + pt.destroy(err); + process.nextTick(finish, err); + }, + ); + } else if (isIterable(ret, true)) { + finishCount++; + pumpToNode(ret, pt, finish, { end }); + } else if (isReadableStream(ret) || isTransformStream(ret)) { + const toRead = ret.readable || ret; + finishCount++; + pumpToNode(toRead, pt, finish, { end }); + } else { + throw $ERR_INVALID_RETURN_VALUE("AsyncIterable or Promise", "destination", ret); + } + + ret = pt; + + const { destroy, cleanup } = destroyer(ret, false, true); + destroys.push(destroy); + if (isLastStream) { + lastStreamCleanup.push(cleanup); + } + } + } else if (isNodeStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount += 2; + const cleanup = pipe(ret, stream, finish, finishOnlyHandleError, { end }); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } + } else if (isTransformStream(ret) || isReadableStream(ret)) { + const toRead = ret.readable || ret; + finishCount++; + pumpToNode(toRead, stream, finish, { end }); + } else if (isIterable(ret)) { + finishCount++; + pumpToNode(ret, stream, finish, { end }); + } else { + throw $ERR_INVALID_ARG_TYPE( + "val", + ["Readable", "Iterable", "AsyncIterable", "ReadableStream", "TransformStream"], + ret, + ); + } + ret = stream; + } else if (isWebStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount++; + pumpToWeb(makeAsyncIterable(ret), stream, finish, { end }); + } else if (isReadableStream(ret) || isIterable(ret)) { + finishCount++; + pumpToWeb(ret, stream, finish, { end }); + } else if (isTransformStream(ret)) { + finishCount++; + pumpToWeb(ret.readable, stream, finish, { end }); + } else { + throw $ERR_INVALID_ARG_TYPE( + "val", + ["Readable", "Iterable", "AsyncIterable", "ReadableStream", "TransformStream"], + ret, + ); + } + ret = stream; + } else { + ret = Duplex.from(stream); + } + } + + if (signal?.aborted || outerSignal?.aborted) { + process.nextTick(abort); + } + + return ret; +} + +function pipe(src, dst, finish, finishOnlyHandleError, { end }) { + let ended = false; + dst.on("close", () => { + if (!ended) { + // Finish if the destination closes before the source has completed. + finishOnlyHandleError($ERR_STREAM_PREMATURE_CLOSE()); + } + }); + + src.pipe(dst, { end: false }); // If end is true we already will have a listener to end dst. + + if (end) { + // Compat. Before node v10.12.0 stdio used to throw an error so + // pipe() did/does not end() stdio destinations. + // Now they allow it but "secretly" don't close the underlying fd. + + function endFn() { + ended = true; + dst.end(); + } + + if (isReadableFinished(src)) { + // End the destination if the source has already ended. + process.nextTick(endFn); + } else { + src.once("end", endFn); + } + } else { + finish(); + } + + eos(src, { readable: true, writable: false }, err => { + const rState = src._readableState; + if (err && err.code === "ERR_STREAM_PREMATURE_CLOSE" && rState?.ended && !rState.errored && !rState.errorEmitted) { + // Some readable streams will emit 'close' before 'end'. However, since + // this is on the readable side 'end' should still be emitted if the + // stream has been ended and no error emitted. This should be allowed in + // favor of backwards compatibility. Since the stream is piped to a + // destination this should not result in any observable difference. + // We don't need to check if this is a writable premature close since + // eos will only fail with premature close on the reading side for + // duplex streams. + src.once("end", finish).once("error", finish); + } else { + finish(err); + } + }); + return eos(dst, { readable: false, writable: true }, finish); +} + +export default { pipelineImpl, pipeline }; diff --git a/src/js/internal/streams/readable.ts b/src/js/internal/streams/readable.ts new file mode 100644 index 0000000000..c31c05fb9e --- /dev/null +++ b/src/js/internal/streams/readable.ts @@ -0,0 +1,1650 @@ +"use strict"; + +const EE = require("node:events"); +const { Stream, prependListener } = require("internal/streams/legacy"); +const { Buffer } = require("node:buffer"); +const { addAbortSignal } = require("internal/streams/add-abort-signal"); +const eos = require("internal/streams/end-of-stream"); +const destroyImpl = require("internal/streams/destroy"); +const { getHighWaterMark, getDefaultHighWaterMark } = require("internal/streams/state"); +const { + kOnConstructed, + kState, + // bitfields + kObjectMode, + kErrorEmitted, + kAutoDestroy, + kEmitClose, + kDestroyed, + kClosed, + kCloseEmitted, + kErrored, + kConstructed, +} = require("internal/streams/utils"); +const { aggregateTwoErrors } = require("internal/errors"); +const { validateObject } = require("internal/validators"); +const { StringDecoder } = require("node:string_decoder"); +const from = require("internal/streams/from"); +const { SafeSet } = require("internal/primordials"); +const { kAutoDestroyed } = require("internal/shared"); + +const ObjectDefineProperties = Object.defineProperties; +const SymbolAsyncDispose = Symbol.asyncDispose; +const NumberIsNaN = Number.isNaN; +const NumberIsInteger = Number.isInteger; +const NumberParseInt = Number.parseInt; +const ArrayPrototypeIndexOf = Array.prototype.indexOf; +const ObjectKeys = Object.keys; +const SymbolAsyncIterator = Symbol.asyncIterator; +const TypedArrayPrototypeSet = Uint8Array.prototype.set; + +const { errorOrDestroy } = destroyImpl; +const nop = () => {}; + +const kErroredValue = Symbol("kErroredValue"); +const kDefaultEncodingValue = Symbol("kDefaultEncodingValue"); +const kDecoderValue = Symbol("kDecoderValue"); +const kEncodingValue = Symbol("kEncodingValue"); + +const kEnded = 1 << 9; +const kEndEmitted = 1 << 10; +const kReading = 1 << 11; +const kSync = 1 << 12; +const kNeedReadable = 1 << 13; +const kEmittedReadable = 1 << 14; +const kReadableListening = 1 << 15; +const kResumeScheduled = 1 << 16; +const kMultiAwaitDrain = 1 << 17; +const kReadingMore = 1 << 18; +const kDataEmitted = 1 << 19; +const kDefaultUTF8Encoding = 1 << 20; +const kDecoder = 1 << 21; +const kEncoding = 1 << 22; +const kHasFlowing = 1 << 23; +const kFlowing = 1 << 24; +const kHasPaused = 1 << 25; +const kPaused = 1 << 26; +const kDataListening = 1 << 27; + +// TODO(benjamingr) it is likely slower to do it this way than with free functions +function makeBitMapDescriptor(bit) { + return { + enumerable: false, + get() { + return (this[kState] & bit) !== 0; + }, + set(value) { + if (value) this[kState] |= bit; + else this[kState] &= ~bit; + }, + }; +} + +function ReadableState(options, stream, isDuplex) { + // Bit map field to store ReadableState more efficiently with 1 bit per field + // instead of a V8 slot per field. + this[kState] = kEmitClose | kAutoDestroy | kConstructed | kSync; + + // Object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away. + if (options?.objectMode) this[kState] |= kObjectMode; + + if (isDuplex && options?.readableObjectMode) this[kState] |= kObjectMode; + + // The point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = options + ? getHighWaterMark(this, options, "readableHighWaterMark", isDuplex) + : getDefaultHighWaterMark(false); + + this.buffer = []; + this.bufferIndex = 0; + this.length = 0; + this.pipes = []; + + // Should close be emitted on destroy. Defaults to true. + if (options && options.emitClose === false) this[kState] &= ~kEmitClose; + + // Should .destroy() be called after 'end' (and potentially 'finish'). + if (options && options.autoDestroy === false) this[kState] &= ~kAutoDestroy; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + const defaultEncoding = options?.defaultEncoding; + if (defaultEncoding == null || defaultEncoding === "utf8" || defaultEncoding === "utf-8") { + this[kState] |= kDefaultUTF8Encoding; + } else if (Buffer.isEncoding(defaultEncoding)) { + this.defaultEncoding = defaultEncoding; + } else { + throw $ERR_UNKNOWN_ENCODING(defaultEncoding); + } + + // Ref the piped dest which we need a drain event on it + // type: null | Writable | Set. + this.awaitDrainWriters = null; + + if (options?.encoding) { + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +ReadableState.prototype = {}; +ObjectDefineProperties(ReadableState.prototype, { + objectMode: makeBitMapDescriptor(kObjectMode), + ended: makeBitMapDescriptor(kEnded), + endEmitted: makeBitMapDescriptor(kEndEmitted), + reading: makeBitMapDescriptor(kReading), + // Stream is still being constructed and cannot be + // destroyed until construction finished or failed. + // Async construction is opt in, therefore we start as + // constructed. + constructed: makeBitMapDescriptor(kConstructed), + // A flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + sync: makeBitMapDescriptor(kSync), + // Whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + needReadable: makeBitMapDescriptor(kNeedReadable), + emittedReadable: makeBitMapDescriptor(kEmittedReadable), + readableListening: makeBitMapDescriptor(kReadableListening), + resumeScheduled: makeBitMapDescriptor(kResumeScheduled), + // True if the error was already emitted and should not be thrown again. + errorEmitted: makeBitMapDescriptor(kErrorEmitted), + emitClose: makeBitMapDescriptor(kEmitClose), + autoDestroy: makeBitMapDescriptor(kAutoDestroy), + // Has it been destroyed. + destroyed: makeBitMapDescriptor(kDestroyed), + // Indicates whether the stream has finished destroying. + closed: makeBitMapDescriptor(kClosed), + // True if close has been emitted or would have been emitted + // depending on emitClose. + closeEmitted: makeBitMapDescriptor(kCloseEmitted), + multiAwaitDrain: makeBitMapDescriptor(kMultiAwaitDrain), + // If true, a maybeReadMore has been scheduled. + readingMore: makeBitMapDescriptor(kReadingMore), + dataEmitted: makeBitMapDescriptor(kDataEmitted), + + // Indicates whether the stream has errored. When true no further + // _read calls, 'data' or 'readable' events should occur. This is needed + // since when autoDestroy is disabled we need a way to tell whether the + // stream has failed. + errored: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kErrored) !== 0 ? this[kErroredValue] : null; + }, + set(value) { + if (value) { + this[kErroredValue] = value; + this[kState] |= kErrored; + } else { + this[kState] &= ~kErrored; + } + }, + }, + + defaultEncoding: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kDefaultUTF8Encoding) !== 0 ? "utf8" : this[kDefaultEncodingValue]; + }, + set(value) { + if (value === "utf8" || value === "utf-8") { + this[kState] |= kDefaultUTF8Encoding; + } else { + this[kState] &= ~kDefaultUTF8Encoding; + this[kDefaultEncodingValue] = value; + } + }, + }, + + decoder: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kDecoder) !== 0 ? this[kDecoderValue] : null; + }, + set(value) { + if (value) { + this[kDecoderValue] = value; + this[kState] |= kDecoder; + } else { + this[kState] &= ~kDecoder; + } + }, + }, + + encoding: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kEncoding) !== 0 ? this[kEncodingValue] : null; + }, + set(value) { + if (value) { + this[kEncodingValue] = value; + this[kState] |= kEncoding; + } else { + this[kState] &= ~kEncoding; + } + }, + }, + + flowing: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kHasFlowing) !== 0 ? (this[kState] & kFlowing) !== 0 : null; + }, + set(value) { + if (value == null) { + this[kState] &= ~(kHasFlowing | kFlowing); + } else if (value) { + this[kState] |= kHasFlowing | kFlowing; + } else { + this[kState] |= kHasFlowing; + this[kState] &= ~kFlowing; + } + }, + }, +}); + +ReadableState.prototype[kOnConstructed] = function onConstructed(stream) { + if ((this[kState] & kNeedReadable) !== 0) { + maybeReadMore(stream, this); + } +}; + +function Readable(options) { + if (!(this instanceof Readable)) return Reflect.construct(Readable, [options]); + + this._events ??= { + close: undefined, + error: undefined, + data: undefined, + end: undefined, + readable: undefined, + // Skip uncommon events... + // pause: undefined, + // resume: undefined, + // pipe: undefined, + // unpipe: undefined, + // [destroyImpl.kConstruct]: undefined, + // [destroyImpl.kDestroy]: undefined, + }; + + this._readableState = new ReadableState(options, this, false); + + if (options) { + if (typeof options.read === "function") this._read = options.read; + + if (typeof options.destroy === "function") this._destroy = options.destroy; + + if (typeof options.construct === "function") this._construct = options.construct; + + if (options.signal) addAbortSignal(options.signal, this); + } + + Stream.$call(this, options); + + if (this._construct != null) { + destroyImpl.construct(this, () => { + this._readableState[kOnConstructed](this); + }); + } +} +$toClass(Readable, "Readable", Stream); + +Readable.ReadableState = ReadableState; + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +Readable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err); +}; + +Readable.prototype[SymbolAsyncDispose] = function () { + let error; + if (!this.destroyed) { + error = this.readableEnded ? null : $makeAbortError(); + this.destroy(error); + } + return new Promise((resolve, reject) => eos(this, err => (err && err !== error ? reject(err) : resolve(null)))); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + $debug("push", chunk); + + const state = this._readableState; + return (state[kState] & kObjectMode) === 0 + ? readableAddChunkPushByteMode(this, state, chunk, encoding) + : readableAddChunkPushObjectMode(this, state, chunk, encoding); +}; + +// Unshift should *always* be something directly out of read(). +Readable.prototype.unshift = function (chunk, encoding) { + $debug("unshift", chunk); + const state = this._readableState; + return (state[kState] & kObjectMode) === 0 + ? readableAddChunkUnshiftByteMode(this, state, chunk, encoding) + : readableAddChunkUnshiftObjectMode(this, state, chunk); +}; + +function readableAddChunkUnshiftByteMode(stream, state, chunk, encoding) { + if (chunk === null) { + state[kState] &= ~kReading; + onEofChunk(stream, state); + + return false; + } + + if (typeof chunk === "string") { + encoding ||= state.defaultEncoding; + if (state.encoding !== encoding) { + if (state.encoding) { + // When unshifting, if state.encoding is set, we have to save + // the string in the BufferList with the state encoding. + chunk = Buffer.from(chunk, encoding).toString(state.encoding); + } else { + chunk = Buffer.from(chunk, encoding); + } + } + } else if (Stream._isArrayBufferView(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + } else if (chunk !== undefined && !(chunk instanceof Buffer)) { + errorOrDestroy(stream, $ERR_INVALID_ARG_TYPE("chunk", ["string", "Buffer", "TypedArray", "DataView"], chunk)); + return false; + } + + if (!(chunk && chunk.length > 0)) { + return canPushMore(state); + } + + return readableAddChunkUnshiftValue(stream, state, chunk); +} + +function readableAddChunkUnshiftObjectMode(stream, state, chunk) { + if (chunk === null) { + state[kState] &= ~kReading; + onEofChunk(stream, state); + + return false; + } + + return readableAddChunkUnshiftValue(stream, state, chunk); +} + +function readableAddChunkUnshiftValue(stream, state, chunk) { + if ((state[kState] & kEndEmitted) !== 0) errorOrDestroy(stream, $ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); + else if ((state[kState] & (kDestroyed | kErrored)) !== 0) return false; + else addChunk(stream, state, chunk, true); + + return canPushMore(state); +} + +function readableAddChunkPushByteMode(stream, state, chunk, encoding) { + if (chunk === null) { + state[kState] &= ~kReading; + onEofChunk(stream, state); + return false; + } + + if (typeof chunk === "string") { + encoding ||= state.defaultEncoding; + if (state.encoding !== encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ""; + } + } else if (chunk instanceof Buffer) { + encoding = ""; + } else if (Stream._isArrayBufferView(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = ""; + } else if (chunk !== undefined) { + errorOrDestroy(stream, $ERR_INVALID_ARG_TYPE("chunk", ["string", "Buffer", "TypedArray", "DataView"], chunk)); + return false; + } + + if (!chunk || chunk.length <= 0) { + state[kState] &= ~kReading; + maybeReadMore(stream, state); + + return canPushMore(state); + } + + if ((state[kState] & kEnded) !== 0) { + errorOrDestroy(stream, $ERR_STREAM_PUSH_AFTER_EOF()); + return false; + } + + if ((state[kState] & (kDestroyed | kErrored)) !== 0) { + return false; + } + + state[kState] &= ~kReading; + if ((state[kState] & kDecoder) !== 0 && !encoding) { + chunk = state[kDecoderValue].write(chunk); + if (chunk.length === 0) { + maybeReadMore(stream, state); + return canPushMore(state); + } + } + + addChunk(stream, state, chunk, false); + return canPushMore(state); +} + +function readableAddChunkPushObjectMode(stream, state, chunk, encoding) { + if (chunk === null) { + state[kState] &= ~kReading; + onEofChunk(stream, state); + return false; + } + + if ((state[kState] & kEnded) !== 0) { + errorOrDestroy(stream, $ERR_STREAM_PUSH_AFTER_EOF()); + return false; + } + + if ((state[kState] & (kDestroyed | kErrored)) !== 0) { + return false; + } + + state[kState] &= ~kReading; + + if ((state[kState] & kDecoder) !== 0 && !encoding) { + chunk = state[kDecoderValue].write(chunk); + } + + addChunk(stream, state, chunk, false); + return canPushMore(state); +} + +function canPushMore(state) { + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return (state[kState] & kEnded) === 0 && (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if ((state[kState] & (kFlowing | kSync | kDataListening)) === (kFlowing | kDataListening) && state.length === 0) { + // Use the guard to avoid creating `Set()` repeatedly + // when we have multiple pipes. + if ((state[kState] & kMultiAwaitDrain) !== 0) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + + state[kState] |= kDataEmitted; + stream.emit("data", chunk); + } else { + // Update the buffer info. + state.length += (state[kState] & kObjectMode) !== 0 ? 1 : chunk.length; + if (addToFront) { + if (state.bufferIndex > 0) { + state.buffer[--state.bufferIndex] = chunk; + } else { + state.buffer.unshift(chunk); // Slow path + } + } else { + state.buffer.push(chunk); + } + + if ((state[kState] & kNeedReadable) !== 0) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +Readable.prototype.isPaused = function () { + const state = this._readableState; + return (state[kState] & kPaused) !== 0 || (state[kState] & (kHasFlowing | kFlowing)) === kHasFlowing; +}; + +// Backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + const state = this._readableState; + + const decoder = new StringDecoder(enc); + state.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8. + state.encoding = state.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + let content = ""; + for (const data of state.buffer.slice(state.bufferIndex)) { + content += decoder.write(data); + } + state.buffer.length = 0; + state.bufferIndex = 0; + + if (content !== "") state.buffer.push(content); + state.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB. +const MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n > MAX_HWM) { + throw $ERR_OUT_OF_RANGE("size", "<= 1GiB", n); + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts. + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || (state.length === 0 && (state[kState] & kEnded) !== 0)) return 0; + if ((state[kState] & kObjectMode) !== 0) return 1; + if (NumberIsNaN(n)) { + // Only flow one buffer at a time. + if ((state[kState] & kFlowing) !== 0 && state.length) return state.buffer[state.bufferIndex].length; + return state.length; + } + if (n <= state.length) return n; + return (state[kState] & kEnded) !== 0 ? state.length : 0; +} + +// You can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + $debug("read", n); + // Same as parseInt(undefined, 10), however V8 7.3 performance regressed + // in this scenario, so we are doing it manually. + if (n === undefined) { + n = NaN; + } else if (!NumberIsInteger(n)) { + n = NumberParseInt(n, 10); + } + const state = this._readableState; + const nOrig = n; + + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + + if (n !== 0) state[kState] &= ~kEmittedReadable; + + // If we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if ( + n === 0 && + (state[kState] & kNeedReadable) !== 0 && + ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || + (state[kState] & kEnded) !== 0) + ) { + $debug("read: emitReadable"); + if (state.length === 0 && (state[kState] & kEnded) !== 0) endReadable(this); + else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // If we've ended, and we're now clear, then finish it up. + if (n === 0 && (state[kState] & kEnded) !== 0) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + let doRead = (state[kState] & kNeedReadable) !== 0; + $debug("need readable", doRead); + + // If we currently have less than the highWaterMark, then also read some. + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + $debug("length less than watermark", doRead); + } + + // However, if we've ended, then there's no point, if we're already + // reading, then it's unnecessary, if we're constructing we have to wait, + // and if we're destroyed or errored, then it's not allowed, + if ((state[kState] & (kReading | kEnded | kDestroyed | kErrored | kConstructed)) !== kConstructed) { + doRead = false; + $debug("reading, ended or constructing", doRead); + } else if (doRead) { + $debug("do read"); + state[kState] |= kReading | kSync; + // If the length is currently zero, then we *need* a readable event. + if (state.length === 0) state[kState] |= kNeedReadable; + + // Call internal read method + try { + this._read(state.highWaterMark); + } catch (err) { + errorOrDestroy(this, err); + } + state[kState] &= ~kSync; + + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if ((state[kState] & kReading) === 0) n = howMuchToRead(nOrig, state); + } + + let ret; + if (n > 0) ret = fromList(n, state); + else ret = null; + + if (ret === null) { + state[kState] |= state.length <= state.highWaterMark ? kNeedReadable : 0; + n = 0; + } else { + state.length -= n; + if ((state[kState] & kMultiAwaitDrain) !== 0) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if ((state[kState] & kEnded) === 0) state[kState] |= kNeedReadable; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && (state[kState] & kEnded) !== 0) endReadable(this); + } + + if (ret !== null && (state[kState] & (kErrorEmitted | kCloseEmitted)) === 0) { + state[kState] |= kDataEmitted; + this.emit("data", ret); + } + + return ret; +}; + +function onEofChunk(stream, state) { + $debug("onEofChunk"); + if ((state[kState] & kEnded) !== 0) return; + const decoder = (state[kState] & kDecoder) !== 0 ? state[kDecoderValue] : null; + if (decoder) { + const chunk = decoder.end(); + if (chunk?.length) { + state.buffer.push(chunk); + state.length += (state[kState] & kObjectMode) !== 0 ? 1 : chunk.length; + } + } + state[kState] |= kEnded; + + if ((state[kState] & kSync) !== 0) { + // If we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call. + emitReadable(stream); + } else { + // Emit 'readable' now to make sure it gets picked up. + state[kState] &= ~kNeedReadable; + state[kState] |= kEmittedReadable; + // We have to emit readable now that we are EOF. Modules + // in the ecosystem (e.g. dicer) rely on this event being sync. + emitReadable_(stream); + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + const state = stream._readableState; + $debug("emitReadable"); + state[kState] &= ~kNeedReadable; + if ((state[kState] & kEmittedReadable) === 0) { + $debug("emitReadable", (state[kState] & kFlowing) !== 0); + state[kState] |= kEmittedReadable; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + const state = stream._readableState; + $debug("emitReadable_"); + if ((state[kState] & (kDestroyed | kErrored)) === 0 && (state.length || (state[kState] & kEnded) !== 0)) { + stream.emit("readable"); + state[kState] &= ~kEmittedReadable; + } + + // The stream needs another readable event if: + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state[kState] |= + (state[kState] & (kFlowing | kEnded)) === 0 && state.length <= state.highWaterMark ? kNeedReadable : 0; + flow(stream); +} + +// At this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if ((state[kState] & (kReadingMore | kConstructed)) === kConstructed) { + state[kState] |= kReadingMore; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while ( + (state[kState] & (kReading | kEnded)) === 0 && + (state.length < state.highWaterMark || ((state[kState] & kFlowing) !== 0 && state.length === 0)) + ) { + const len = state.length; + $debug("maybeReadMore read 0"); + stream.read(0); + if (len === state.length) + // Didn't get any data, stop spinning. + break; + } + state[kState] &= ~kReadingMore; +} + +// Abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + throw $ERR_METHOD_NOT_IMPLEMENTED("_read()"); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + const src = this; + const state = this._readableState; + + if (state.pipes.length === 1) { + if ((state[kState] & kMultiAwaitDrain) === 0) { + state[kState] |= kMultiAwaitDrain; + state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : []); + } + } + + state.pipes.push(dest); + $debug("pipe count=%d opts=%j", state.pipes.length, pipeOpts); + + const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + const endFn = doEnd ? onend : unpipe; + if ((state[kState] & kEndEmitted) !== 0) process.nextTick(endFn); + else src.once("end", endFn); + + dest.on("unpipe", onunpipe); + function onunpipe(readable, unpipeInfo) { + $debug("onunpipe"); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + $debug("onend"); + dest.end(); + } + + let ondrain; + + let cleanedUp = false; + function cleanup() { + $debug("cleanup"); + // Cleanup event handlers once the pipe is broken. + dest.removeListener("close", onclose); + dest.removeListener("finish", onfinish); + if (ondrain) { + dest.removeListener("drain", ondrain); + } + dest.removeListener("error", onerror); + dest.removeListener("unpipe", onunpipe); + src.removeListener("end", onend); + src.removeListener("end", unpipe); + src.removeListener("data", ondata); + + cleanedUp = true; + + // If the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + function pause() { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if (!cleanedUp) { + if (state.pipes.length === 1 && state.pipes[0] === dest) { + $debug("false write response, pause", 0); + state.awaitDrainWriters = dest; + state[kState] &= ~kMultiAwaitDrain; + } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { + $debug("false write response, pause", state.awaitDrainWriters.size); + state.awaitDrainWriters.add(dest); + } + src.pause(); + } + if (!ondrain) { + // When the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + ondrain = pipeOnDrain(src, dest); + dest.on("drain", ondrain); + } + } + + src.on("data", ondata); + function ondata(chunk) { + $debug("ondata"); + const ret = dest.write(chunk); + $debug("dest.write", ret); + if (ret === false) { + pause(); + } + } + + // If the dest has an error, then stop piping into it. + // However, don't suppress the throwing behavior for this. + function onerror(er) { + $debug("onerror", er); + unpipe(); + dest.removeListener("error", onerror); + if (dest.listenerCount("error") === 0) { + const s = dest._writableState || dest._readableState; + if (s && !s.errorEmitted) { + // User incorrectly emitted 'error' directly on the stream. + errorOrDestroy(dest, er); + } else { + dest.emit("error", er); + } + } + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, "error", onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener("finish", onfinish); + unpipe(); + } + dest.once("close", onclose); + function onfinish() { + $debug("onfinish"); + dest.removeListener("close", onclose); + unpipe(); + } + dest.once("finish", onfinish); + + function unpipe() { + $debug("unpipe"); + src.unpipe(dest); + } + + // Tell the dest that it's being piped to. + dest.emit("pipe", src); + + // Start the flow if it hasn't been started already. + + if (dest.writableNeedDrain === true) { + pause(); + } else if ((state[kState] & kFlowing) === 0) { + $debug("pipe resume"); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src, dest) { + return function pipeOnDrainFunctionResult() { + const state = src._readableState; + + // `ondrain` will call directly, + // `this` maybe not a reference to dest, + // so we use the real dest here. + if (state.awaitDrainWriters === dest) { + $debug("pipeOnDrain", 1); + state.awaitDrainWriters = null; + } else if ((state[kState] & kMultiAwaitDrain) !== 0) { + $debug("pipeOnDrain", state.awaitDrainWriters.size); + state.awaitDrainWriters.delete(dest); + } + + if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && (state[kState] & kDataListening) !== 0) { + src.resume(); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + const state = this._readableState; + const unpipeInfo = { hasUnpiped: false }; + + // If we're not piping anywhere, then do nothing. + if (state.pipes.length === 0) return this; + + if (!dest) { + // remove all. + const dests = state.pipes; + state.pipes = []; + this.pause(); + + for (let i = 0; i < dests.length; i++) dests[i].emit("unpipe", this, { hasUnpiped: false }); + return this; + } + + // Try to find the right one. + const index = ArrayPrototypeIndexOf.$call(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + if (state.pipes.length === 0) this.pause(); + + dest.emit("unpipe", this, unpipeInfo); + + return this; +}; + +// Set up data events if they are asked for +// Ensure readable listeners eventually get something. +Readable.prototype.on = function (ev, fn) { + const res = Stream.prototype.on.$call(this, ev, fn); + const state = this._readableState; + + if (ev === "data") { + state[kState] |= kDataListening; + + // Update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state[kState] |= this.listenerCount("readable") > 0 ? kReadableListening : 0; + + // Try start flowing on next tick if stream isn't explicitly paused. + if ((state[kState] & (kHasFlowing | kFlowing)) !== kHasFlowing) { + this.resume(); + } + } else if (ev === "readable") { + if ((state[kState] & (kEndEmitted | kReadableListening)) === 0) { + state[kState] |= kReadableListening | kNeedReadable | kHasFlowing; + state[kState] &= ~(kFlowing | kEmittedReadable); + $debug("on readable"); + if (state.length) { + emitReadable(this); + } else if ((state[kState] & kReading) === 0) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function (ev, fn) { + const state = this._readableState; + + const res = Stream.prototype.removeListener.$call(this, ev, fn); + + if (ev === "readable") { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } else if (ev === "data" && this.listenerCount("data") === 0) { + state[kState] &= ~kDataListening; + } + + return res; +}; +Readable.prototype.off = Readable.prototype.removeListener; + +Readable.prototype.removeAllListeners = function (ev) { + const res = Stream.prototype.removeAllListeners.$apply(this, arguments); + + if (ev === "readable" || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + const state = self._readableState; + + if (self.listenerCount("readable") > 0) { + state[kState] |= kReadableListening; + } else { + state[kState] &= ~kReadableListening; + } + + if ((state[kState] & (kHasPaused | kPaused | kResumeScheduled)) === (kHasPaused | kResumeScheduled)) { + // Flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state[kState] |= kHasFlowing | kFlowing; + + // Crude way to check if we should resume. + } else if ((state[kState] & kDataListening) !== 0) { + self.resume(); + } else if ((state[kState] & kReadableListening) === 0) { + state[kState] &= ~(kHasFlowing | kFlowing); + } +} + +function nReadingNextTick(self) { + $debug("readable nexttick read 0"); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + const state = this._readableState; + if ((state[kState] & kFlowing) === 0) { + $debug("resume"); + // We flow only if there is no one listening + // for readable, but we still have to call + // resume(). + state[kState] |= kHasFlowing; + if ((state[kState] & kReadableListening) === 0) { + state[kState] |= kFlowing; + } else { + state[kState] &= ~kFlowing; + } + resume(this, state); + } + state[kState] |= kHasPaused; + state[kState] &= ~kPaused; + return this; +}; + +function resume(stream, state) { + if ((state[kState] & kResumeScheduled) === 0) { + state[kState] |= kResumeScheduled; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + $debug("resume", (state[kState] & kReading) !== 0); + if ((state[kState] & kReading) === 0) { + stream.read(0); + } + + state[kState] &= ~kResumeScheduled; + stream.emit("resume"); + flow(stream); + if ((state[kState] & (kFlowing | kReading)) === kFlowing) stream.read(0); +} + +Readable.prototype.pause = function () { + const state = this._readableState; + $debug("call pause"); + if ((state[kState] & (kHasFlowing | kFlowing)) !== kHasFlowing) { + $debug("pause"); + state[kState] |= kHasFlowing; + state[kState] &= ~kFlowing; + this.emit("pause"); + } + state[kState] |= kHasPaused | kPaused; + return this; +}; + +function flow(stream) { + const state = stream._readableState; + $debug("flow"); + while ((state[kState] & kFlowing) !== 0 && stream.read() !== null); +} + +// Wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + let paused = false; + + // TODO (ronag): Should this.destroy(err) emit + // 'error' on the wrapped stream? Would require + // a static factory method, e.g. Readable.wrap(stream). + + stream.on("data", chunk => { + if (!this.push(chunk) && stream.pause) { + paused = true; + stream.pause(); + } + }); + + stream.on("end", () => { + this.push(null); + }); + + stream.on("error", err => { + errorOrDestroy(this, err); + }); + + stream.on("close", () => { + this.destroy(); + }); + + stream.on("destroy", () => { + this.destroy(); + }); + + this._read = () => { + if (paused && stream.resume) { + paused = false; + stream.resume(); + } + }; + + // Proxy all the other methods. Important when wrapping filters and duplexes. + const streamKeys = ObjectKeys(stream); + for (let j = 1; j < streamKeys.length; j++) { + const i = streamKeys[j]; + if (this[i] === undefined && typeof stream[i] === "function") { + this[i] = stream[i].bind(stream); + } + } + + return this; +}; + +Readable.prototype[SymbolAsyncIterator] = function () { + return streamToAsyncIterator(this); +}; + +Readable.prototype.iterator = function (options) { + if (options !== undefined) { + validateObject(options, "options"); + } + return streamToAsyncIterator(this, options); +}; + +function streamToAsyncIterator(stream, options?) { + if (typeof stream.read !== "function") { + stream = Readable.wrap(stream, { objectMode: true }); + } + + const iter = createAsyncIterator(stream, options); + iter.stream = stream; + return iter; +} + +async function* createAsyncIterator(stream, options) { + let callback = nop; + + function next(resolve) { + if (this === stream) { + callback(); + callback = nop; + } else { + callback = resolve; + } + } + + stream.on("readable", next); + + let error; + const cleanup = eos(stream, { writable: false }, err => { + error = err ? aggregateTwoErrors(error, err) : null; + callback(); + callback = nop; + }); + + try { + while (true) { + const chunk = stream.destroyed ? null : stream.read(); + if (chunk !== null) { + yield chunk; + } else if (error) { + throw error; + } else if (error === null) { + return; + } else { + await new Promise(next); + } + } + } catch (err) { + error = aggregateTwoErrors(error, err); + throw error; + } finally { + if ((error || options?.destroyOnReturn !== false) && (error === undefined || stream._readableState.autoDestroy)) { + destroyImpl.destroyer(stream, null); + } else { + stream.off("readable", next); + cleanup(); + } + } +} + +// Making it explicit these properties are not enumerable +// because otherwise some prototype manipulation in +// userland will fail. +ObjectDefineProperties(Readable.prototype, { + readable: { + __proto__: null, + get() { + const r = this._readableState; + // r.readable === false means that this is part of a Duplex stream + // where the readable side was disabled upon construction. + // Compat. The user might manually disable readable side through + // deprecated setter. + return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted; + }, + set(val) { + // Backwards compat. + if (this._readableState) { + this._readableState.readable = !!val; + } + }, + }, + + readableDidRead: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.dataEmitted; + }, + }, + + readableAborted: { + __proto__: null, + enumerable: false, + get: function () { + return !!( + this._readableState.readable !== false && + (this._readableState.destroyed || this._readableState.errored) && + !this._readableState.endEmitted + ); + }, + }, + + readableHighWaterMark: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + }, + }, + + readableBuffer: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState?.buffer; + }, + }, + + readableFlowing: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.flowing; + }, + set: function (state) { + if (this._readableState) { + this._readableState.flowing = state; + } + }, + }, + + readableLength: { + __proto__: null, + enumerable: false, + get() { + return this._readableState.length; + }, + }, + + readableObjectMode: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.objectMode : false; + }, + }, + + readableEncoding: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.encoding : null; + }, + }, + + errored: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.errored : null; + }, + }, + + closed: { + __proto__: null, + get() { + return this._readableState ? this._readableState.closed : false; + }, + }, + + destroyed: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.destroyed : false; + }, + set(value) { + // We ignore the value if the stream + // has not been initialized yet. + if (!this._readableState) { + return; + } + + // Backward compatibility, the user is explicitly + // managing destroyed. + this._readableState.destroyed = value; + }, + }, + + readableEnded: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.endEmitted : false; + }, + }, +}); + +ObjectDefineProperties(ReadableState.prototype, { + // Legacy getter for `pipesCount`. + pipesCount: { + __proto__: null, + get() { + return this.pipes.length; + }, + }, + + // Legacy property for `paused`. + paused: { + __proto__: null, + get() { + return (this[kState] & kPaused) !== 0; + }, + set(value) { + this[kState] |= kHasPaused; + if (value) { + this[kState] |= kPaused; + } else { + this[kState] &= ~kPaused; + } + }, + }, +}); + +// Exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered. + if (state.length === 0) return null; + + let idx = state.bufferIndex; + let ret; + + const buf = state.buffer; + const len = buf.length; + + if ((state[kState] & kObjectMode) !== 0) { + ret = buf[idx]; + buf[idx++] = null; + } else if (!n || n >= state.length) { + // Read it all, truncate the list. + if ((state[kState] & kDecoder) !== 0) { + ret = ""; + while (idx < len) { + ret += buf[idx]; + buf[idx++] = null; + } + } else if (len - idx === 0) { + ret = Buffer.alloc(0); + } else if (len - idx === 1) { + ret = buf[idx]; + buf[idx++] = null; + } else { + ret = Buffer.allocUnsafe(state.length); + + let i = 0; + while (idx < len) { + TypedArrayPrototypeSet.$call(ret, buf[idx], i); + i += buf[idx].length; + buf[idx++] = null; + } + } + } else if (n < buf[idx].length) { + // `slice` is the same for buffers and strings. + ret = buf[idx].slice(0, n); + buf[idx] = buf[idx].slice(n); + } else if (n === buf[idx].length) { + // First chunk is a perfect match. + ret = buf[idx]; + buf[idx++] = null; + } else if ((state[kState] & kDecoder) !== 0) { + ret = ""; + while (idx < len) { + const str = buf[idx]; + if (n > str.length) { + ret += str; + n -= str.length; + buf[idx++] = null; + } else { + if (n === buf.length) { + ret += str; + buf[idx++] = null; + } else { + ret += str.slice(0, n); + buf[idx] = str.slice(n); + } + break; + } + } + } else { + ret = Buffer.allocUnsafe(n); + + const retLen = n; + while (idx < len) { + const data = buf[idx]; + if (n > data.length) { + TypedArrayPrototypeSet.$call(ret, data, retLen - n); + n -= data.length; + buf[idx++] = null; + } else { + if (n === data.length) { + TypedArrayPrototypeSet.$call(ret, data, retLen - n); + buf[idx++] = null; + } else { + TypedArrayPrototypeSet.$call(ret, new $Buffer(data.buffer, data.byteOffset, n), retLen - n); + buf[idx] = new $Buffer(data.buffer, data.byteOffset + n, data.length - n); + } + break; + } + } + } + + if (idx === len) { + state.buffer.length = 0; + state.bufferIndex = 0; + } else if (idx > 1024) { + state.buffer.splice(0, idx); + state.bufferIndex = 0; + } else { + state.bufferIndex = idx; + } + + return ret; +} + +function endReadable(stream) { + const state = stream._readableState; + + $debug("endReadable"); + if ((state[kState] & kEndEmitted) === 0) { + state[kState] |= kEnded; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + $debug("endReadableNT"); + + // Check that we didn't get one last unshift. + if ((state[kState] & (kErrored | kCloseEmitted | kEndEmitted)) === 0 && state.length === 0) { + state[kState] |= kEndEmitted; + stream.emit("end"); + + if (stream.writable && stream.allowHalfOpen === false) { + process.nextTick(endWritableNT, stream); + } else if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well. + const wState = stream._writableState; + const autoDestroy = + !wState || + (wState.autoDestroy && + // We don't expect the writable to ever 'finish' + // if writable is explicitly set to false. + (wState.finished || wState.writable === false)); + + if (autoDestroy) { + stream[kAutoDestroyed] = true; // workaround for node:http Server not using node:net Server + stream.destroy(); + } + } + } +} + +function endWritableNT(stream) { + const writable = stream.writable && !stream.writableEnded && !stream.destroyed; + if (writable) { + stream.end(); + } +} + +Readable.from = function (iterable, opts) { + return from(Readable, iterable, opts); +}; + +// Lazy to avoid circular references +let webStreamsAdapters; +function lazyWebStreams() { + if (webStreamsAdapters === undefined) webStreamsAdapters = require("internal/webstreams_adapters"); + return webStreamsAdapters; +} + +Readable.fromWeb = function (readableStream, options) { + return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options); +}; + +Readable.toWeb = function (streamReadable, options) { + return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable, options); +}; + +Readable.wrap = function (src, options) { + return new Readable({ + objectMode: src.readableObjectMode ?? src.objectMode ?? true, + ...options, + destroy(err, callback) { + destroyImpl.destroyer(src, err); + callback(err); + }, + }).wrap(src); +}; + +export default Readable; diff --git a/src/js/internal/streams/state.ts b/src/js/internal/streams/state.ts new file mode 100644 index 0000000000..2fb3379b5a --- /dev/null +++ b/src/js/internal/streams/state.ts @@ -0,0 +1,47 @@ +"use strict"; + +const { validateInteger } = require("internal/validators"); + +const NumberIsInteger = Number.isInteger; +const MathFloor = Math.floor; + +// TODO (fix): For some reason Windows CI fails with bigger hwm. +let defaultHighWaterMarkBytes = process.platform === "win32" ? 16 * 1024 : 64 * 1024; +let defaultHighWaterMarkObjectMode = 16; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getDefaultHighWaterMark(objectMode) { + return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes; +} + +function setDefaultHighWaterMark(objectMode, value) { + validateInteger(value, "value", 0); + if (objectMode) { + defaultHighWaterMarkObjectMode = value; + } else { + defaultHighWaterMarkBytes = value; + } +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!NumberIsInteger(hwm) || hwm < 0) { + const name = isDuplex ? `options.${duplexKey}` : "options.highWaterMark"; + throw $ERR_INVALID_ARG_VALUE(name, hwm); + } + return MathFloor(hwm); + } + + // Default value + return getDefaultHighWaterMark(state.objectMode); +} + +export default { + getHighWaterMark, + getDefaultHighWaterMark, + setDefaultHighWaterMark, +}; diff --git a/src/js/internal/streams/transform.ts b/src/js/internal/streams/transform.ts new file mode 100644 index 0000000000..620b523e76 --- /dev/null +++ b/src/js/internal/streams/transform.ts @@ -0,0 +1,172 @@ +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +"use strict"; + +const Duplex = require("internal/streams/duplex"); +const { getHighWaterMark } = require("internal/streams/state"); + +const kCallback = Symbol("kCallback"); + +function Transform(options) { + if (!(this instanceof Transform)) return Reflect.construct(Transform, [options]); + + // TODO (ronag): This should preferably always be + // applied but would be semver-major. Or even better; + // make Transform a Readable with the Writable interface. + const readableHighWaterMark = options ? getHighWaterMark(this, options, "readableHighWaterMark", true) : null; + if (readableHighWaterMark === 0) { + // A Duplex will buffer both on the writable and readable side while + // a Transform just wants to buffer hwm number of elements. To avoid + // buffering twice we disable buffering on the writable side. + options = { + ...options, + highWaterMark: null, + readableHighWaterMark, + writableHighWaterMark: options.writableHighWaterMark || 0, + }; + } + + Duplex.$call(this, options); + + // We have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + this[kCallback] = null; + + if (options) { + if (typeof options.transform === "function") this._transform = options.transform; + + if (typeof options.flush === "function") this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + // Backwards compat. Some Transform streams incorrectly implement _final + // instead of or in addition to _flush. By using 'prefinish' instead of + // implementing _final we continue supporting this unfortunate use case. + this.on("prefinish", prefinish); +} +$toClass(Transform, "Transform", Duplex); + +function final(cb?) { + if (typeof this._flush === "function" && !this.destroyed) { + this._flush((er, data) => { + if (er) { + if (cb) { + cb(er); + } else { + this.destroy(er); + } + return; + } + + if (data != null) { + this.push(data); + } + this.push(null); + if (cb) { + cb(); + } + }); + } else { + this.push(null); + if (cb) { + cb(); + } + } +} + +function prefinish() { + if (this._final !== final) { + final.$call(this); + } +} + +Transform.prototype._final = final; + +Transform.prototype._transform = function (chunk, encoding, callback) { + throw $ERR_METHOD_NOT_IMPLEMENTED("_transform()"); +}; + +Transform.prototype._write = function (chunk, encoding, callback) { + const rState = this._readableState; + const wState = this._writableState; + const length = rState.length; + + this._transform(chunk, encoding, (err, val) => { + if (err) { + callback(err); + return; + } + + if (val != null) { + this.push(val); + } + + if (rState.ended) { + // If user has called this.push(null) we have to + // delay the callback to properly propagate the new + // state. + process.nextTick(callback); + } else if ( + wState.ended || // Backwards compat. + length === rState.length || // Backwards compat. + rState.length < rState.highWaterMark + ) { + callback(); + } else { + this[kCallback] = callback; + } + }); +}; + +Transform.prototype._read = function () { + if (this[kCallback]) { + const callback = this[kCallback]; + this[kCallback] = null; + callback(); + } +}; + +export default Transform; diff --git a/src/js/internal/streams/utils.ts b/src/js/internal/streams/utils.ts new file mode 100644 index 0000000000..12f0e9ff3d --- /dev/null +++ b/src/js/internal/streams/utils.ts @@ -0,0 +1,321 @@ +"use strict"; + +const SymbolFor = Symbol.for; +const SymbolIterator = Symbol.iterator; +const SymbolAsyncIterator = Symbol.asyncIterator; + +// We need to use SymbolFor to make these globally available +// for interoperability with readable-stream, i.e. readable-stream +// and node core needs to be able to read/write private state +// from each other for proper interoperability. +const kIsDestroyed = SymbolFor("nodejs.stream.destroyed"); +const kIsErrored = SymbolFor("nodejs.stream.errored"); +const kIsReadable = SymbolFor("nodejs.stream.readable"); +const kIsWritable = SymbolFor("nodejs.stream.writable"); +const kIsDisturbed = SymbolFor("nodejs.stream.disturbed"); + +const kOnConstructed = Symbol("kOnConstructed"); + +const kIsClosedPromise = SymbolFor("nodejs.webstream.isClosedPromise"); +const kControllerErrorFunction = SymbolFor("nodejs.webstream.controllerErrorFunction"); + +const kState = Symbol("kState"); +const kObjectMode = 1 << 0; +const kErrorEmitted = 1 << 1; +const kAutoDestroy = 1 << 2; +const kEmitClose = 1 << 3; +const kDestroyed = 1 << 4; +const kClosed = 1 << 5; +const kCloseEmitted = 1 << 6; +const kErrored = 1 << 7; +const kConstructed = 1 << 8; + +function isReadableNodeStream(obj, strict = false) { + return !!( + ( + obj && + typeof obj.pipe === "function" && + typeof obj.on === "function" && + (!strict || (typeof obj.pause === "function" && typeof obj.resume === "function")) && + (!obj._writableState || obj._readableState?.readable !== false) && // Duplex + (!obj._writableState || obj._readableState) + ) // Writable has .pipe. + ); +} + +function isWritableNodeStream(obj) { + return !!( + ( + obj && + typeof obj.write === "function" && + typeof obj.on === "function" && + (!obj._readableState || obj._writableState?.writable !== false) + ) // Duplex + ); +} + +function isDuplexNodeStream(obj) { + return !!( + obj && + typeof obj.pipe === "function" && + obj._readableState && + typeof obj.on === "function" && + typeof obj.write === "function" + ); +} + +function isNodeStream(obj) { + return ( + obj && + (obj._readableState || + obj._writableState || + (typeof obj.write === "function" && typeof obj.on === "function") || + (typeof obj.pipe === "function" && typeof obj.on === "function")) + ); +} + +function isReadableStream(obj) { + return $inheritsReadableStream(obj); +} + +function isWritableStream(obj) { + return $inheritsWritableStream(obj); +} + +function isTransformStream(obj) { + return $inheritsTransformStream(obj); +} + +function isWebStream(obj) { + return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj); +} + +function isIterable(obj, isAsync) { + if (obj == null) return false; + if (isAsync === true) return typeof obj[SymbolAsyncIterator] === "function"; + if (isAsync === false) return typeof obj[SymbolIterator] === "function"; + return typeof obj[SymbolAsyncIterator] === "function" || typeof obj[SymbolIterator] === "function"; +} + +function isDestroyed(stream) { + if (!isNodeStream(stream)) return null; + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + return !!(stream.destroyed || stream[kIsDestroyed] || state?.destroyed); +} + +// Have been end():d. +function isWritableEnded(stream) { + if (!isWritableNodeStream(stream)) return null; + if (stream.writableEnded === true) return true; + const wState = stream._writableState; + if (wState?.errored) return false; + if (typeof wState?.ended !== "boolean") return null; + return wState.ended; +} + +// Have emitted 'finish'. +function isWritableFinished(stream, strict) { + if (!isWritableNodeStream(stream)) return null; + if (stream.writableFinished === true) return true; + const wState = stream._writableState; + if (wState?.errored) return false; + if (typeof wState?.finished !== "boolean") return null; + return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0)); +} + +// Have been push(null):d. +function isReadableEnded(stream) { + if (!isReadableNodeStream(stream)) return null; + if (stream.readableEnded === true) return true; + const rState = stream._readableState; + if (!rState || rState.errored) return false; + if (typeof rState?.ended !== "boolean") return null; + return rState.ended; +} + +// Have emitted 'end'. +function isReadableFinished(stream, strict?) { + if (!isReadableNodeStream(stream)) return null; + const rState = stream._readableState; + if (rState?.errored) return false; + if (typeof rState?.endEmitted !== "boolean") return null; + return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0)); +} + +function isReadable(stream) { + if (stream && stream[kIsReadable] != null) return stream[kIsReadable]; + if (typeof stream?.readable !== "boolean") return null; + if (isDestroyed(stream)) return false; + return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream); +} + +function isWritable(stream) { + if (stream && stream[kIsWritable] != null) return stream[kIsWritable]; + if (typeof stream?.writable !== "boolean") return null; + if (isDestroyed(stream)) return false; + return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream); +} + +function isFinished(stream, opts) { + if (!isNodeStream(stream)) { + return null; + } + + if (isDestroyed(stream)) { + return true; + } + + if (opts?.readable !== false && isReadable(stream)) { + return false; + } + + if (opts?.writable !== false && isWritable(stream)) { + return false; + } + + return true; +} + +function isWritableErrored(stream) { + if (!isNodeStream(stream)) { + return null; + } + + if (stream.writableErrored) { + return stream.writableErrored; + } + + return stream._writableState?.errored ?? null; +} + +function isReadableErrored(stream) { + if (!isNodeStream(stream)) { + return null; + } + + if (stream.readableErrored) { + return stream.readableErrored; + } + + return stream._readableState?.errored ?? null; +} + +function isClosed(stream) { + if (!isNodeStream(stream)) { + return null; + } + + if (typeof stream.closed === "boolean") { + return stream.closed; + } + + const wState = stream._writableState; + const rState = stream._readableState; + + if (typeof wState?.closed === "boolean" || typeof rState?.closed === "boolean") { + return wState?.closed || rState?.closed; + } + + if (typeof stream._closed === "boolean" && isOutgoingMessage(stream)) { + return stream._closed; + } + + return null; +} + +function isOutgoingMessage(stream) { + return ( + typeof stream._closed === "boolean" && + typeof stream._defaultKeepAlive === "boolean" && + typeof stream._removedConnection === "boolean" && + typeof stream._removedContLen === "boolean" + ); +} + +function isServerResponse(stream) { + return typeof stream._sent100 === "boolean" && isOutgoingMessage(stream); +} + +function isServerRequest(stream) { + return ( + typeof stream._consuming === "boolean" && + typeof stream._dumped === "boolean" && + stream.req?.upgradeOrConnect === undefined + ); +} + +function willEmitClose(stream) { + if (!isNodeStream(stream)) return null; + + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + + return (!state && isServerResponse(stream)) || !!(state?.autoDestroy && state.emitClose && state.closed === false); +} + +function isDisturbed(stream) { + return !!(stream && (stream[kIsDisturbed] ?? (stream.readableDidRead || stream.readableAborted))); +} + +function isErrored(stream) { + return !!( + stream && + (stream[kIsErrored] ?? + stream.readableErrored ?? + stream.writableErrored ?? + stream._readableState?.errorEmitted ?? + stream._writableState?.errorEmitted ?? + stream._readableState?.errored ?? + stream._writableState?.errored) + ); +} + +export default { + kOnConstructed, + isDestroyed, + kIsDestroyed, + isDisturbed, + kIsDisturbed, + isErrored, + kIsErrored, + isReadable, + kIsReadable, + kIsClosedPromise, + kControllerErrorFunction, + kIsWritable, + isClosed, + isDuplexNodeStream, + isFinished, + isIterable, + isReadableNodeStream, + isReadableStream, + isReadableEnded, + isReadableFinished, + isReadableErrored, + isNodeStream, + isWebStream, + isWritable, + isWritableNodeStream, + isWritableStream, + isWritableEnded, + isWritableFinished, + isWritableErrored, + isServerRequest, + isServerResponse, + willEmitClose, + isTransformStream, + kState, + // bitfields + kObjectMode, + kErrorEmitted, + kAutoDestroy, + kEmitClose, + kDestroyed, + kClosed, + kCloseEmitted, + kErrored, + kConstructed, +}; diff --git a/src/js/internal/streams/writable.ts b/src/js/internal/streams/writable.ts new file mode 100644 index 0000000000..3d86b0771b --- /dev/null +++ b/src/js/internal/streams/writable.ts @@ -0,0 +1,1123 @@ +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +"use strict"; + +const EE = require("node:events"); +const { Stream } = require("internal/streams/legacy"); +const { Buffer } = require("node:buffer"); +const destroyImpl = require("internal/streams/destroy"); +const eos = require("internal/streams/end-of-stream"); +const { addAbortSignal } = require("internal/streams/add-abort-signal"); +const { getHighWaterMark, getDefaultHighWaterMark } = require("internal/streams/state"); +const { + kOnConstructed, + kState, + kObjectMode, + kErrorEmitted, + kAutoDestroy, + kEmitClose, + kDestroyed, + kClosed, + kCloseEmitted, + kErrored, + kConstructed, +}: { + readonly kState: unique symbol; + readonly kOnConstructed: unique symbol; + kObjectMode: number; + kErrorEmitted: number; + kAutoDestroy: number; + kEmitClose: number; + kDestroyed: number; + kClosed: number; + kCloseEmitted: number; + kErrored: number; + kConstructed: number; +} = require("internal/streams/utils"); + +const ObjectDefineProperties = Object.defineProperties; +const ArrayPrototypeSlice = Array.prototype.slice; +const ObjectDefineProperty = Object.defineProperty; +const SymbolHasInstance = Symbol.hasInstance; +const FunctionPrototypeSymbolHasInstance = Function.prototype[Symbol.hasInstance]; +const StringPrototypeToLowerCase = String.prototype.toLowerCase; +const SymbolAsyncDispose = Symbol.asyncDispose; + +const { errorOrDestroy } = destroyImpl; + +function nop() {} + +const kOnFinishedValue = Symbol("kOnFinishedValue"); +const kErroredValue = Symbol("kErroredValue"); +const kDefaultEncodingValue = Symbol("kDefaultEncodingValue"); +const kWriteCbValue = Symbol("kWriteCbValue"); +const kAfterWriteTickInfoValue = Symbol("kAfterWriteTickInfoValue"); +const kBufferedValue = Symbol("kBufferedValue"); + +const kSync = 1 << 9; +const kFinalCalled = 1 << 10; +const kNeedDrain = 1 << 11; +const kEnding = 1 << 12; +const kFinished = 1 << 13; +const kDecodeStrings = 1 << 14; +const kWriting = 1 << 15; +const kBufferProcessing = 1 << 16; +const kPrefinished = 1 << 17; +const kAllBuffers = 1 << 18; +const kAllNoop = 1 << 19; +const kOnFinished = 1 << 20; +const kHasWritable = 1 << 21; +const kWritable = 1 << 22; +const kCorked = 1 << 23; +const kDefaultUTF8Encoding = 1 << 24; +const kWriteCb = 1 << 25; +const kExpectWriteCb = 1 << 26; +const kAfterWriteTickInfo = 1 << 27; +const kAfterWritePending = 1 << 28; +const kBuffered = 1 << 29; +const kEnded = 1 << 30; + +// TODO(benjamingr) it is likely slower to do it this way than with free functions +function makeBitMapDescriptor(bit) { + return { + enumerable: false, + get() { + return (this[kState] & bit) !== 0; + }, + set(value) { + if (value) this[kState] |= bit; + else this[kState] &= ~bit; + }, + }; +} +WritableState.prototype = {}; +ObjectDefineProperties(WritableState.prototype, { + // Object stream flag to indicate whether or not this stream + // contains buffers or objects. + objectMode: makeBitMapDescriptor(kObjectMode), + + // if _final has been called. + finalCalled: makeBitMapDescriptor(kFinalCalled), + + // drain event flag. + needDrain: makeBitMapDescriptor(kNeedDrain), + + // At the start of calling end() + ending: makeBitMapDescriptor(kEnding), + + // When end() has been called, and returned. + ended: makeBitMapDescriptor(kEnded), + + // When 'finish' is emitted. + finished: makeBitMapDescriptor(kFinished), + + // Has it been destroyed. + destroyed: makeBitMapDescriptor(kDestroyed), + + // Should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + decodeStrings: makeBitMapDescriptor(kDecodeStrings), + + // A flag to see when we're in the middle of a write. + writing: makeBitMapDescriptor(kWriting), + + // A flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + sync: makeBitMapDescriptor(kSync), + + // A flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + bufferProcessing: makeBitMapDescriptor(kBufferProcessing), + + // Stream is still being constructed and cannot be + // destroyed until construction finished or failed. + // Async construction is opt in, therefore we start as + // constructed. + constructed: makeBitMapDescriptor(kConstructed), + + // Emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams. + prefinished: makeBitMapDescriptor(kPrefinished), + + // True if the error was already emitted and should not be thrown again. + errorEmitted: makeBitMapDescriptor(kErrorEmitted), + + // Should close be emitted on destroy. Defaults to true. + emitClose: makeBitMapDescriptor(kEmitClose), + + // Should .destroy() be called after 'finish' (and potentially 'end'). + autoDestroy: makeBitMapDescriptor(kAutoDestroy), + + // Indicates whether the stream has finished destroying. + closed: makeBitMapDescriptor(kClosed), + + // True if close has been emitted or would have been emitted + // depending on emitClose. + closeEmitted: makeBitMapDescriptor(kCloseEmitted), + + allBuffers: makeBitMapDescriptor(kAllBuffers), + allNoop: makeBitMapDescriptor(kAllNoop), + + // Indicates whether the stream has errored. When true all write() calls + // should return false. This is needed since when autoDestroy + // is disabled we need a way to tell whether the stream has failed. + // This is/should be a cold path. + errored: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kErrored) !== 0 ? this[kErroredValue] : null; + }, + set(value) { + if (value) { + this[kErroredValue] = value; + this[kState] |= kErrored; + } else { + this[kState] &= ~kErrored; + } + }, + }, + + writable: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kHasWritable) !== 0 ? (this[kState] & kWritable) !== 0 : undefined; + }, + set(value) { + if (value == null) { + this[kState] &= ~(kHasWritable | kWritable); + } else if (value) { + this[kState] |= kHasWritable | kWritable; + } else { + this[kState] |= kHasWritable; + this[kState] &= ~kWritable; + } + }, + }, + + defaultEncoding: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kDefaultUTF8Encoding) !== 0 ? "utf8" : this[kDefaultEncodingValue]; + }, + set(value) { + if (value === "utf8" || value === "utf-8") { + this[kState] |= kDefaultUTF8Encoding; + } else { + this[kState] &= ~kDefaultUTF8Encoding; + this[kDefaultEncodingValue] = value; + } + }, + }, + + // The callback that the user supplies to write(chunk, encoding, cb). + writecb: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kWriteCb) !== 0 ? this[kWriteCbValue] : nop; + }, + set(value) { + this[kWriteCbValue] = value; + if (value) { + this[kState] |= kWriteCb; + } else { + this[kState] &= ~kWriteCb; + } + }, + }, + + // Storage for data passed to the afterWrite() callback in case of + // synchronous _write() completion. + afterWriteTickInfo: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kAfterWriteTickInfo) !== 0 ? this[kAfterWriteTickInfoValue] : null; + }, + set(value) { + this[kAfterWriteTickInfoValue] = value; + if (value) { + this[kState] |= kAfterWriteTickInfo; + } else { + this[kState] &= ~kAfterWriteTickInfo; + } + }, + }, + + buffered: { + __proto__: null, + enumerable: false, + get() { + return (this[kState] & kBuffered) !== 0 ? this[kBufferedValue] : []; + }, + set(value) { + this[kBufferedValue] = value; + if (value) { + this[kState] |= kBuffered; + } else { + this[kState] &= ~kBuffered; + } + }, + }, +}); + +function WritableState(options, stream, isDuplex) { + // Bit map field to store WritableState more efficiently with 1 bit per field + // instead of a V8 slot per field. + this[kState] = kSync | kConstructed | kEmitClose | kAutoDestroy; + + if (options?.objectMode) this[kState] |= kObjectMode; + + if (isDuplex && options?.writableObjectMode) this[kState] |= kObjectMode; + + // The point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write(). + this.highWaterMark = options + ? getHighWaterMark(this, options, "writableHighWaterMark", isDuplex) + : getDefaultHighWaterMark(false); + + if (!options || options.decodeStrings !== false) this[kState] |= kDecodeStrings; + + // Should close be emitted on destroy. Defaults to true. + if (options && options.emitClose === false) this[kState] &= ~kEmitClose; + + // Should .destroy() be called after 'end' (and potentially 'finish'). + if (options && options.autoDestroy === false) this[kState] &= ~kAutoDestroy; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + const defaultEncoding = options ? options.defaultEncoding : null; + if (defaultEncoding == null || defaultEncoding === "utf8" || defaultEncoding === "utf-8") { + this[kState] |= kDefaultUTF8Encoding; + } else if (Buffer.isEncoding(defaultEncoding)) { + this[kState] &= ~kDefaultUTF8Encoding; + this[kDefaultEncodingValue] = defaultEncoding; + } else { + throw $ERR_UNKNOWN_ENCODING(defaultEncoding); + } + + // Not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // When true all writes will be buffered until .uncork() call. + this.corked = 0; + + // The callback that's passed to _write(chunk, cb). + this.onwrite = onwrite.bind(undefined, stream); + + // The amount that is being written when _write is called. + this.writelen = 0; + + resetBuffer(this); + + // Number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted. + this.pendingcb = 0; +} + +function resetBuffer(state) { + state[kBufferedValue] = null; + state.bufferedIndex = 0; + state[kState] |= kAllBuffers | kAllNoop; + state[kState] &= ~kBuffered; +} + +WritableState.prototype.getBuffer = function getBuffer() { + return (this[kState] & kBuffered) === 0 ? [] : ArrayPrototypeSlice.$call(this.buffered, this.bufferedIndex); +}; + +ObjectDefineProperty(WritableState.prototype, "bufferedRequestCount", { + __proto__: null, + get() { + return (this[kState] & kBuffered) === 0 ? 0 : this[kBufferedValue].length - this.bufferedIndex; + }, +}); + +WritableState.prototype[kOnConstructed] = function onConstructed(stream) { + if ((this[kState] & kWriting) === 0) { + clearBuffer(stream, this); + } + + if ((this[kState] & kEnding) !== 0) { + finishMaybe(stream, this); + } +}; + +function Writable(options) { + if (!(this instanceof Writable)) return Reflect.construct(Writable, [options]); + + this._events ??= { + close: undefined, + error: undefined, + prefinish: undefined, + finish: undefined, + drain: undefined, + // Skip uncommon events... + // [destroyImpl.kConstruct]: undefined, + // [destroyImpl.kDestroy]: undefined, + }; + + this._writableState = new WritableState(options, this, false); + + if (options) { + if (typeof options.write === "function") this._write = options.write; + + if (typeof options.writev === "function") this._writev = options.writev; + + if (typeof options.destroy === "function") this._destroy = options.destroy; + + if (typeof options.final === "function") this._final = options.final; + + if (typeof options.construct === "function") this._construct = options.construct; + + if (options.signal) addAbortSignal(options.signal, this); + } + + Stream.$call(this, options); + + if (this._construct != null) { + destroyImpl.construct(this, () => { + this._writableState[kOnConstructed](this); + }); + } +} +$toClass(Writable, "Writable", Stream); + +Writable.WritableState = WritableState; + +ObjectDefineProperty(Writable, SymbolHasInstance, { + __proto__: null, + value: function (object) { + if (FunctionPrototypeSymbolHasInstance.$call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + }, +}); + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, $ERR_STREAM_CANNOT_PIPE()); +}; + +function _write(stream, chunk, encoding, cb?) { + const state = stream._writableState; + + if (cb == null || typeof cb !== "function") { + cb = nop; + } + + if (chunk === null) { + throw $ERR_STREAM_NULL_VALUES(); + } + + if ((state[kState] & kObjectMode) === 0) { + if (!encoding) { + encoding = (state[kState] & kDefaultUTF8Encoding) !== 0 ? "utf8" : state.defaultEncoding; + } else if (encoding !== "buffer" && !Buffer.isEncoding(encoding)) { + throw $ERR_UNKNOWN_ENCODING(encoding); + } + + if (typeof chunk === "string") { + if ((state[kState] & kDecodeStrings) !== 0) { + chunk = Buffer.from(chunk, encoding); + encoding = "buffer"; + } + } else if (chunk instanceof Buffer) { + encoding = "buffer"; + } else if (Stream._isArrayBufferView(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = "buffer"; + } else { + throw $ERR_INVALID_ARG_TYPE("chunk", ["string", "Buffer", "TypedArray", "DataView"], chunk); + } + } + + let err; + if ((state[kState] & kEnding) !== 0) { + err = $ERR_STREAM_WRITE_AFTER_END(); + } else if ((state[kState] & kDestroyed) !== 0) { + err = $ERR_STREAM_DESTROYED("write"); + } + + if (err) { + process.nextTick(cb, err); + errorOrDestroy(stream, err, true); + return err; + } + + state.pendingcb++; + return writeOrBuffer(stream, state, chunk, encoding, cb); +} + +Writable.prototype.write = function (chunk, encoding, cb) { + if (encoding != null && typeof encoding === "function") { + cb = encoding; + encoding = null; + } + + return _write(this, chunk, encoding, cb) === true; +}; + +Writable.prototype.cork = function () { + const state = this._writableState; + + state[kState] |= kCorked; + state.corked++; +}; + +Writable.prototype.uncork = function () { + const state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.corked) { + state[kState] &= ~kCorked; + } + + if ((state[kState] & kWriting) === 0) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === "string") encoding = StringPrototypeToLowerCase.$call(encoding); + if (!Buffer.isEncoding(encoding)) throw $ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +// If we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, callback) { + const len = (state[kState] & kObjectMode) !== 0 ? 1 : chunk.length; + + state.length += len; + + if ((state[kState] & (kWriting | kErrored | kCorked | kConstructed)) !== kConstructed) { + if ((state[kState] & kBuffered) === 0) { + state[kState] |= kBuffered; + state[kBufferedValue] = []; + } + + state[kBufferedValue].push({ chunk, encoding, callback }); + if ((state[kState] & kAllBuffers) !== 0 && encoding !== "buffer") { + state[kState] &= ~kAllBuffers; + } + if ((state[kState] & kAllNoop) !== 0 && callback !== nop) { + state[kState] &= ~kAllNoop; + } + } else { + state.writelen = len; + if (callback !== nop) { + state.writecb = callback; + } + state[kState] |= kWriting | kSync | kExpectWriteCb; + stream._write(chunk, encoding, state.onwrite); + state[kState] &= ~kSync; + } + + const ret = state.length < state.highWaterMark || state.length === 0; + + if (!ret) { + state[kState] |= kNeedDrain; + } + + // Return false if errored or destroyed in order to break + // any synchronous while(stream.write(data)) loops. + return ret && (state[kState] & (kDestroyed | kErrored)) === 0; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + if (cb !== nop) { + state.writecb = cb; + } + state[kState] |= kWriting | kSync | kExpectWriteCb; + if ((state[kState] & kDestroyed) !== 0) state.onwrite($ERR_STREAM_DESTROYED("write")); + else if (writev) stream._writev(chunk, state.onwrite); + else stream._write(chunk, encoding, state.onwrite); + state[kState] &= ~kSync; +} + +function onwriteError(stream, state, er, cb) { + --state.pendingcb; + + cb(er); + // Ensure callbacks are invoked even when autoDestroy is + // not enabled. Passing `er` here doesn't make sense since + // it's related to one specific write, not to the buffered + // writes. + errorBuffer(state); + // This can emit error, but error must always follow cb. + errorOrDestroy(stream, er); +} + +function onwrite(stream, er) { + const state = stream._writableState; + + if ((state[kState] & kExpectWriteCb) === 0) { + errorOrDestroy(stream, $ERR_MULTIPLE_CALLBACK()); + return; + } + + const sync = (state[kState] & kSync) !== 0; + const cb = (state[kState] & kWriteCb) !== 0 ? state[kWriteCbValue] : nop; + + state.writecb = null; + state[kState] &= ~(kWriting | kExpectWriteCb); + state.length -= state.writelen; + state.writelen = 0; + + if (er) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + er.stack; // eslint-disable-line no-unused-expressions + + if ((state[kState] & kErrored) === 0) { + state[kErroredValue] = er; + state[kState] |= kErrored; + } + + // In case of duplex streams we need to notify the readable side of the + // error. + if (stream._readableState && !stream._readableState.errored) { + stream._readableState.errored = er; + } + + if (sync) { + process.nextTick(onwriteError, stream, state, er, cb); + } else { + onwriteError(stream, state, er, cb); + } + } else { + if ((state[kState] & kBuffered) !== 0) { + clearBuffer(stream, state); + } + + if (sync) { + const needDrain = (state[kState] & kNeedDrain) !== 0 && state.length === 0; + const needTick = needDrain || state[kState] & Number(kDestroyed !== 0) || cb !== nop; + + // It is a common case that the callback passed to .write() is always + // the same. In that case, we do not schedule a new nextTick(), but + // rather just increase a counter, to improve performance and avoid + // memory allocations. + if (cb === nop) { + if ((state[kState] & kAfterWritePending) === 0 && needTick) { + process.nextTick(afterWrite, stream, state, 1, cb); + state[kState] |= kAfterWritePending; + } else { + state.pendingcb--; + if ((state[kState] & kEnding) !== 0) { + finishMaybe(stream, state, true); + } + } + } else if ((state[kState] & kAfterWriteTickInfo) !== 0 && state[kAfterWriteTickInfoValue].cb === cb) { + state[kAfterWriteTickInfoValue].count++; + } else if (needTick) { + state[kAfterWriteTickInfoValue] = { count: 1, cb, stream, state }; + process.nextTick(afterWriteTick, state[kAfterWriteTickInfoValue]); + state[kState] |= kAfterWritePending | kAfterWriteTickInfo; + } else { + state.pendingcb--; + if ((state[kState] & kEnding) !== 0) { + finishMaybe(stream, state, true); + } + } + } else { + afterWrite(stream, state, 1, cb); + } + } +} + +function afterWriteTick({ stream, state, count, cb }) { + state[kState] &= ~kAfterWriteTickInfo; + state[kAfterWriteTickInfoValue] = null; + return afterWrite(stream, state, count, cb); +} + +function afterWrite(stream, state, count, cb) { + state[kState] &= ~kAfterWritePending; + + const needDrain = (state[kState] & (kEnding | kNeedDrain | kDestroyed)) === kNeedDrain && state.length === 0; + if (needDrain) { + state[kState] &= ~kNeedDrain; + stream.emit("drain"); + } + + while (count-- > 0) { + state.pendingcb--; + cb(null); + } + + if ((state[kState] & kDestroyed) !== 0) { + errorBuffer(state); + } + + if ((state[kState] & kEnding) !== 0) { + finishMaybe(stream, state, true); + } +} + +// If there's something in the buffer waiting, then invoke callbacks. +function errorBuffer(state) { + if ((state[kState] & kWriting) !== 0) { + return; + } + + if ((state[kState] & kBuffered) !== 0) { + for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { + const { chunk, callback } = state[kBufferedValue][n]; + const len = (state[kState] & kObjectMode) !== 0 ? 1 : chunk.length; + state.length -= len; + callback(state.errored ?? $ERR_STREAM_DESTROYED("write")); + } + } + + callFinishedCallbacks(state, state.errored ?? $ERR_STREAM_DESTROYED("end")); + + resetBuffer(state); +} + +// If there's something in the buffer waiting, then process it. +function clearBuffer(stream, state) { + if ( + (state[kState] & (kDestroyed | kBufferProcessing | kCorked | kBuffered | kConstructed)) !== + (kBuffered | kConstructed) + ) { + return; + } + + const objectMode = (state[kState] & kObjectMode) !== 0; + const { [kBufferedValue]: buffered, bufferedIndex } = state; + const bufferedLength = buffered.length - bufferedIndex; + + if (!bufferedLength) { + return; + } + + let i = bufferedIndex; + + state[kState] |= kBufferProcessing; + if (bufferedLength > 1 && stream._writev) { + state.pendingcb -= bufferedLength - 1; + + const callback = + (state[kState] & kAllNoop) !== 0 + ? nop + : err => { + for (let n = i; n < buffered.length; ++n) { + buffered[n].callback(err); + } + }; + // Make a copy of `buffered` if it's going to be used by `callback` above, + // since `doWrite` will mutate the array. + const chunks = (state[kState] & kAllNoop) !== 0 && i === 0 ? buffered : ArrayPrototypeSlice.$call(buffered, i); + chunks.allBuffers = (state[kState] & kAllBuffers) !== 0; + + doWrite(stream, state, true, state.length, chunks, "", callback); + + resetBuffer(state); + } else { + do { + const { chunk, encoding, callback } = buffered[i]; + buffered[i++] = null; + const len = objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, callback); + } while (i < buffered.length && (state[kState] & kWriting) === 0); + + if (i === buffered.length) { + resetBuffer(state); + } else if (i > 256) { + buffered.splice(0, i); + state.bufferedIndex = 0; + } else { + state.bufferedIndex = i; + } + } + state[kState] &= ~kBufferProcessing; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + if (this._writev) { + this._writev([{ chunk, encoding }], cb); + } else { + throw $ERR_METHOD_NOT_IMPLEMENTED("_write()"); + } +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + const state = this._writableState; + + if (typeof chunk === "function") { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === "function") { + cb = encoding; + encoding = null; + } + + let err; + + if (chunk != null) { + const ret = _write(this, chunk, encoding); + if (Error.isError(ret)) { + err = ret; + } + } + + // .end() fully uncorks. + if ((state[kState] & kCorked) !== 0) { + state.corked = 1; + this.uncork(); + } + + if (err) { + // Do nothing... + } else if ((state[kState] & (kEnding | kErrored)) === 0) { + // This is forgiving in terms of unnecessary calls to end() and can hide + // logic errors. However, usually such errors are harmless and causing a + // hard error can be disproportionately destructive. It is not always + // trivial for the user to determine whether end() needs to be called + // or not. + + state[kState] |= kEnding; + finishMaybe(this, state, true); + state[kState] |= kEnded; + } else if ((state[kState] & kFinished) !== 0) { + err = $ERR_STREAM_ALREADY_FINISHED("end"); + } else if ((state[kState] & kDestroyed) !== 0) { + err = $ERR_STREAM_DESTROYED("end"); + } + + if (typeof cb === "function") { + if (err) { + process.nextTick(cb, err); + } else if ((state[kState] & kErrored) !== 0) { + process.nextTick(cb, state[kErroredValue]); + } else if ((state[kState] & kFinished) !== 0) { + process.nextTick(cb, null); + } else { + state[kState] |= kOnFinished; + state[kOnFinishedValue] ??= []; + state[kOnFinishedValue].push(cb); + } + } + + return this; +}; + +function needFinish(state) { + return ( + // State is ended && constructed but not destroyed, finished, writing, errorEmitted or closedEmitted + (state[kState] & + (kEnding | + kDestroyed | + kConstructed | + kFinished | + kWriting | + kErrorEmitted | + kCloseEmitted | + kErrored | + kBuffered)) === + (kEnding | kConstructed) && state.length === 0 + ); +} + +function onFinish(stream, state, err) { + if ((state[kState] & kPrefinished) !== 0) { + errorOrDestroy(stream, err ?? $ERR_MULTIPLE_CALLBACK()); + return; + } + state.pendingcb--; + if (err) { + callFinishedCallbacks(state, err); + errorOrDestroy(stream, err, (state[kState] & kSync) !== 0); + } else if (needFinish(state)) { + state[kState] |= kPrefinished; + stream.emit("prefinish"); + // Backwards compat. Don't check state.sync here. + // Some streams assume 'finish' will be emitted + // asynchronously relative to _final callback. + state.pendingcb++; + process.nextTick(finish, stream, state); + } +} + +function prefinish(stream, state) { + if ((state[kState] & (kPrefinished | kFinalCalled)) !== 0) { + return; + } + + if (typeof stream._final === "function" && (state[kState] & kDestroyed) === 0) { + state[kState] |= kFinalCalled | kSync; + state.pendingcb++; + + try { + stream._final(err => onFinish(stream, state, err)); + } catch (err) { + onFinish(stream, state, err); + } + + state[kState] &= ~kSync; + } else { + state[kState] |= kFinalCalled | kPrefinished; + stream.emit("prefinish"); + } +} + +function finishMaybe(stream, state, sync?) { + if (needFinish(state)) { + prefinish(stream, state); + if (state.pendingcb === 0) { + if (sync) { + state.pendingcb++; + process.nextTick( + (stream, state) => { + if (needFinish(state)) { + finish(stream, state); + } else { + state.pendingcb--; + } + }, + stream, + state, + ); + } else if (needFinish(state)) { + state.pendingcb++; + finish(stream, state); + } + } + } +} + +function finish(stream, state) { + state.pendingcb--; + state[kState] |= kFinished; + + callFinishedCallbacks(state, null); + + stream.emit("finish"); + + if ((state[kState] & kAutoDestroy) !== 0) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well. + const rState = stream._readableState; + const autoDestroy = + !rState || + (rState.autoDestroy && + // We don't expect the readable to ever 'end' + // if readable is explicitly set to false. + (rState.endEmitted || rState.readable === false)); + if (autoDestroy) { + stream.destroy(); + } + } +} + +function callFinishedCallbacks(state, err) { + if ((state[kState] & kOnFinished) === 0) { + return; + } + + const onfinishCallbacks = state[kOnFinishedValue]; + state[kOnFinishedValue] = null; + state[kState] &= ~kOnFinished; + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](err); + } +} + +ObjectDefineProperties(Writable.prototype, { + closed: { + __proto__: null, + get() { + return this._writableState ? (this._writableState[kState] & kClosed) !== 0 : false; + }, + }, + + destroyed: { + __proto__: null, + get() { + return this._writableState ? (this._writableState[kState] & kDestroyed) !== 0 : false; + }, + set(value) { + // Backward compatibility, the user is explicitly managing destroyed. + if (!this._writableState) return; + + if (value) this._writableState[kState] |= kDestroyed; + else this._writableState[kState] &= ~kDestroyed; + }, + }, + + writable: { + __proto__: null, + get() { + const w = this._writableState; + // w.writable === false means that this is part of a Duplex stream + // where the writable side was disabled upon construction. + // Compat. The user might manually disable writable side through + // deprecated setter. + return !!w && w.writable !== false && (w[kState] & (kEnding | kEnded | kDestroyed | kErrored)) === 0; + }, + set(val) { + // Backwards compatible. + if (this._writableState) { + this._writableState.writable = !!val; + } + }, + }, + + writableFinished: { + __proto__: null, + get() { + const state = this._writableState; + return state ? (state[kState] & kFinished) !== 0 : false; + }, + }, + + writableObjectMode: { + __proto__: null, + get() { + const state = this._writableState; + return state ? (state[kState] & kObjectMode) !== 0 : false; + }, + }, + + writableBuffer: { + __proto__: null, + get() { + const state = this._writableState; + return state && state.getBuffer(); + }, + }, + + writableEnded: { + __proto__: null, + get() { + const state = this._writableState; + return state ? (state[kState] & kEnding) !== 0 : false; + }, + }, + + writableNeedDrain: { + __proto__: null, + get() { + const state = this._writableState; + return state ? (state[kState] & (kDestroyed | kEnding | kNeedDrain)) === kNeedDrain : false; + }, + }, + + writableHighWaterMark: { + __proto__: null, + get() { + const state = this._writableState; + return state?.highWaterMark; + }, + }, + + writableCorked: { + __proto__: null, + get() { + const state = this._writableState; + return state ? state.corked : 0; + }, + }, + + writableLength: { + __proto__: null, + get() { + const state = this._writableState; + return state?.length; + }, + }, + + errored: { + __proto__: null, + enumerable: false, + get() { + const state = this._writableState; + return state ? state.errored : null; + }, + }, + + writableAborted: { + __proto__: null, + get: function () { + const state = this._writableState; + return ( + (state[kState] & (kHasWritable | kWritable)) !== kHasWritable && + (state[kState] & (kDestroyed | kErrored)) !== 0 && + (state[kState] & kFinished) === 0 + ); + }, + }, +}); + +const destroy = destroyImpl.destroy; +Writable.prototype.destroy = function (err, cb) { + const state = this._writableState; + + // Invoke pending callbacks. + if ((state[kState] & (kBuffered | kOnFinished)) !== 0 && (state[kState] & kDestroyed) === 0) { + process.nextTick(errorBuffer, state); + } + + destroy.$call(this, err, cb); + return this; +}; + +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; + +Writable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err); +}; + +// Lazy to avoid circular references +let webStreamsAdapters; +function lazyWebStreams() { + if (webStreamsAdapters === undefined) webStreamsAdapters = require("internal/webstreams_adapters"); + return webStreamsAdapters; +} + +Writable.fromWeb = function (writableStream, options) { + return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options); +}; + +Writable.toWeb = function (streamWritable) { + return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable); +}; + +Writable.prototype[SymbolAsyncDispose] = function () { + let error; + if (!this.destroyed) { + error = this.writableFinished ? null : $makeAbortError(); + this.destroy(error); + } + return new Promise((resolve, reject) => + eos(this, err => (err && err.name !== "AbortError" ? reject(err) : resolve(null))), + ); +}; + +export default Writable; diff --git a/src/js/internal/webstreams_adapters.ts b/src/js/internal/webstreams_adapters.ts new file mode 100644 index 0000000000..e8931e1ccb --- /dev/null +++ b/src/js/internal/webstreams_adapters.ts @@ -0,0 +1,785 @@ +"use strict"; + +const { + SafePromiseAll, + SafeSet, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteOffset, + TypedArrayPrototypeGetByteLength, +} = require("internal/primordials"); + +const Writable = require("internal/streams/writable"); +const Readable = require("internal/streams/readable"); +const Duplex = require("internal/streams/duplex"); +const { destroyer } = require("internal/streams/destroy"); +const { isDestroyed, isReadable, isWritable, isWritableEnded } = require("internal/streams/utils"); +const { Buffer } = require("node:buffer"); +const { kEmptyObject, kGetNativeReadableProto } = require("internal/shared"); +const { validateBoolean, validateObject } = require("internal/validators"); +const finished = require("internal/streams/end-of-stream"); + +const normalizeEncoding = $newZigFunction("node_util_binding.zig", "normalizeEncoding", 1); + +const ArrayPrototypeFilter = Array.prototype.filter; +const ArrayPrototypeMap = Array.prototype.map; +const ObjectEntries = Object.entries; +const PromiseWithResolvers = Promise.withResolvers.bind(Promise); +const PromiseResolve = Promise.resolve.bind(Promise); +const PromisePrototypeThen = Promise.prototype.then; +const SafePromisePrototypeFinally = Promise.prototype.finally; + +const constants_zlib = process.binding("constants").zlib; + +// +// +const transferToNativeReadable = $newCppFunction("ReadableStream.cpp", "jsFunctionTransferToNativeReadableStream", 1); + +function getNativeReadableStream(Readable, stream, options) { + const ptr = stream.$bunNativePtr; + if (!ptr || ptr === -1) { + $debug("no native readable stream"); + return undefined; + } + const type = stream.$bunNativeType; + $assert(typeof type === "number", "Invalid native type"); + $assert(typeof ptr === "object", "Invalid native ptr"); + + const NativeReadable = require("node:stream")[kGetNativeReadableProto](type); + // https://github.com/oven-sh/bun/pull/12801 + // https://github.com/oven-sh/bun/issues/9555 + // There may be a ReadableStream.Strong handle to the ReadableStream. + // We can't update those handles to point to the NativeReadable from JS + // So we instead mark it as no longer usable, and create a new NativeReadable + transferToNativeReadable(stream); + + return new NativeReadable(ptr, options); +} + +class ReadableFromWeb extends Readable { + #reader; + #closed; + #pendingChunks; + #stream; + + constructor(options, stream) { + const { objectMode, highWaterMark, encoding, signal } = options; + super({ + objectMode, + highWaterMark, + encoding, + signal, + }); + this.#pendingChunks = []; + this.#reader = undefined; + this.#stream = stream; + this.#closed = false; + } + + #drainPending() { + var pendingChunks = this.#pendingChunks, + pendingChunksI = 0, + pendingChunksCount = pendingChunks.length; + + for (; pendingChunksI < pendingChunksCount; pendingChunksI++) { + const chunk = pendingChunks[pendingChunksI]; + pendingChunks[pendingChunksI] = undefined; + if (!this.push(chunk, undefined)) { + this.#pendingChunks = pendingChunks.slice(pendingChunksI + 1); + return true; + } + } + + if (pendingChunksCount > 0) { + this.#pendingChunks = []; + } + + return false; + } + + #handleDone(reader) { + reader.releaseLock(); + this.#reader = undefined; + this.#closed = true; + this.push(null); + return; + } + + async _read() { + $debug("ReadableFromWeb _read()", this.__id); + var stream = this.#stream, + reader = this.#reader; + if (stream) { + reader = this.#reader = stream.getReader(); + this.#stream = undefined; + } else if (this.#drainPending()) { + return; + } + + var deferredError; + try { + do { + var done = false, + value; + const firstResult = reader.readMany(); + + if ($isPromise(firstResult)) { + ({ done, value } = await firstResult); + + if (this.#closed) { + this.#pendingChunks.push(...value); + return; + } + } else { + ({ done, value } = firstResult); + } + + if (done) { + this.#handleDone(reader); + return; + } + + if (!this.push(value[0])) { + this.#pendingChunks = value.slice(1); + return; + } + + for (let i = 1, count = value.length; i < count; i++) { + if (!this.push(value[i])) { + this.#pendingChunks = value.slice(i + 1); + return; + } + } + } while (!this.#closed); + } catch (e) { + deferredError = e; + } finally { + if (deferredError) throw deferredError; + } + } + + _destroy(error, callback) { + if (!this.#closed) { + var reader = this.#reader; + if (reader) { + this.#reader = undefined; + reader.cancel(error).finally(() => { + this.#closed = true; + callback(error); + }); + } + + return; + } + try { + callback(error); + } catch (error) { + globalThis.reportError(error); + } + } +} +// +// + +const encoder = new TextEncoder(); + +// Collect all negative (error) ZLIB codes and Z_NEED_DICT +const ZLIB_FAILURES = new SafeSet([ + ...ArrayPrototypeFilter.$call( + ArrayPrototypeMap.$call(ObjectEntries(constants_zlib), ({ 0: code, 1: value }) => (value < 0 ? code : null)), + Boolean, + ), + "Z_NEED_DICT", +]); + +function handleKnownInternalErrors(cause: Error | null): Error | null { + switch (true) { + case cause?.code === "ERR_STREAM_PREMATURE_CLOSE": { + return $makeAbortError(undefined, { cause }); + } + case ZLIB_FAILURES.has(cause?.code): { + const error = new TypeError(undefined, { cause }); + error.code = cause.code; + return error; + } + default: + return cause; + } +} + +function newWritableStreamFromStreamWritable(streamWritable) { + // Not using the internal/streams/utils isWritableNodeStream utility + // here because it will return false if streamWritable is a Duplex + // whose writable option is false. For a Duplex that is not writable, + // we want it to pass this check but return a closed WritableStream. + // We check if the given stream is a stream.Writable or http.OutgoingMessage + const checkIfWritableOrOutgoingMessage = + streamWritable && typeof streamWritable?.write === "function" && typeof streamWritable?.on === "function"; + if (!checkIfWritableOrOutgoingMessage) { + throw $ERR_INVALID_ARG_TYPE("streamWritable", "stream.Writable", streamWritable); + } + + if (isDestroyed(streamWritable) || !isWritable(streamWritable)) { + const writable = new WritableStream(); + writable.close(); + return writable; + } + + const highWaterMark = streamWritable.writableHighWaterMark; + const strategy = streamWritable.writableObjectMode ? new CountQueuingStrategy({ highWaterMark }) : { highWaterMark }; + + let controller; + let backpressurePromise; + let closed; + + function onDrain() { + if (backpressurePromise !== undefined) backpressurePromise.resolve(); + } + + const cleanup = finished(streamWritable, error => { + error = handleKnownInternalErrors(error); + + cleanup(); + // This is a protection against non-standard, legacy streams + // that happen to emit an error event again after finished is called. + streamWritable.on("error", () => {}); + if (error != null) { + if (backpressurePromise !== undefined) backpressurePromise.reject(error); + // If closed is not undefined, the error is happening + // after the WritableStream close has already started. + // We need to reject it here. + if (closed !== undefined) { + closed.reject(error); + closed = undefined; + } + controller.error(error); + controller = undefined; + return; + } + + if (closed !== undefined) { + closed.resolve(); + closed = undefined; + return; + } + controller.error($makeAbortError()); + controller = undefined; + }); + + streamWritable.on("drain", onDrain); + + return new WritableStream( + { + start(c) { + controller = c; + }, + + write(chunk) { + if (streamWritable.writableNeedDrain || !streamWritable.write(chunk)) { + backpressurePromise = PromiseWithResolvers(); + return SafePromisePrototypeFinally.$call(backpressurePromise.promise, () => { + backpressurePromise = undefined; + }); + } + }, + + abort(reason) { + destroyer(streamWritable, reason); + }, + + close() { + if (closed === undefined && !isWritableEnded(streamWritable)) { + closed = PromiseWithResolvers(); + streamWritable.end(); + return closed.promise; + } + + controller = undefined; + return PromiseResolve(); + }, + }, + strategy, + ); +} + +function newStreamWritableFromWritableStream(writableStream, options = kEmptyObject) { + if (!$inheritsWritableStream(writableStream)) { + throw $ERR_INVALID_ARG_TYPE("writableStream", "WritableStream", writableStream); + } + + validateObject(options, "options"); + const { highWaterMark, decodeStrings = true, objectMode = false, signal } = options; + + validateBoolean(objectMode, "options.objectMode"); + validateBoolean(decodeStrings, "options.decodeStrings"); + + const writer = writableStream.getWriter(); + let closed = false; + + const writable = new Writable({ + highWaterMark, + objectMode, + decodeStrings, + signal, + + writev(chunks, callback) { + function done(error) { + error = error.filter(e => e); + try { + callback(error.length === 0 ? undefined : error); + } catch (error) { + // In a next tick because this is happening within + // a promise context, and if there are any errors + // thrown we don't want those to cause an unhandled + // rejection. Let's just escape the promise and + // handle it separately. + process.nextTick(() => destroyer(writable, error)); + } + } + + PromisePrototypeThen.$call( + writer.ready, + () => { + return PromisePrototypeThen.$call( + SafePromiseAll(chunks, data => writer.write(data.chunk)), + done, + done, + ); + }, + done, + ); + }, + + write(chunk, encoding, callback) { + if (typeof chunk === "string" && decodeStrings && !objectMode) { + const enc = normalizeEncoding(encoding); + + if (enc === "utf8") { + chunk = encoder.encode(chunk); + } else { + chunk = Buffer.from(chunk, encoding); + chunk = new Uint8Array( + TypedArrayPrototypeGetBuffer(chunk), + TypedArrayPrototypeGetByteOffset(chunk), + TypedArrayPrototypeGetByteLength(chunk), + ); + } + } + + function done(error) { + try { + callback(error); + } catch (error) { + destroyer(writable, error); + } + } + + PromisePrototypeThen.$call( + writer.ready, + () => { + return PromisePrototypeThen.$call(writer.write(chunk), done, done); + }, + done, + ); + }, + + destroy(error, callback) { + function done() { + try { + callback(error); + } catch (error) { + // In a next tick because this is happening within + // a promise context, and if there are any errors + // thrown we don't want those to cause an unhandled + // rejection. Let's just escape the promise and + // handle it separately. + process.nextTick(() => { + throw error; + }); + } + } + + if (!closed) { + if (error != null) { + PromisePrototypeThen.$call(writer.abort(error), done, done); + } else { + PromisePrototypeThen.$call(writer.close(), done, done); + } + return; + } + + done(); + }, + + final(callback) { + function done(error) { + try { + callback(error); + } catch (error) { + // In a next tick because this is happening within + // a promise context, and if there are any errors + // thrown we don't want those to cause an unhandled + // rejection. Let's just escape the promise and + // handle it separately. + process.nextTick(() => destroyer(writable, error)); + } + } + + if (!closed) { + PromisePrototypeThen.$call(writer.close(), done, done); + } + }, + }); + + PromisePrototypeThen.$call( + writer.closed, + () => { + // If the WritableStream closes before the stream.Writable has been + // ended, we signal an error on the stream.Writable. + closed = true; + if (!isWritableEnded(writable)) destroyer(writable, $ERR_STREAM_PREMATURE_CLOSE()); + }, + error => { + // If the WritableStream errors before the stream.Writable has been + // destroyed, signal an error on the stream.Writable. + closed = true; + destroyer(writable, error); + }, + ); + + return writable; +} + +function newReadableStreamFromStreamReadable(streamReadable, options = kEmptyObject) { + // Not using the internal/streams/utils isReadableNodeStream utility + // here because it will return false if streamReadable is a Duplex + // whose readable option is false. For a Duplex that is not readable, + // we want it to pass this check but return a closed ReadableStream. + if (typeof streamReadable?._readableState !== "object") { + throw $ERR_INVALID_ARG_TYPE("streamReadable", "stream.Readable", streamReadable); + } + + if (isDestroyed(streamReadable) || !isReadable(streamReadable)) { + const readable = new ReadableStream(); + readable.cancel(); + return readable; + } + + const objectMode = streamReadable.readableObjectMode; + const highWaterMark = streamReadable.readableHighWaterMark; + + const evaluateStrategyOrFallback = strategy => { + // If there is a strategy available, use it + if (strategy) return strategy; + + if (objectMode) { + // When running in objectMode explicitly but no strategy, we just fall + // back to CountQueuingStrategy + return new CountQueuingStrategy({ highWaterMark }); + } + + return new ByteLengthQueuingStrategy({ highWaterMark }); + }; + + const strategy = evaluateStrategyOrFallback(options?.strategy); + + let controller; + let wasCanceled = false; + + function onData(chunk) { + // Copy the Buffer to detach it from the pool. + if (Buffer.isBuffer(chunk) && !objectMode) chunk = new Uint8Array(chunk); + controller.enqueue(chunk); + if (controller.desiredSize <= 0) streamReadable.pause(); + } + + streamReadable.pause(); + + const cleanup = finished(streamReadable, error => { + error = handleKnownInternalErrors(error); + + cleanup(); + // This is a protection against non-standard, legacy streams + // that happen to emit an error event again after finished is called. + streamReadable.on("error", () => {}); + if (error) return controller.error(error); + // Was already canceled + if (wasCanceled) { + return; + } + controller.close(); + }); + + streamReadable.on("data", onData); + + return new ReadableStream( + { + start(c) { + controller = c; + }, + + pull() { + streamReadable.resume(); + }, + + cancel(reason) { + wasCanceled = true; + destroyer(streamReadable, reason); + }, + }, + strategy, + ); +} + +function newStreamReadableFromReadableStream(readableStream, options = kEmptyObject) { + if (!$inheritsReadableStream(readableStream)) { + throw $ERR_INVALID_ARG_TYPE("readableStream", "ReadableStream", readableStream); + } + + validateObject(options, "options"); + const { highWaterMark, encoding, objectMode = false, signal } = options; + + if (encoding !== undefined && !Buffer.isEncoding(encoding)) + throw $ERR_INVALID_ARG_VALUE("options.encoding", encoding); + validateBoolean(objectMode, "options.objectMode"); + + const nativeStream = getNativeReadableStream(Readable, readableStream, options); + + return ( + nativeStream || + new ReadableFromWeb( + { + highWaterMark, + encoding, + objectMode, + signal, + }, + readableStream, + ) + ); +} + +function newReadableWritablePairFromDuplex(duplex) { + // Not using the internal/streams/utils isWritableNodeStream and + // isReadableNodeStream utilities here because they will return false + // if the duplex was created with writable or readable options set to + // false. Instead, we'll check the readable and writable state after + // and return closed WritableStream or closed ReadableStream as + // necessary. + if (typeof duplex?._writableState !== "object" || typeof duplex?._readableState !== "object") { + throw $ERR_INVALID_ARG_TYPE("duplex", "stream.Duplex", duplex); + } + + if (isDestroyed(duplex)) { + const writable = new WritableStream(); + const readable = new ReadableStream(); + writable.close(); + readable.cancel(); + return { readable, writable }; + } + + const writable = isWritable(duplex) ? newWritableStreamFromStreamWritable(duplex) : new WritableStream(); + + if (!isWritable(duplex)) writable.close(); + + const readable = isReadable(duplex) ? newReadableStreamFromStreamReadable(duplex) : new ReadableStream(); + + if (!isReadable(duplex)) readable.cancel(); + + return { writable, readable }; +} + +function newStreamDuplexFromReadableWritablePair(pair = kEmptyObject, options = kEmptyObject) { + validateObject(pair, "pair"); + const { readable: readableStream, writable: writableStream } = pair; + + if (!$inheritsReadableStream(readableStream)) { + throw $ERR_INVALID_ARG_TYPE("pair.readable", "ReadableStream", readableStream); + } + if (!$inheritsWritableStream(writableStream)) { + throw $ERR_INVALID_ARG_TYPE("pair.writable", "WritableStream", writableStream); + } + + validateObject(options, "options"); + const { allowHalfOpen = false, objectMode = false, encoding, decodeStrings = true, highWaterMark, signal } = options; + + validateBoolean(objectMode, "options.objectMode"); + if (encoding !== undefined && !Buffer.isEncoding(encoding)) + throw $ERR_INVALID_ARG_VALUE(encoding, "options.encoding"); + + const writer = writableStream.getWriter(); + const reader = readableStream.getReader(); + let writableClosed = false; + let readableClosed = false; + + const duplex = new Duplex({ + allowHalfOpen, + highWaterMark, + objectMode, + encoding, + decodeStrings, + signal, + + writev(chunks, callback) { + function done(error) { + error = error.filter(e => e); + try { + callback(error.length === 0 ? undefined : error); + } catch (error) { + // In a next tick because this is happening within + // a promise context, and if there are any errors + // thrown we don't want those to cause an unhandled + // rejection. Let's just escape the promise and + // handle it separately. + process.nextTick(() => destroyer(duplex, error)); + } + } + + PromisePrototypeThen.$call( + writer.ready, + () => { + return PromisePrototypeThen.$call( + SafePromiseAll(chunks, data => writer.write(data.chunk)), + done, + done, + ); + }, + done, + ); + }, + + write(chunk, encoding, callback) { + if (typeof chunk === "string" && decodeStrings && !objectMode) { + const enc = normalizeEncoding(encoding); + + if (enc === "utf8") { + chunk = encoder.encode(chunk); + } else { + chunk = Buffer.from(chunk, encoding); + chunk = new Uint8Array( + TypedArrayPrototypeGetBuffer(chunk), + TypedArrayPrototypeGetByteOffset(chunk), + TypedArrayPrototypeGetByteLength(chunk), + ); + } + } + + function done(error) { + try { + callback(error); + } catch (error) { + destroyer(duplex, error); + } + } + + PromisePrototypeThen.$call( + writer.ready, + () => { + return PromisePrototypeThen.$call(writer.write(chunk), done, done); + }, + done, + ); + }, + + final(callback) { + function done(error) { + try { + callback(error); + } catch (error) { + // In a next tick because this is happening within + // a promise context, and if there are any errors + // thrown we don't want those to cause an unhandled + // rejection. Let's just escape the promise and + // handle it separately. + process.nextTick(() => destroyer(duplex, error)); + } + } + + if (!writableClosed) { + PromisePrototypeThen.$call(writer.close(), done, done); + } + }, + + read() { + PromisePrototypeThen.$call( + reader.read(), + chunk => { + if (chunk.done) { + duplex.push(null); + } else { + duplex.push(chunk.value); + } + }, + error => destroyer(duplex, error), + ); + }, + + destroy(error, callback) { + function done() { + try { + callback(error); + } catch (error) { + // In a next tick because this is happening within + // a promise context, and if there are any errors + // thrown we don't want those to cause an unhandled + // rejection. Let's just escape the promise and + // handle it separately. + process.nextTick(() => { + throw error; + }); + } + } + + async function closeWriter() { + if (!writableClosed) await writer.abort(error); + } + + async function closeReader() { + if (!readableClosed) await reader.cancel(error); + } + + if (!writableClosed || !readableClosed) { + PromisePrototypeThen.$call(SafePromiseAll([closeWriter(), closeReader()]), done, done); + return; + } + + done(); + }, + }); + + PromisePrototypeThen.$call( + writer.closed, + () => { + writableClosed = true; + if (!isWritableEnded(duplex)) destroyer(duplex, $ERR_STREAM_PREMATURE_CLOSE()); + }, + error => { + writableClosed = true; + readableClosed = true; + destroyer(duplex, error); + }, + ); + + PromisePrototypeThen.$call( + reader.closed, + () => { + readableClosed = true; + }, + error => { + writableClosed = true; + readableClosed = true; + destroyer(duplex, error); + }, + ); + + return duplex; +} + +export default { + newWritableStreamFromStreamWritable, + newReadableStreamFromStreamReadable, + newStreamWritableFromWritableStream, + newStreamReadableFromReadableStream, + newReadableWritablePairFromDuplex, + newStreamDuplexFromReadableWritablePair, + _ReadableFromWeb: ReadableFromWeb, +}; diff --git a/src/js/node/_stream_duplex.ts b/src/js/node/_stream_duplex.ts new file mode 100644 index 0000000000..ab61146db8 --- /dev/null +++ b/src/js/node/_stream_duplex.ts @@ -0,0 +1,3 @@ +"use strict"; + +export default require("internal/streams/duplex"); diff --git a/src/js/node/_stream_passthrough.ts b/src/js/node/_stream_passthrough.ts new file mode 100644 index 0000000000..357e647150 --- /dev/null +++ b/src/js/node/_stream_passthrough.ts @@ -0,0 +1,3 @@ +"use strict"; + +export default require("internal/streams/passthrough"); diff --git a/src/js/node/_stream_readable.ts b/src/js/node/_stream_readable.ts new file mode 100644 index 0000000000..b429f60158 --- /dev/null +++ b/src/js/node/_stream_readable.ts @@ -0,0 +1,3 @@ +"use strict"; + +export default require("internal/streams/readable"); diff --git a/src/js/node/_stream_transform.ts b/src/js/node/_stream_transform.ts new file mode 100644 index 0000000000..3ae986e613 --- /dev/null +++ b/src/js/node/_stream_transform.ts @@ -0,0 +1,3 @@ +"use strict"; + +export default require("internal/streams/transform"); diff --git a/src/js/node/_stream_wrap.ts b/src/js/node/_stream_wrap.ts new file mode 100644 index 0000000000..c754d0c71c --- /dev/null +++ b/src/js/node/_stream_wrap.ts @@ -0,0 +1,5 @@ +"use strict"; + +process.emitWarning("The _stream_wrap module is deprecated.", "DeprecationWarning", "DEP0125"); + +export default require("node:stream"); diff --git a/src/js/node/_stream_writable.ts b/src/js/node/_stream_writable.ts new file mode 100644 index 0000000000..7101582faa --- /dev/null +++ b/src/js/node/_stream_writable.ts @@ -0,0 +1,3 @@ +"use strict"; + +export default require("internal/streams/writable"); diff --git a/src/js/node/child_process.ts b/src/js/node/child_process.ts index fee02dcbd1..b5a7826351 100644 --- a/src/js/node/child_process.ts +++ b/src/js/node/child_process.ts @@ -1563,7 +1563,7 @@ function abortChildProcess(child, killSignal, reason) { if (!child) return; try { if (child.kill(killSignal)) { - child.emit("error", new AbortError(undefined, { cause: reason })); + child.emit("error", $makeAbortError(undefined, { cause: reason })); } } catch (err) { child.emit("error", err); @@ -1610,7 +1610,7 @@ class ShimmedStdioOutStream extends EventEmitter { function validateMaxBuffer(maxBuffer) { if (maxBuffer != null && !(typeof maxBuffer === "number" && maxBuffer >= 0)) { - throw ERR_OUT_OF_RANGE("options.maxBuffer", "a positive number", maxBuffer); + throw $ERR_OUT_OF_RANGE("options.maxBuffer", "a positive number", maxBuffer); } } @@ -1628,7 +1628,7 @@ function validateArgumentsNullCheck(args, propName) { function validateTimeout(timeout) { if (timeout != null && !(NumberIsInteger(timeout) && timeout >= 0)) { - throw ERR_OUT_OF_RANGE("timeout", "an unsigned integer", timeout); + throw $ERR_OUT_OF_RANGE("timeout", "an unsigned integer", timeout); } } @@ -1698,18 +1698,6 @@ var Error = globalThis.Error; var TypeError = globalThis.TypeError; var RangeError = globalThis.RangeError; -// Node uses a slightly different abort error than standard DOM. See: https://github.com/nodejs/node/blob/main/lib/internal/errors.js -class AbortError extends Error { - code = "ABORT_ERR"; - name = "AbortError"; - constructor(message = "The operation was aborted", options = undefined) { - if (options !== undefined && typeof options !== "object") { - throw $ERR_INVALID_ARG_TYPE("options", "object", options); - } - super(message, options); - } -} - function genericNodeError(message, options) { const err = new Error(message); err.code = options.code; @@ -1849,29 +1837,6 @@ function genericNodeError(message, options) { // TypeError // ); -function ERR_OUT_OF_RANGE(str, range, input, replaceDefaultBoolean = false) { - // Node implementation: - // assert(range, 'Missing "range" argument'); - // let msg = replaceDefaultBoolean - // ? str - // : `The value of "${str}" is out of range.`; - // let received; - // if (NumberIsInteger(input) && MathAbs(input) > 2 ** 32) { - // received = addNumericalSeparator(String(input)); - // } else if (typeof input === "bigint") { - // received = String(input); - // if (input > 2n ** 32n || input < -(2n ** 32n)) { - // received = addNumericalSeparator(received); - // } - // received += "n"; - // } else { - // received = lazyInternalUtilInspect().inspect(input); - // } - // msg += ` It must be ${range}. Received ${received}`; - // return new RangeError(msg); - return new RangeError(`The value of ${str} is out of range. It must be ${range}. Received ${input}`); -} - function ERR_CHILD_PROCESS_STDIO_MAXBUFFER(stdio) { const err = Error(`${stdio} maxBuffer length exceeded`); err.code = "ERR_CHILD_PROCESS_STDIO_MAXBUFFER"; diff --git a/src/js/node/dgram.ts b/src/js/node/dgram.ts index 4a9ecc1c4a..0ed940d47f 100644 --- a/src/js/node/dgram.ts +++ b/src/js/node/dgram.ts @@ -755,7 +755,7 @@ Socket.prototype.addMembership = function (multicastAddress, interfaceAddress) { throwNotImplemented("addMembership", 10381); /* if (!multicastAddress) { - throw new ERR_MISSING_ARGS('multicastAddress'); + throw $ERR_MISSING_ARGS('multicastAddress'); } const { handle } = this[kStateSymbol]; @@ -770,7 +770,7 @@ Socket.prototype.dropMembership = function (multicastAddress, interfaceAddress) throwNotImplemented("dropMembership", 10381); /* if (!multicastAddress) { - throw new ERR_MISSING_ARGS('multicastAddress'); + throw $ERR_MISSING_ARGS('multicastAddress'); } const { handle } = this[kStateSymbol]; diff --git a/src/js/node/diagnostics_channel.ts b/src/js/node/diagnostics_channel.ts index 2aa78dbb12..dfdca8b13d 100644 --- a/src/js/node/diagnostics_channel.ts +++ b/src/js/node/diagnostics_channel.ts @@ -12,8 +12,8 @@ const ArrayPrototypeSplice = Array.prototype.splice; const ObjectGetPrototypeOf = Object.getPrototypeOf; const ObjectSetPrototypeOf = Object.setPrototypeOf; const SymbolHasInstance = Symbol.hasInstance; -const PromiseResolve = Promise.resolve; -const PromiseReject = Promise.reject; +const PromiseResolve = Promise.resolve.bind(Promise); +const PromiseReject = Promise.reject.bind(Promise); const PromisePrototypeThen = (promise, onFulfilled, onRejected) => promise.then(onFulfilled, onRejected); // TODO: https://github.com/nodejs/node/blob/fb47afc335ef78a8cef7eac52b8ee7f045300696/src/node_util.h#L13 diff --git a/src/js/node/dns.ts b/src/js/node/dns.ts index 0900318df4..de7af10956 100644 --- a/src/js/node/dns.ts +++ b/src/js/node/dns.ts @@ -322,7 +322,7 @@ function lookup(hostname, options, callback) { function lookupService(address, port, callback) { if (arguments.length < 3) { - throw $ERR_MISSING_ARGS('The "address", "port", and "callback" arguments must be specified'); + throw $ERR_MISSING_ARGS("address", "port", "callback"); } if (typeof callback !== "function") { @@ -756,7 +756,7 @@ const promises = { lookupService(address, port) { if (arguments.length !== 2) { - throw $ERR_MISSING_ARGS('The "address" and "port" arguments must be specified'); + throw $ERR_MISSING_ARGS("address", "port"); } validateString(address); diff --git a/src/js/node/domain.ts b/src/js/node/domain.ts index 2789b87792..23198f15f7 100644 --- a/src/js/node/domain.ts +++ b/src/js/node/domain.ts @@ -1,5 +1,5 @@ +// Import Events let EventEmitter; -const { ERR_UNHANDLED_ERROR } = require("internal/errors"); const ObjectDefineProperty = Object.defineProperty; @@ -12,7 +12,7 @@ domain.createDomain = domain.create = function () { var d = new EventEmitter(); function emitError(e) { - e ||= ERR_UNHANDLED_ERROR(); + e ||= $ERR_UNHANDLED_ERROR(); if (typeof e === "object") { e.domainEmitter = this; ObjectDefineProperty(e, "domain", { diff --git a/src/js/node/events.ts b/src/js/node/events.ts index 64a14f8edb..268b198413 100644 --- a/src/js/node/events.ts +++ b/src/js/node/events.ts @@ -23,7 +23,6 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -const { ERR_UNHANDLED_ERROR } = require("internal/errors"); const { validateObject, validateInteger, @@ -118,7 +117,7 @@ function emitError(emitter, args) { } // At least give some kind of context to the user - const err = ERR_UNHANDLED_ERROR(stringifiedEr); + const err = $ERR_UNHANDLED_ERROR(stringifiedEr); err.context = er; throw err; // Unhandled 'error' event } @@ -424,7 +423,7 @@ function once(emitter, type, options = kEmptyObject) { var signal = options?.signal; validateAbortSignal(signal, "options.signal"); if (signal?.aborted) { - throw new AbortError(undefined, { cause: signal?.reason }); + throw $makeAbortError(undefined, { cause: signal?.reason }); } const { resolve, reject, promise } = $newPromiseCapability(Promise); const errorListener = err => { @@ -452,7 +451,7 @@ function once(emitter, type, options = kEmptyObject) { function abortListener() { eventTargetAgnosticRemoveListener(emitter, type, resolver); eventTargetAgnosticRemoveListener(emitter, "error", errorListener); - reject(new AbortError(undefined, { cause: signal?.reason })); + reject($makeAbortError(undefined, { cause: signal?.reason })); } if (signal != null) { eventTargetAgnosticAddListener(signal, "abort", abortListener, { once: true }); @@ -471,7 +470,7 @@ function on(emitter, event, options = kEmptyObject) { validateObject(options, "options"); const signal = options.signal; validateAbortSignal(signal, "options.signal"); - if (signal?.aborted) throw new AbortError(undefined, { cause: signal?.reason }); + if (signal?.aborted) throw $makeAbortError(undefined, { cause: signal?.reason }); // Support both highWaterMark and highWatermark for backward compatibility const highWatermark = options.highWaterMark ?? options.highWatermark ?? Number.MAX_SAFE_INTEGER; validateInteger(highWatermark, "options.highWaterMark", 1); @@ -578,7 +577,7 @@ function on(emitter, event, options = kEmptyObject) { return iterator; function abortListener() { - errorHandler(new AbortError(undefined, { cause: signal?.reason })); + errorHandler($makeAbortError(undefined, { cause: signal?.reason })); } function eventHandler(value) { @@ -676,10 +675,28 @@ function listenerCount(emitter, type) { return emitter.listenerCount(type); } - return jsEventTargetGetEventListenersCount(emitter, type); + // EventTarget + const evt_count = jsEventTargetGetEventListenersCount(emitter, type); + if (evt_count !== undefined) return evt_count; + + // EventEmitter's with no `.listenerCount` + return listenerCountSlow(emitter, type); } Object.defineProperty(listenerCount, "name", { value: "listenerCount" }); +function listenerCountSlow(emitter, type) { + const events = emitter._events; + if (events !== undefined) { + const evlistener = events[type]; + if (typeof evlistener === "function") { + return 1; + } else if (evlistener !== undefined) { + return evlistener.length; + } + } + return 0; +} + function eventTargetAgnosticRemoveListener(emitter, name, listener, flags) { if (typeof emitter.removeListener === "function") { emitter.removeListener(name, listener); @@ -704,17 +721,6 @@ function eventTargetAgnosticAddListener(emitter, name, listener, flags) { } } -class AbortError extends Error { - constructor(message = "The operation was aborted", options = undefined) { - if (options !== undefined && typeof options !== "object") { - throw $ERR_INVALID_ARG_TYPE("options", "object", options); - } - super(message, options); - this.code = "ABORT_ERR"; - this.name = "AbortError"; - } -} - function checkListener(listener) { validateFunction(listener, "listener"); } diff --git a/src/js/node/fs.ts b/src/js/node/fs.ts index e237064b55..4116d1efdc 100644 --- a/src/js/node/fs.ts +++ b/src/js/node/fs.ts @@ -5,8 +5,8 @@ const promises = require("node:fs/promises"); const Stream = require("node:stream"); const types = require("node:util/types"); -const { ERR_INVALID_ARG_TYPE, ERR_OUT_OF_RANGE } = require("internal/errors"); const { validateInteger } = require("internal/validators"); +const { kGetNativeReadableProto } = require("internal/shared"); const NumberIsFinite = Number.isFinite; const DatePrototypeGetTime = Date.prototype.getTime; @@ -24,9 +24,7 @@ var _fs = Symbol.for("#fs"); function ensureCallback(callback) { if (!$isCallable(callback)) { - const err = new TypeError('The "cb" argument must be of type function. Received ' + typeof callback); - err.code = "ERR_INVALID_ARG_TYPE"; - throw err; + throw $ERR_INVALID_ARG_TYPE("cb", "function", callback); } return callback; @@ -722,7 +720,7 @@ function createReadStream(path, options) { return new ReadStream(path, options); } -const NativeReadable = Stream._getNativeReadableStreamPrototype(2, Stream.Readable); +const NativeReadable = Stream[kGetNativeReadableProto](2); const NativeReadablePrototype = NativeReadable.prototype; const kFs = Symbol("kFs"); const kHandle = Symbol("kHandle"); @@ -830,7 +828,7 @@ function ReadStream(this: typeof ReadStream, pathOrFd, options) { } else if (end !== Infinity) { validateInteger(end, "end", 0); if (start !== undefined && start > end) { - throw new ERR_OUT_OF_RANGE("start", `<= "end" (here: ${end})`, start); + throw $ERR_OUT_OF_RANGE("start", `<= "end" (here: ${end})`, start); } } diff --git a/src/js/node/http.ts b/src/js/node/http.ts index 61c85e2250..2068f73ecb 100644 --- a/src/js/node/http.ts +++ b/src/js/node/http.ts @@ -1,8 +1,7 @@ // Hardcoded module "node:http" const EventEmitter = require("node:events"); -const { isTypedArray } = require("node:util/types"); +const { isTypedArray, isArrayBuffer } = require("node:util/types"); const { Duplex, Readable, Writable } = require("node:stream"); -const { ERR_INVALID_PROTOCOL } = require("internal/errors"); const { isPrimary } = require("internal/cluster/isPrimary"); const { kAutoDestroyed } = require("internal/shared"); const { urlToHttpOptions } = require("internal/url"); @@ -112,12 +111,11 @@ const kfakeSocket = Symbol("kfakeSocket"); const kEmptyBuffer = Buffer.alloc(0); function isValidTLSArray(obj) { - if (typeof obj === "string" || isTypedArray(obj) || obj instanceof ArrayBuffer || obj instanceof Blob) return true; + if (typeof obj === "string" || isTypedArray(obj) || isArrayBuffer(obj) || $inheritsBlob(obj)) return true; if (Array.isArray(obj)) { for (var i = 0; i < obj.length; i++) { const item = obj[i]; - if (typeof item !== "string" && !isTypedArray(item) && !(item instanceof ArrayBuffer) && !(item instanceof Blob)) - return false; + if (typeof item !== "string" && !isTypedArray(item) && !isArrayBuffer(item) && !$inheritsBlob(item)) return false; } return true; } @@ -1820,7 +1818,7 @@ class ClientRequest extends OutgoingMessage { expectedProtocol = this.agent.protocol; } if (protocol !== expectedProtocol) { - throw ERR_INVALID_PROTOCOL(protocol, expectedProtocol); + throw $ERR_INVALID_PROTOCOL(protocol, expectedProtocol); } this.#protocol = protocol; diff --git a/src/js/node/http2.ts b/src/js/node/http2.ts index f7f5418150..5ce20ef257 100644 --- a/src/js/node/http2.ts +++ b/src/js/node/http2.ts @@ -143,6 +143,10 @@ function onStreamDrain() { if (response !== undefined) response.emit("drain"); } +function onStreamAbortedResponse() { + // no-op for now +} + function onStreamAbortedRequest() { const request = this[kRequest]; if (request !== undefined && request[kState].closed === false) { @@ -388,6 +392,7 @@ class Http2ServerResponse extends Stream { this.writable = true; this.req = stream[kRequest]; stream.on("drain", onStreamDrain); + stream.on("aborted", onStreamAbortedResponse); stream.on("close", onStreamCloseResponse); stream.on("wantTrailers", onStreamTrailersReady); stream.on("timeout", onStreamTimeout); diff --git a/src/js/node/readline.ts b/src/js/node/readline.ts index 7bea6f227e..07bb433a90 100644 --- a/src/js/node/readline.ts +++ b/src/js/node/readline.ts @@ -210,14 +210,6 @@ class ERR_USE_AFTER_CLOSE extends NodeError { } } -class AbortError extends Error { - code; - constructor() { - super("The operation was aborted"); - this.code = "ABORT_ERR"; - } -} - // ---------------------------------------------------------------------------- // Section: Utils // ---------------------------------------------------------------------------- @@ -2337,14 +2329,14 @@ Interface.prototype.question[promisify.custom] = function question(query, option var signal = options?.signal; if (signal && signal.aborted) { - return PromiseReject(new AbortError(undefined, { cause: signal.reason })); + return PromiseReject($makeAbortError(undefined, { cause: signal.reason })); } return new Promise((resolve, reject) => { var cb = resolve; if (signal) { var onAbort = () => { - reject(new AbortError(undefined, { cause: signal.reason })); + reject($makeAbortError(undefined, { cause: signal.reason })); }; signal.addEventListener("abort", onAbort, { once: true }); cb = answer => { @@ -2806,7 +2798,7 @@ var PromisesInterface = class Interface extends _Interface { if (signal) { validateAbortSignal(signal, "options.signal"); if (signal.aborted) { - return PromiseReject(new AbortError(undefined, { cause: signal.reason })); + return PromiseReject($makeAbortError(undefined, { cause: signal.reason })); } } const { promise, resolve, reject } = $newPromiseCapability(Promise); @@ -2814,7 +2806,7 @@ var PromisesInterface = class Interface extends _Interface { if (options?.signal) { var onAbort = () => { this[kQuestionCancel](); - reject(new AbortError(undefined, { cause: signal.reason })); + reject($makeAbortError(undefined, { cause: signal.reason })); }; signal.addEventListener("abort", onAbort, { once: true }); cb = answer => { diff --git a/src/js/node/stream.consumers.ts b/src/js/node/stream.consumers.ts index c03427bc09..84f3b0d03c 100644 --- a/src/js/node/stream.consumers.ts +++ b/src/js/node/stream.consumers.ts @@ -1,12 +1,35 @@ // Hardcoded module "node:stream/consumers" / "readable-stream/consumer" -export async function arrayBuffer(stream): Promise { - if ($isReadableStream(stream)) return Bun.readableStreamToArrayBuffer(stream); - const chunks: any[] = []; +"use strict"; + +const { Buffer } = require("node:buffer"); + +const JSONParse = JSON.parse; + +async function blob(stream): Promise { + if ($inheritsReadableStream(stream)) return Bun.readableStreamToBlob(stream); + const chunks: (Blob | ArrayBuffer | string | NodeJS.ArrayBufferView)[] = []; for await (const chunk of stream) chunks.push(chunk); - return Buffer.concat(chunks).buffer as ArrayBuffer; + return new Blob(chunks); } -export async function text(stream): Promise { - if ($isReadableStream(stream)) return Bun.readableStreamToText(stream); + +async function arrayBuffer(stream): Promise { + if ($inheritsReadableStream(stream)) return Bun.readableStreamToArrayBuffer(stream); + const ret = await blob(stream); + return ret.arrayBuffer(); +} + +async function bytes(stream): Promise { + if ($inheritsReadableStream(stream)) return Bun.readableStreamToBytes(stream); + const ret = await blob(stream); + return ret.bytes(); +} + +async function buffer(stream): Promise { + return Buffer.from(await arrayBuffer(stream)); +} + +async function text(stream): Promise { + if ($inheritsReadableStream(stream)) return Bun.readableStreamToText(stream); const dec = new TextDecoder(); let str = ""; for await (const chunk of stream) { @@ -18,22 +41,16 @@ export async function text(stream): Promise { str += dec.decode(undefined, { stream: false }); return str; } -export async function json(stream): Promise { - if ($isReadableStream(stream)) return Bun.readableStreamToJSON(stream).then(JSON.parse); - return JSON.parse(await text(stream)); -} -export async function buffer(stream): Promise { - return new Buffer(await arrayBuffer(stream)); -} -async function blob(stream) { - if ($isReadableStream(stream)) return Bun.readableStreamToBlob(stream).then(JSON.parse); - const chunks: any[] = []; - for await (const chunk of stream) chunks.push(chunk); - return new Blob(chunks); + +async function json(stream): Promise { + if ($inheritsReadableStream(stream)) return Bun.readableStreamToJSON(stream); + const str = await text(stream); + return JSONParse(str); } export default { arrayBuffer, + bytes, text, json, buffer, diff --git a/src/js/node/stream.ts b/src/js/node/stream.ts index fd12678fb2..9d261544c1 100644 --- a/src/js/node/stream.ts +++ b/src/js/node/stream.ts @@ -1,5501 +1,11 @@ // Hardcoded module "node:stream" / "readable-stream" -// "readable-stream" npm package -// just transpiled and debug logs added. - -// BEGIN moved from require_readable -// when we split this stuff up again, we can move this back -const kObjectMode = 1 << 0; -const kEnded = 1 << 1; -const kEndEmitted = 1 << 2; -const kReading = 1 << 3; -const kConstructed = 1 << 4; -const kSync = 1 << 5; -const kNeedReadable = 1 << 6; -const kEmittedReadable = 1 << 7; -const kReadableListening = 1 << 8; -const kResumeScheduled = 1 << 9; -const kErrorEmitted = 1 << 10; -const kEmitClose = 1 << 11; -const kAutoDestroy = 1 << 12; -const kDestroyed = 1 << 13; -const kClosed = 1 << 14; -const kCloseEmitted = 1 << 15; -const kMultiAwaitDrain = 1 << 16; -const kReadingMore = 1 << 17; -const kDataEmitted = 1 << 18; -const kPaused = Symbol("kPaused"); -// END moved from require_readable - -const StringDecoder = require("node:string_decoder").StringDecoder; -const transferToNativeReadable = $newCppFunction("ReadableStream.cpp", "jsFunctionTransferToNativeReadableStream", 1); -const { kAutoDestroyed } = require("internal/shared"); -const { - validateBoolean, - validateInteger, - validateInt32, - validateAbortSignal, - validateFunction, - validateObject, -} = require("internal/validators"); - -const ProcessNextTick = process.nextTick; +const { kEnsureConstructed, kGetNativeReadableProto } = require("internal/shared"); const EE = require("node:events").EventEmitter; - -var __getOwnPropNames = Object.getOwnPropertyNames; - -var __commonJS = (cb, mod: typeof module | undefined = undefined) => - function __require2() { - return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; - }; - -function isReadableStream(value) { - return typeof value === "object" && value !== null && value instanceof ReadableStream; -} +const exports = require("internal/stream"); $debug("node:stream loaded"); -//------------------------------------------------------------------------------ -// Node error polyfills -//------------------------------------------------------------------------------ - -// node_modules/readable-stream/lib/ours/primordials.js -var require_primordials = __commonJS({ - "node_modules/readable-stream/lib/ours/primordials.js"(exports, module) { - "use strict"; - module.exports = { - ArrayPrototypeIncludes(self, el) { - return self.includes(el); - }, - ArrayPrototypeIndexOf(self, el) { - return self.indexOf(el); - }, - ArrayPrototypeJoin(self, sep) { - return self.join(sep); - }, - ArrayPrototypeMap(self, fn) { - return self.map(fn); - }, - ArrayPrototypePop(self, el) { - return self.pop(el); - }, - ArrayPrototypePush(self, el) { - return self.push(el); - }, - ArrayPrototypeSlice(self, start, end) { - return self.slice(start, end); - }, - Error, - FunctionPrototypeCall(fn, thisArgs, ...args) { - return fn.$call(thisArgs, ...args); - }, - FunctionPrototypeSymbolHasInstance(self, instance) { - return Function.prototype[Symbol.hasInstance].$call(self, instance); - }, - MathFloor: Math.floor, - Number, - NumberIsInteger: Number.isInteger, - NumberIsNaN: Number.isNaN, - NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, - NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, - NumberParseInt: Number.parseInt, - ObjectDefineProperties(self, props) { - return Object.defineProperties(self, props); - }, - ObjectDefineProperty(self, name, prop) { - return Object.defineProperty(self, name, prop); - }, - ObjectGetOwnPropertyDescriptor(self, name) { - return Object.getOwnPropertyDescriptor(self, name); - }, - ObjectKeys(obj) { - return Object.keys(obj); - }, - ObjectSetPrototypeOf(target, proto) { - return Object.setPrototypeOf(target, proto); - }, - Promise, - PromisePrototypeCatch(self, fn) { - return self.catch(fn); - }, - PromisePrototypeThen(self, thenFn, catchFn) { - return self.then(thenFn, catchFn); - }, - PromiseReject(err) { - return Promise.reject(err); - }, - RegExpPrototypeTest(self, value) { - return self.test(value); - }, - SafeSet: Set, - String, - StringPrototypeSlice(self, start, end) { - return self.slice(start, end); - }, - StringPrototypeToLowerCase(self) { - return self.toLowerCase(); - }, - StringPrototypeToUpperCase(self) { - return self.toUpperCase(); - }, - StringPrototypeTrim(self) { - return self.trim(); - }, - Symbol, - SymbolAsyncIterator: Symbol.asyncIterator, - SymbolHasInstance: Symbol.hasInstance, - SymbolIterator: Symbol.iterator, - TypedArrayPrototypeSet(self, buf, len) { - return self.set(buf, len); - }, - Uint8Array, - }; - }, -}); -// node_modules/readable-stream/lib/ours/util.js -var require_util = __commonJS({ - "node_modules/readable-stream/lib/ours/util.js"(exports, module) { - "use strict"; - - var AsyncFunction = Object.getPrototypeOf(async function () {}).constructor; - var isBlob = - typeof Blob !== "undefined" - ? function isBlob2(b) { - return b instanceof Blob; - } - : function isBlob2(b) { - return false; - }; - var AggregateError = class extends Error { - constructor(errors) { - if (!Array.isArray(errors)) { - throw new TypeError(`Expected input to be an Array, got ${typeof errors}`); - } - let message = ""; - for (let i = 0; i < errors.length; i++) { - message += ` ${errors[i].stack} -`; - } - super(message); - this.name = "AggregateError"; - this.errors = errors; - } - }; - module.exports = { - AggregateError, - once(callback) { - let called = false; - return function (...args) { - if (called) { - return; - } - called = true; - callback.$apply(this, args); - }; - }, - createDeferredPromise: function () { - let resolve; - let reject; - const promise = new Promise((res, rej) => { - resolve = res; - reject = rej; - }); - return { - promise, - resolve, - reject, - }; - }, - promisify(fn) { - return new Promise((resolve, reject) => { - fn((err, ...args) => { - if (err) { - return reject(err); - } - return resolve(...args); - }); - }); - }, - debuglog() { - return function () {}; - }, - format(format, ...args) { - return format.replace(/%([sdifj])/g, function (...[_unused, type]) { - const replacement = args.shift(); - if (type === "f") { - return replacement.toFixed(6); - } else if (type === "j") { - return JSON.stringify(replacement); - } else if (type === "s" && typeof replacement === "object") { - const ctor = replacement.constructor !== Object ? replacement.constructor.name : ""; - return `${ctor} {}`.trim(); - } else { - return replacement.toString(); - } - }); - }, - inspect(value) { - switch (typeof value) { - case "string": - if (value.includes("'")) { - if (!value.includes('"')) { - return `"${value}"`; - } else if (!value.includes("`") && !value.includes("${")) { - return `\`${value}\``; - } - } - return `'${value}'`; - case "number": - if (isNaN(value)) { - return "NaN"; - } else if (Object.is(value, -0)) { - return String(value); - } - return value; - case "bigint": - return `${String(value)}n`; - case "boolean": - case "undefined": - return String(value); - case "object": - return "{}"; - } - }, - types: { - isAsyncFunction(fn) { - return fn instanceof AsyncFunction; - }, - isArrayBufferView(arr) { - return ArrayBuffer.isView(arr); - }, - }, - isBlob, - }; - module.exports.promisify.custom = Symbol.for("nodejs.util.promisify.custom"); - }, -}); - -// node_modules/readable-stream/lib/ours/errors.js -var require_errors = __commonJS({ - "node_modules/readable-stream/lib/ours/errors.js"(exports, module) { - "use strict"; - var { format, inspect, AggregateError: CustomAggregateError } = require_util(); - var AggregateError = globalThis.AggregateError || CustomAggregateError; - var kIsNodeError = Symbol("kIsNodeError"); - var kTypes = ["string", "function", "number", "object", "Function", "Object", "boolean", "bigint", "symbol"]; - var classRegExp = /^([A-Z][a-z0-9]*)+$/; - var nodeInternalPrefix = "__node_internal_"; - var codes = {}; - function assert(value, message) { - if (!value) { - throw new codes.ERR_INTERNAL_ASSERTION(message); - } - } - function addNumericalSeparator(val) { - let res = ""; - let i = val.length; - const start = val[0] === "-" ? 1 : 0; - for (; i >= start + 4; i -= 3) { - res = `_${val.slice(i - 3, i)}${res}`; - } - return `${val.slice(0, i)}${res}`; - } - function getMessage(key, msg, args) { - if (typeof msg === "function") { - assert( - msg.length <= args.length, - `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`, - ); - return msg(...args); - } - const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length; - assert( - expectedLength === args.length, - `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`, - ); - if (args.length === 0) { - return msg; - } - return format(msg, ...args); - } - function E(code, message, Base) { - if (!Base) { - Base = Error; - } - class NodeError extends Base { - constructor(...args) { - super(getMessage(code, message, args)); - } - toString() { - return `${this.name} [${code}]: ${this.message}`; - } - } - Object.defineProperties(NodeError.prototype, { - name: { - value: Base.name, - writable: true, - enumerable: false, - configurable: true, - }, - toString: { - value() { - return `${this.name} [${code}]: ${this.message}`; - }, - writable: true, - enumerable: false, - configurable: true, - }, - }); - NodeError.prototype.code = code; - NodeError.prototype[kIsNodeError] = true; - codes[code] = NodeError; - } - function hideStackFrames(fn) { - const hidden = nodeInternalPrefix + fn.name; - Object.defineProperty(fn, "name", { - value: hidden, - }); - return fn; - } - function aggregateTwoErrors(innerError, outerError) { - if (innerError && outerError && innerError !== outerError) { - if (Array.isArray(outerError.errors)) { - outerError.errors.push(innerError); - return outerError; - } - const err = new AggregateError([outerError, innerError], outerError.message); - err.code = outerError.code; - return err; - } - return innerError || outerError; - } - var AbortError = class extends Error { - constructor(message = "The operation was aborted", options = void 0) { - if (options !== void 0 && typeof options !== "object") { - throw new codes.ERR_INVALID_ARG_TYPE("options", "Object", options); - } - super(message, options); - this.code = "ABORT_ERR"; - this.name = "AbortError"; - } - }; - E("ERR_ASSERTION", "%s", Error); - E( - "ERR_INVALID_ARG_TYPE", - (name, expected, actual) => { - assert(typeof name === "string", "'name' must be a string"); - if (!Array.isArray(expected)) { - expected = [expected]; - } - let msg = "The "; - if (name.endsWith(" argument")) { - msg += `${name} `; - } else { - msg += `"${name}" ${name.includes(".") ? "property" : "argument"} `; - } - msg += "must be "; - const types = []; - const instances = []; - const other = []; - for (const value of expected) { - assert(typeof value === "string", "All expected entries have to be of type string"); - if (kTypes.includes(value)) { - types.push(value.toLowerCase()); - } else if (classRegExp.test(value)) { - instances.push(value); - } else { - assert(value !== "object", 'The value "object" should be written as "Object"'); - other.push(value); - } - } - if (instances.length > 0) { - const pos = types.indexOf("object"); - if (pos !== -1) { - types.splice(types, pos, 1); - instances.push("Object"); - } - } - if (types.length > 0) { - switch (types.length) { - case 1: - msg += `of type ${types[0]}`; - break; - case 2: - msg += `one of type ${types[0]} or ${types[1]}`; - break; - default: { - const last = types.pop(); - msg += `one of type ${types.join(", ")}, or ${last}`; - } - } - if (instances.length > 0 || other.length > 0) { - msg += " or "; - } - } - if (instances.length > 0) { - switch (instances.length) { - case 1: - msg += `an instance of ${instances[0]}`; - break; - case 2: - msg += `an instance of ${instances[0]} or ${instances[1]}`; - break; - default: { - const last = instances.pop(); - msg += `an instance of ${instances.join(", ")}, or ${last}`; - } - } - if (other.length > 0) { - msg += " or "; - } - } - switch (other.length) { - case 0: - break; - case 1: - if (other[0].toLowerCase() !== other[0]) { - msg += "an "; - } - msg += `${other[0]}`; - break; - case 2: - msg += `one of ${other[0]} or ${other[1]}`; - break; - default: { - const last = other.pop(); - msg += `one of ${other.join(", ")}, or ${last}`; - } - } - if (actual == null) { - msg += `. Received ${actual}`; - } else if (typeof actual === "function" && actual.name) { - msg += `. Received function ${actual.name}`; - } else if (typeof actual === "object") { - var _actual$constructor; - if ( - (_actual$constructor = actual.constructor) !== null && - _actual$constructor !== void 0 && - _actual$constructor.name - ) { - msg += `. Received an instance of ${actual.constructor.name}`; - } else { - const inspected = inspect(actual, { - depth: -1, - }); - msg += `. Received ${inspected}`; - } - } else { - let inspected = inspect(actual, { - colors: false, - }); - if (inspected.length > 25) { - inspected = `${inspected.slice(0, 25)}...`; - } - msg += `. Received type ${typeof actual} (${inspected})`; - } - return msg; - }, - TypeError, - ); - E( - "ERR_INVALID_RETURN_VALUE", - (input, name, value) => { - var _value$constructor; - const type = - value !== null && - value !== void 0 && - (_value$constructor = value.constructor) !== null && - _value$constructor !== void 0 && - _value$constructor.name - ? `instance of ${value.constructor.name}` - : `type ${typeof value}`; - return `Expected ${input} to be returned from the "${name}" function but got ${type}.`; - }, - TypeError, - ); - E( - "ERR_MISSING_ARGS", - (...args) => { - assert(args.length > 0, "At least one arg needs to be specified"); - let msg; - const len = args.length; - args = (Array.isArray(args) ? args : [args]).map(a => `"${a}"`).join(" or "); - switch (len) { - case 1: - msg += `The ${args[0]} argument`; - break; - case 2: - msg += `The ${args[0]} and ${args[1]} arguments`; - break; - default: - { - const last = args.pop(); - msg += `The ${args.join(", ")}, and ${last} arguments`; - } - break; - } - return `${msg} must be specified`; - }, - TypeError, - ); - E( - "ERR_OUT_OF_RANGE", - (str, range, input) => { - assert(range, 'Missing "range" argument'); - let received; - if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { - received = addNumericalSeparator(String(input)); - } else if (typeof input === "bigint") { - received = String(input); - if (input > 2n ** 32n || input < -(2n ** 32n)) { - received = addNumericalSeparator(received); - } - received += "n"; - } else { - received = inspect(input); - } - return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`; - }, - RangeError, - ); - E("ERR_MULTIPLE_CALLBACK", "Callback called multiple times", Error); - E("ERR_METHOD_NOT_IMPLEMENTED", "The %s method is not implemented", Error); - E("ERR_STREAM_ALREADY_FINISHED", "Cannot call %s after a stream was finished", Error); - E("ERR_STREAM_CANNOT_PIPE", "Cannot pipe, not readable", Error); - E("ERR_STREAM_DESTROYED", "Cannot call %s after a stream was destroyed", Error); - E("ERR_STREAM_NULL_VALUES", "May not write null values to stream", TypeError); - E("ERR_STREAM_PREMATURE_CLOSE", "Premature close", Error); - E("ERR_STREAM_PUSH_AFTER_EOF", "stream.push() after EOF", Error); - E("ERR_STREAM_UNSHIFT_AFTER_END_EVENT", "stream.unshift() after end event", Error); - E("ERR_STREAM_WRITE_AFTER_END", "write after end", Error); - E("ERR_UNKNOWN_ENCODING", "Unknown encoding: %s", TypeError); - module.exports = { - AbortError, - aggregateTwoErrors: hideStackFrames(aggregateTwoErrors), - hideStackFrames, - codes, - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/utils.js -var require_utils = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/utils.js"(exports, module) { - "use strict"; - var { Symbol: Symbol2, SymbolAsyncIterator, SymbolIterator } = require_primordials(); - var kDestroyed = Symbol2("kDestroyed"); - var kIsErrored = Symbol2("kIsErrored"); - var kIsReadable = Symbol2("kIsReadable"); - var kIsDisturbed = Symbol2("kIsDisturbed"); - function isReadableNodeStream(obj, strict = false) { - var _obj$_readableState; - return !!( - obj && - typeof obj.pipe === "function" && - typeof obj.on === "function" && - (!strict || (typeof obj.pause === "function" && typeof obj.resume === "function")) && - (!obj._writableState || - ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === void 0 - ? void 0 - : _obj$_readableState.readable) !== false) && - (!obj._writableState || obj._readableState) - ); - } - function isWritableNodeStream(obj) { - var _obj$_writableState; - return !!( - obj && - typeof obj.write === "function" && - typeof obj.on === "function" && - (!obj._readableState || - ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === void 0 - ? void 0 - : _obj$_writableState.writable) !== false) - ); - } - function isDuplexNodeStream(obj) { - return !!( - obj && - typeof obj.pipe === "function" && - obj._readableState && - typeof obj.on === "function" && - typeof obj.write === "function" - ); - } - function isNodeStream(obj) { - return ( - obj && - (obj._readableState || - obj._writableState || - (typeof obj.write === "function" && typeof obj.on === "function") || - (typeof obj.pipe === "function" && typeof obj.on === "function")) - ); - } - function isIterable(obj, isAsync) { - if (obj == null) return false; - if (isAsync === true) return typeof obj[SymbolAsyncIterator] === "function"; - if (isAsync === false) return typeof obj[SymbolIterator] === "function"; - return typeof obj[SymbolAsyncIterator] === "function" || typeof obj[SymbolIterator] === "function"; - } - function isDestroyed(stream) { - if (!isNodeStream(stream)) return null; - const wState = stream._writableState; - const rState = stream._readableState; - const state = wState || rState; - return !!(stream.destroyed || stream[kDestroyed] || (state !== null && state !== void 0 && state.destroyed)); - } - function isWritableEnded(stream) { - if (!isWritableNodeStream(stream)) return null; - if (stream.writableEnded === true) return true; - const wState = stream._writableState; - if (wState !== null && wState !== void 0 && wState.errored) return false; - if (typeof (wState === null || wState === void 0 ? void 0 : wState.ended) !== "boolean") return null; - return wState.ended; - } - function isWritableFinished(stream, strict) { - if (!isWritableNodeStream(stream)) return null; - if (stream.writableFinished === true) return true; - const wState = stream._writableState; - if (wState !== null && wState !== void 0 && wState.errored) return false; - if (typeof (wState === null || wState === void 0 ? void 0 : wState.finished) !== "boolean") return null; - return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0)); - } - function isReadableEnded(stream) { - if (!isReadableNodeStream(stream)) return null; - if (stream.readableEnded === true) return true; - const rState = stream._readableState; - if (!rState || rState.errored) return false; - if (typeof (rState === null || rState === void 0 ? void 0 : rState.ended) !== "boolean") return null; - return rState.ended; - } - function isReadableFinished(stream, strict?: boolean) { - if (!isReadableNodeStream(stream)) return null; - const rState = stream._readableState; - if (rState !== null && rState !== void 0 && rState.errored) return false; - if (typeof (rState === null || rState === void 0 ? void 0 : rState.endEmitted) !== "boolean") return null; - return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0)); - } - function isReadable(stream) { - if (stream && stream[kIsReadable] != null) return stream[kIsReadable]; - if (typeof (stream === null || stream === void 0 ? void 0 : stream.readable) !== "boolean") return null; - if (isDestroyed(stream)) return false; - return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream); - } - function isWritable(stream) { - if (typeof (stream === null || stream === void 0 ? void 0 : stream.writable) !== "boolean") return null; - if (isDestroyed(stream)) return false; - return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream); - } - function isFinished(stream, opts) { - if (!isNodeStream(stream)) { - return null; - } - if (isDestroyed(stream)) { - return true; - } - if ((opts === null || opts === void 0 ? void 0 : opts.readable) !== false && isReadable(stream)) { - return false; - } - if ((opts === null || opts === void 0 ? void 0 : opts.writable) !== false && isWritable(stream)) { - return false; - } - return true; - } - function isWritableErrored(stream) { - var _stream$_writableStat, _stream$_writableStat2; - if (!isNodeStream(stream)) { - return null; - } - if (stream.writableErrored) { - return stream.writableErrored; - } - return (_stream$_writableStat = - (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === void 0 - ? void 0 - : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== void 0 - ? _stream$_writableStat - : null; - } - function isReadableErrored(stream) { - var _stream$_readableStat, _stream$_readableStat2; - if (!isNodeStream(stream)) { - return null; - } - if (stream.readableErrored) { - return stream.readableErrored; - } - return (_stream$_readableStat = - (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === void 0 - ? void 0 - : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== void 0 - ? _stream$_readableStat - : null; - } - function isClosed(stream) { - if (!isNodeStream(stream)) { - return null; - } - if (typeof stream.closed === "boolean") { - return stream.closed; - } - const wState = stream._writableState; - const rState = stream._readableState; - if ( - typeof (wState === null || wState === void 0 ? void 0 : wState.closed) === "boolean" || - typeof (rState === null || rState === void 0 ? void 0 : rState.closed) === "boolean" - ) { - return ( - (wState === null || wState === void 0 ? void 0 : wState.closed) || - (rState === null || rState === void 0 ? void 0 : rState.closed) - ); - } - if (typeof stream._closed === "boolean" && isOutgoingMessage(stream)) { - return stream._closed; - } - return null; - } - function isOutgoingMessage(stream) { - return ( - typeof stream._closed === "boolean" && - typeof stream._defaultKeepAlive === "boolean" && - typeof stream._removedConnection === "boolean" && - typeof stream._removedContLen === "boolean" - ); - } - function isServerResponse(stream) { - return typeof stream._sent100 === "boolean" && isOutgoingMessage(stream); - } - function isServerRequest(stream) { - var _stream$req; - return ( - typeof stream._consuming === "boolean" && - typeof stream._dumped === "boolean" && - ((_stream$req = stream.req) === null || _stream$req === void 0 ? void 0 : _stream$req.upgradeOrConnect) === - void 0 - ); - } - function willEmitClose(stream) { - if (!isNodeStream(stream)) return null; - const wState = stream._writableState; - const rState = stream._readableState; - const state = wState || rState; - return ( - (!state && isServerResponse(stream)) || - !!(state && state.autoDestroy && state.emitClose && state.closed === false) - ); - } - function isDisturbed(stream) { - var _stream$kIsDisturbed; - return !!( - stream && - ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== void 0 - ? _stream$kIsDisturbed - : stream.readableDidRead || stream.readableAborted) - ); - } - function isErrored(stream) { - var _ref, - _ref2, - _ref3, - _ref4, - _ref5, - _stream$kIsErrored, - _stream$_readableStat3, - _stream$_writableStat3, - _stream$_readableStat4, - _stream$_writableStat4; - return !!( - stream && - ((_ref = - (_ref2 = - (_ref3 = - (_ref4 = - (_ref5 = - (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== void 0 - ? _stream$kIsErrored - : stream.readableErrored) !== null && _ref5 !== void 0 - ? _ref5 - : stream.writableErrored) !== null && _ref4 !== void 0 - ? _ref4 - : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === void 0 - ? void 0 - : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== void 0 - ? _ref3 - : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === void 0 - ? void 0 - : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== void 0 - ? _ref2 - : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === void 0 - ? void 0 - : _stream$_readableStat4.errored) !== null && _ref !== void 0 - ? _ref - : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === void 0 - ? void 0 - : _stream$_writableStat4.errored) - ); - } - module.exports = { - kDestroyed, - isDisturbed, - kIsDisturbed, - isErrored, - kIsErrored, - isReadable, - kIsReadable, - isClosed, - isDestroyed, - isDuplexNodeStream, - isFinished, - isIterable, - isReadableNodeStream, - isReadableEnded, - isReadableFinished, - isReadableErrored, - isNodeStream, - isWritable, - isWritableNodeStream, - isWritableEnded, - isWritableFinished, - isWritableErrored, - isServerRequest, - isServerResponse, - willEmitClose, - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/end-of-stream.js -var require_end_of_stream = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/end-of-stream.js"(exports, module) { - "use strict"; - var { AbortError, codes } = require_errors(); - var { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes; - var { once } = require_util(); - var { Promise: Promise2 } = require_primordials(); - var { - isClosed, - isReadable, - isReadableNodeStream, - isReadableFinished, - isReadableErrored, - isWritable, - isWritableNodeStream, - isWritableFinished, - isWritableErrored, - isNodeStream, - willEmitClose: _willEmitClose, - } = require_utils(); - function isRequest(stream) { - return stream.setHeader && typeof stream.abort === "function"; - } - var nop = () => {}; - function eos(stream, options, callback) { - var _options$readable, _options$writable; - if (arguments.length === 2) { - callback = options; - options = {}; - } else if (options == null) { - options = {}; - } else { - validateObject(options, "options"); - } - validateFunction(callback, "callback"); - validateAbortSignal(options.signal, "options.signal"); - callback = once(callback); - const readable = - (_options$readable = options.readable) !== null && _options$readable !== void 0 - ? _options$readable - : isReadableNodeStream(stream); - const writable = - (_options$writable = options.writable) !== null && _options$writable !== void 0 - ? _options$writable - : isWritableNodeStream(stream); - if (!isNodeStream(stream)) { - throw new ERR_INVALID_ARG_TYPE("stream", "Stream", stream); - } - const wState = stream._writableState; - const rState = stream._readableState; - const onlegacyfinish = () => { - if (!stream.writable) { - onfinish(); - } - }; - let willEmitClose = - _willEmitClose(stream) && - isReadableNodeStream(stream) === readable && - isWritableNodeStream(stream) === writable; - let writableFinished = isWritableFinished(stream, false); - const onfinish = () => { - writableFinished = true; - if (stream.destroyed) { - willEmitClose = false; - } - if (willEmitClose && (!stream.readable || readable)) { - return; - } - if (!readable || readableFinished) { - callback.$call(stream); - } - }; - let readableFinished = isReadableFinished(stream, false); - const onend = () => { - readableFinished = true; - if (stream.destroyed) { - willEmitClose = false; - } - if (willEmitClose && (!stream.writable || writable)) { - return; - } - if (!writable || writableFinished) { - callback.$call(stream); - } - }; - const onerror = err => { - callback.$call(stream, err); - }; - let closed = isClosed(stream); - const onclose = () => { - closed = true; - const errored = isWritableErrored(stream) || isReadableErrored(stream); - if (errored && typeof errored !== "boolean") { - return callback.$call(stream, errored); - } - if (readable && !readableFinished && isReadableNodeStream(stream, true)) { - if (!isReadableFinished(stream, false)) return callback.$call(stream, new ERR_STREAM_PREMATURE_CLOSE()); - } - if (writable && !writableFinished) { - if (!isWritableFinished(stream, false)) return callback.$call(stream, new ERR_STREAM_PREMATURE_CLOSE()); - } - callback.$call(stream); - }; - const onrequest = () => { - stream.req.on("finish", onfinish); - }; - if (isRequest(stream)) { - stream.on("complete", onfinish); - if (!willEmitClose) { - stream.on("abort", onclose); - } - if (stream.req) { - onrequest(); - } else { - stream.on("request", onrequest); - } - } else if (writable && !wState) { - stream.on("end", onlegacyfinish); - stream.on("close", onlegacyfinish); - } - if (!willEmitClose && typeof stream.aborted === "boolean") { - stream.on("aborted", onclose); - } - stream.on("end", onend); - stream.on("finish", onfinish); - if (options.error !== false) { - stream.on("error", onerror); - } - stream.on("close", onclose); - if (closed) { - ProcessNextTick(onclose); - } else if ( - (wState !== null && wState !== void 0 && wState.errorEmitted) || - (rState !== null && rState !== void 0 && rState.errorEmitted) - ) { - if (!willEmitClose) { - ProcessNextTick(onclose); - } - } else if ( - !readable && - (!willEmitClose || isReadable(stream)) && - (writableFinished || isWritable(stream) === false) - ) { - ProcessNextTick(onclose); - } else if ( - !writable && - (!willEmitClose || isWritable(stream)) && - (readableFinished || isReadable(stream) === false) - ) { - ProcessNextTick(onclose); - } else if (rState && stream.req && stream.aborted) { - ProcessNextTick(onclose); - } - const cleanup = () => { - callback = nop; - stream.removeListener("aborted", onclose); - stream.removeListener("complete", onfinish); - stream.removeListener("abort", onclose); - stream.removeListener("request", onrequest); - if (stream.req) stream.req.removeListener("finish", onfinish); - stream.removeListener("end", onlegacyfinish); - stream.removeListener("close", onlegacyfinish); - stream.removeListener("finish", onfinish); - stream.removeListener("end", onend); - stream.removeListener("error", onerror); - stream.removeListener("close", onclose); - }; - if (options.signal && !closed) { - const abort = () => { - const endCallback = callback; - cleanup(); - endCallback.$call( - stream, - new AbortError(void 0, { - cause: options.signal.reason, - }), - ); - }; - if (options.signal.aborted) { - ProcessNextTick(abort); - } else { - const originalCallback = callback; - callback = once((...args) => { - options.signal.removeEventListener("abort", abort); - originalCallback.$apply(stream, args); - }); - options.signal.addEventListener("abort", abort); - } - } - return cleanup; - } - function finished(stream, opts) { - const { promise, resolve, reject } = $newPromiseCapability(Promise); - eos(stream, opts, err => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - return promise; - } - module.exports = eos; - module.exports.finished = finished; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/operators.js -var require_operators = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/operators.js"(exports, module) { - "use strict"; - var { - codes: { ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, - AbortError, - } = require_errors(); - var kWeakHandler = require_primordials().Symbol("kWeak"); - var { finished } = require_end_of_stream(); - var { - ArrayPrototypePush, - MathFloor, - Number: Number2, - NumberIsNaN, - Promise: Promise2, - PromiseReject, - PromisePrototypeCatch, - Symbol: Symbol2, - } = require_primordials(); - var kEmpty = Symbol2("kEmpty"); - var kEof = Symbol2("kEof"); - function map(fn, options) { - if (typeof fn !== "function") { - throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); - } - if (options != null) { - validateObject(options, "options"); - } - if ((options === null || options === void 0 ? void 0 : options.signal) != null) { - validateAbortSignal(options.signal, "options.signal"); - } - let concurrency = 1; - if ((options === null || options === void 0 ? void 0 : options.concurrency) != null) { - concurrency = MathFloor(options.concurrency); - } - validateInteger(concurrency, "concurrency", 1); - return async function* map2() { - var _options$signal, _options$signal2; - const ac = new AbortController(); - const stream = this; - const queue = []; - const signal = ac.signal; - const signalOpt = { - signal, - }; - const abort = () => ac.abort(); - if ( - options !== null && - options !== void 0 && - (_options$signal = options.signal) !== null && - _options$signal !== void 0 && - _options$signal.aborted - ) { - abort(); - } - options === null || options === void 0 - ? void 0 - : (_options$signal2 = options.signal) === null || _options$signal2 === void 0 - ? void 0 - : _options$signal2.addEventListener("abort", abort); - let next; - let resume; - let done = false; - function onDone() { - done = true; - } - async function pump() { - try { - for await (let val of stream) { - var _val; - if (done) { - return; - } - if (signal.aborted) { - throw new AbortError(); - } - try { - val = fn(val, signalOpt); - } catch (err) { - val = PromiseReject(err); - } - if (val === kEmpty) { - continue; - } - if (typeof ((_val = val) === null || _val === void 0 ? void 0 : _val.catch) === "function") { - val.catch(onDone); - } - queue.push(val); - if (next) { - next(); - next = null; - } - if (!done && queue.length && queue.length >= concurrency) { - await new Promise2(resolve => { - resume = resolve; - }); - } - } - queue.push(kEof); - } catch (err) { - const val = PromiseReject(err); - PromisePrototypeCatch(val, onDone); - queue.push(val); - } finally { - var _options$signal3; - done = true; - if (next) { - next(); - next = null; - } - options === null || options === void 0 - ? void 0 - : (_options$signal3 = options.signal) === null || _options$signal3 === void 0 - ? void 0 - : _options$signal3.removeEventListener("abort", abort); - } - } - pump(); - try { - while (true) { - while (queue.length > 0) { - const val = await queue[0]; - if (val === kEof) { - return; - } - if (signal.aborted) { - throw new AbortError(); - } - if (val !== kEmpty) { - yield val; - } - queue.shift(); - if (resume) { - resume(); - resume = null; - } - } - await new Promise2(resolve => { - next = resolve; - }); - } - } finally { - ac.abort(); - done = true; - if (resume) { - resume(); - resume = null; - } - } - }.$call(this); - } - function asIndexedPairs(options = void 0) { - if (options != null) { - validateObject(options, "options"); - } - if ((options === null || options === void 0 ? void 0 : options.signal) != null) { - validateAbortSignal(options.signal, "options.signal"); - } - return async function* asIndexedPairs2() { - let index = 0; - for await (const val of this) { - var _options$signal4; - if ( - options !== null && - options !== void 0 && - (_options$signal4 = options.signal) !== null && - _options$signal4 !== void 0 && - _options$signal4.aborted - ) { - throw new AbortError({ - cause: options.signal.reason, - }); - } - yield [index++, val]; - } - }.$call(this); - } - async function some(fn, options = void 0) { - for await (const unused of filter.$call(this, fn, options)) { - return true; - } - return false; - } - async function every(fn, options = void 0) { - if (typeof fn !== "function") { - throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); - } - return !(await some.$call( - this, - async (...args) => { - return !(await fn(...args)); - }, - options, - )); - } - async function find(fn, options) { - for await (const result of filter.$call(this, fn, options)) { - return result; - } - return void 0; - } - async function forEach(fn, options) { - if (typeof fn !== "function") { - throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); - } - async function forEachFn(value, options2) { - await fn(value, options2); - return kEmpty; - } - for await (const unused of map.$call(this, forEachFn, options)); - } - function filter(fn, options) { - if (typeof fn !== "function") { - throw new ERR_INVALID_ARG_TYPE("fn", ["Function", "AsyncFunction"], fn); - } - async function filterFn(value, options2) { - if (await fn(value, options2)) { - return value; - } - return kEmpty; - } - return map.$call(this, filterFn, options); - } - var ReduceAwareErrMissingArgs = class extends ERR_MISSING_ARGS { - constructor() { - super("reduce"); - this.message = "Reduce of an empty stream requires an initial value"; - } - }; - async function reduce(reducer, initialValue, options) { - var _options$signal5; - if (typeof reducer !== "function") { - throw new ERR_INVALID_ARG_TYPE("reducer", ["Function", "AsyncFunction"], reducer); - } - if (options != null) { - validateObject(options, "options"); - } - if ((options === null || options === void 0 ? void 0 : options.signal) != null) { - validateAbortSignal(options.signal, "options.signal"); - } - let hasInitialValue = arguments.length > 1; - if ( - options !== null && - options !== void 0 && - (_options$signal5 = options.signal) !== null && - _options$signal5 !== void 0 && - _options$signal5.aborted - ) { - const err = new AbortError(void 0, { - cause: options.signal.reason, - }); - this.once("error", () => {}); - await finished(this.destroy(err)); - throw err; - } - const ac = new AbortController(); - const signal = ac.signal; - if (options !== null && options !== void 0 && options.signal) { - const opts = { - once: true, - [kWeakHandler]: this, - }; - options.signal.addEventListener("abort", () => ac.abort(), opts); - } - let gotAnyItemFromStream = false; - try { - for await (const value of this) { - var _options$signal6; - gotAnyItemFromStream = true; - if ( - options !== null && - options !== void 0 && - (_options$signal6 = options.signal) !== null && - _options$signal6 !== void 0 && - _options$signal6.aborted - ) { - throw new AbortError(); - } - if (!hasInitialValue) { - initialValue = value; - hasInitialValue = true; - } else { - initialValue = await reducer(initialValue, value, { - signal, - }); - } - } - if (!gotAnyItemFromStream && !hasInitialValue) { - throw new ReduceAwareErrMissingArgs(); - } - } finally { - ac.abort(); - } - return initialValue; - } - async function toArray(options) { - if (options != null) { - validateObject(options, "options"); - } - if ((options === null || options === void 0 ? void 0 : options.signal) != null) { - validateAbortSignal(options.signal, "options.signal"); - } - const result = []; - for await (const val of this) { - var _options$signal7; - if ( - options !== null && - options !== void 0 && - (_options$signal7 = options.signal) !== null && - _options$signal7 !== void 0 && - _options$signal7.aborted - ) { - throw new AbortError(void 0, { - cause: options.signal.reason, - }); - } - ArrayPrototypePush(result, val); - } - return result; - } - function flatMap(fn, options) { - const values = map.$call(this, fn, options); - return async function* flatMap2() { - for await (const val of values) { - yield* val; - } - }.$call(this); - } - function toIntegerOrInfinity(number) { - number = Number2(number); - if (NumberIsNaN(number)) { - return 0; - } - if (number < 0) { - throw new ERR_OUT_OF_RANGE("number", ">= 0", number); - } - return number; - } - function drop(number, options = void 0) { - if (options != null) { - validateObject(options, "options"); - } - if ((options === null || options === void 0 ? void 0 : options.signal) != null) { - validateAbortSignal(options.signal, "options.signal"); - } - number = toIntegerOrInfinity(number); - return async function* drop2() { - var _options$signal8; - if ( - options !== null && - options !== void 0 && - (_options$signal8 = options.signal) !== null && - _options$signal8 !== void 0 && - _options$signal8.aborted - ) { - throw new AbortError(); - } - for await (const val of this) { - var _options$signal9; - if ( - options !== null && - options !== void 0 && - (_options$signal9 = options.signal) !== null && - _options$signal9 !== void 0 && - _options$signal9.aborted - ) { - throw new AbortError(); - } - if (number-- <= 0) { - yield val; - } - } - }.$call(this); - } - function take(number, options = void 0) { - if (options != null) { - validateObject(options, "options"); - } - if ((options === null || options === void 0 ? void 0 : options.signal) != null) { - validateAbortSignal(options.signal, "options.signal"); - } - number = toIntegerOrInfinity(number); - return async function* take2() { - var _options$signal10; - if ( - options !== null && - options !== void 0 && - (_options$signal10 = options.signal) !== null && - _options$signal10 !== void 0 && - _options$signal10.aborted - ) { - throw new AbortError(); - } - for await (const val of this) { - var _options$signal11; - if ( - options !== null && - options !== void 0 && - (_options$signal11 = options.signal) !== null && - _options$signal11 !== void 0 && - _options$signal11.aborted - ) { - throw new AbortError(); - } - if (number-- > 0) { - yield val; - } else { - return; - } - } - }.$call(this); - } - module.exports.streamReturningOperators = { - asIndexedPairs, - drop, - filter, - flatMap, - map, - take, - }; - module.exports.promiseReturningOperators = { - every, - forEach, - reduce, - toArray, - some, - find, - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/destroy.js -var require_destroy = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/destroy.js"(exports, module) { - "use strict"; - var { - aggregateTwoErrors, - codes: { ERR_MULTIPLE_CALLBACK }, - AbortError, - } = require_errors(); - var { Symbol: Symbol2 } = require_primordials(); - var { kDestroyed, isDestroyed, isFinished, isServerRequest } = require_utils(); - var kDestroy = Symbol.for("kDestroy"); - var kConstruct = Symbol.for("kConstruct"); - function checkError(err, w, r) { - if (err) { - err.stack; - if (w && !w.errored) { - w.errored = err; - } - if (r && !r.errored) { - r.errored = err; - } - } - } - function destroy(err, cb) { - const r = this._readableState; - const w = this._writableState; - const s = w || r; - if ((w && w.destroyed) || (r && r.destroyed)) { - if (typeof cb === "function") { - cb(); - } - return this; - } - checkError(err, w, r); - if (w) { - w.destroyed = true; - } - if (r) { - r.destroyed = true; - } - if (!s.constructed) { - this.once(kDestroy, er => { - _destroy(this, aggregateTwoErrors(er, err), cb); - }); - } else { - _destroy(this, err, cb); - } - return this; - } - function _destroy(self, err, cb) { - let called = false; - function onDestroy(err2) { - if (called) { - return; - } - called = true; - const r = self._readableState; - const w = self._writableState; - checkError(err2, w, r); - if (w) { - w.closed = true; - } - if (r) { - r.closed = true; - } - if (typeof cb === "function") { - cb(err2); - } - if (err2) { - ProcessNextTick(emitErrorCloseNT, self, err2); - } else { - ProcessNextTick(emitCloseNT, self); - } - } - try { - self._destroy(err || null, onDestroy); - } catch (err2) { - onDestroy(err2); - } - } - function emitErrorCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); - } - function emitCloseNT(self) { - const r = self._readableState; - const w = self._writableState; - if (w) { - w.closeEmitted = true; - } - if (r) { - r.closeEmitted = true; - } - if ((w && w.emitClose) || (r && r.emitClose)) { - self.emit("close"); - } - } - function emitErrorNT(self, err) { - const r = self?._readableState; - const w = self?._writableState; - if (w?.errorEmitted || r?.errorEmitted) { - return; - } - if (w) { - w.errorEmitted = true; - } - if (r) { - r.errorEmitted = true; - } - self?.emit?.("error", err); - } - function undestroy() { - const r = this._readableState; - const w = this._writableState; - if (r) { - r.constructed = true; - r.closed = false; - r.closeEmitted = false; - r.destroyed = false; - r.errored = null; - r.errorEmitted = false; - r.reading = false; - r.ended = r.readable === false; - r.endEmitted = r.readable === false; - } - if (w) { - w.constructed = true; - w.destroyed = false; - w.closed = false; - w.closeEmitted = false; - w.errored = null; - w.errorEmitted = false; - w.finalCalled = false; - w.prefinished = false; - w.ended = w.writable === false; - w.ending = w.writable === false; - w.finished = w.writable === false; - } - } - function errorOrDestroy(stream, err, sync?: boolean) { - const r = stream?._readableState; - const w = stream?._writableState; - if ((w && w.destroyed) || (r && r.destroyed)) { - return this; - } - if ((r && r.autoDestroy) || (w && w.autoDestroy)) stream.destroy(err); - else if (err) { - Error.captureStackTrace(err); - if (w && !w.errored) { - w.errored = err; - } - if (r && !r.errored) { - r.errored = err; - } - if (sync) { - ProcessNextTick(emitErrorNT, stream, err); - } else { - emitErrorNT(stream, err); - } - } - } - function construct(stream, cb) { - if (typeof stream._construct !== "function") { - return; - } - const r = stream._readableState; - const w = stream._writableState; - if (r) { - r.constructed = false; - } - if (w) { - w.constructed = false; - } - stream.once(kConstruct, cb); - if (stream.listenerCount(kConstruct) > 1) { - return; - } - ProcessNextTick(constructNT, stream); - } - function constructNT(stream) { - let called = false; - function onConstruct(err) { - if (called) { - errorOrDestroy(stream, err !== null && err !== void 0 ? err : new ERR_MULTIPLE_CALLBACK()); - return; - } - called = true; - const r = stream._readableState; - const w = stream._writableState; - const s = w || r; - if (r) { - r.constructed = true; - } - if (w) { - w.constructed = true; - } - if (s.destroyed) { - stream.emit(kDestroy, err); - } else if (err) { - errorOrDestroy(stream, err, true); - } else { - ProcessNextTick(emitConstructNT, stream); - } - } - try { - stream._construct(onConstruct); - } catch (err) { - onConstruct(err); - } - } - function emitConstructNT(stream) { - stream.emit(kConstruct); - } - function isRequest(stream) { - return stream && stream.setHeader && typeof stream.abort === "function"; - } - function emitCloseLegacy(stream) { - stream.emit("close"); - } - function emitErrorCloseLegacy(stream, err) { - stream.emit("error", err); - ProcessNextTick(emitCloseLegacy, stream); - } - function destroyer(stream, err) { - if (!stream || isDestroyed(stream)) { - return; - } - if (!err && !isFinished(stream)) { - err = new AbortError(); - } - if (isServerRequest(stream)) { - stream.socket = null; - stream.destroy(err); - } else if (isRequest(stream)) { - stream.abort(); - } else if (isRequest(stream.req)) { - stream.req.abort(); - } else if (typeof stream.destroy === "function") { - stream.destroy(err); - } else if (typeof stream.close === "function") { - stream.close(); - } else if (err) { - ProcessNextTick(emitErrorCloseLegacy, stream); - } else { - ProcessNextTick(emitCloseLegacy, stream); - } - if (!stream.destroyed) { - stream[kDestroyed] = true; - } - } - module.exports = { - construct, - destroyer, - destroy, - undestroy, - errorOrDestroy, - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/legacy.js -var require_legacy = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/legacy.js"(exports, module) { - "use strict"; - var { ObjectSetPrototypeOf } = require_primordials(); - - function Stream(options) { - if (!(this instanceof Stream)) return new Stream(options); - EE.$call(this, options); - } - Stream.prototype = {}; - ObjectSetPrototypeOf(Stream.prototype, EE.prototype); - Stream.prototype.constructor = Stream; // Re-add constructor which got lost when setting prototype - ObjectSetPrototypeOf(Stream, EE); - - Stream.prototype.pipe = function (dest, options) { - const source = this; - function ondata(chunk) { - if (dest.writable && dest.write(chunk) === false && source.pause) { - source.pause(); - } - } - source.on("data", ondata); - function ondrain() { - if (source.readable && source.resume) { - source.resume(); - } - } - dest.on("drain", ondrain); - if (!dest._isStdio && (!options || options.end !== false)) { - source.on("end", onend); - source.on("close", onclose); - } - let didOnEnd = false; - function onend() { - if (didOnEnd) return; - didOnEnd = true; - dest.end(); - } - function onclose() { - if (didOnEnd) return; - didOnEnd = true; - if (typeof dest.destroy === "function") dest.destroy(); - } - function onerror(er) { - cleanup(); - if (EE.listenerCount(this, "error") === 0) { - this.emit("error", er); - } - } - prependListener(source, "error", onerror); - prependListener(dest, "error", onerror); - function cleanup() { - source.removeListener("data", ondata); - dest.removeListener("drain", ondrain); - source.removeListener("end", onend); - source.removeListener("close", onclose); - source.removeListener("error", onerror); - dest.removeListener("error", onerror); - source.removeListener("end", cleanup); - source.removeListener("close", cleanup); - dest.removeListener("close", cleanup); - } - source.on("end", cleanup); - source.on("close", cleanup); - dest.on("close", cleanup); - dest.emit("pipe", source); - return dest; - }; - function prependListener(emitter, event, fn) { - if (typeof emitter.prependListener === "function") return emitter.prependListener(event, fn); - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn); - else if ($isJSArray(emitter._events[event])) emitter._events[event].unshift(fn); - else emitter._events[event] = [fn, emitter._events[event]]; - } - module.exports = { - Stream, - prependListener, - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/add-abort-signal.js -var require_add_abort_signal = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/add-abort-signal.js"(exports, module) { - "use strict"; - var { AbortError, codes } = require_errors(); - var eos = require_end_of_stream(); - var { ERR_INVALID_ARG_TYPE } = codes; - function isNodeStream(obj) { - return !!(obj && typeof obj.pipe === "function"); - } - module.exports.addAbortSignal = function addAbortSignal(signal, stream) { - validateAbortSignal(signal, "signal"); - if (!isNodeStream(stream)) { - throw new ERR_INVALID_ARG_TYPE("stream", "stream.Stream", stream); - } - return module.exports.addAbortSignalNoValidate(signal, stream); - }; - module.exports.addAbortSignalNoValidate = function (signal, stream) { - if (typeof signal !== "object" || !("aborted" in signal)) { - return stream; - } - const onAbort = () => { - stream.destroy( - new AbortError(void 0, { - cause: signal.reason, - }), - ); - }; - if (signal.aborted) { - onAbort(); - } else { - signal.addEventListener("abort", onAbort); - eos(stream, () => signal.removeEventListener("abort", onAbort)); - } - return stream; - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/state.js -var { MathFloor, NumberIsInteger } = require_primordials(); -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} - -let hwm_object = 16; -let hwm_bytes = 16 * 1024; - -function getDefaultHighWaterMark(objectMode) { - return objectMode ? hwm_object : hwm_bytes; -} - -function setDefaultHighWaterMark(objectMode, value) { - if (objectMode) { - hwm_object = value; - } else { - hwm_bytes = value; - } -} - -function getHighWaterMark(state, options, duplexKey, isDuplex) { - const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - if (hwm != null) { - if (!NumberIsInteger(hwm) || hwm < 0) { - const name = isDuplex ? `options.${duplexKey}` : "options.highWaterMark"; - throw $ERR_INVALID_ARG_VALUE(name, hwm); - } - return MathFloor(hwm); - } - return getDefaultHighWaterMark(state.objectMode); -} - -// node_modules/readable-stream/lib/internal/streams/from.js -var require_from = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/from.js"(exports, module) { - "use strict"; - var { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require_primordials(); - var { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require_errors().codes; - function from(Readable, iterable, opts) { - let iterator; - if (typeof iterable === "string" || iterable instanceof Buffer) { - return new Readable({ - objectMode: true, - ...opts, - read() { - this.push(iterable); - this.push(null); - }, - }); - } - let isAsync; - if (iterable && iterable[SymbolAsyncIterator]) { - isAsync = true; - iterator = iterable[SymbolAsyncIterator](); - } else if (iterable && iterable[SymbolIterator]) { - isAsync = false; - iterator = iterable[SymbolIterator](); - } else { - throw new ERR_INVALID_ARG_TYPE("iterable", ["Iterable"], iterable); - } - const readable = new Readable({ - objectMode: true, - highWaterMark: 1, - ...opts, - }); - let reading = false; - readable._read = function () { - if (!reading) { - reading = true; - next(); - } - }; - readable._destroy = function (error, cb) { - PromisePrototypeThen( - close(error), - () => ProcessNextTick(cb, error), - e => ProcessNextTick(cb, e || error), - ); - }; - async function close(error) { - const hadError = error !== void 0 && error !== null; - const hasThrow = typeof iterator.throw === "function"; - if (hadError && hasThrow) { - const { value, done } = await iterator.throw(error); - await value; - if (done) { - return; - } - } - if (typeof iterator.return === "function") { - const { value } = await iterator.return(); - await value; - } - } - async function next() { - for (;;) { - try { - const { value, done } = isAsync ? await iterator.next() : iterator.next(); - if (done) { - readable.push(null); - } else { - const res = value && typeof value.then === "function" ? await value : value; - if (res === null) { - reading = false; - throw new ERR_STREAM_NULL_VALUES(); - } else if (readable.push(res)) { - continue; - } else { - reading = false; - } - } - } catch (err) { - readable.destroy(err); - } - break; - } - } - return readable; - } - module.exports = from; - }, -}); - -var _ReadableFromWeb; -var _ReadableFromWebForUndici; - -// node_modules/readable-stream/lib/internal/streams/readable.js -var require_readable = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/readable.js"(exports, module) { - "use strict"; - var { - ArrayPrototypeIndexOf, - NumberIsInteger, - NumberIsNaN, - NumberParseInt, - ObjectDefineProperties, - ObjectKeys, - ObjectSetPrototypeOf, - Promise: Promise2, - SafeSet, - SymbolAsyncIterator, - Promise, - SymbolAsyncDispose, - Symbol, - } = require_primordials(); - - var { Stream, prependListener } = require_legacy(); - - const BufferList = $cpp("JSBufferList.cpp", "getBufferList"); - - const { AbortError } = require_errors(); - - // TODO(benjamingr) it is likely slower to do it this way than with free functions - function makeBitMapDescriptor(bit) { - return { - enumerable: false, - get() { - return (this.state & bit) !== 0; - }, - set(value) { - if (value) this.state |= bit; - else this.state &= ~bit; - }, - }; - } - function ReadableState(options, stream, isDuplex) { - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - if (typeof isDuplex !== "boolean") isDuplex = stream instanceof require_duplex(); - - // Bit map field to store ReadableState more effciently with 1 bit per field - // instead of a V8 slot per field. - this.state = kEmitClose | kAutoDestroy | kConstructed | kSync; - // Object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away. - if (options && options.objectMode) this.state |= kObjectMode; - if (isDuplex && options && options.readableObjectMode) this.state |= kObjectMode; - - // The point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - this.highWaterMark = options - ? getHighWaterMark(this, options, "readableHighWaterMark", isDuplex) - : getDefaultHighWaterMark(false); - - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift(). - this.buffer = new BufferList(); - this.length = 0; - this.pipes = []; - this.flowing = null; - this[kPaused] = null; - - // Should close be emitted on destroy. Defaults to true. - if (options && options.emitClose === false) this.state &= ~kEmitClose; - - // Should .destroy() be called after 'end' (and potentially 'finish'). - if (options && options.autoDestroy === false) this.state &= ~kAutoDestroy; - - // Indicates whether the stream has errored. When true no further - // _read calls, 'data' or 'readable' events should occur. This is needed - // since when autoDestroy is disabled we need a way to tell whether the - // stream has failed. - this.errored = null; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = (options && options.defaultEncoding) || "utf8"; - - // Ref the piped dest which we need a drain event on it - // type: null | Writable | Set. - this.awaitDrainWriters = null; - this.decoder = null; - this.encoding = null; - if (options && options.encoding) { - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } - } - ReadableState.prototype = {}; - ObjectDefineProperties(ReadableState.prototype, { - objectMode: makeBitMapDescriptor(kObjectMode), - ended: makeBitMapDescriptor(kEnded), - endEmitted: makeBitMapDescriptor(kEndEmitted), - reading: makeBitMapDescriptor(kReading), - // Stream is still being constructed and cannot be - // destroyed until construction finished or failed. - // Async construction is opt in, therefore we start as - // constructed. - constructed: makeBitMapDescriptor(kConstructed), - // A flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - sync: makeBitMapDescriptor(kSync), - // Whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - needReadable: makeBitMapDescriptor(kNeedReadable), - emittedReadable: makeBitMapDescriptor(kEmittedReadable), - readableListening: makeBitMapDescriptor(kReadableListening), - resumeScheduled: makeBitMapDescriptor(kResumeScheduled), - // True if the error was already emitted and should not be thrown again. - errorEmitted: makeBitMapDescriptor(kErrorEmitted), - emitClose: makeBitMapDescriptor(kEmitClose), - autoDestroy: makeBitMapDescriptor(kAutoDestroy), - // Has it been destroyed. - destroyed: makeBitMapDescriptor(kDestroyed), - // Indicates whether the stream has finished destroying. - closed: makeBitMapDescriptor(kClosed), - // True if close has been emitted or would have been emitted - // depending on emitClose. - closeEmitted: makeBitMapDescriptor(kCloseEmitted), - multiAwaitDrain: makeBitMapDescriptor(kMultiAwaitDrain), - // If true, a maybeReadMore has been scheduled. - readingMore: makeBitMapDescriptor(kReadingMore), - dataEmitted: makeBitMapDescriptor(kDataEmitted), - }); - - function Readable(options) { - if (!(this instanceof Readable)) return new Readable(options); - const isDuplex = this instanceof require_duplex(); - - this._readableState = new ReadableState(options, this, isDuplex); - if (options) { - const { read, destroy, construct, signal } = options; - if (typeof read === "function") this._read = read; - if (typeof destroy === "function") this._destroy = destroy; - if (typeof construct === "function") this._construct = construct; - if (signal && !isDuplex) addAbortSignal(signal, this); - } - Stream.$call(this, options); - - destroyImpl.construct(this, () => { - if (this._readableState.needReadable) { - maybeReadMore(this, this._readableState); - } - }); - } - Readable.prototype = {}; - ObjectSetPrototypeOf(Readable.prototype, Stream.prototype); - Readable.prototype.constructor = Readable; // Re-add constructor which got lost when setting prototype - ObjectSetPrototypeOf(Readable, Stream); - Readable.ReadableState = ReadableState; - - Readable.prototype.on = function (ev, fn) { - const res = Stream.prototype.on.$call(this, ev, fn); - const state = this._readableState; - if (ev === "data") { - state.readableListening = this.listenerCount("readable") > 0; - if (state.flowing !== false) { - $debug("in flowing mode!", this.__id); - this.resume(); - } else { - $debug("in readable mode!", this.__id); - } - } else if (ev === "readable") { - $debug("readable listener added!", this.__id); - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - $debug( - "on readable - state.length, reading, emittedReadable", - state.length, - state.reading, - state.emittedReadable, - this.__id, - ); - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - ProcessNextTick(nReadingNextTick, this); - } - } else if (state.endEmitted) { - $debug("end already emitted...", this.__id); - } - } - return res; - }; - - class ReadableFromWeb extends Readable { - #reader; - #closed; - #pendingChunks; - #stream; - - constructor(options, stream) { - const { objectMode, highWaterMark, encoding, signal } = options; - super({ - objectMode, - highWaterMark, - encoding, - signal, - }); - this.#pendingChunks = []; - this.#reader = undefined; - this.#stream = stream; - this.#closed = false; - } - - #drainPending() { - var pendingChunks = this.#pendingChunks, - pendingChunksI = 0, - pendingChunksCount = pendingChunks.length; - - for (; pendingChunksI < pendingChunksCount; pendingChunksI++) { - const chunk = pendingChunks[pendingChunksI]; - pendingChunks[pendingChunksI] = undefined; - if (!this.push(chunk, undefined)) { - this.#pendingChunks = pendingChunks.slice(pendingChunksI + 1); - return true; - } - } - - if (pendingChunksCount > 0) { - this.#pendingChunks = []; - } - - return false; - } - - #handleDone(reader) { - reader.releaseLock(); - this.#reader = undefined; - this.#closed = true; - this.push(null); - return; - } - - async _read() { - $debug("ReadableFromWeb _read()", this.__id); - var stream = this.#stream, - reader = this.#reader; - if (stream) { - reader = this.#reader = stream.getReader(); - this.#stream = undefined; - } else if (this.#drainPending()) { - return; - } - - var deferredError; - try { - do { - var done = false, - value; - const firstResult = reader.readMany(); - - if ($isPromise(firstResult)) { - ({ done, value } = await firstResult); - - if (this.#closed) { - this.#pendingChunks.push(...value); - return; - } - } else { - ({ done, value } = firstResult); - } - - if (done) { - this.#handleDone(reader); - return; - } - - if (!this.push(value[0])) { - this.#pendingChunks = value.slice(1); - return; - } - - for (let i = 1, count = value.length; i < count; i++) { - if (!this.push(value[i])) { - this.#pendingChunks = value.slice(i + 1); - return; - } - } - } while (!this.#closed); - } catch (e) { - deferredError = e; - } finally { - if (deferredError) throw deferredError; - } - } - - _destroy(error, callback) { - if (!this.#closed) { - var reader = this.#reader; - if (reader) { - this.#reader = undefined; - reader.cancel(error).finally(() => { - this.#closed = true; - callback(error); - }); - } - - return; - } - try { - callback(error); - } catch (error) { - globalThis.reportError(error); - } - } - } - - _ReadableFromWebForUndici = ReadableFromWeb; - - /** - * @param {ReadableStream} readableStream - * @param {{ - * highWaterMark? : number, - * encoding? : string, - * objectMode? : boolean, - * signal? : AbortSignal, - * }} [options] - * @returns {Readable} - */ - function newStreamReadableFromReadableStream(readableStream, options = {}) { - if (!isReadableStream(readableStream)) { - throw new ERR_INVALID_ARG_TYPE("readableStream", "ReadableStream", readableStream); - } - - validateObject(options, "options"); - const { - highWaterMark, - encoding, - objectMode = false, - signal, - // native = true, - } = options; - - if (encoding !== undefined && !Buffer.isEncoding(encoding)) - throw $ERR_INVALID_ARG_VALUE("options.encoding", encoding); - validateBoolean(objectMode, "options.objectMode"); - - // validateBoolean(native, "options.native"); - - // if (!native) { - // return new ReadableFromWeb( - // { - // highWaterMark, - // encoding, - // objectMode, - // signal, - // }, - // readableStream, - // ); - // } - - const nativeStream = getNativeReadableStream(Readable, readableStream, options); - - return ( - nativeStream || - new ReadableFromWeb( - { - highWaterMark, - encoding, - objectMode, - signal, - }, - readableStream, - ) - ); - } - - module.exports = Readable; - _ReadableFromWeb = newStreamReadableFromReadableStream; - - var { addAbortSignal } = require_add_abort_signal(); - var eos = require_end_of_stream(); - // function maybeReadMore(stream, state) { - // ProcessNextTick(_maybeReadMore, stream, state); - // } - - function maybeReadMore(stream, state) { - if (!state.readingMore && state.constructed) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); - } - } - function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while ( - !state.reading && - !state.ended && - (state.length < state.highWaterMark || (state.flowing && state.length === 0)) - ) { - const len = state.length; - stream.read(0); - if (len === state.length) - // Didn't get any data, stop spinning. - break; - } - state.readingMore = false; - } - - function emitReadable(stream) { - const state = stream._readableState; - $debug("emitReadable", state.needReadable, state.emittedReadable); - state.needReadable = false; - if (!state.emittedReadable) { - $debug("emitReadable", state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); - } - } - function emitReadable_(stream) { - const state = stream._readableState; - $debug("emitReadable_", state.destroyed, state.length, state.ended); - if (!state.destroyed && !state.errored && (state.length || state.ended)) { - stream.emit("readable"); - state.emittedReadable = false; - } - - // The stream needs another readable event if: - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); - } - - var destroyImpl = require_destroy(); - var { - aggregateTwoErrors, - codes: { - ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED, - ERR_OUT_OF_RANGE, - ERR_STREAM_PUSH_AFTER_EOF, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT, - }, - } = require_errors(); - var from = require_from(); - var nop = () => {}; - var { errorOrDestroy } = destroyImpl; - - Readable.prototype.destroy = destroyImpl.destroy; - Readable.prototype._undestroy = destroyImpl.undestroy; - Readable.prototype._destroy = function (err, cb) { - cb(err); - }; - Readable.prototype[EE.captureRejectionSymbol] = function (err) { - this.destroy(err); - }; - Readable.prototype.push = function (chunk, encoding) { - return readableAddChunk(this, chunk, encoding, false); - }; - Readable.prototype.unshift = function (chunk, encoding) { - return readableAddChunk(this, chunk, encoding, true); - }; - function readableAddChunk(stream, chunk, encoding, addToFront) { - $debug("readableAddChunk", chunk, stream.__id); - const state = stream._readableState; - let err; - if (!state.objectMode) { - if (typeof chunk === "string") { - encoding = encoding || state.defaultEncoding; - if (state.encoding !== encoding) { - if (addToFront && state.encoding) { - chunk = Buffer.from(chunk, encoding).toString(state.encoding); - } else { - chunk = Buffer.from(chunk, encoding); - encoding = ""; - } - } - } else if (chunk instanceof Buffer) { - encoding = ""; - } else if (Stream._isUint8Array(chunk)) { - if (addToFront || !state.decoder) { - chunk = Stream._uint8ArrayToBuffer(chunk); - } - encoding = ""; - } else if (chunk != null) { - err = new ERR_INVALID_ARG_TYPE("chunk", ["string", "Buffer", "Uint8Array"], chunk); - } - } - if (err) { - errorOrDestroy(stream, err); - } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else if (state.objectMode || (chunk && chunk.length > 0)) { - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); - else if (state.destroyed || state.errored) return false; - else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed || state.errored) { - return false; - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false); - else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } - return !state.ended && (state.length < state.highWaterMark || state.length === 0); - } - function addChunk(stream, state, chunk, addToFront) { - $debug("adding chunk", stream.__id); - $debug("chunk", chunk.toString(), stream.__id); - if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount("data") > 0) { - if (state.multiAwaitDrain) { - state.awaitDrainWriters.clear(); - } else { - state.awaitDrainWriters = null; - } - state.dataEmitted = true; - stream.emit("data", chunk); - } else { - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk); - else state.buffer.push(chunk); - $debug("needReadable @ addChunk", state.needReadable, stream.__id); - if (state.needReadable) emitReadable(stream); - } - $debug("about to maybereadmore"); - maybeReadMore(stream, state); - } - function onEofChunk(stream, state) { - if (state.ended) return; - - const decoder = state.decoder; - if (decoder) { - const chunk = decoder.end(); - const chunkLength = chunk?.length; - if (chunkLength) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunkLength; - } - } - state.ended = true; - if (state.sync) { - emitReadable(stream); - } else { - state.needReadable = false; - state.emittedReadable = true; - emitReadable_(stream); - } - } - Readable.prototype.isPaused = function () { - const state = this._readableState; - return state.paused === true || state.flowing === false; - }; - Readable.prototype.setEncoding = function (enc) { - const decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; - this._readableState.encoding = this._readableState.decoder.encoding; - const buffer = this._readableState.buffer; - let content = ""; - // BufferList does not support iterator now, and iterator is slow in JSC. - // for (const data of buffer) { - // content += decoder.write(data); - // } - // buffer.clear(); - for (let i = buffer.length; i > 0; i--) { - content += decoder.write(buffer.shift()); - } - if (content !== "") buffer.push(content); - this._readableState.length = content.length; - return this; - }; - var MAX_HWM = 1073741824; - function computeNewHighWaterMark(n) { - if (n > MAX_HWM) { - throw new ERR_OUT_OF_RANGE("size", "<= 1GiB", n); - } else { - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; - } - function howMuchToRead(n, state) { - if (n <= 0 || (state.length === 0 && state.ended)) return 0; - if (state.objectMode) return 1; - if (NumberIsNaN(n)) { - if (state.flowing && state.length) return state.buffer.first().length; - return state.length; - } - if (n <= state.length) return n; - return state.ended ? state.length : 0; - } - // You can override either this method, or the async _read(n) below. - Readable.prototype.read = function (n) { - $debug("read - n =", n, this.__id); - if (!NumberIsInteger(n)) { - n = NumberParseInt(n, 10); - } - const state = this._readableState; - const nOrig = n; - - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - - if (n !== 0) state.emittedReadable = false; - - // If we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if ( - n === 0 && - state.needReadable && - ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended) - ) { - $debug("read: emitReadable or endReadable", state.length, state.ended, this.__id); - if (state.length === 0 && state.ended) endReadable(this); - else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // If we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - $debug("read: calling endReadable if length 0 -- length, state.ended", state.length, state.ended, this.__id); - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - let doRead = state.needReadable; - $debug("need readable", doRead, this.__id); - - // If we currently have less than the highWaterMark, then also read some. - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - $debug("length less than watermark", doRead, this.__id); - } - - // However, if we've ended, then there's no point, if we're already - // reading, then it's unnecessary, if we're constructing we have to wait, - // and if we're destroyed or errored, then it's not allowed, - if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) { - $debug("state.constructed?", state.constructed, this.__id); - doRead = false; - $debug("reading, ended or constructing", doRead, this.__id); - } else if (doRead) { - $debug("do read", this.__id); - state.reading = true; - state.sync = true; - // If the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - - // Call internal read method - try { - var result = this._read(state.highWaterMark); - if ($isPromise(result)) { - $debug("async _read", this.__id); - const peeked = Bun.peek(result); - $debug("peeked promise", peeked, this.__id); - if (peeked !== result) { - result = peeked; - } - } - - if ($isPromise(result) && result?.then && $isCallable(result.then)) { - $debug("async _read result.then setup", this.__id); - result.then(nop, function (err) { - errorOrDestroy(this, err); - }); - } - } catch (err) { - errorOrDestroy(this, err); - } - - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); - } - - $debug("n @ fromList", n, this.__id); - let ret; - if (n > 0) ret = fromList(n, state); - else ret = null; - - $debug("ret @ read", ret, this.__id); - - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - $debug("state.length while ret = null", state.length, this.__id); - n = 0; - } else { - state.length -= n; - if (state.multiAwaitDrain) { - state.awaitDrainWriters.clear(); - } else { - state.awaitDrainWriters = null; - } - } - - $debug("length", state.length, state.ended, nOrig, n); - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); - } - - if (ret !== null && !state.errorEmitted && !state.closeEmitted) { - state.dataEmitted = true; - this.emit("data", ret); - } - - return ret; - }; - Readable.prototype._read = function (n) { - throw new ERR_METHOD_NOT_IMPLEMENTED("_read()"); - }; - Readable.prototype.pipe = function (dest, pipeOpts) { - const src = this; - const state = this._readableState; - if (state.pipes.length === 1) { - if (!state.multiAwaitDrain) { - state.multiAwaitDrain = true; - state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : []); - } - } - state.pipes.push(dest); - $debug("pipe count=%d opts=%j", state.pipes.length, pipeOpts, src.__id); - const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - const endFn = doEnd ? onend : unpipe; - if (state.endEmitted) ProcessNextTick(endFn); - else src.once("end", endFn); - dest.on("unpipe", onunpipe); - function onunpipe(readable, unpipeInfo) { - $debug("onunpipe", src.__id); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - function onend() { - $debug("onend", src.__id); - dest.end(); - } - let ondrain; - let cleanedUp = false; - function cleanup() { - $debug("cleanup", src.__id); - dest.removeListener("close", onclose); - dest.removeListener("finish", onfinish); - if (ondrain) { - dest.removeListener("drain", ondrain); - } - dest.removeListener("error", onerror); - dest.removeListener("unpipe", onunpipe); - src.removeListener("end", onend); - src.removeListener("end", unpipe); - src.removeListener("data", ondata); - cleanedUp = true; - if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - function pause() { - if (!cleanedUp) { - if (state.pipes.length === 1 && state.pipes[0] === dest) { - $debug("false write response, pause", 0, src.__id); - state.awaitDrainWriters = dest; - state.multiAwaitDrain = false; - } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { - $debug("false write response, pause", state.awaitDrainWriters.size, src.__id); - state.awaitDrainWriters.add(dest); - } - src.pause(); - } - if (!ondrain) { - ondrain = pipeOnDrain(src, dest); - dest.on("drain", ondrain); - } - } - src.on("data", ondata); - function ondata(chunk) { - $debug("ondata", src.__id); - const ret = dest.write(chunk); - $debug("dest.write", ret, src.__id); - if (ret === false) { - pause(); - } - } - function onerror(er) { - $debug("onerror", er); - unpipe(); - dest.removeListener("error", onerror); - if (dest.listenerCount("error") === 0) { - const s = dest._writableState || dest._readableState; - if (s && !s.errorEmitted) { - errorOrDestroy(dest, er); - } else { - dest.emit("error", er); - } - } - } - prependListener(dest, "error", onerror); - function onclose() { - dest.removeListener("finish", onfinish); - unpipe(); - } - dest.once("close", onclose); - function onfinish() { - $debug("onfinish"); - dest.removeListener("close", onclose); - unpipe(); - } - dest.once("finish", onfinish); - function unpipe() { - $debug("unpipe"); - src.unpipe(dest); - } - dest.emit("pipe", src); - if (dest.writableNeedDrain === true) { - if (state.flowing) { - pause(); - } - } else if (!state.flowing) { - $debug("pipe resume"); - src.resume(); - } - return dest; - }; - function pipeOnDrain(src, dest) { - return function pipeOnDrainFunctionResult() { - const state = src._readableState; - if (state.awaitDrainWriters === dest) { - $debug("pipeOnDrain", 1); - state.awaitDrainWriters = null; - } else if (state.multiAwaitDrain) { - $debug("pipeOnDrain", state.awaitDrainWriters.size); - state.awaitDrainWriters.delete(dest); - } - if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount("data")) { - src.resume(); - } - }; - } - Readable.prototype.unpipe = function (dest) { - const state = this._readableState; - const unpipeInfo = { - hasUnpiped: false, - }; - if (state.pipes.length === 0) return this; - if (!dest) { - const dests = state.pipes; - state.pipes = []; - this.pause(); - for (let i = 0; i < dests.length; i++) - dests[i].emit("unpipe", this, { - hasUnpiped: false, - }); - return this; - } - const index = ArrayPrototypeIndexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - if (state.pipes.length === 0) this.pause(); - dest.emit("unpipe", this, unpipeInfo); - return this; - }; - Readable.prototype.addListener = Readable.prototype.on; - Readable.prototype.removeListener = function (ev, fn) { - const res = Stream.prototype.removeListener.$call(this, ev, fn); - if (ev === "readable") { - ProcessNextTick(updateReadableListening, this); - } - return res; - }; - Readable.prototype.off = Readable.prototype.removeListener; - Readable.prototype.removeAllListeners = function (ev) { - const res = Stream.prototype.removeAllListeners.$apply(this, arguments); - if (ev === "readable" || ev === void 0) { - ProcessNextTick(updateReadableListening, this); - } - return res; - }; - function updateReadableListening(self) { - const state = self._readableState; - state.readableListening = self.listenerCount("readable") > 0; - if (state.resumeScheduled && state.paused === false) { - state.flowing = true; - } else if (self.listenerCount("data") > 0) { - self.resume(); - } else if (!state.readableListening) { - state.flowing = null; - } - } - function nReadingNextTick(self) { - $debug("on readable nextTick, calling read(0)", self.__id); - self.read(0); - } - // Readable.prototype.resume = function () { - // const state = this._readableState; - // if (!state.flowing) { - // $debug("resume", this.__id); - // state.flowing = !state.readableListening; - // resume(this, state); - // } - // state.paused = false; - // return this; - // }; - Readable.prototype.resume = function () { - const state = this._readableState; - if (!state.flowing) { - $debug("resume"); - // We flow only if there is no one listening - // for readable, but we still have to call - // resume(). - state.flowing = !state.readableListening; - resume(this, state); - } - state[kPaused] = false; - return this; - }; - function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); - } - } - function resume_(stream, state) { - $debug("resume", state.reading); - if (!state.reading) { - stream.read(0); - } - state.resumeScheduled = false; - stream.emit("resume"); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); - } - Readable.prototype.pause = function () { - $debug("call pause flowing=%j", this._readableState.flowing, this.__id); - if (this._readableState.flowing !== false) { - $debug("pause", this.__id); - this._readableState.flowing = false; - this.emit("pause"); - } - this._readableState.paused = true; - return this; - }; - function flow(stream) { - const state = stream._readableState; - while (state.flowing && stream.read() !== null); - } - Readable.prototype.wrap = function (stream) { - let paused = false; - stream.on("data", chunk => { - if (!this.push(chunk) && stream.pause) { - paused = true; - stream.pause(); - } - }); - stream.on("end", () => { - this.push(null); - }); - stream.on("error", err => { - errorOrDestroy(this, err); - }); - stream.on("close", () => { - this.destroy(); - }); - stream.on("destroy", () => { - this.destroy(); - }); - this._read = () => { - if (paused && stream.resume) { - paused = false; - stream.resume(); - } - }; - const streamKeys = ObjectKeys(stream); - for (let j = 1; j < streamKeys.length; j++) { - const i = streamKeys[j]; - if (this[i] === void 0 && typeof stream[i] === "function") { - this[i] = stream[i].bind(stream); - } - } - return this; - }; - Readable.prototype[SymbolAsyncIterator] = function () { - return streamToAsyncIterator(this); - }; - Readable.prototype.iterator = function (options) { - if (options !== void 0) { - validateObject(options, "options"); - } - return streamToAsyncIterator(this, options); - }; - function streamToAsyncIterator(stream, options) { - if (typeof stream.read !== "function") { - stream = Readable.wrap(stream, { - objectMode: true, - }); - } - const iter = createAsyncIterator(stream, options); - iter.stream = stream; - return iter; - } - async function* createAsyncIterator(stream, options) { - let callback = nop; - function next(resolve) { - if (this === stream) { - callback(); - callback = nop; - } else { - callback = resolve; - } - } - stream.on("readable", next); - let error; - const cleanup = eos( - stream, - { - writable: false, - }, - err => { - error = err ? aggregateTwoErrors(error, err) : null; - callback(); - callback = nop; - }, - ); - try { - while (true) { - const chunk = stream.destroyed ? null : stream.read(); - if (chunk !== null) { - yield chunk; - } else if (error) { - throw error; - } else if (error === null) { - return; - } else { - await new Promise2(next); - } - } - } catch (err) { - error = aggregateTwoErrors(error, err); - throw error; - } finally { - if ( - (error || (options === null || options === void 0 ? void 0 : options.destroyOnReturn) !== false) && - (error === void 0 || stream._readableState.autoDestroy) - ) { - destroyImpl.destroyer(stream, null); - } else { - stream.off("readable", next); - cleanup(); - } - } - } - ObjectDefineProperties(Readable.prototype, { - readable: { - get() { - const r = this._readableState; - return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted; - }, - set(val) { - if (this._readableState) { - this._readableState.readable = !!val; - } - }, - }, - readableDidRead: { - enumerable: false, - get: function () { - return this._readableState.dataEmitted; - }, - }, - readableAborted: { - enumerable: false, - get: function () { - return !!( - this._readableState.readable !== false && - (this._readableState.destroyed || this._readableState.errored) && - !this._readableState.endEmitted - ); - }, - }, - readableHighWaterMark: { - enumerable: false, - get: function () { - return this._readableState.highWaterMark; - }, - }, - readableBuffer: { - enumerable: false, - get: function () { - return this._readableState && this._readableState.buffer; - }, - }, - readableFlowing: { - enumerable: false, - get: function () { - return this._readableState.flowing; - }, - set: function (state) { - if (this._readableState) { - this._readableState.flowing = state; - } - }, - }, - readableLength: { - enumerable: false, - get() { - return this._readableState.length; - }, - }, - readableObjectMode: { - enumerable: false, - get() { - return this._readableState ? this._readableState.objectMode : false; - }, - }, - readableEncoding: { - enumerable: false, - get() { - return this._readableState ? this._readableState.encoding : null; - }, - }, - errored: { - enumerable: false, - get() { - return this._readableState ? this._readableState.errored : null; - }, - }, - closed: { - get() { - return this._readableState ? this._readableState.closed : false; - }, - }, - destroyed: { - enumerable: false, - get() { - return this._readableState ? this._readableState.destroyed : false; - }, - set(value) { - if (!this._readableState) { - return; - } - this._readableState.destroyed = value; - }, - }, - readableEnded: { - enumerable: false, - get() { - return this._readableState ? this._readableState.endEmitted : false; - }, - }, - }); - Readable._fromList = fromList; - function fromList(n, state) { - if (state.length === 0) return null; - let ret; - if (state.objectMode) ret = state.buffer.shift(); - else if (!n || n >= state.length) { - if (state.decoder) ret = state.buffer.join(""); - else if (state.buffer.length === 1) ret = state.buffer.first(); - else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - ret = state.buffer.consume(n, state.decoder); - } - return ret; - } - function endReadable(stream) { - const state = stream._readableState; - $debug("endEmitted @ endReadable", state.endEmitted, stream.__id); - if (!state.endEmitted) { - state.ended = true; - ProcessNextTick(endReadableNT, state, stream); - } - } - function endReadableNT(state, stream) { - $debug("endReadableNT -- endEmitted, state.length", state.endEmitted, state.length, stream.__id); - if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.emit("end"); - $debug("end emitted @ endReadableNT", stream.__id); - if (stream.writable && stream.allowHalfOpen === false) { - ProcessNextTick(endWritableNT, stream); - } else if (state.autoDestroy) { - const wState = stream._writableState; - const autoDestroy = !wState || (wState.autoDestroy && (wState.finished || wState.writable === false)); - if (autoDestroy) { - stream[kAutoDestroyed] = true; // workaround for node:http Server not using node:net Server - stream.destroy(); - } - } - } - } - function endWritableNT(stream) { - const writable = stream.writable && !stream.writableEnded && !stream.destroyed; - if (writable) { - stream.end(); - } - } - Readable.from = function (iterable, opts) { - return from(Readable, iterable, opts); - }; - var webStreamsAdapters = { - newStreamReadableFromReadableStream, - - newReadableStreamFromStreamReadable(streamReadable, options = {}) { - // Not using the internal/streams/utils isReadableNodeStream utility - // here because it will return false if streamReadable is a Duplex - // whose readable option is false. For a Duplex that is not readable, - // we want it to pass this check but return a closed ReadableStream. - if (typeof streamReadable?._readableState !== "object") { - throw new ERR_INVALID_ARG_TYPE("streamReadable", "stream.Readable", streamReadable); - } - var { isDestroyed, isReadable } = require_utils(); - - if (isDestroyed(streamReadable) || !isReadable(streamReadable)) { - const readable = new ReadableStream(); - readable.cancel(); - return readable; - } - - const objectMode = streamReadable.readableObjectMode; - const highWaterMark = streamReadable.readableHighWaterMark; - - const evaluateStrategyOrFallback = strategy => { - // If there is a strategy available, use it - if (strategy) return strategy; - - if (objectMode) { - // When running in objectMode explicitly but no strategy, we just fall - // back to CountQueuingStrategy - return new CountQueuingStrategy({ highWaterMark }); - } - - // When not running in objectMode explicitly, we just fall - // back to a minimal strategy that just specifies the highWaterMark - // and no size algorithm. Using a ByteLengthQueuingStrategy here - // is unnecessary. - return { highWaterMark }; - }; - - const strategy = evaluateStrategyOrFallback(options?.strategy); - - let controller; - - function onData(chunk) { - controller.enqueue(chunk); - if (controller.desiredSize <= 0) streamReadable.pause(); - } - - streamReadable.pause(); - - const cleanup = eos(streamReadable, error => { - if (error?.code === "ERR_STREAM_PREMATURE_CLOSE") { - const err = new AbortError(undefined, { cause: error }); - error = err; - } - - cleanup(); - // This is a protection against non-standard, legacy streams - // that happen to emit an error event again after finished is called. - streamReadable.on("error", () => {}); - if (error) return controller.error(error); - controller.close(); - }); - - streamReadable.on("data", onData); - - return new ReadableStream( - { - start(c) { - controller = c; - }, - - pull() { - streamReadable.resume(); - }, - - cancel(reason) { - destroy(streamReadable, reason); - }, - }, - strategy, - ); - }, - }; - - Readable.fromWeb = function (readableStream, options) { - // We cache .stream() calls for file descriptors - // This won't create a new ReadableStream each time. - let bunStdinStream = Bun.stdin.stream(); - if (readableStream === bunStdinStream) { - return bunStdinStream; - } - - return webStreamsAdapters.newStreamReadableFromReadableStream(readableStream, options); - }; - Readable.toWeb = function (streamReadable, options) { - // Workaround for https://github.com/oven-sh/bun/issues/9041 - if (streamReadable === process.stdin) { - return Bun.stdin.stream(); - } - - return webStreamsAdapters.newReadableStreamFromStreamReadable(streamReadable, options); - }; - Readable.wrap = function (src, options) { - var _ref, _src$readableObjectMo; - return new Readable({ - objectMode: - (_ref = - (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== void 0 - ? _src$readableObjectMo - : src.objectMode) !== null && _ref !== void 0 - ? _ref - : true, - ...options, - destroy(err, callback) { - destroyImpl.destroyer(src, err); - callback(err); - }, - }).wrap(src); - }; - }, -}); -const Readable = require_readable(); - -// node_modules/readable-stream/lib/internal/streams/writable.js -var errorOrDestroy; -var require_writable = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/writable.js"(exports, module) { - "use strict"; - var { - ArrayPrototypeSlice, - Error: Error2, - FunctionPrototypeSymbolHasInstance, - ObjectDefineProperty, - ObjectDefineProperties, - ObjectSetPrototypeOf, - StringPrototypeToLowerCase, - Symbol: Symbol2, - SymbolHasInstance, - } = require_primordials(); - - var Stream = require_legacy().Stream; - var destroyImpl = require_destroy(); - var { addAbortSignal } = require_add_abort_signal(); - var { - ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED, - ERR_STREAM_ALREADY_FINISHED, - ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING, - } = require_errors().codes; - ({ errorOrDestroy } = destroyImpl); - - function Writable(options = {}) { - const isDuplex = this instanceof require_duplex(); - if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options); - - // this._events ??= { - // close: undefined, - // error: undefined, - // prefinish: undefined, - // finish: undefined, - // drain: undefined, - // }; - - this._writableState = new WritableState(options, this, isDuplex); - if (options) { - if (typeof options.write === "function") this._write = options.write; - if (typeof options.writev === "function") this._writev = options.writev; - if (typeof options.destroy === "function") this._destroy = options.destroy; - if (typeof options.final === "function") this._final = options.final; - if (typeof options.construct === "function") this._construct = options.construct; - if (options.signal) addAbortSignal(options.signal, this); - } - Stream.$call(this, options); - - destroyImpl.construct(this, () => { - const state = this._writableState; - if (!state.writing) { - clearBuffer(this, state); - } - finishMaybe(this, state); - }); - } - Writable.prototype = {}; - ObjectSetPrototypeOf(Writable.prototype, Stream.prototype); - Writable.prototype.constructor = Writable; // Re-add constructor which got lost when setting prototype - ObjectSetPrototypeOf(Writable, Stream); - module.exports = Writable; - - function nop() {} - var kOnFinished = Symbol2("kOnFinished"); - function WritableState(options, stream, isDuplex) { - if (typeof isDuplex !== "boolean") isDuplex = stream instanceof require_duplex(); - this.objectMode = !!(options && options.objectMode); - if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode); - this.highWaterMark = options - ? getHighWaterMark(this, options, "writableHighWaterMark", isDuplex) - : getDefaultHighWaterMark(false); - this.finalCalled = false; - this.needDrain = false; - this.ending = false; - this.ended = false; - this.finished = false; - this.destroyed = false; - const noDecode = !!(options && options.decodeStrings === false); - this.decodeStrings = !noDecode; - this.defaultEncoding = (options && options.defaultEncoding) || "utf8"; - this.length = 0; - this.writing = false; - this.corked = 0; - this.sync = true; - this.bufferProcessing = false; - this.onwrite = onwrite.bind(void 0, stream); - this.writecb = null; - this.writelen = 0; - this.afterWriteTickInfo = null; - resetBuffer(this); - this.pendingcb = 0; - this.constructed = true; - this.prefinished = false; - this.errorEmitted = false; - this.emitClose = !options || options.emitClose !== false; - this.autoDestroy = !options || options.autoDestroy !== false; - this.errored = null; - this.closed = false; - this.closeEmitted = false; - this[kOnFinished] = []; - } - WritableState.prototype = {}; - function resetBuffer(state) { - state.buffered = []; - state.bufferedIndex = 0; - state.allBuffers = true; - state.allNoop = true; - } - WritableState.prototype.getBuffer = function getBuffer() { - return ArrayPrototypeSlice(this.buffered, this.bufferedIndex); - }; - ObjectDefineProperty(WritableState.prototype, "bufferedRequestCount", { - get() { - return this.buffered.length - this.bufferedIndex; - }, - }); - - ObjectDefineProperty(Writable, SymbolHasInstance, { - value: function (object) { - if (FunctionPrototypeSymbolHasInstance(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; - }, - }); - Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); - }; - function _write(stream, chunk, encoding, cb) { - const state = stream._writableState; - if (typeof encoding === "function") { - cb = encoding; - encoding = state.defaultEncoding; - } else { - if (!encoding) encoding = state.defaultEncoding; - else if (encoding !== "buffer" && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding); - if (typeof cb !== "function") cb = nop; - } - if (chunk === null) { - throw new ERR_STREAM_NULL_VALUES(); - } else if (!state.objectMode) { - if (typeof chunk === "string") { - if (state.decodeStrings !== false) { - chunk = Buffer.from(chunk, encoding); - encoding = "buffer"; - } - } else if (chunk instanceof Buffer) { - encoding = "buffer"; - } else if (Stream._isUint8Array(chunk)) { - chunk = Stream._uint8ArrayToBuffer(chunk); - encoding = "buffer"; - } else { - throw new ERR_INVALID_ARG_TYPE("chunk", ["string", "Buffer", "Uint8Array"], chunk); - } - } - let err; - if (state.ending) { - err = new ERR_STREAM_WRITE_AFTER_END(); - } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED("write"); - } - if (err) { - ProcessNextTick(cb, err); - errorOrDestroy(stream, err, true); - return err; - } - state.pendingcb++; - return writeOrBuffer(stream, state, chunk, encoding, cb); - } - Writable.prototype.write = function (chunk, encoding, cb) { - if ($isCallable(encoding)) { - cb = encoding; - encoding = null; - } - return _write(this, chunk, encoding, cb) === true; - }; - Writable.prototype.cork = function () { - this._writableState.corked++; - }; - Writable.prototype.uncork = function () { - const state = this._writableState; - if (state.corked) { - state.corked--; - if (!state.writing) clearBuffer(this, state); - } - }; - Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - if (typeof encoding === "string") encoding = StringPrototypeToLowerCase(encoding); - if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; - }; - function writeOrBuffer(stream, state, chunk, encoding, callback) { - const len = state.objectMode ? 1 : chunk.length; - state.length += len; - const ret = state.length < state.highWaterMark; - if (!ret) state.needDrain = true; - if (state.writing || state.corked || state.errored || !state.constructed) { - state.buffered.push({ - chunk, - encoding, - callback, - }); - if (state.allBuffers && encoding !== "buffer") { - state.allBuffers = false; - } - if (state.allNoop && callback !== nop) { - state.allNoop = false; - } - } else { - state.writelen = len; - state.writecb = callback; - state.writing = true; - state.sync = true; - stream._write(chunk, encoding, state.onwrite); - state.sync = false; - } - return ret && !state.errored && !state.destroyed; - } - function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED("write")); - else if (writev) stream._writev(chunk, state.onwrite); - else stream._write(chunk, encoding, state.onwrite); - state.sync = false; - } - function onwriteError(stream, state, er, cb) { - --state.pendingcb; - cb(er); - errorBuffer(state); - errorOrDestroy(stream, er); - } - function onwrite(stream, er) { - const state = stream._writableState; - const sync = state.sync; - const cb = state.writecb; - if (typeof cb !== "function") { - errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()); - return; - } - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; - if (er) { - Error.captureStackTrace(er); - if (!state.errored) { - state.errored = er; - } - if (stream._readableState && !stream._readableState.errored) { - stream._readableState.errored = er; - } - if (sync) { - ProcessNextTick(onwriteError, stream, state, er, cb); - } else { - onwriteError(stream, state, er, cb); - } - } else { - if (state.buffered.length > state.bufferedIndex) { - clearBuffer(stream, state); - } - if (sync) { - if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) { - state.afterWriteTickInfo.count++; - } else { - state.afterWriteTickInfo = { - count: 1, - cb, - stream, - state, - }; - ProcessNextTick(afterWriteTick, state.afterWriteTickInfo); - } - } else { - afterWrite(stream, state, 1, cb); - } - } - } - function afterWriteTick({ stream, state, count, cb }) { - state.afterWriteTickInfo = null; - return afterWrite(stream, state, count, cb); - } - function afterWrite(stream, state, count, cb) { - const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain; - if (needDrain) { - state.needDrain = false; - stream.emit("drain"); - } - while (count-- > 0) { - state.pendingcb--; - cb(); - } - if (state.destroyed) { - errorBuffer(state); - } - finishMaybe(stream, state); - } - function errorBuffer(state) { - if (state.writing) { - return; - } - for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { - var _state$errored; - const { chunk, callback } = state.buffered[n]; - const len = state.objectMode ? 1 : chunk.length; - state.length -= len; - callback( - (_state$errored = state.errored) !== null && _state$errored !== void 0 - ? _state$errored - : new ERR_STREAM_DESTROYED("write"), - ); - } - const onfinishCallbacks = state[kOnFinished].splice(0); - for (let i = 0; i < onfinishCallbacks.length; i++) { - var _state$errored2; - onfinishCallbacks[i]( - (_state$errored2 = state.errored) !== null && _state$errored2 !== void 0 - ? _state$errored2 - : new ERR_STREAM_DESTROYED("end"), - ); - } - resetBuffer(state); - } - function clearBuffer(stream, state) { - if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) { - return; - } - const { buffered, bufferedIndex, objectMode } = state; - const bufferedLength = buffered.length - bufferedIndex; - if (!bufferedLength) { - return; - } - let i = bufferedIndex; - state.bufferProcessing = true; - if (bufferedLength > 1 && stream._writev) { - state.pendingcb -= bufferedLength - 1; - const callback = state.allNoop - ? nop - : err => { - for (let n = i; n < buffered.length; ++n) { - buffered[n].callback(err); - } - }; - const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i); - chunks.allBuffers = state.allBuffers; - doWrite(stream, state, true, state.length, chunks, "", callback); - resetBuffer(state); - } else { - do { - const { chunk, encoding, callback } = buffered[i]; - buffered[i++] = null; - const len = objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, callback); - } while (i < buffered.length && !state.writing); - if (i === buffered.length) { - resetBuffer(state); - } else if (i > 256) { - buffered.splice(0, i); - state.bufferedIndex = 0; - } else { - state.bufferedIndex = i; - } - } - state.bufferProcessing = false; - } - Writable.prototype._write = function (chunk, encoding, cb) { - if (this._writev) { - this._writev( - [ - { - chunk, - encoding, - }, - ], - cb, - ); - } else { - throw new ERR_METHOD_NOT_IMPLEMENTED("_write()"); - } - }; - Writable.prototype._writev = null; - Writable.prototype.end = function (chunk, encoding, cb, native = false) { - const state = this._writableState; - $debug("end", state, this.__id); - if (typeof chunk === "function") { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === "function") { - cb = encoding; - encoding = null; - } - let err; - if (chunk !== null && chunk !== void 0) { - let ret; - if (!native) { - ret = _write(this, chunk, encoding); - } else { - ret = this.write(chunk, encoding); - } - if (ret instanceof Error2) { - err = ret; - } - } - if (state.corked) { - state.corked = 1; - this.uncork(); - } - if (err) { - this.emit("error", err); - } else if (!state.errored && !state.ending) { - state.ending = true; - finishMaybe(this, state, true); - state.ended = true; - } else if (state.finished) { - err = new ERR_STREAM_ALREADY_FINISHED("end"); - } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED("end"); - } - if (typeof cb === "function") { - if (err || state.finished) { - ProcessNextTick(cb, err); - } else { - state[kOnFinished].push(cb); - } - } - return this; - }; - function needFinish(state, tag?) { - var needFinish = - state.ending && - !state.destroyed && - state.constructed && - state.length === 0 && - !state.errored && - state.buffered.length === 0 && - !state.finished && - !state.writing && - !state.errorEmitted && - !state.closeEmitted; - $debug("needFinish", needFinish, tag); - return needFinish; - } - function callFinal(stream, state) { - let called = false; - function onFinish(err) { - if (called) { - errorOrDestroy(stream, err !== null && err !== void 0 ? err : new ERR_MULTIPLE_CALLBACK()); - return; - } - called = true; - state.pendingcb--; - if (err) { - const onfinishCallbacks = state[kOnFinished].splice(0); - for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i](err); - } - errorOrDestroy(stream, err, state.sync); - } else if (needFinish(state)) { - state.prefinished = true; - stream.emit("prefinish"); - state.pendingcb++; - ProcessNextTick(finish, stream, state); - } - } - state.sync = true; - state.pendingcb++; - try { - stream._final(onFinish); - } catch (err) { - onFinish(err); - } - state.sync = false; - } - function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === "function" && !state.destroyed) { - state.finalCalled = true; - callFinal(stream, state); - } else { - state.prefinished = true; - stream.emit("prefinish"); - } - } - } - function finishMaybe(stream, state, sync) { - $debug("finishMaybe -- state, sync", state, sync, stream.__id); - - if (!needFinish(state, stream.__id)) return; - - prefinish(stream, state); - if (state.pendingcb === 0) { - if (sync) { - state.pendingcb++; - ProcessNextTick.$call( - null, - (stream2, state2) => { - if (needFinish(state2)) { - finish(stream2, state2); - } else { - state2.pendingcb--; - } - }, - stream, - state, - ); - } else if (needFinish(state)) { - state.pendingcb++; - finish(stream, state); - } - } - } - function finish(stream, state) { - state.pendingcb--; - state.finished = true; - const onfinishCallbacks = state[kOnFinished].splice(0); - for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i](); - } - stream.emit("finish"); - if (state.autoDestroy) { - const rState = stream._readableState; - const autoDestroy = !rState || (rState.autoDestroy && (rState.endEmitted || rState.readable === false)); - if (autoDestroy) { - stream.destroy(); - } - } - } - ObjectDefineProperties(Writable.prototype, { - closed: { - get() { - return this._writableState ? this._writableState.closed : false; - }, - }, - destroyed: { - get() { - return this._writableState ? this._writableState.destroyed : false; - }, - set(value) { - if (this._writableState) { - this._writableState.destroyed = value; - } - }, - }, - writable: { - get() { - const w = this._writableState; - return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended; - }, - set(val) { - if (this._writableState) { - this._writableState.writable = !!val; - } - }, - }, - writableFinished: { - get() { - return this._writableState ? this._writableState.finished : false; - }, - }, - writableObjectMode: { - get() { - return this._writableState ? this._writableState.objectMode : false; - }, - }, - writableBuffer: { - get() { - return this._writableState && this._writableState.getBuffer(); - }, - }, - writableEnded: { - get() { - return this._writableState ? this._writableState.ending : false; - }, - }, - writableNeedDrain: { - get() { - const wState = this._writableState; - if (!wState) return false; - return !wState.destroyed && !wState.ending && wState.needDrain; - }, - }, - writableHighWaterMark: { - get() { - return this._writableState && this._writableState.highWaterMark; - }, - }, - writableCorked: { - get() { - return this._writableState ? this._writableState.corked : 0; - }, - }, - writableLength: { - get() { - return this._writableState && this._writableState.length; - }, - }, - errored: { - enumerable: false, - get() { - return this._writableState ? this._writableState.errored : null; - }, - }, - writableAborted: { - enumerable: false, - get: function () { - return !!( - this._writableState.writable !== false && - (this._writableState.destroyed || this._writableState.errored) && - !this._writableState.finished - ); - }, - }, - }); - var destroy = destroyImpl.destroy; - Writable.prototype.destroy = function (err, cb) { - const state = this._writableState; - if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) { - ProcessNextTick(errorBuffer, state); - } - destroy.$call(this, err, cb); - return this; - }; - Writable.prototype._undestroy = destroyImpl.undestroy; - Writable.prototype._destroy = function (err, cb) { - cb(err); - }; - Writable.prototype[EE.captureRejectionSymbol] = function (err) { - this.destroy(err); - }; - var webStreamsAdapters; - function lazyWebStreams() { - if (webStreamsAdapters === void 0) webStreamsAdapters = {}; - return webStreamsAdapters; - } - Writable.fromWeb = function (writableStream, options) { - return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options); - }; - Writable.toWeb = function (streamWritable) { - return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable); - }; - }, -}); -const Writable = require_writable(); - -// node_modules/readable-stream/lib/internal/streams/duplexify.js -var require_duplexify = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/duplexify.js"(exports, module) { - "use strict"; - var { - isReadable, - isWritable, - isIterable, - isNodeStream, - isReadableNodeStream, - isWritableNodeStream, - isDuplexNodeStream, - } = require_utils(); - var eos = require_end_of_stream(); - var { - AbortError, - codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE }, - } = require_errors(); - var { destroyer } = require_destroy(); - var Duplex = require_duplex(); - var { createDeferredPromise } = require_util(); - var from = require_from(); - var isBlob = - typeof Blob !== "undefined" - ? function isBlob2(b) { - return b instanceof Blob; - } - : function isBlob2(b) { - return false; - }; - var { FunctionPrototypeCall } = require_primordials(); - class Duplexify extends Duplex { - constructor(options) { - super(options); - - // https://github.com/nodejs/node/pull/34385 - - if ((options === null || options === undefined ? undefined : options.readable) === false) { - this._readableState.readable = false; - this._readableState.ended = true; - this._readableState.endEmitted = true; - } - if ((options === null || options === undefined ? undefined : options.writable) === false) { - this._writableState.writable = false; - this._writableState.ending = true; - this._writableState.ended = true; - this._writableState.finished = true; - } - } - } - module.exports = function duplexify(body, name) { - if (isDuplexNodeStream(body)) { - return body; - } - if (isReadableNodeStream(body)) { - return _duplexify({ - readable: body, - }); - } - if (isWritableNodeStream(body)) { - return _duplexify({ - writable: body, - }); - } - if (isNodeStream(body)) { - return _duplexify({ - writable: false, - readable: false, - }); - } - if (typeof body === "function") { - const { value, write, final, destroy } = fromAsyncGen(body); - if (isIterable(value)) { - return from(Duplexify, value, { - objectMode: true, - write, - final, - destroy, - }); - } - const then2 = value === null || value === void 0 ? void 0 : value.then; - if (typeof then2 === "function") { - let d; - const promise = FunctionPrototypeCall( - then2, - value, - val => { - if (val != null) { - throw new ERR_INVALID_RETURN_VALUE("nully", "body", val); - } - }, - err => { - destroyer(d, err); - }, - ); - return (d = new Duplexify({ - objectMode: true, - readable: false, - write, - final(cb) { - final(async () => { - try { - await promise; - ProcessNextTick(cb, null); - } catch (err) { - ProcessNextTick(cb, err); - } - }); - }, - destroy, - })); - } - throw new ERR_INVALID_RETURN_VALUE("Iterable, AsyncIterable or AsyncFunction", name, value); - } - if (isBlob(body)) { - return duplexify(body.arrayBuffer()); - } - if (isIterable(body)) { - return from(Duplexify, body, { - objectMode: true, - writable: false, - }); - } - if ( - typeof (body === null || body === void 0 ? void 0 : body.writable) === "object" || - typeof (body === null || body === void 0 ? void 0 : body.readable) === "object" - ) { - const readable = - body !== null && body !== void 0 && body.readable - ? isReadableNodeStream(body === null || body === void 0 ? void 0 : body.readable) - ? body === null || body === void 0 - ? void 0 - : body.readable - : duplexify(body.readable) - : void 0; - const writable = - body !== null && body !== void 0 && body.writable - ? isWritableNodeStream(body === null || body === void 0 ? void 0 : body.writable) - ? body === null || body === void 0 - ? void 0 - : body.writable - : duplexify(body.writable) - : void 0; - return _duplexify({ - readable, - writable, - }); - } - const then = body === null || body === void 0 ? void 0 : body.then; - if (typeof then === "function") { - let d; - FunctionPrototypeCall( - then, - body, - val => { - if (val != null) { - d.push(val); - } - d.push(null); - }, - err => { - destroyer(d, err); - }, - ); - return (d = new Duplexify({ - objectMode: true, - writable: false, - read() {}, - })); - } - throw new ERR_INVALID_ARG_TYPE( - name, - [ - "Blob", - "ReadableStream", - "WritableStream", - "Stream", - "Iterable", - "AsyncIterable", - "Function", - "{ readable, writable } pair", - "Promise", - ], - body, - ); - }; - function fromAsyncGen(fn) { - let { promise, resolve } = createDeferredPromise(); - const ac = new AbortController(); - const signal = ac.signal; - const value = fn( - (async function* () { - while (true) { - const _promise = promise; - promise = null; - const { chunk, done, cb } = await _promise; - ProcessNextTick(cb); - if (done) return; - if (signal.aborted) - throw new AbortError(void 0, { - cause: signal.reason, - }); - ({ promise, resolve } = createDeferredPromise()); - yield chunk; - } - })(), - { - signal, - }, - ); - return { - value, - write(chunk, encoding, cb) { - const _resolve = resolve; - resolve = null; - _resolve({ - chunk, - done: false, - cb, - }); - }, - final(cb) { - const _resolve = resolve; - resolve = null; - _resolve({ - done: true, - cb, - }); - }, - destroy(err, cb) { - ac.abort(); - cb(err); - }, - }; - } - function _duplexify(pair) { - const r = - pair.readable && typeof pair.readable.read !== "function" ? Readable.wrap(pair.readable) : pair.readable; - const w = pair.writable; - let readable = !!isReadable(r); - let writable = !!isWritable(w); - let ondrain; - let onfinish; - let onreadable; - let onclose; - let d; - function onfinished(err) { - const cb = onclose; - onclose = null; - if (cb) { - cb(err); - } else if (err) { - d.destroy(err); - } else if (!readable && !writable) { - d.destroy(); - } - } - d = new Duplexify({ - readableObjectMode: !!(r !== null && r !== void 0 && r.readableObjectMode), - writableObjectMode: !!(w !== null && w !== void 0 && w.writableObjectMode), - readable, - writable, - }); - if (writable) { - eos(w, err => { - writable = false; - if (err) { - destroyer(r, err); - } - onfinished(err); - }); - d._write = function (chunk, encoding, callback) { - if (w.write(chunk, encoding)) { - callback(); - } else { - ondrain = callback; - } - }; - d._final = function (callback) { - w.end(); - onfinish = callback; - }; - w.on("drain", function () { - if (ondrain) { - const cb = ondrain; - ondrain = null; - cb(); - } - }); - w.on("finish", function () { - if (onfinish) { - const cb = onfinish; - onfinish = null; - cb(); - } - }); - } - if (readable) { - eos(r, err => { - readable = false; - if (err) { - destroyer(r, err); - } - onfinished(err); - }); - r.on("readable", function () { - if (onreadable) { - const cb = onreadable; - onreadable = null; - cb(); - } - }); - r.on("end", function () { - d.push(null); - }); - d._read = function () { - while (true) { - const buf = r.read(); - if (buf === null) { - onreadable = d._read; - return; - } - if (!d.push(buf)) { - return; - } - } - }; - } - d._destroy = function (err, callback) { - if (!err && onclose !== null) { - err = new AbortError(); - } - onreadable = null; - ondrain = null; - onfinish = null; - if (onclose === null) { - callback(err); - } else { - onclose = callback; - destroyer(w, err); - destroyer(r, err); - } - }; - return d; - } - }, -}); - -// node_modules/readable-stream/lib/internal/streams/duplex.js -var require_duplex = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/duplex.js"(exports, module) { - "use strict"; - var { ObjectDefineProperties, ObjectGetOwnPropertyDescriptor, ObjectKeys, ObjectSetPrototypeOf } = - require_primordials(); - - function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - // this._events ??= { - // close: undefined, - // error: undefined, - // prefinish: undefined, - // finish: undefined, - // drain: undefined, - // data: undefined, - // end: undefined, - // readable: undefined, - // }; - - Readable.$call(this, options); - Writable.$call(this, options); - - if (options) { - this.allowHalfOpen = options.allowHalfOpen !== false; - if (options.readable === false) { - this._readableState.readable = false; - this._readableState.ended = true; - this._readableState.endEmitted = true; - } - if (options.writable === false) { - this._writableState.writable = false; - this._writableState.ending = true; - this._writableState.ended = true; - this._writableState.finished = true; - } - } else { - this.allowHalfOpen = true; - } - } - Duplex.prototype = {}; - module.exports = Duplex; - ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype); - Duplex.prototype.constructor = Duplex; // Re-add constructor which got lost when setting prototype - ObjectSetPrototypeOf(Duplex, Readable); - - { - for (var method in Writable.prototype) { - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } - } - - ObjectDefineProperties(Duplex.prototype, { - writable: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writable"), - writableHighWaterMark: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableHighWaterMark"), - writableObjectMode: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableObjectMode"), - writableBuffer: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableBuffer"), - writableLength: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableLength"), - writableFinished: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableFinished"), - writableCorked: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableCorked"), - writableEnded: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableEnded"), - writableNeedDrain: ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableNeedDrain"), - destroyed: { - get() { - if (this._readableState === void 0 || this._writableState === void 0) { - return false; - } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set(value) { - if (this._readableState && this._writableState) { - this._readableState.destroyed = value; - this._writableState.destroyed = value; - } - }, - }, - }); - var webStreamsAdapters; - function lazyWebStreams() { - if (webStreamsAdapters === void 0) webStreamsAdapters = {}; - return webStreamsAdapters; - } - Duplex.fromWeb = function (pair, options) { - return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options); - }; - Duplex.toWeb = function (duplex) { - return lazyWebStreams().newReadableWritablePairFromDuplex(duplex); - }; - var duplexify; - Duplex.from = function (body) { - if (!duplexify) { - duplexify = require_duplexify(); - } - return duplexify(body, "body"); - }; - }, -}); -const Duplex = require_duplex(); - -// node_modules/readable-stream/lib/internal/streams/transform.js -var require_transform = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/transform.js"(exports, module) { - "use strict"; - var { ObjectSetPrototypeOf, Symbol: Symbol2 } = require_primordials(); - var { ERR_METHOD_NOT_IMPLEMENTED } = require_errors().codes; - function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.$call(this, options); - - this._readableState.sync = false; - this[kCallback] = null; - - if (options) { - if (typeof options.transform === "function") this._transform = options.transform; - if (typeof options.flush === "function") this._flush = options.flush; - } else { - this.allowHalfOpen = true; - } - - this.on("prefinish", prefinish.bind(this)); - } - Transform.prototype = {}; - ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); - Transform.prototype.constructor = Transform; // Re-add constructor which got lost when setting prototype - ObjectSetPrototypeOf(Transform, Duplex); - - module.exports = Transform; - var kCallback = Symbol2("kCallback"); - function final(cb) { - if (typeof this._flush === "function" && !this.destroyed) { - this._flush((er, data) => { - if (er) { - if (cb) { - cb(er); - } else { - this.destroy(er); - } - return; - } - if (data != null) { - this.push(data); - } - this.push(null); - if (cb) { - cb(); - } - }); - } else { - this.push(null); - if (cb) { - cb(); - } - } - } - function prefinish() { - if (this._final !== final) { - final.$call(this); - } - } - Transform.prototype._final = final; - Transform.prototype._transform = function (chunk, encoding, callback) { - throw new ERR_METHOD_NOT_IMPLEMENTED("_transform()"); - }; - Transform.prototype._write = function (chunk, encoding, callback) { - const rState = this._readableState; - const wState = this._writableState; - const length = rState.length; - this._transform(chunk, encoding, (err, val) => { - if (err) { - callback(err); - return; - } - if (val != null) { - this.push(val); - } - if ( - wState.ended || - length === rState.length || - rState.length < rState.highWaterMark || - rState.highWaterMark === 0 || - rState.length === 0 - ) { - callback(); - } else { - this[kCallback] = callback; - } - }); - }; - Transform.prototype._read = function () { - if (this[kCallback]) { - const callback = this[kCallback]; - this[kCallback] = null; - callback(); - } - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/passthrough.js -var require_passthrough = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/passthrough.js"(exports, module) { - "use strict"; - var { ObjectSetPrototypeOf } = require_primordials(); - var Transform = require_transform(); - - function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.$call(this, options); - } - PassThrough.prototype = {}; - - ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype); - PassThrough.prototype.constructor = PassThrough; // Re-add constructor which got lost when setting prototype - ObjectSetPrototypeOf(PassThrough, Transform); - - PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); - }; - - module.exports = PassThrough; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/pipeline.js -var require_pipeline = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/pipeline.js"(exports, module) { - "use strict"; - var { Promise: Promise2, SymbolAsyncIterator } = require_primordials(); - var eos = require_end_of_stream(); - var { once } = require_util(); - var destroyImpl = require_destroy(); - var { - aggregateTwoErrors, - codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE, ERR_MISSING_ARGS, ERR_STREAM_DESTROYED }, - AbortError, - } = require_errors(); - var { isIterable, isReadable, isReadableNodeStream, isNodeStream } = require_utils(); - var PassThrough; - function destroyer(stream, reading, writing) { - let finished = false; - stream.on("close", () => { - finished = true; - }); - const cleanup = eos( - stream, - { - readable: reading, - writable: writing, - }, - err => { - finished = !err; - }, - ); - return { - destroy: err => { - if (finished) return; - finished = true; - destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED("pipe")); - }, - cleanup, - }; - } - function popCallback(streams) { - validateFunction(streams[streams.length - 1], "streams[stream.length - 1]"); - return streams.pop(); - } - function makeAsyncIterable(val) { - if (isIterable(val)) { - return val; - } else if (isReadableNodeStream(val)) { - return fromReadable(val); - } - throw new ERR_INVALID_ARG_TYPE("val", ["Readable", "Iterable", "AsyncIterable"], val); - } - async function* fromReadable(val) { - yield* Readable.prototype[SymbolAsyncIterator].$call(val); - } - async function pump(iterable, writable, finish, { end }) { - let error; - let onresolve = null; - const resume = err => { - if (err) { - error = err; - } - if (onresolve) { - const callback = onresolve; - onresolve = null; - callback(); - } - }; - const wait = () => - new Promise2((resolve, reject) => { - if (error) { - reject(error); - } else { - onresolve = () => { - if (error) { - reject(error); - } else { - resolve(); - } - }; - } - }); - writable.on("drain", resume); - const cleanup = eos( - writable, - { - readable: false, - }, - resume, - ); - try { - if (writable.writableNeedDrain) { - await wait(); - } - for await (const chunk of iterable) { - if (!writable.write(chunk)) { - await wait(); - } - } - if (end) { - writable.end(); - } - await wait(); - finish(); - } catch (err) { - finish(error !== err ? aggregateTwoErrors(error, err) : err); - } finally { - cleanup(); - writable.off("drain", resume); - } - } - function pipeline(...streams) { - return pipelineImpl(streams, once(popCallback(streams))); - } - function pipelineImpl(streams, callback, opts) { - if (streams.length === 1 && $isJSArray(streams[0])) { - streams = streams[0]; - } - if (streams.length < 2) { - throw new ERR_MISSING_ARGS("streams"); - } - const ac = new AbortController(); - const signal = ac.signal; - const outerSignal = opts === null || opts === void 0 ? void 0 : opts.signal; - const lastStreamCleanup = []; - validateAbortSignal(outerSignal, "options.signal"); - function abort() { - finishImpl(new AbortError()); - } - outerSignal === null || outerSignal === void 0 ? void 0 : outerSignal.addEventListener("abort", abort); - let error; - let value; - const destroys = []; - let finishCount = 0; - function finish(err) { - finishImpl(err, --finishCount === 0); - } - function finishImpl(err, final) { - if (err && (!error || error.code === "ERR_STREAM_PREMATURE_CLOSE")) { - error = err; - } - if (!error && !final) { - return; - } - while (destroys.length) { - destroys.shift()(error); - } - outerSignal === null || outerSignal === void 0 ? void 0 : outerSignal.removeEventListener("abort", abort); - ac.abort(); - if (final) { - if (!error) { - lastStreamCleanup.forEach(fn => fn()); - } - ProcessNextTick(callback, error, value); - } - } - let ret; - for (let i = 0; i < streams.length; i++) { - const stream = streams[i]; - const reading = i < streams.length - 1; - const writing = i > 0; - const end = reading || (opts === null || opts === void 0 ? void 0 : opts.end) !== false; - const isLastStream = i === streams.length - 1; - if (isNodeStream(stream)) { - let onError = function (err) { - if (err && err.name !== "AbortError" && err.code !== "ERR_STREAM_PREMATURE_CLOSE") { - finish(err); - } - }; - if (end) { - const { destroy, cleanup } = destroyer(stream, reading, writing); - destroys.push(destroy); - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup); - } - } - stream.on("error", onError); - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(() => { - stream.removeListener("error", onError); - }); - } - } - if (i === 0) { - if (typeof stream === "function") { - ret = stream({ - signal, - }); - if (!isIterable(ret)) { - throw new ERR_INVALID_RETURN_VALUE("Iterable, AsyncIterable or Stream", "source", ret); - } - } else if (isIterable(stream) || isReadableNodeStream(stream)) { - ret = stream; - } else { - ret = Duplex.from(stream); - } - } else if (typeof stream === "function") { - ret = makeAsyncIterable(ret); - ret = stream(ret, { - signal, - }); - if (reading) { - if (!isIterable(ret, true)) { - throw new ERR_INVALID_RETURN_VALUE("AsyncIterable", `transform[${i - 1}]`, ret); - } - } else { - var _ret; - if (!PassThrough) { - PassThrough = require_passthrough(); - } - const pt = new PassThrough({ - objectMode: true, - }); - const then = (_ret = ret) === null || _ret === void 0 ? void 0 : _ret.then; - if (typeof then === "function") { - finishCount++; - then.$call( - ret, - val => { - value = val; - if (val != null) { - pt.write(val); - } - if (end) { - pt.end(); - } - ProcessNextTick(finish); - }, - err => { - pt.destroy(err); - ProcessNextTick(finish, err); - }, - ); - } else if (isIterable(ret, true)) { - finishCount++; - pump(ret, pt, finish, { - end, - }); - } else { - throw new ERR_INVALID_RETURN_VALUE("AsyncIterable or Promise", "destination", ret); - } - ret = pt; - const { destroy, cleanup } = destroyer(ret, false, true); - destroys.push(destroy); - if (isLastStream) { - lastStreamCleanup.push(cleanup); - } - } - } else if (isNodeStream(stream)) { - if (isReadableNodeStream(ret)) { - finishCount += 2; - const cleanup = pipe(ret, stream, finish, { - end, - }); - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup); - } - } else if (isIterable(ret)) { - finishCount++; - pump(ret, stream, finish, { - end, - }); - } else { - throw new ERR_INVALID_ARG_TYPE("val", ["Readable", "Iterable", "AsyncIterable"], ret); - } - ret = stream; - } else { - ret = Duplex.from(stream); - } - } - if ( - (signal !== null && signal !== void 0 && signal.aborted) || - (outerSignal !== null && outerSignal !== void 0 && outerSignal.aborted) - ) { - ProcessNextTick(abort); - } - return ret; - } - function pipe(src, dst, finish, { end }) { - src.pipe(dst, { - end, - }); - if (end) { - src.once("end", () => dst.end()); - } else { - finish(); - } - eos( - src, - { - readable: true, - writable: false, - }, - err => { - const rState = src._readableState; - if ( - err && - err.code === "ERR_STREAM_PREMATURE_CLOSE" && - rState && - rState.ended && - !rState.errored && - !rState.errorEmitted - ) { - src.once("end", finish).once("error", finish); - } else { - finish(err); - } - }, - ); - return eos( - dst, - { - readable: false, - writable: true, - }, - finish, - ); - } - module.exports = { - pipelineImpl, - pipeline, - }; - }, -}); - -// node_modules/readable-stream/lib/internal/streams/compose.js -var require_compose = __commonJS({ - "node_modules/readable-stream/lib/internal/streams/compose.js"(exports, module) { - "use strict"; - var { pipeline } = require_pipeline(); - var Duplex = require_duplex(); - var { destroyer } = require_destroy(); - var { isNodeStream, isReadable, isWritable } = require_utils(); - var { - AbortError, - codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }, - } = require_errors(); - module.exports = function compose(...streams) { - if (streams.length === 0) { - throw new ERR_MISSING_ARGS("streams"); - } - if (streams.length === 1) { - return Duplex.from(streams[0]); - } - const orgStreams = [...streams]; - if (typeof streams[0] === "function") { - streams[0] = Duplex.from(streams[0]); - } - if (typeof streams[streams.length - 1] === "function") { - const idx = streams.length - 1; - streams[idx] = Duplex.from(streams[idx]); - } - for (let n = 0; n < streams.length; ++n) { - if (!isNodeStream(streams[n])) { - continue; - } - if (n < streams.length - 1 && !isReadable(streams[n])) { - throw $ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be readable"); - } - if (n > 0 && !isWritable(streams[n])) { - throw $ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be writable"); - } - } - let ondrain; - let onfinish; - let onreadable; - let onclose; - let d; - function onfinished(err) { - const cb = onclose; - onclose = null; - if (cb) { - cb(err); - } else if (err) { - d.destroy(err); - } else if (!readable && !writable) { - d.destroy(); - } - } - const head = streams[0]; - const tail = pipeline(streams, onfinished); - const writable = !!isWritable(head); - const readable = !!isReadable(tail); - d = new Duplex({ - writableObjectMode: !!(head !== null && head !== void 0 && head.writableObjectMode), - readableObjectMode: !!(tail !== null && tail !== void 0 && tail.writableObjectMode), - writable, - readable, - }); - if (writable) { - d._write = function (chunk, encoding, callback) { - if (head.write(chunk, encoding)) { - callback(); - } else { - ondrain = callback; - } - }; - d._final = function (callback) { - head.end(); - onfinish = callback; - }; - head.on("drain", function () { - if (ondrain) { - const cb = ondrain; - ondrain = null; - cb(); - } - }); - tail.on("finish", function () { - if (onfinish) { - const cb = onfinish; - onfinish = null; - cb(); - } - }); - } - if (readable) { - tail.on("readable", function () { - if (onreadable) { - const cb = onreadable; - onreadable = null; - cb(); - } - }); - tail.on("end", function () { - d.push(null); - }); - d._read = function () { - while (true) { - const buf = tail.read(); - if (buf === null) { - onreadable = d._read; - return; - } - if (!d.push(buf)) { - return; - } - } - }; - } - d._destroy = function (err, callback) { - if (!err && onclose !== null) { - err = new AbortError(); - } - onreadable = null; - ondrain = null; - onfinish = null; - if (onclose === null) { - callback(err); - } else { - onclose = callback; - destroyer(tail, err); - } - }; - return d; - }; - }, -}); - -// node_modules/readable-stream/lib/stream/promises.js -var require_promises = __commonJS({ - "node_modules/readable-stream/lib/stream/promises.js"(exports, module) { - "use strict"; - var { ArrayPrototypePop, Promise: Promise2 } = require_primordials(); - var { isIterable, isNodeStream } = require_utils(); - var { pipelineImpl: pl } = require_pipeline(); - var { finished } = require_end_of_stream(); - function pipeline(...streams) { - const { promise, resolve, reject } = $newPromiseCapability(Promise); - let signal; - let end; - const lastArg = streams[streams.length - 1]; - if (lastArg && typeof lastArg === "object" && !isNodeStream(lastArg) && !isIterable(lastArg)) { - const options = ArrayPrototypePop(streams); - signal = options.signal; - end = options.end; - } - pl( - streams, - (err, value) => { - if (err) { - reject(err); - } else { - resolve(value); - } - }, - { - signal, - end, - }, - ); - return promise; - } - module.exports = { - finished, - pipeline, - }; - }, -}); -// node_modules/readable-stream/lib/stream.js -var require_stream = __commonJS({ - "node_modules/readable-stream/lib/stream.js"(exports, module) { - "use strict"; - var { ObjectDefineProperty, ObjectKeys } = require_primordials(); - var { - promisify: { custom: customPromisify }, - } = require_util(); - - var { streamReturningOperators, promiseReturningOperators } = require_operators(); - var { - codes: { ERR_ILLEGAL_CONSTRUCTOR }, - } = require_errors(); - var compose = require_compose(); - var { pipeline } = require_pipeline(); - var { destroyer } = require_destroy(); - var eos = require_end_of_stream(); - var promises = require_promises(); - var utils = require_utils(); - var Stream = (module.exports = require_legacy().Stream); - Stream.isDisturbed = utils.isDisturbed; - Stream.isErrored = utils.isErrored; - Stream.isWritable = utils.isWritable; - Stream.isReadable = utils.isReadable; - Stream.Readable = require_readable(); - for (const key of ObjectKeys(streamReturningOperators)) { - let fn = function (...args) { - if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR(); - } - return Stream.Readable.from(op.$apply(this, args)); - }; - const op = streamReturningOperators[key]; - ObjectDefineProperty(fn, "name", { - value: op.name, - }); - ObjectDefineProperty(fn, "length", { - value: op.length, - }); - ObjectDefineProperty(Stream.Readable.prototype, key, { - value: fn, - enumerable: false, - configurable: true, - writable: true, - }); - } - for (const key of ObjectKeys(promiseReturningOperators)) { - let fn = function (...args) { - if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR(); - } - return op.$apply(this, args); - }; - const op = promiseReturningOperators[key]; - ObjectDefineProperty(fn, "name", { - value: op.name, - }); - ObjectDefineProperty(fn, "length", { - value: op.length, - }); - ObjectDefineProperty(Stream.Readable.prototype, key, { - value: fn, - enumerable: false, - configurable: true, - writable: true, - }); - } - Stream.Writable = require_writable(); - Stream.Duplex = require_duplex(); - Stream.Transform = require_transform(); - Stream.PassThrough = require_passthrough(); - Stream.pipeline = pipeline; - var { addAbortSignal } = require_add_abort_signal(); - Stream.addAbortSignal = addAbortSignal; - Stream.finished = eos; - Stream.destroy = destroyer; - Stream.compose = compose; - ObjectDefineProperty(Stream, "promises", { - configurable: true, - enumerable: true, - get() { - return promises; - }, - }); - ObjectDefineProperty(pipeline, customPromisify, { - enumerable: true, - get() { - return promises.pipeline; - }, - }); - ObjectDefineProperty(eos, customPromisify, { - enumerable: true, - get() { - return promises.finished; - }, - }); - Stream.Stream = Stream; - Stream._isUint8Array = function isUint8Array(value) { - return value instanceof Uint8Array; - }; - Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { - return new Buffer(chunk.buffer, chunk.byteOffset, chunk.byteLength); - }; - Stream.setDefaultHighWaterMark = setDefaultHighWaterMark; - Stream.getDefaultHighWaterMark = getDefaultHighWaterMark; - }, -}); - -var kEnsureConstructed = Symbol("kEnsureConstructed"); - -/** - * Bun native stream wrapper - * - * This glue code lets us avoid using ReadableStreams to wrap Bun internal streams - */ -function createNativeStreamReadable(Readable) { - var closer = [false]; - var handleNumberResult = function (nativeReadable, result, view, isClosed) { - if (result > 0) { - const slice = view.subarray(0, result); - view = slice.byteLength < view.byteLength ? view.subarray(result) : undefined; - if (slice.byteLength > 0) { - nativeReadable.push(slice); - } - } - - if (isClosed) { - ProcessNextTick(() => { - nativeReadable.push(null); - }); - } - - return view; - }; - - var handleArrayBufferViewResult = function (nativeReadable, result, view, isClosed) { - if (result.byteLength > 0) { - nativeReadable.push(result); - } - - if (isClosed) { - ProcessNextTick(() => { - nativeReadable.push(null); - }); - } - - return view; - }; - - var DYNAMICALLY_ADJUST_CHUNK_SIZE = process.env.BUN_DISABLE_DYNAMIC_CHUNK_SIZE !== "1"; - - const MIN_BUFFER_SIZE = 512; - - const refCount = Symbol("refCount"); - const constructed = Symbol("constructed"); - const remainingChunk = Symbol("remainingChunk"); - const highWaterMark = Symbol("highWaterMark"); - const pendingRead = Symbol("pendingRead"); - const hasResized = Symbol("hasResized"); - - const _onClose = Symbol("_onClose"); - const _onDrain = Symbol("_onDrain"); - const _internalConstruct = Symbol("_internalConstruct"); - const _getRemainingChunk = Symbol("_getRemainingChunk"); - const _adjustHighWaterMark = Symbol("_adjustHighWaterMark"); - const _handleResult = Symbol("_handleResult"); - const _internalRead = Symbol("_internalRead"); - - function NativeReadable(this: typeof NativeReadable, ptr, options) { - if (!(this instanceof NativeReadable)) { - return new NativeReadable(path, options); - } - - this[refCount] = 0; - this[constructed] = false; - this[remainingChunk] = undefined; - this[pendingRead] = false; - this[hasResized] = !DYNAMICALLY_ADJUST_CHUNK_SIZE; - - options ??= {}; - Readable.$apply(this, [options]); - - if (typeof options.highWaterMark === "number") { - this[highWaterMark] = options.highWaterMark; - } else { - this[highWaterMark] = 256 * 1024; - } - this.$bunNativePtr = ptr; - this[constructed] = false; - this[remainingChunk] = undefined; - this[pendingRead] = false; - ptr.onClose = this[_onClose].bind(this); - ptr.onDrain = this[_onDrain].bind(this); - } - $toClass(NativeReadable, "NativeReadable", Readable); - - NativeReadable.prototype[_onClose] = function () { - this.push(null); - }; - - NativeReadable.prototype[_onDrain] = function (chunk) { - this.push(chunk); - }; - - // maxToRead is by default the highWaterMark passed from the Readable.read call to this fn - // However, in the case of an fs.ReadStream, we can pass the number of bytes we want to read - // which may be significantly less than the actual highWaterMark - NativeReadable.prototype._read = function _read(maxToRead) { - $debug("NativeReadable._read", this.__id); - if (this[pendingRead]) { - $debug("pendingRead is true", this.__id); - return; - } - var ptr = this.$bunNativePtr; - $debug("ptr @ NativeReadable._read", ptr, this.__id); - if (!ptr) { - this.push(null); - return; - } - if (!this[constructed]) { - $debug("NativeReadable not constructed yet", this.__id); - this[_internalConstruct](ptr); - } - return this[_internalRead](this[_getRemainingChunk](maxToRead), ptr); - }; - - NativeReadable.prototype[_internalConstruct] = function (ptr) { - $assert(this[constructed] === false); - this[constructed] = true; - - const result = ptr.start(this[highWaterMark]); - - $debug("NativeReadable internal `start` result", result, this.__id); - - if (typeof result === "number" && result > 1) { - this[hasResized] = true; - $debug("NativeReadable resized", this.__id); - - this[highWaterMark] = Math.min(this[highWaterMark], result); - } - - const drainResult = ptr.drain(); - $debug("NativeReadable drain result", drainResult, this.__id); - if ((drainResult?.byteLength ?? 0) > 0) { - this.push(drainResult); - } - }; - - // maxToRead can be the highWaterMark (by default) or the remaining amount of the stream to read - // This is so the consumer of the stream can terminate the stream early if they know - // how many bytes they want to read (ie. when reading only part of a file) - // ObjectDefinePrivateProperty(NativeReadable.prototype, "_getRemainingChunk", ); - NativeReadable.prototype[_getRemainingChunk] = function (maxToRead) { - maxToRead ??= this[highWaterMark]; - var chunk = this[remainingChunk]; - $debug("chunk @ #getRemainingChunk", chunk, this.__id); - if (chunk?.byteLength ?? 0 < MIN_BUFFER_SIZE) { - var size = maxToRead > MIN_BUFFER_SIZE ? maxToRead : MIN_BUFFER_SIZE; - this[remainingChunk] = chunk = new Buffer(size); - } - return chunk; - }; - - // ObjectDefinePrivateProperty(NativeReadable.prototype, "_adjustHighWaterMark", ); - NativeReadable.prototype[_adjustHighWaterMark] = function () { - this[highWaterMark] = Math.min(this[highWaterMark] * 2, 1024 * 1024 * 2); - this[hasResized] = true; - $debug("Resized", this.__id); - }; - - // ObjectDefinePrivateProperty(NativeReadable.prototype, "_handleResult", ); - NativeReadable.prototype[_handleResult] = function (result, view, isClosed) { - $debug("result, isClosed @ #handleResult", result, isClosed, this.__id); - - if (typeof result === "number") { - if (result >= this[highWaterMark] && !this[hasResized] && !isClosed) { - this[_adjustHighWaterMark](); - } - return handleNumberResult(this, result, view, isClosed); - } else if (typeof result === "boolean") { - ProcessNextTick(() => { - this.push(null); - }); - return (view?.byteLength ?? 0 > 0) ? view : undefined; - } else if ($isTypedArrayView(result)) { - if (result.byteLength >= this[highWaterMark] && !this[hasResized] && !isClosed) { - this[_adjustHighWaterMark](); - } - - return handleArrayBufferViewResult(this, result, view, isClosed); - } else { - $debug("Unknown result type", result, this.__id); - throw new Error("Invalid result from pull"); - } - }; - - NativeReadable.prototype[_internalRead] = function (view, ptr) { - $debug("#internalRead()", this.__id); - closer[0] = false; - var result = ptr.pull(view, closer); - if ($isPromise(result)) { - this[pendingRead] = true; - return result.then( - result => { - this[pendingRead] = false; - $debug("pending no longerrrrrrrr (result returned from pull)", this.__id); - const isClosed = closer[0]; - this[remainingChunk] = this[_handleResult](result, view, isClosed); - }, - reason => { - $debug("error from pull", reason, this.__id); - errorOrDestroy(this, reason); - }, - ); - } else { - this[remainingChunk] = this[_handleResult](result, view, closer[0]); - } - }; - - NativeReadable.prototype._destroy = function (error, callback) { - var ptr = this.$bunNativePtr; - if (!ptr) { - callback(error); - return; - } - - this.$bunNativePtr = undefined; - ptr.updateRef(false); - - $debug("NativeReadable destroyed", this.__id); - ptr.cancel(error); - callback(error); - }; - - NativeReadable.prototype.ref = function () { - var ptr = this.$bunNativePtr; - if (ptr === undefined) return; - if (this[refCount]++ === 0) { - ptr.updateRef(true); - } - }; - - NativeReadable.prototype.unref = function () { - var ptr = this.$bunNativePtr; - if (ptr === undefined) return; - if (this[refCount]-- === 1) { - ptr.updateRef(false); - } - }; - - NativeReadable.prototype[kEnsureConstructed] = function () { - if (this[constructed]) return; - this[_internalConstruct](this.$bunNativePtr); - }; - - return NativeReadable; -} - var nativeReadableStreamPrototypes = { 0: undefined, 1: undefined, @@ -5506,176 +16,21 @@ var nativeReadableStreamPrototypes = { }; function getNativeReadableStreamPrototype(nativeType, Readable) { - return (nativeReadableStreamPrototypes[nativeType] ??= createNativeStreamReadable(Readable)); -} - -function getNativeReadableStream(Readable, stream, options) { - const ptr = stream.$bunNativePtr; - if (!ptr || ptr === -1) { - $debug("no native readable stream"); - return undefined; - } - const type = stream.$bunNativeType; - $assert(typeof type === "number", "Invalid native type"); - $assert(typeof ptr === "object", "Invalid native ptr"); - - const NativeReadable = getNativeReadableStreamPrototype(type, Readable); - // https://github.com/oven-sh/bun/pull/12801 - // https://github.com/oven-sh/bun/issues/9555 - // There may be a ReadableStream.Strong handle to the ReadableStream. - // We can't update those handles to point to the NativeReadable from JS - // So we instead mark it as no longer usable, and create a new NativeReadable - transferToNativeReadable(stream); - - return new NativeReadable(ptr, options); + return (nativeReadableStreamPrototypes[nativeType] ??= require("internal/streams/nativereadable")()); } /** --- Bun native stream wrapper --- */ -const _pathOrFdOrSink = Symbol("pathOrFdOrSink"); -const { fileSinkSymbol: _fileSink } = require("internal/shared"); -const _native = Symbol("native"); +exports[kGetNativeReadableProto] = getNativeReadableStreamPrototype; +exports.NativeWritable = require("internal/streams/nativewritable"); -function NativeWritable(pathOrFdOrSink, options = {}) { - Writable.$call(this, options); - - this[_native] = true; - - this._construct = NativeWritable_internalConstruct; - this._final = NativeWritable_internalFinal; - this._write = NativeWritablePrototypeWrite; - - this[_pathOrFdOrSink] = pathOrFdOrSink; -} -$toClass(NativeWritable, "NativeWritable", Writable); - -// These are confusingly two different fns for construct which initially were the same thing because -// `_construct` is part of the lifecycle of Writable and is not called lazily, -// so we need to separate our _construct for Writable state and actual construction of the write stream -function NativeWritable_internalConstruct(cb) { - this._writableState.constructed = true; - this.constructed = true; - if (typeof cb === "function") ProcessNextTick(cb); - ProcessNextTick(() => { - this.emit("open", this.fd); - this.emit("ready"); - }); -} - -function NativeWritable_lazyConstruct(stream) { - // TODO: Turn this check into check for instanceof FileSink - var sink = stream[_pathOrFdOrSink]; - if (typeof sink === "object") { - if (typeof sink.write === "function") { - return (stream[_fileSink] = sink); - } else { - throw new Error("Invalid FileSink"); - } - } else { - return (stream[_fileSink] = Bun.file(sink).writer()); - } -} - -function NativeWritablePrototypeWrite(chunk, encoding, cb) { - var fileSink = this[_fileSink] ?? NativeWritable_lazyConstruct(this); - var result = fileSink.write(chunk); - - if (typeof encoding === "function") { - cb = encoding; - } - - if ($isPromise(result)) { - // var writePromises = this.#writePromises; - // var i = writePromises.length; - // writePromises[i] = result; - result - .then(result => { - this.emit("drain"); - if (cb) { - cb(null, result); - } - }) - .catch( - cb - ? err => { - cb(err); - } - : err => { - this.emit("error", err); - }, - ); - return false; - } - - // TODO: Should we just have a calculation based on encoding and length of chunk? - if (cb) cb(null, chunk.byteLength); - return true; -} - -const WritablePrototypeEnd = Writable.prototype.end; -NativeWritable.prototype.end = function end(chunk, encoding, cb, native) { - return WritablePrototypeEnd.$call(this, chunk, encoding, cb, native ?? this[_native]); -}; - -NativeWritable.prototype._destroy = function (error, cb) { - const w = this._writableState; - const r = this._readableState; - - if (w) { - w.destroyed = true; - w.closeEmitted = true; - } - if (r) { - r.destroyed = true; - r.closeEmitted = true; - } - - if (typeof cb === "function") cb(error); - - if (w?.closeEmitted || r?.closeEmitted) { - this.emit("close"); - } -}; - -function NativeWritable_internalFinal(cb) { - var sink = this[_fileSink]; - if (sink) { - const end = sink.end(true); - if ($isPromise(end) && cb) { - end.then(() => { - if (cb) cb(); - }, cb); - } - } - if (cb) cb(); -} - -NativeWritable.prototype.ref = function ref() { - const sink = (this[_fileSink] ||= NativeWritable_lazyConstruct(this)); - sink.ref(); - return this; -}; - -NativeWritable.prototype.unref = function unref() { - const sink = (this[_fileSink] ||= NativeWritable_lazyConstruct(this)); - sink.unref(); - return this; -}; - -const exports = require_stream(); -const promises = require_promises(); -exports._getNativeReadableStreamPrototype = getNativeReadableStreamPrototype; -exports.NativeWritable = NativeWritable; -Object.defineProperty(exports, "promises", { - configurable: true, - enumerable: true, - get() { - return promises; - }, -}); +const { + newStreamReadableFromReadableStream: _ReadableFromWeb, + _ReadableFromWeb: _ReadableFromWebForUndici, +} = require("internal/webstreams_adapters"); exports[Symbol.for("::bunternal::")] = { _ReadableFromWeb, _ReadableFromWebForUndici, kEnsureConstructed }; -exports.eos = require_end_of_stream(); +exports.eos = require("internal/streams/end-of-stream"); exports.EventEmitter = EE; export default exports; diff --git a/src/js/node/timers.promises.ts b/src/js/node/timers.promises.ts index 6d011ec78a..97c302d1ca 100644 --- a/src/js/node/timers.promises.ts +++ b/src/js/node/timers.promises.ts @@ -5,13 +5,6 @@ const { validateBoolean, validateAbortSignal, validateObject } = require("intern const symbolAsyncIterator = Symbol.asyncIterator; -class AbortError extends Error { - constructor() { - super("The operation was aborted"); - this.code = "ABORT_ERR"; - } -} - function asyncIterator({ next: nextFunction, return: returnFunction }) { const result = {}; if (typeof nextFunction === "function") { @@ -46,7 +39,7 @@ function setTimeoutPromise(after = 1, value, options = {}) { return Promise.reject(error); } if (signal?.aborted) { - return Promise.reject(new AbortError()); + return Promise.reject($makeAbortError()); } let onCancel; const returnValue = new Promise((resolve, reject) => { @@ -57,7 +50,7 @@ function setTimeoutPromise(after = 1, value, options = {}) { if (signal) { onCancel = () => { clearTimeout(timeout); - reject(new AbortError()); + reject($makeAbortError()); }; signal.addEventListener("abort", onCancel); } @@ -85,7 +78,7 @@ function setImmediatePromise(value, options = {}) { return Promise.reject(error); } if (signal?.aborted) { - return Promise.reject(new AbortError()); + return Promise.reject($makeAbortError()); } let onCancel; const returnValue = new Promise((resolve, reject) => { @@ -96,7 +89,7 @@ function setImmediatePromise(value, options = {}) { if (signal) { onCancel = () => { clearImmediate(immediate); - reject(new AbortError()); + reject($makeAbortError()); }; signal.addEventListener("abort", onCancel); } @@ -139,7 +132,7 @@ function setIntervalPromise(after = 1, value, options = {}) { if (signal?.aborted) { return asyncIterator({ next: function () { - return Promise.reject(new AbortError()); + return Promise.reject($makeAbortError()); }, }); } @@ -180,7 +173,7 @@ function setIntervalPromise(after = 1, value, options = {}) { resolve(); } } else if (notYielded === 0) { - reject(new AbortError()); + reject($makeAbortError()); } else { resolve(); } diff --git a/src/js/node/tls.ts b/src/js/node/tls.ts index 46d0fa1113..403b09a5c5 100644 --- a/src/js/node/tls.ts +++ b/src/js/node/tls.ts @@ -1,5 +1,5 @@ // Hardcoded module "node:tls" -const { isArrayBufferView, isTypedArray } = require("node:util/types"); +const { isArrayBufferView, isArrayBuffer, isTypedArray } = require("node:util/types"); const { addServerName } = require("../internal/net"); const net = require("node:net"); const { Duplex } = require("node:stream"); @@ -38,12 +38,11 @@ function parseCertString() { const rejectUnauthorizedDefault = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== "0" && process.env.NODE_TLS_REJECT_UNAUTHORIZED !== "false"; function isValidTLSArray(obj) { - if (typeof obj === "string" || isTypedArray(obj) || obj instanceof ArrayBuffer || obj instanceof Blob) return true; + if (typeof obj === "string" || isTypedArray(obj) || isArrayBuffer(obj) || $inheritsBlob(obj)) return true; if (Array.isArray(obj)) { for (var i = 0; i < obj.length; i++) { const item = obj[i]; - if (typeof item !== "string" && !isTypedArray(item) && !(item instanceof ArrayBuffer) && !(item instanceof Blob)) - return false; + if (typeof item !== "string" && !isTypedArray(item) && !isArrayBuffer(item) && !$inheritsBlob(item)) return false; } return true; } diff --git a/src/js/node/trace_events.ts b/src/js/node/trace_events.ts index 762a565b78..37fe030ae5 100644 --- a/src/js/node/trace_events.ts +++ b/src/js/node/trace_events.ts @@ -5,16 +5,10 @@ class Tracing { categories = ""; } -function ERR_INVALID_ARG_TYPE(name, type, value) { - const err = new TypeError(`The "${name}" argument must be of type ${type}. Received ${value}`); - err.code = "ERR_INVALID_ARG_TYPE"; - return err; -} - function createTracing(opts) { if (typeof opts !== "object" || opts == null) { // @ts-ignore - throw new ERR_INVALID_ARG_TYPE("options", "object", opts); + throw $ERR_INVALID_ARG_TYPE("options", "object", opts); } // TODO: validate categories diff --git a/src/js/node/util.ts b/src/js/node/util.ts index e2498b85e1..e5f67d73e4 100644 --- a/src/js/node/util.ts +++ b/src/js/node/util.ts @@ -2,7 +2,6 @@ const types = require("node:util/types"); /** @type {import('node-inspect-extracted')} */ const utl = require("internal/util/inspect"); -const { ERR_OUT_OF_RANGE } = require("internal/errors"); const { promisify } = require("internal/promisify"); const { validateString, validateOneOf } = require("internal/validators"); @@ -256,7 +255,7 @@ function styleText(format, text) { function getSystemErrorName(err: any) { if (typeof err !== "number") throw $ERR_INVALID_ARG_TYPE("err", "number", err); - if (err >= 0 || !NumberIsSafeInteger(err)) throw ERR_OUT_OF_RANGE("err", "a negative integer", err); + if (err >= 0 || !NumberIsSafeInteger(err)) throw $ERR_OUT_OF_RANGE("err", "a negative integer", err); return internalErrorName(err); } diff --git a/src/js/node/zlib.ts b/src/js/node/zlib.ts index b8bd4feadc..34235ac71c 100644 --- a/src/js/node/zlib.ts +++ b/src/js/node/zlib.ts @@ -24,7 +24,6 @@ const isArrayBufferView = ArrayBufferIsView; const isAnyArrayBuffer = b => b instanceof ArrayBuffer || b instanceof SharedArrayBuffer; const kMaxLength = $requireMap.$get("buffer")?.exports.kMaxLength ?? BufferModule.kMaxLength; -const { ERR_BROTLI_INVALID_PARAM, ERR_BUFFER_TOO_LARGE, ERR_OUT_OF_RANGE } = require("internal/errors"); const { Transform, finished } = require("node:stream"); const owner_symbol = Symbol("owner_symbol"); const { @@ -91,7 +90,7 @@ function zlibBufferOnData(chunk) { if (this.nread > this._maxOutputLength) { this.close(); this.removeAllListeners("end"); - this.cb(ERR_BUFFER_TOO_LARGE(this._maxOutputLength)); + this.cb($ERR_BUFFER_TOO_LARGE(this._maxOutputLength)); } } @@ -165,7 +164,7 @@ function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) { if (!validateFiniteNumber(chunkSize, "options.chunkSize")) { chunkSize = Z_DEFAULT_CHUNK; } else if (chunkSize < Z_MIN_CHUNK) { - throw ERR_OUT_OF_RANGE("options.chunkSize", `>= ${Z_MIN_CHUNK}`, chunkSize); + throw $ERR_OUT_OF_RANGE("options.chunkSize", `>= ${Z_MIN_CHUNK}`, chunkSize); } // prettier-ignore @@ -358,7 +357,7 @@ function processChunkSync(self, chunk, flushFlag) { if (nread > self._maxOutputLength) { _close(self); - throw ERR_BUFFER_TOO_LARGE(self._maxOutputLength); + throw $ERR_BUFFER_TOO_LARGE(self._maxOutputLength); } } else { assert(have === 0, "have should not go down"); @@ -675,7 +674,7 @@ function Brotli(opts, mode) { ArrayPrototypeForEach.$call(ObjectKeys(opts.params), origKey => { const key = +origKey; if (NumberIsNaN(key) || key < 0 || key > kMaxBrotliParam || (brotliInitParamsArray[key] | 0) !== -1) { - throw ERR_BROTLI_INVALID_PARAM(origKey); + throw $ERR_BROTLI_INVALID_PARAM(origKey); } const value = opts.params[origKey]; diff --git a/src/output.zig b/src/output.zig index 20abdd9cde..53c09fd79f 100644 --- a/src/output.zig +++ b/src/output.zig @@ -842,27 +842,31 @@ pub fn scoped(comptime tag: anytype, comptime disabled: bool) LogFunction { // // // +// // // +// // // // - bold // - dim // - reset // - reset -const ED = "\x1b["; +const CSI = "\x1b["; pub const color_map = ComptimeStringMap(string, .{ - &.{ "black", ED ++ "30m" }, - &.{ "blue", ED ++ "34m" }, - &.{ "b", ED ++ "1m" }, - &.{ "d", ED ++ "2m" }, - &.{ "i", ED ++ "3m" }, - &.{ "cyan", ED ++ "36m" }, - &.{ "green", ED ++ "32m" }, - &.{ "magenta", ED ++ "35m" }, - &.{ "red", ED ++ "31m" }, - &.{ "white", ED ++ "37m" }, - &.{ "yellow", ED ++ "33m" }, + &.{ "b", CSI ++ "1m" }, + &.{ "d", CSI ++ "2m" }, + &.{ "i", CSI ++ "3m" }, + &.{ "black", CSI ++ "30m" }, + &.{ "red", CSI ++ "31m" }, + &.{ "green", CSI ++ "32m" }, + &.{ "yellow", CSI ++ "33m" }, + &.{ "blue", CSI ++ "34m" }, + &.{ "magenta", CSI ++ "35m" }, + &.{ "cyan", CSI ++ "36m" }, + &.{ "white", CSI ++ "37m" }, + &.{ "bgred", CSI ++ "41m" }, + &.{ "bggreen", CSI ++ "42m" }, }); const RESET: string = "\x1b[0m"; pub fn prettyFmt(comptime fmt: string, comptime is_enabled: bool) [:0]const u8 { diff --git a/test/bundler/native-plugin.test.ts b/test/bundler/native-plugin.test.ts index 942461228f..9348a80995 100644 --- a/test/bundler/native-plugin.test.ts +++ b/test/bundler/native-plugin.test.ts @@ -548,7 +548,9 @@ const many_foo = ["foo","foo","foo","foo","foo","foo","foo"] }); expect.unreachable(); } catch (e) { - expect(e.toString()).toContain('TypeError: Could not find the symbol "OOGA_BOOGA_420" in the given napi module.'); + expect(e.toString()).toContain( + 'TypeError [ERR_INVALID_ARG_TYPE]: Could not find the symbol "OOGA_BOOGA_420" in the given napi module.', + ); } }); diff --git a/test/js/node/http2/node-http2.test.js b/test/js/node/http2/node-http2.test.js index e7079bd2e9..403472a040 100644 --- a/test/js/node/http2/node-http2.test.js +++ b/test/js/node/http2/node-http2.test.js @@ -783,7 +783,7 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) { resolve(); }); await promise; - // expect(response_headers[":status"]).toBe(200); // TODO: + expect(response_headers[":status"]).toBe(200); const settings = client.remoteSettings; const localSettings = client.localSettings; assertSettings(settings); diff --git a/test/js/node/readline/readline.node.test.ts b/test/js/node/readline/readline.node.test.ts index fecce0f34d..caa38dcfa5 100644 --- a/test/js/node/readline/readline.node.test.ts +++ b/test/js/node/readline/readline.node.test.ts @@ -306,15 +306,15 @@ describe("readline.cursorTo()", () => { // Verify that cursorTo() throws if x or y is NaN. assert.throws(() => { readline.cursorTo(writable, NaN); - }, "ERR_INVALID_ARG_VALUE"); + }, /ERR_INVALID_ARG_VALUE/); assert.throws(() => { readline.cursorTo(writable, 1, NaN); - }, "ERR_INVALID_ARG_VALUE"); + }, /ERR_INVALID_ARG_VALUE/); assert.throws(() => { readline.cursorTo(writable, NaN, NaN); - }, "ERR_INVALID_ARG_VALUE"); + }, /ERR_INVALID_ARG_VALUE/); }); }); diff --git a/test/js/node/stream/bufferlist.test.ts b/test/js/node/stream/bufferlist.test.ts deleted file mode 100644 index 240c54935d..0000000000 --- a/test/js/node/stream/bufferlist.test.ts +++ /dev/null @@ -1,247 +0,0 @@ -import { expect, it } from "bun:test"; -import { Readable } from "stream"; - -function makeUint8Array(str: string) { - return new Uint8Array( - [].map.call(str, function (ch: string) { - return ch.charCodeAt(0); - }) as number[], - ); -} - -it("should work with .clear()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push({})).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.push({})).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.clear()).toBeUndefined(); - expect(list.length).toBe(0); -}); - -it("should work with .concat()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push(makeUint8Array("foo"))).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.concat(3)).toEqual(new Uint8Array([102, 111, 111])); - expect(list.push(makeUint8Array("bar"))).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.concat(10)).toEqual(new Uint8Array([102, 111, 111, 98, 97, 114, 0, 0, 0, 0])); -}); - -it("should fail on .concat() with invalid items", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push("foo")).toBeUndefined(); - expect(() => { - list.concat(42); - }).toThrow(TypeError); -}); - -it("should fail on .concat() buffer overflow", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push(makeUint8Array("foo"))).toBeUndefined(); - expect(list.length).toBe(1); - expect(() => { - list.concat(2); - }).toThrow(RangeError); - expect(list.push(makeUint8Array("bar"))).toBeUndefined(); - expect(list.length).toBe(2); - expect(() => { - list.concat(5); - }).toThrow(RangeError); -}); - -it("should work with .consume() on strings", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.consume(42, true)).toBe(""); - expect(list.push("foo")).toBeUndefined(); - expect(list.push("bar")).toBeUndefined(); - expect(list.push("baz")).toBeUndefined(); - expect(list.push("moo")).toBeUndefined(); - expect(list.push("moz")).toBeUndefined(); - expect(list.length).toBe(5); - expect(list.consume(3, true)).toBe("foo"); - expect(list.length).toBe(4); - expect(list.consume(4, true)).toBe("barb"); - expect(list.length).toBe(3); - expect(list.consume(256, true)).toBe("azmoomoz"); - expect(list.length).toBe(0); -}); - -it("should work with .consume() on buffers", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.consume(42, false)).toEqual(new Uint8Array()); - expect(list.push(makeUint8Array("foo"))).toBeUndefined(); - expect(list.push(makeUint8Array("bar"))).toBeUndefined(); - expect(list.push(makeUint8Array("baz"))).toBeUndefined(); - expect(list.push(makeUint8Array("moo"))).toBeUndefined(); - expect(list.push(makeUint8Array("moz"))).toBeUndefined(); - expect(list.length).toBe(5); - expect(list.consume(3, false)).toEqual(makeUint8Array("foo")); - expect(list.length).toBe(4); - expect(list.consume(2, false)).toEqual(makeUint8Array("ba")); - expect(list.length).toBe(4); - expect(list.consume(4, false)).toEqual(makeUint8Array("rbaz")); - expect(list.length).toBe(2); - expect(list.consume(10, false)).toEqual(new Uint8Array([109, 111, 111, 109, 111, 122, 0, 0, 0, 0])); - expect(list.length).toBe(0); -}); - -it("should fail on .consume() with invalid items", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push("foo")).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.consume(0, false)).toEqual(new Uint8Array([])); - expect(() => { - list.consume(1, false); - }).toThrow(TypeError); - expect(list.consume(3, true)).toBe("foo"); - expect(list.length).toBe(0); - expect(list.push(makeUint8Array("bar"))).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.consume(0, true)).toEqual(""); - expect(() => { - list.consume(1, true); - }).toThrow(TypeError); - expect(list.consume(3, false)).toEqual(new Uint8Array([98, 97, 114])); -}); - -it("should work with .first()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.first()).toBeUndefined(); - const item = {}; - expect(list.push(item)).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.first()).toBe(item); -}); - -it("should work with .join()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push(42)).toBeUndefined(); - expect(list.push(null)).toBeUndefined(); - expect(list.push("foo")).toBeUndefined(); - expect(list.push(makeUint8Array("bar"))).toBeUndefined(); - expect(list.length).toBe(4); - expect(list.join("")).toBe("42nullfoo98,97,114"); - expect(list.join(",")).toBe("42,null,foo,98,97,114"); - expect(list.join(" baz ")).toBe("42 baz null baz foo baz 98,97,114"); -}); - -it("should work with .push()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - const item1 = {}; - expect(list.push(item1)).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.first()).toBe(item1); - const item2 = {}; - expect(list.push(item2)).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.shift()).toBe(item1); - expect(list.shift()).toBe(item2); - expect(list.shift()).toBeUndefined(); -}); - -it("should work with .shift()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.shift()).toBeUndefined(); - const item = {}; - expect(list.push(item)).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.shift()).toBe(item); - expect(list.shift()).toBeUndefined(); -}); - -it("should work with .unshift()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - const item1 = {}; - expect(list.unshift(item1)).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.first()).toBe(item1); - const item2 = {}; - expect(list.push(item2)).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.first()).toBe(item1); - const item3 = {}; - expect(list.unshift(item3)).toBeUndefined(); - expect(list.length).toBe(3); - expect(list.shift()).toBe(item3); - expect(list.shift()).toBe(item1); - expect(list.shift()).toBe(item2); - expect(list.shift()).toBeUndefined(); -}); - -it("should work with multiple partial .consume() from buffers", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push(Buffer.from("f000baaa", "hex"))).toBeUndefined(); - expect(list.length).toBe(1); - expect(list.consume(2, undefined)).toEqual(Buffer.from("f000", "hex")); - expect(list.consume(1, undefined)).toEqual(Buffer.from("ba", "hex")); - expect(list.length).toBe(1); -}); - -it("should work with partial .consume() followed by .first()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push("foo")).toBeUndefined(); - expect(list.push("bar")).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.consume(4, true)).toEqual("foob"); - expect(list.length).toBe(1); - expect(list.first()).toEqual("ar"); - expect(list.length).toBe(1); -}); - -it("should work with partial .consume() followed by .shift()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push(makeUint8Array("foo"))).toBeUndefined(); - expect(list.push(makeUint8Array("bar"))).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.consume(4, false)).toEqual(makeUint8Array("foob")); - expect(list.length).toBe(1); - expect(list.shift()).toEqual(makeUint8Array("ar")); - expect(list.length).toBe(0); -}); - -it("should work with partial .consume() followed by .unshift()", () => { - // @ts-ignore - const list = new Readable().readableBuffer; - expect(list.length).toBe(0); - expect(list.push(makeUint8Array("😋😋😋"))).toBeUndefined(); - expect(list.push(makeUint8Array("📋📋📋"))).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.consume(7, false)).toEqual(new Uint8Array([61, 11, 61, 11, 61, 11, 61])); - expect(list.length).toBe(1); - expect(list.unshift(makeUint8Array("👌👌👌"))).toBeUndefined(); - expect(list.length).toBe(2); - expect(list.consume(12, false)).toEqual(new Uint8Array([61, 76, 61, 76, 61, 76, 203, 61, 203, 61, 203, 0])); - expect(list.length).toBe(0); -}); diff --git a/test/js/node/stream/node-stream-uint8array.test.ts b/test/js/node/stream/node-stream-uint8array.test.ts index fd27592240..5072706bd9 100644 --- a/test/js/node/stream/node-stream-uint8array.test.ts +++ b/test/js/node/stream/node-stream-uint8array.test.ts @@ -47,7 +47,7 @@ describe("Writable", () => { expect(chunk instanceof Buffer).toBe(false); expect(chunk instanceof Uint8Array).toBe(true); expect(chunk).toStrictEqual(ABC); - expect(encoding).toBe("utf8"); + expect(encoding).toBeUndefined(); cb(); }, 0), }); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-drain.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-drain.js deleted file mode 100644 index 7ccbb1f4d2..0000000000 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-drain.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -const common = require('../common'); -if (!common.hasCrypto) - common.skip('missing crypto'); -const assert = require('assert'); -const h2 = require('http2'); - -// Check that drain event is passed from Http2Stream - -const testString = 'tests'; - -const server = h2.createServer(); - -server.on('request', common.mustCall((req, res) => { - res.stream._writableState.highWaterMark = testString.length; - assert.strictEqual(res.write(testString), false); - res.on('drain', common.mustCall(() => res.end(testString))); -})); - -server.listen(0, common.mustCall(() => { - const port = server.address().port; - - const client = h2.connect(`http://localhost:${port}`); - const request = client.request({ - ':path': '/foobar', - ':method': 'POST', - ':scheme': 'http', - ':authority': `localhost:${port}` - }); - request.resume(); - request.end(); - - let data = ''; - request.setEncoding('utf8'); - request.on('data', (chunk) => (data += chunk)); - - request.on('end', common.mustCall(function() { - assert.strictEqual(data, testString.repeat(2)); - client.close(); - server.close(); - })); -})); diff --git a/test/js/node/test/parallel/test-readable-from-web-enqueue-then-close.js b/test/js/node/test/parallel/test-readable-from-web-enqueue-then-close.js new file mode 100644 index 0000000000..e96df70c9e --- /dev/null +++ b/test/js/node/test/parallel/test-readable-from-web-enqueue-then-close.js @@ -0,0 +1,26 @@ +'use strict'; +const { mustCall } = require('../common'); +const { Readable, Duplex } = require('stream'); +const { strictEqual } = require('assert'); + +function start(controller) { + controller.enqueue(new Uint8Array(1)); + controller.close(); +} + +Readable.fromWeb(new ReadableStream({ start })) +.on('data', mustCall((d) => { + strictEqual(d.length, 1); +})) +.on('end', mustCall()) +.resume(); + +Duplex.fromWeb({ + readable: new ReadableStream({ start }), + writable: new WritableStream({ write(chunk) {} }) +}) +.on('data', mustCall((d) => { + strictEqual(d.length, 1); +})) +.on('end', mustCall()) +.resume(); diff --git a/test/js/node/test/parallel/test-stream-aliases-legacy.js b/test/js/node/test/parallel/test-stream-aliases-legacy.js new file mode 100644 index 0000000000..2c87f0ad0f --- /dev/null +++ b/test/js/node/test/parallel/test-stream-aliases-legacy.js @@ -0,0 +1,14 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const stream = require('stream'); + +// Verify that all individual aliases are left in place. + +assert.strictEqual(stream.Readable, require('_stream_readable')); +assert.strictEqual(stream.Writable, require('_stream_writable')); +assert.strictEqual(stream.Duplex, require('_stream_duplex')); +assert.strictEqual(stream.Transform, require('_stream_transform')); +assert.strictEqual(stream.PassThrough, require('_stream_passthrough')); diff --git a/test/js/node/test/parallel/test-stream-compose-operator.js b/test/js/node/test/parallel/test-stream-compose-operator.js new file mode 100644 index 0000000000..4fefb004f5 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-compose-operator.js @@ -0,0 +1,127 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, Transform, +} = require('stream'); +const assert = require('assert'); + +{ + // with async generator + const stream = Readable.from(['a', 'b', 'c', 'd']).compose(async function *(stream) { + let str = ''; + for await (const chunk of stream) { + str += chunk; + + if (str.length === 2) { + yield str; + str = ''; + } + } + }); + const result = ['ab', 'cd']; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // With Transformer + const stream = Readable.from(['a', 'b', 'c', 'd']).compose(new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk); + }, 4) + })); + const result = ['a', 'b', 'c', 'd']; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Throwing an error during `compose` (before waiting for data) + const stream = Readable.from([1, 2, 3, 4, 5]).compose(async function *(stream) { // eslint-disable-line require-yield + + throw new Error('boom'); + }); + + assert.rejects(async () => { + for await (const item of stream) { + assert.fail('should not reach here, got ' + item); + } + }, /boom/).then(common.mustCall()); +} + +{ + // Throwing an error during `compose` (when waiting for data) + const stream = Readable.from([1, 2, 3, 4, 5]).compose(async function *(stream) { + for await (const chunk of stream) { + if (chunk === 3) { + throw new Error('boom'); + } + yield chunk; + } + }); + + assert.rejects( + stream.toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // Throwing an error during `compose` (after finishing all readable data) + const stream = Readable.from([1, 2, 3, 4, 5]).compose(async function *(stream) { // eslint-disable-line require-yield + + // eslint-disable-next-line no-unused-vars,no-empty + for await (const chunk of stream) { + } + + throw new Error('boom'); + }); + assert.rejects( + stream.toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // AbortSignal + const ac = new AbortController(); + const stream = Readable.from([1, 2, 3, 4, 5]) + .compose(async function *(source) { + // Should not reach here + for await (const chunk of source) { + yield chunk; + } + }, { signal: ac.signal }); + + ac.abort(); + + assert.rejects(async () => { + for await (const item of stream) { + assert.fail('should not reach here, got ' + item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); +} + +{ + assert.throws( + () => Readable.from(['a']).compose(Readable.from(['b'])), + { code: 'ERR_INVALID_ARG_VALUE' } + ); +} + +{ + assert.throws( + () => Readable.from(['a']).compose(), + { code: 'ERR_INVALID_ARG_TYPE' } + ); +} diff --git a/test/js/node/test/parallel/test-stream-compose.js b/test/js/node/test/parallel/test-stream-compose.js new file mode 100644 index 0000000000..d7a54e1776 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-compose.js @@ -0,0 +1,539 @@ +'use strict'; + +const common = require('../common'); +const { + Duplex, + Readable, + Transform, + Writable, + finished, + compose, + PassThrough +} = require('stream'); +const assert = require('assert'); + +{ + let res = ''; + compose( + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk + chunk); + }) + }), + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }) + ) + .end('asd') + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASDASD'); + })); +} + +{ + let res = ''; + compose( + async function*(source) { + for await (const chunk of source) { + yield chunk + chunk; + } + }, + async function*(source) { + for await (const chunk of source) { + yield chunk.toString().toUpperCase(); + } + } + ) + .end('asd') + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASDASD'); + })); +} + +{ + let res = ''; + compose( + async function*(source) { + for await (const chunk of source) { + yield chunk + chunk; + } + } + ) + .end('asd') + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'asdasd'); + })); +} + +{ + let res = ''; + compose( + Readable.from(['asd']), + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }) + ) + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res = ''; + compose( + async function* () { + yield 'asd'; + }(), + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }) + ) + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res = ''; + compose( + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + new Writable({ + write: common.mustCall((chunk, encoding, callback) => { + res += chunk; + callback(null); + }) + }) + ) + .end('asd') + .on('finish', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res = ''; + compose( + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + async function(source) { + for await (const chunk of source) { + res += chunk; + } + } + ) + .end('asd') + .on('finish', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res; + compose( + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }) + ) + .end(true) + .on('data', common.mustCall((buf) => { + res = buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res.chunk.chunk, true); + })); +} + +{ + const _err = new Error('asd'); + compose( + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(_err); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + new Transform({ + objectMode: true, + transform: common.mustNotCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }) + ) + .end(true) + .on('data', common.mustNotCall()) + .on('end', common.mustNotCall()) + .on('error', (err) => { + assert.strictEqual(err, _err); + }); +} + +{ + const _err = new Error('asd'); + compose( + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk); + }) + }), + async function*(source) { // eslint-disable-line require-yield + let tmp = ''; + for await (const chunk of source) { + tmp += chunk; + throw _err; + } + return tmp; + }, + new Transform({ + objectMode: true, + transform: common.mustNotCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }) + ) + .end(true) + .on('data', common.mustNotCall()) + .on('end', common.mustNotCall()) + .on('error', (err) => { + assert.strictEqual(err, _err); + }); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }(), async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + assert.strictEqual(s1.writable, false); + assert.strictEqual(s1.readable, false); + + finished(s1.resume(), common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + let buf = ''; + // Convert into transform duplex. + const s2 = compose(async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }); + s2.end('helloworld'); + s2.resume(); + s2.on('data', (chunk) => { + buf += chunk; + }); + + finished(s2.resume(), common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }()); + + // Convert into transform duplex. + const s2 = compose(async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }); + + // Convert into writable duplex. + const s3 = compose(async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + const s4 = compose(s1, s2, s3); + + finished(s4, common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }(), async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + finished(s1, common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + assert.throws( + () => compose(), + { code: 'ERR_MISSING_ARGS' } + ); +} + +{ + assert.throws( + () => compose(new Writable(), new PassThrough()), + { code: 'ERR_INVALID_ARG_VALUE' } + ); +} + +{ + assert.throws( + () => compose(new PassThrough(), new Readable({ read() {} }), new PassThrough()), + { code: 'ERR_INVALID_ARG_VALUE' } + ); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }(), async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + return buf; + }); + + finished(s1, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_INVALID_RETURN_VALUE'); + })); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose('HelloWorld', async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + finished(s1, common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + // In the new stream than should use the writeable of the first stream and readable of the last stream + // #46829 + (async () => { + const newStream = compose( + new PassThrough({ + // reading FROM you in object mode or not + readableObjectMode: false, + + // writing TO you in object mode or not + writableObjectMode: false, + }), + new Transform({ + // reading FROM you in object mode or not + readableObjectMode: true, + + // writing TO you in object mode or not + writableObjectMode: false, + transform: (chunk, encoding, callback) => { + callback(null, { + value: chunk.toString() + }); + } + }) + ); + + assert.strictEqual(newStream.writableObjectMode, false); + assert.strictEqual(newStream.readableObjectMode, true); + + newStream.write('Steve Rogers'); + newStream.write('On your left'); + + newStream.end(); + + assert.deepStrictEqual(await newStream.toArray(), [{ value: 'Steve Rogers' }, { value: 'On your left' }]); + })().then(common.mustCall()); +} + +{ + // In the new stream than should use the writeable of the first stream and readable of the last stream + // #46829 + (async () => { + const newStream = compose( + new PassThrough({ + // reading FROM you in object mode or not + readableObjectMode: true, + + // writing TO you in object mode or not + writableObjectMode: true, + }), + new Transform({ + // reading FROM you in object mode or not + readableObjectMode: false, + + // writing TO you in object mode or not + writableObjectMode: true, + transform: (chunk, encoding, callback) => { + callback(null, chunk.value); + } + }) + ); + + assert.strictEqual(newStream.writableObjectMode, true); + assert.strictEqual(newStream.readableObjectMode, false); + + newStream.write({ value: 'Steve Rogers' }); + newStream.write({ value: 'On your left' }); + + newStream.end(); + + assert.deepStrictEqual(await newStream.toArray(), [Buffer.from('Steve RogersOn your left')]); + })().then(common.mustCall()); +} + +{ + class DuplexProcess extends Duplex { + constructor(options) { + super({ ...options, objectMode: true }); + this.stuff = []; + } + + _write(message, _, callback) { + this.stuff.push(message); + callback(); + } + + _destroy(err, cb) { + cb(err); + } + + _read() { + if (this.stuff.length) { + this.push(this.stuff.shift()); + } else if (this.writableEnded) { + this.push(null); + } else { + this._read(); + } + } + } + + const pass = new PassThrough({ objectMode: true }); + const duplex = new DuplexProcess(); + + const composed = compose( + pass, + duplex + ).on('error', () => {}); + + composed.write('hello'); + composed.write('world'); + composed.end(); + + composed.destroy(new Error('an unexpected error')); + assert.strictEqual(duplex.destroyed, true); + +} diff --git a/test/js/node/test/parallel/test-stream-consumers.js b/test/js/node/test/parallel/test-stream-consumers.js new file mode 100644 index 0000000000..883d55dc6f --- /dev/null +++ b/test/js/node/test/parallel/test-stream-consumers.js @@ -0,0 +1,262 @@ +// Flags: --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + arrayBuffer, + blob, + buffer, + text, + json, +} = require('stream/consumers'); + +const { + Readable, + PassThrough +} = require('stream'); + +const { + TransformStream, +} = require('stream/web'); + +const buf = Buffer.from('hellothere'); +const kArrayBuffer = + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + +{ + const passthrough = new PassThrough(); + + blob(passthrough).then(common.mustCall(async (blob) => { + assert.strictEqual(blob.size, 10); + assert.deepStrictEqual(await blob.arrayBuffer(), kArrayBuffer); + })); + + passthrough.write('hello'); + setTimeout(() => passthrough.end('there'), 10); +} + +{ + const passthrough = new PassThrough(); + + arrayBuffer(passthrough).then(common.mustCall(async (ab) => { + assert.strictEqual(ab.byteLength, 10); + assert.deepStrictEqual(ab, kArrayBuffer); + })); + + passthrough.write('hello'); + setTimeout(() => passthrough.end('there'), 10); +} + +{ + const passthrough = new PassThrough(); + + buffer(passthrough).then(common.mustCall(async (buf) => { + assert.strictEqual(buf.byteLength, 10); + assert.deepStrictEqual(buf.buffer, kArrayBuffer); + })); + + passthrough.write('hello'); + setTimeout(() => passthrough.end('there'), 10); +} + + +{ + const passthrough = new PassThrough(); + + text(passthrough).then(common.mustCall(async (str) => { + assert.strictEqual(str.length, 10); + assert.strictEqual(str, 'hellothere'); + })); + + passthrough.write('hello'); + setTimeout(() => passthrough.end('there'), 10); +} + +{ + const readable = new Readable({ + read() {} + }); + + text(readable).then((data) => { + assert.strictEqual(data, 'foo\ufffd\ufffd\ufffd'); + }); + + readable.push(new Uint8Array([0x66, 0x6f, 0x6f, 0xed, 0xa0, 0x80])); + readable.push(null); +} + +{ + const passthrough = new PassThrough(); + + json(passthrough).then(common.mustCall(async (str) => { + assert.strictEqual(str.length, 10); + assert.strictEqual(str, 'hellothere'); + })); + + passthrough.write('"hello'); + setTimeout(() => passthrough.end('there"'), 10); +} + +{ + const { writable, readable } = new TransformStream(); + + blob(readable).then(common.mustCall(async (blob) => { + assert.strictEqual(blob.size, 10); + assert.deepStrictEqual(await blob.arrayBuffer(), kArrayBuffer); + })); + + const writer = writable.getWriter(); + writer.write('hello'); + setTimeout(() => { + writer.write('there'); + writer.close(); + }, 10); + + assert.rejects(blob(readable), { code: 'ERR_INVALID_STATE' }).then(common.mustCall()); +} + +{ + const { writable, readable } = new TransformStream(); + + arrayBuffer(readable).then(common.mustCall(async (ab) => { + assert.strictEqual(ab.byteLength, 10); + assert.deepStrictEqual(ab, kArrayBuffer); + })); + + const writer = writable.getWriter(); + writer.write('hello'); + setTimeout(() => { + writer.write('there'); + writer.close(); + }, 10); + + assert.rejects(arrayBuffer(readable), { code: 'ERR_INVALID_STATE' }).then(common.mustCall()); +} + +{ + const { writable, readable } = new TransformStream(); + + text(readable).then(common.mustCall(async (str) => { + assert.strictEqual(str.length, 10); + assert.strictEqual(str, 'hellothere'); + })); + + const writer = writable.getWriter(); + writer.write('hello'); + setTimeout(() => { + writer.write('there'); + writer.close(); + }, 10); + + assert.rejects(text(readable), { code: 'ERR_INVALID_STATE' }).then(common.mustCall()); +} + +{ + const { writable, readable } = new TransformStream(); + + json(readable).then(common.mustCall(async (str) => { + assert.strictEqual(str.length, 10); + assert.strictEqual(str, 'hellothere'); + })); + + const writer = writable.getWriter(); + writer.write('"hello'); + setTimeout(() => { + writer.write('there"'); + writer.close(); + }, 10); + + assert.rejects(json(readable), { code: 'ERR_INVALID_STATE' }).then(common.mustCall()); +} + +{ + const stream = new PassThrough({ + readableObjectMode: true, + writableObjectMode: true, + }); + + blob(stream).then(common.mustCall((blob) => { + assert.strictEqual(blob.size, 30); + })); + + stream.write({}); + stream.end({}); +} + +{ + const stream = new PassThrough({ + readableObjectMode: true, + writableObjectMode: true, + }); + + arrayBuffer(stream).then(common.mustCall((ab) => { + assert.strictEqual(ab.byteLength, 30); + assert.strictEqual( + Buffer.from(ab).toString(), + '[object Object][object Object]'); + })); + + stream.write({}); + stream.end({}); +} + +{ + const stream = new PassThrough({ + readableObjectMode: true, + writableObjectMode: true, + }); + + buffer(stream).then(common.mustCall((buf) => { + assert.strictEqual(buf.byteLength, 30); + assert.strictEqual( + buf.toString(), + '[object Object][object Object]'); + })); + + stream.write({}); + stream.end({}); +} + +{ + const stream = new PassThrough({ + readableObjectMode: true, + writableObjectMode: true, + }); + + assert.rejects(text(stream), { + code: 'ERR_INVALID_ARG_TYPE', + }).then(common.mustCall()); + + stream.write({}); + stream.end({}); +} + +{ + const stream = new PassThrough({ + readableObjectMode: true, + writableObjectMode: true, + }); + + assert.rejects(json(stream), { + code: 'ERR_INVALID_ARG_TYPE', + }).then(common.mustCall()); + + stream.write({}); + stream.end({}); +} + +{ + const stream = new TransformStream(); + text(stream.readable).then(common.mustCall((str) => { + // Incomplete utf8 character is flushed as a replacement char + assert.strictEqual(str.charCodeAt(0), 0xfffd); + })); + const writer = stream.writable.getWriter(); + Promise.all([ + writer.write(new Uint8Array([0xe2])), + writer.write(new Uint8Array([0x82])), + writer.close(), + ]).then(common.mustCall()); +} diff --git a/test/js/node/test/parallel/test-stream-drop-take.js b/test/js/node/test/parallel/test-stream-drop-take.js new file mode 100644 index 0000000000..97e6c74dfa --- /dev/null +++ b/test/js/node/test/parallel/test-stream-drop-take.js @@ -0,0 +1,124 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const { deepStrictEqual, rejects, throws, strictEqual } = require('assert'); + +const { from } = Readable; + +const fromAsync = (...args) => from(...args).map(async (x) => x); + +const naturals = () => from(async function*() { + let i = 1; + while (true) { + yield i++; + } +}()); + +{ + // Synchronous streams + (async () => { + deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3]); + deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1]); + deepStrictEqual(await from([]).drop(2).toArray(), []); + deepStrictEqual(await from([]).take(1).toArray(), []); + deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2]); + deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]); + deepStrictEqual(await from([1, 2]).take(0).toArray(), []); + })().then(common.mustCall()); + // Asynchronous streams + (async () => { + deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]); + deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]); + deepStrictEqual(await fromAsync([]).drop(2).toArray(), []); + deepStrictEqual(await fromAsync([]).take(1).toArray(), []); + deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2]); + deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2]); + deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []); + })().then(common.mustCall()); + // Infinite streams + // Asynchronous streams + (async () => { + deepStrictEqual(await naturals().take(1).toArray(), [1]); + deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]); + const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; + deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10); + deepStrictEqual(await naturals().take(5).take(1).toArray(), [1]); + })().then(common.mustCall()); +} + + +// Don't wait for next item in the original stream when already consumed the requested take amount +{ + let reached = false; + let resolve; + const promise = new Promise((res) => resolve = res); + + const stream = from((async function *() { + yield 1; + await promise; + reached = true; + yield 2; + })()); + + stream.take(1) + .toArray() + .then(common.mustCall(() => { + strictEqual(reached, false); + })) + .finally(() => resolve()); +} + +{ + // Coercion + (async () => { + // The spec made me do this ^^ + deepStrictEqual(await naturals().take('cat').toArray(), []); + deepStrictEqual(await naturals().take('2').toArray(), [1, 2]); + deepStrictEqual(await naturals().take(true).toArray(), [1]); + })().then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + rejects( + Readable.from([1, 2, 3]).take(1, { signal: ac.signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); + rejects( + Readable.from([1, 2, 3]).drop(1, { signal: ac.signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); + ac.abort(); +} + +{ + // Support for AbortSignal, already aborted + const signal = AbortSignal.abort(); + rejects( + Readable.from([1, 2, 3]).take(1, { signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); +} + +{ + // Error cases + const invalidArgs = [ + -1, + -Infinity, + -40, + ]; + + for (const example of invalidArgs) { + throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/); + } + + throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).drop(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); + + throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).take(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); +} diff --git a/test/js/node/test/parallel/test-stream-duplex-destroy.js b/test/js/node/test/parallel/test-stream-duplex-destroy.js new file mode 100644 index 0000000000..5286738405 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-duplex-destroy.js @@ -0,0 +1,286 @@ +'use strict'; + +const common = require('../common'); +const { Duplex } = require('stream'); +const assert = require('assert'); + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + duplex.resume(); + + duplex.on('end', common.mustNotCall()); + duplex.on('finish', common.mustNotCall()); + duplex.on('close', common.mustCall()); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + duplex.resume(); + + const expected = new Error('kaboom'); + + duplex.on('end', common.mustNotCall()); + duplex.on('finish', common.mustNotCall()); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + duplex.destroy(expected); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(err); + }); + + const expected = new Error('kaboom'); + + duplex.on('finish', common.mustNotCall('no finish event')); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + duplex.destroy(expected); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const expected = new Error('kaboom'); + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + destroy: common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(); + }) + }); + duplex.resume(); + + duplex.on('end', common.mustNotCall('no end event')); + duplex.on('finish', common.mustNotCall('no finish event')); + + // Error is swallowed by the custom _destroy + duplex.on('error', common.mustNotCall('no error event')); + duplex.on('close', common.mustCall()); + + duplex.destroy(expected); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(); + }); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + duplex.resume(); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + process.nextTick(() => { + this.push(null); + this.end(); + cb(); + }); + }); + + const fail = common.mustNotCall('no finish or end event'); + + duplex.on('finish', fail); + duplex.on('end', fail); + + duplex.destroy(); + + duplex.removeListener('end', fail); + duplex.removeListener('finish', fail); + duplex.on('end', common.mustNotCall()); + duplex.on('finish', common.mustNotCall()); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + const expected = new Error('kaboom'); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(expected); + }); + + duplex.on('finish', common.mustNotCall('no finish event')); + duplex.on('end', common.mustNotCall('no end event')); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + allowHalfOpen: true + }); + duplex.resume(); + + duplex.on('finish', common.mustNotCall()); + duplex.on('end', common.mustNotCall()); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + + duplex.destroyed = true; + assert.strictEqual(duplex.destroyed, true); + + // The internal destroy() mechanism should not be triggered + duplex.on('finish', common.mustNotCall()); + duplex.on('end', common.mustNotCall()); + duplex.destroy(); +} + +{ + function MyDuplex() { + assert.strictEqual(this.destroyed, false); + this.destroyed = false; + Duplex.call(this); + } + + Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype); + Object.setPrototypeOf(MyDuplex, Duplex); + + new MyDuplex(); +} + +{ + const duplex = new Duplex({ + writable: false, + autoDestroy: true, + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + duplex.push(null); + duplex.resume(); + duplex.on('close', common.mustCall()); +} + +{ + const duplex = new Duplex({ + readable: false, + autoDestroy: true, + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + duplex.end(); + duplex.on('close', common.mustCall()); +} + +{ + const duplex = new Duplex({ + allowHalfOpen: false, + autoDestroy: true, + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + duplex.push(null); + duplex.resume(); + const orgEnd = duplex.end; + duplex.end = common.mustNotCall(); + duplex.on('end', () => { + // Ensure end() is called in next tick to allow + // any pending writes to be invoked first. + process.nextTick(() => { + duplex.end = common.mustCall(orgEnd); + }); + }); + duplex.on('close', common.mustCall()); +} + +{ + // Check abort signal + const controller = new AbortController(); + const { signal } = controller; + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + signal, + }); + let count = 0; + duplex.on('error', common.mustCall((e) => { + assert.strictEqual(count++, 0); // Ensure not called twice + assert.strictEqual(e.name, 'AbortError'); + })); + duplex.on('close', common.mustCall()); + controller.abort(); +} + +{ + const duplex = new Duplex({ + read() {}, + write(chunk, enc, cb) { cb(); } + }); + + duplex.cork(); + duplex.write('foo', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + })); + duplex.destroy(); +} + +{ + // Check Symbol.asyncDispose + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + let count = 0; + duplex.on('error', common.mustCall((e) => { + assert.strictEqual(count++, 0); // Ensure not called twice + assert.strictEqual(e.name, 'AbortError'); + })); + duplex.on('close', common.mustCall()); + duplex[Symbol.asyncDispose]().then(common.mustCall()); +} diff --git a/test/js/node/test/parallel/test-stream-duplex-from.js b/test/js/node/test/parallel/test-stream-duplex-from.js new file mode 100644 index 0000000000..e3c117ff8d --- /dev/null +++ b/test/js/node/test/parallel/test-stream-duplex-from.js @@ -0,0 +1,403 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { Duplex, Readable, Writable, pipeline, PassThrough } = require('stream'); +const { ReadableStream, WritableStream } = require('stream/web'); +const { Blob } = require('buffer'); + +{ + const d = Duplex.from({ + readable: new Readable({ + read() { + this.push('asd'); + this.push(null); + } + }) + }); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); +} + +{ + const d = Duplex.from(new Readable({ + read() { + this.push('asd'); + this.push(null); + } + })); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); +} + +{ + let ret = ''; + const d = Duplex.from(new Writable({ + write(chunk, encoding, callback) { + ret += chunk; + callback(); + } + })); + assert.strictEqual(d.readable, false); + assert.strictEqual(d.writable, true); + d.end('asd'); + d.on('finish', common.mustCall(function() { + assert.strictEqual(d.writable, false); + assert.strictEqual(ret, 'asd'); + })); +} + +{ + let ret = ''; + const d = Duplex.from({ + writable: new Writable({ + write(chunk, encoding, callback) { + ret += chunk; + callback(); + } + }) + }); + assert.strictEqual(d.readable, false); + assert.strictEqual(d.writable, true); + d.end('asd'); + d.on('finish', common.mustCall(function() { + assert.strictEqual(d.writable, false); + assert.strictEqual(ret, 'asd'); + })); +} + +{ + let ret = ''; + const d = Duplex.from({ + readable: new Readable({ + read() { + this.push('asd'); + this.push(null); + } + }), + writable: new Writable({ + write(chunk, encoding, callback) { + ret += chunk; + callback(); + } + }) + }); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, true); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); + d.end('asd'); + d.once('finish', common.mustCall(function() { + assert.strictEqual(d.writable, false); + assert.strictEqual(ret, 'asd'); + })); +} + +{ + const d = Duplex.from(Promise.resolve('asd')); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); +} + +{ + // https://github.com/nodejs/node/issues/40497 + pipeline( + ['abc\ndef\nghi'], + Duplex.from(async function * (source) { + let rest = ''; + for await (const chunk of source) { + const lines = (rest + chunk.toString()).split('\n'); + rest = lines.pop(); + for (const line of lines) { + yield line; + } + } + yield rest; + }), + async function * (source) { // eslint-disable-line require-yield + let ret = ''; + for await (const x of source) { + ret += x; + } + assert.strictEqual(ret, 'abcdefghi'); + }, + common.mustSucceed(), + ); +} + +// Ensure that isDuplexNodeStream was called +{ + const duplex = new Duplex(); + assert.strictEqual(Duplex.from(duplex), duplex); +} + +// Ensure that Duplex.from works for blobs +{ + const blob = new Blob(['blob']); + const expectedByteLength = blob.size; + const duplex = Duplex.from(blob); + duplex.on('data', common.mustCall((arrayBuffer) => { + assert.strictEqual(arrayBuffer.byteLength, expectedByteLength); + })); +} + +// Ensure that given a promise rejection it emits an error +{ + const myErrorMessage = 'myCustomError'; + Duplex.from(Promise.reject(myErrorMessage)) + .on('error', common.mustCall((error) => { + assert.strictEqual(error, myErrorMessage); + })); +} + +// Ensure that given a promise rejection on an async function it emits an error +{ + const myErrorMessage = 'myCustomError'; + async function asyncFn() { + return Promise.reject(myErrorMessage); + } + + Duplex.from(asyncFn) + .on('error', common.mustCall((error) => { + assert.strictEqual(error, myErrorMessage); + })); +} + +// Ensure that Duplex.from throws an Invalid return value when function is void +{ + assert.throws(() => Duplex.from(() => {}), { + code: 'ERR_INVALID_RETURN_VALUE', + }); +} + +// Ensure data if a sub object has a readable stream it's duplexified +{ + const msg = Buffer.from('hello'); + const duplex = Duplex.from({ + readable: Readable({ + read() { + this.push(msg); + this.push(null); + } + }) + }).on('data', common.mustCall((data) => { + assert.strictEqual(data, msg); + })); + + assert.strictEqual(duplex.writable, false); +} + +// Ensure data if a sub object has a writable stream it's duplexified +{ + const msg = Buffer.from('hello'); + const duplex = Duplex.from({ + writable: Writable({ + write: common.mustCall((data) => { + assert.strictEqual(data, msg); + }) + }) + }); + + duplex.write(msg); + assert.strictEqual(duplex.readable, false); +} + +// Ensure data if a sub object has a writable and readable stream it's duplexified +{ + const msg = Buffer.from('hello'); + + const duplex = Duplex.from({ + readable: Readable({ + read() { + this.push(msg); + this.push(null); + } + }), + writable: Writable({ + write: common.mustCall((data) => { + assert.strictEqual(data, msg); + }) + }) + }); + + duplex.pipe(duplex) + .on('data', common.mustCall((data) => { + assert.strictEqual(data, msg); + assert.strictEqual(duplex.readable, true); + assert.strictEqual(duplex.writable, true); + })) + .on('end', common.mustCall()); +} + +// Ensure that given readable stream that throws an error it calls destroy +{ + const myErrorMessage = 'error!'; + const duplex = Duplex.from(Readable({ + read() { + throw new Error(myErrorMessage); + } + })); + duplex.on('error', common.mustCall((msg) => { + assert.strictEqual(msg.message, myErrorMessage); + })); +} + +// Ensure that given writable stream that throws an error it calls destroy +{ + const myErrorMessage = 'error!'; + const duplex = Duplex.from(Writable({ + write(chunk, enc, cb) { + cb(myErrorMessage); + } + })); + + duplex.on('error', common.mustCall((msg) => { + assert.strictEqual(msg, myErrorMessage); + })); + + duplex.write('test'); +} + +{ + const through = new PassThrough({ objectMode: true }); + + let res = ''; + const d = Readable.from(['foo', 'bar'], { objectMode: true }) + .pipe(Duplex.from({ + writable: through, + readable: through + })); + + d.on('data', (data) => { + d.pause(); + setImmediate(() => { + d.resume(); + }); + res += data; + }).on('end', common.mustCall(() => { + assert.strictEqual(res, 'foobar'); + })).on('close', common.mustCall()); +} + +function makeATestReadableStream(value) { + return new ReadableStream({ + start(controller) { + controller.enqueue(value); + controller.close(); + } + }); +} + +function makeATestWritableStream(writeFunc) { + return new WritableStream({ + write(chunk) { + writeFunc(chunk); + } + }); +} + +{ + const d = Duplex.from({ + readable: makeATestReadableStream('foo'), + }); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + + d.on('data', common.mustCall((data) => { + assert.strictEqual(data.toString(), 'foo'); + })); + + d.on('end', common.mustCall(() => { + assert.strictEqual(d.readable, false); + })); +} + +{ + const d = Duplex.from(makeATestReadableStream('foo')); + + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + + d.on('data', common.mustCall((data) => { + assert.strictEqual(data.toString(), 'foo'); + })); + + d.on('end', common.mustCall(() => { + assert.strictEqual(d.readable, false); + })); +} + +{ + let ret = ''; + const d = Duplex.from({ + writable: makeATestWritableStream((chunk) => ret += chunk), + }); + + assert.strictEqual(d.readable, false); + assert.strictEqual(d.writable, true); + + d.end('foo'); + d.on('finish', common.mustCall(() => { + assert.strictEqual(ret, 'foo'); + assert.strictEqual(d.writable, false); + })); +} + +{ + let ret = ''; + const d = Duplex.from(makeATestWritableStream((chunk) => ret += chunk)); + + assert.strictEqual(d.readable, false); + assert.strictEqual(d.writable, true); + + d.end('foo'); + d.on('finish', common.mustCall(() => { + assert.strictEqual(ret, 'foo'); + assert.strictEqual(d.writable, false); + })); +} + +{ + let ret = ''; + const d = Duplex.from({ + readable: makeATestReadableStream('foo'), + writable: makeATestWritableStream((chunk) => ret += chunk), + }); + + d.end('bar'); + + d.on('data', common.mustCall((data) => { + assert.strictEqual(data.toString(), 'foo'); + })); + + d.on('end', common.mustCall(() => { + assert.strictEqual(d.readable, false); + })); + + d.on('finish', common.mustCall(() => { + assert.strictEqual(ret, 'bar'); + assert.strictEqual(d.writable, false); + })); +} diff --git a/test/js/node/test/parallel/test-stream-duplex.js b/test/js/node/test/parallel/test-stream-duplex.js new file mode 100644 index 0000000000..490744910c --- /dev/null +++ b/test/js/node/test/parallel/test-stream-duplex.js @@ -0,0 +1,133 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const Duplex = require('stream').Duplex; +const { ReadableStream, WritableStream } = require('stream/web'); + +const stream = new Duplex({ objectMode: true }); + +assert(Duplex() instanceof Duplex); +assert(stream._readableState.objectMode); +assert(stream._writableState.objectMode); +assert(stream.allowHalfOpen); +assert.strictEqual(stream.listenerCount('end'), 0); + +let written; +let read; + +stream._write = (obj, _, cb) => { + written = obj; + cb(); +}; + +stream._read = () => {}; + +stream.on('data', (obj) => { + read = obj; +}); + +stream.push({ val: 1 }); +stream.end({ val: 2 }); + +process.on('exit', () => { + assert.strictEqual(read.val, 1); + assert.strictEqual(written.val, 2); +}); + +// Duplex.fromWeb +{ + const dataToRead = Buffer.from('hello'); + const dataToWrite = Buffer.from('world'); + + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(dataToRead); + }, + }); + + const writable = new WritableStream({ + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, dataToWrite); + }) + }); + + const pair = { readable, writable }; + const duplex = Duplex.fromWeb(pair); + + duplex.write(dataToWrite); + duplex.once('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, dataToRead); + })); +} + +// Duplex.fromWeb - using utf8 and objectMode +{ + const dataToRead = 'hello'; + const dataToWrite = 'world'; + + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(dataToRead); + }, + }); + + const writable = new WritableStream({ + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, dataToWrite); + }) + }); + + const pair = { + readable, + writable + }; + const duplex = Duplex.fromWeb(pair, { encoding: 'utf8', objectMode: true }); + + duplex.write(dataToWrite); + duplex.once('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, dataToRead); + })); +} +// Duplex.toWeb +{ + const dataToRead = Buffer.from('hello'); + const dataToWrite = Buffer.from('world'); + + const duplex = Duplex({ + read() { + this.push(dataToRead); + this.push(null); + }, + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, dataToWrite); + }) + }); + + const { writable, readable } = Duplex.toWeb(duplex); + writable.getWriter().write(dataToWrite); + + readable.getReader().read().then(common.mustCall((result) => { + assert.deepStrictEqual(Buffer.from(result.value), dataToRead); + })); +} diff --git a/test/js/node/test/parallel/test-stream-duplexpair.js b/test/js/node/test/parallel/test-stream-duplexpair.js new file mode 100644 index 0000000000..3e1b3044dd --- /dev/null +++ b/test/js/node/test/parallel/test-stream-duplexpair.js @@ -0,0 +1,74 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { Duplex, duplexPair } = require('stream'); + +{ + const pair = duplexPair(); + + assert(pair[0] instanceof Duplex); + assert(pair[1] instanceof Duplex); + assert.notStrictEqual(pair[0], pair[1]); +} + +{ + // Verify that the iterable for array assignment works + const [ clientSide, serverSide ] = duplexPair(); + assert(clientSide instanceof Duplex); + assert(serverSide instanceof Duplex); + clientSide.on( + 'data', + common.mustCall((d) => assert.strictEqual(`${d}`, 'foo')) + ); + clientSide.on('end', common.mustNotCall()); + serverSide.write('foo'); +} + +{ + const [ clientSide, serverSide ] = duplexPair(); + assert(clientSide instanceof Duplex); + assert(serverSide instanceof Duplex); + serverSide.on( + 'data', + common.mustCall((d) => assert.strictEqual(`${d}`, 'foo')) + ); + serverSide.on('end', common.mustCall()); + clientSide.end('foo'); +} + +{ + const [ serverSide, clientSide ] = duplexPair(); + serverSide.cork(); + serverSide.write('abc'); + serverSide.write('12'); + serverSide.end('\n'); + serverSide.uncork(); + let characters = ''; + clientSide.on('readable', function() { + for (let segment; (segment = this.read()) !== null;) + characters += segment; + }); + clientSide.on('end', common.mustCall(function() { + assert.strictEqual(characters, 'abc12\n'); + })); +} + +// Test the case where the the _write never calls [kCallback] +// because a zero-size push doesn't trigger a _read +{ + const [ serverSide, clientSide ] = duplexPair(); + serverSide.write(''); + serverSide.write('12'); + serverSide.write(''); + serverSide.write(''); + serverSide.end('\n'); + let characters = ''; + clientSide.on('readable', function() { + for (let segment; (segment = this.read()) !== null;) + characters += segment; + }); + clientSide.on('end', common.mustCall(function() { + assert.strictEqual(characters, '12\n'); + })); +} diff --git a/test/js/node/test/parallel/test-stream-event-names.js b/test/js/node/test/parallel/test-stream-event-names.js new file mode 100644 index 0000000000..e9eab40088 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-event-names.js @@ -0,0 +1,42 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); +const { Readable, Writable, Duplex } = require('stream'); + +{ + const stream = new Readable(); + assert.strictEqual(stream.eventNames().length, 0); +} + +{ + const stream = new Readable(); + stream.on('foo', () => {}); + stream.on('data', () => {}); + stream.on('error', () => {}); + assert.deepStrictEqual(stream.eventNames(), ['error', 'data', 'foo']); +} + +{ + const stream = new Writable(); + assert.strictEqual(stream.eventNames().length, 0); +} + +{ + const stream = new Writable(); + stream.on('foo', () => {}); + stream.on('drain', () => {}); + stream.on('prefinish', () => {}); + assert.deepStrictEqual(stream.eventNames(), ['prefinish', 'drain', 'foo']); +} +{ + const stream = new Duplex(); + assert.strictEqual(stream.eventNames().length, 0); +} + +{ + const stream = new Duplex(); + stream.on('foo', () => {}); + stream.on('finish', () => {}); + assert.deepStrictEqual(stream.eventNames(), ['finish', 'foo']); +} diff --git a/test/js/node/test/parallel/test-stream-filter.js b/test/js/node/test/parallel/test-stream-filter.js new file mode 100644 index 0000000000..e7711012bb --- /dev/null +++ b/test/js/node/test/parallel/test-stream-filter.js @@ -0,0 +1,174 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); +const { once } = require('events'); +const { setTimeout } = require('timers/promises'); + +{ + // Filter works on synchronous streams with a synchronous predicate + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => x < 3); + const result = [1, 2]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Filter works on synchronous streams with an asynchronous predicate + const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => { + await Promise.resolve(); + return x > 3; + }); + const result = [4, 5]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Map works on asynchronous streams with a asynchronous mapper + const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { + await Promise.resolve(); + return x + x; + }).filter((x) => x > 5); + const result = [6, 8, 10]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Filter works on an infinite stream + const stream = Readable.from(async function* () { + while (true) yield 1; + }()).filter(common.mustCall(async (x) => { + return x < 3; + }, 5)); + (async () => { + let i = 1; + for await (const item of stream) { + assert.strictEqual(item, 1); + if (++i === 5) break; + } + })().then(common.mustCall()); +} + +{ + // Filter works on constructor created streams + let i = 0; + const stream = new Readable({ + read() { + if (i === 10) { + this.push(null); + return; + } + this.push(Uint8Array.from([i])); + i++; + }, + highWaterMark: 0, + }).filter(common.mustCall(async ([x]) => { + return x !== 5; + }, 10)); + (async () => { + const result = (await stream.toArray()).map((x) => x[0]); + const expected = [...Array(10).keys()].filter((x) => x !== 5); + assert.deepStrictEqual(result, expected); + })().then(common.mustCall()); +} + +{ + // Throwing an error during `filter` (sync) + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => { + if (x === 3) { + throw new Error('boom'); + } + return true; + }); + assert.rejects( + stream.map((x) => x + x).toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // Throwing an error during `filter` (async) + const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => { + if (x === 3) { + throw new Error('boom'); + } + return true; + }); + assert.rejects( + stream.filter(() => true).toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + let calls = 0; + const stream = Readable.from([1, 2, 3, 4]).filter(async (_, { signal }) => { + calls++; + await once(signal, 'abort'); + }, { signal: ac.signal, concurrency: 2 }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + console.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); + + setImmediate(() => { + ac.abort(); + assert.strictEqual(calls, 2); + }); +} + +{ + // Concurrency result order + const stream = Readable.from([1, 2]).filter(async (item, { signal }) => { + await setTimeout(10 - item, { signal }); + return true; + }, { concurrency: 2 }); + + (async () => { + const expected = [1, 2]; + for await (const item of stream) { + assert.strictEqual(item, expected.shift()); + } + })().then(common.mustCall()); +} + +{ + // Error cases + assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).filter((x) => x, { concurrency: 'Foo' }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/); +} +{ + // Test result is a Readable + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => true); + assert.strictEqual(stream.readable, true); +} +{ + const stream = Readable.from([1, 2, 3, 4, 5]); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(), + }); + // Check that map isn't getting called. + stream.filter(() => true); +} diff --git a/test/js/node/test/parallel/test-stream-flatMap.js b/test/js/node/test/parallel/test-stream-flatMap.js new file mode 100644 index 0000000000..9295b8a0f8 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-flatMap.js @@ -0,0 +1,129 @@ +'use strict'; + +const common = require('../common'); +const fixtures = require('../common/fixtures'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); +const { setTimeout } = require('timers/promises'); +const { createReadStream } = require('fs'); + +function oneTo5() { + return Readable.from([1, 2, 3, 4, 5]); +} + +{ + // flatMap works on synchronous streams with a synchronous mapper + (async () => { + assert.deepStrictEqual( + await oneTo5().flatMap((x) => [x + x]).toArray(), + [2, 4, 6, 8, 10] + ); + assert.deepStrictEqual( + await oneTo5().flatMap(() => []).toArray(), + [] + ); + assert.deepStrictEqual( + await oneTo5().flatMap((x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + })().then(common.mustCall()); +} + + +{ + // flatMap works on sync/async streams with an asynchronous mapper + (async () => { + assert.deepStrictEqual( + await oneTo5().flatMap(async (x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + const asyncOneTo5 = oneTo5().map(async (x) => x); + assert.deepStrictEqual( + await asyncOneTo5.flatMap(async (x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + })().then(common.mustCall()); +} +{ + // flatMap works on a stream where mapping returns a stream + (async () => { + const result = await oneTo5().flatMap(async (x) => { + return Readable.from([x, x]); + }).toArray(); + assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]); + })().then(common.mustCall()); + // flatMap works on an objectMode stream where mappign returns a stream + (async () => { + const result = await oneTo5().flatMap(() => { + return createReadStream(fixtures.path('x.txt')); + }).toArray(); + // The resultant stream is in object mode so toArray shouldn't flatten + assert.strictEqual(result.length, 5); + assert.deepStrictEqual( + Buffer.concat(result).toString(), + 'xyz\n'.repeat(5) + ); + + })().then(common.mustCall()); + +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { signal }); + }), { signal: ac.signal, concurrency: 2 }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + console.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); + + queueMicrotask(() => { + ac.abort(); + }); +} + +{ + // Already aborted AbortSignal + const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { signal }); + }), { signal: AbortSignal.abort() }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + console.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); +} + +{ + // Error cases + assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, { concurrency: 'Foo' }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, { signal: true }), /ERR_INVALID_ARG_TYPE/); +} +{ + // Test result is a Readable + const stream = oneTo5().flatMap((x) => x); + assert.strictEqual(stream.readable, true); +} +{ + const stream = oneTo5(); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(), + }); + // Check that map isn't getting called. + stream.flatMap(() => true); +} diff --git a/test/js/node/test/parallel/test-stream-forEach.js b/test/js/node/test/parallel/test-stream-forEach.js new file mode 100644 index 0000000000..627ea0ccf1 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-forEach.js @@ -0,0 +1,139 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); +const { once } = require('events'); + +{ + // forEach works on synchronous streams with a synchronous predicate + const stream = Readable.from([1, 2, 3]); + const result = [1, 2, 3]; + (async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())); + })().then(common.mustCall()); +} + +{ + // forEach works an asynchronous streams + const stream = Readable.from([1, 2, 3]).filter(async (x) => { + await Promise.resolve(); + return true; + }); + const result = [1, 2, 3]; + (async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())); + })().then(common.mustCall()); +} + +{ + // forEach works on asynchronous streams with a asynchronous forEach fn + const stream = Readable.from([1, 2, 3]).filter(async (x) => { + await Promise.resolve(); + return true; + }); + const result = [1, 2, 3]; + (async () => { + await stream.forEach(async (value) => { + await Promise.resolve(); + assert.strictEqual(value, result.shift()); + }); + })().then(common.mustCall()); +} + +{ + // forEach works on an infinite stream + const ac = new AbortController(); + const { signal } = ac; + const stream = Readable.from(async function* () { + while (true) yield 1; + }(), { signal }); + let i = 0; + assert.rejects(stream.forEach(common.mustCall((x) => { + i++; + if (i === 10) ac.abort(); + assert.strictEqual(x, 1); + }, 10)), { name: 'AbortError' }).then(common.mustCall()); +} + +{ + // Emitting an error during `forEach` + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach(async (x) => { + if (x === 3) { + stream.emit('error', new Error('boom')); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Throwing an error during `forEach` (sync) + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach((x) => { + if (x === 3) { + throw new Error('boom'); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Throwing an error during `forEach` (async) + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach(async (x) => { + if (x === 3) { + return Promise.reject(new Error('boom')); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + let calls = 0; + const forEachPromise = + Readable.from([1, 2, 3, 4]).forEach(async (_, { signal }) => { + calls++; + await once(signal, 'abort'); + }, { signal: ac.signal, concurrency: 2, highWaterMark: 0 }); + // pump + assert.rejects(async () => { + await forEachPromise; + }, { + name: 'AbortError', + }).then(common.mustCall()); + + setImmediate(() => { + ac.abort(); + assert.strictEqual(calls, 2); + }); +} + +{ + // Error cases + assert.rejects(async () => { + await Readable.from([1]).forEach(1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1]).forEach((x) => x, { + concurrency: 'Foo' + }); + }, /ERR_OUT_OF_RANGE/).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1]).forEach((x) => x, 1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); +} +{ + // Test result is a Promise + const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true); + assert.strictEqual(typeof stream.then, 'function'); +} +{ + const stream = Readable.from([1, 2, 3, 4, 5]); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(), + }); + // Check that map isn't getting called. + stream.forEach(() => true); +} diff --git a/test/js/node/test/parallel/test-stream-ispaused.js b/test/js/node/test/parallel/test-stream-ispaused.js new file mode 100644 index 0000000000..a57928f934 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-ispaused.js @@ -0,0 +1,44 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const assert = require('assert'); +const stream = require('stream'); + +const readable = new stream.Readable(); + +// _read is a noop, here. +readable._read = Function(); + +// Default state of a stream is not "paused" +assert.ok(!readable.isPaused()); + +// Make the stream start flowing... +readable.on('data', Function()); + +// still not paused. +assert.ok(!readable.isPaused()); + +readable.pause(); +assert.ok(readable.isPaused()); +readable.resume(); +assert.ok(!readable.isPaused()); diff --git a/test/js/node/test/parallel/test-stream-map.js b/test/js/node/test/parallel/test-stream-map.js new file mode 100644 index 0000000000..4a7a53c559 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-map.js @@ -0,0 +1,360 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); +const { once } = require('events'); +const { setTimeout } = require('timers/promises'); + +function createDependentPromises(n) { + const promiseAndResolveArray = []; + + for (let i = 0; i < n; i++) { + let res; + const promise = new Promise((resolve) => { + if (i === 0) { + res = resolve; + return; + } + res = () => promiseAndResolveArray[i - 1][0].then(resolve); + }); + + promiseAndResolveArray.push([promise, res]); + } + + return promiseAndResolveArray; +} + +{ + // Map works on synchronous streams with a synchronous mapper + const stream = Readable.from([1, 2, 3, 4, 5]).map((x) => x + x); + (async () => { + assert.deepStrictEqual(await stream.toArray(), [2, 4, 6, 8, 10]); + })().then(common.mustCall()); +} + +{ + // Map works on synchronous streams with an asynchronous mapper + const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { + await Promise.resolve(); + return x + x; + }); + (async () => { + assert.deepStrictEqual(await stream.toArray(), [2, 4, 6, 8, 10]); + })().then(common.mustCall()); +} + +{ + // Map works on asynchronous streams with a asynchronous mapper + const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { + return x + x; + }).map((x) => x + x); + (async () => { + assert.deepStrictEqual(await stream.toArray(), [4, 8, 12, 16, 20]); + })().then(common.mustCall()); +} + +{ + // Map works on an infinite stream + const stream = Readable.from(async function* () { + while (true) yield 1; + }()).map(common.mustCall(async (x) => { + return x + x; + }, 5)); + (async () => { + let i = 1; + for await (const item of stream) { + assert.strictEqual(item, 2); + if (++i === 5) break; + } + })().then(common.mustCall()); +} + +{ + // Map works on non-objectMode streams + const stream = new Readable({ + read() { + this.push(Uint8Array.from([1])); + this.push(Uint8Array.from([2])); + this.push(null); + } + }).map(async ([x]) => { + return x + x; + }).map((x) => x + x); + const result = [4, 8]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Does not care about data events + const source = new Readable({ + read() { + this.push(Uint8Array.from([1])); + this.push(Uint8Array.from([2])); + this.push(null); + } + }); + setImmediate(() => stream.emit('data', Uint8Array.from([1]))); + const stream = source.map(async ([x]) => { + return x + x; + }).map((x) => x + x); + const result = [4, 8]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Emitting an error during `map` + const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { + if (x === 3) { + stream.emit('error', new Error('boom')); + } + return x + x; + }); + assert.rejects( + stream.map((x) => x + x).toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // Throwing an error during `map` (sync) + const stream = Readable.from([1, 2, 3, 4, 5]).map((x) => { + if (x === 3) { + throw new Error('boom'); + } + return x + x; + }); + assert.rejects( + stream.map((x) => x + x).toArray(), + /boom/, + ).then(common.mustCall()); +} + + +{ + // Throwing an error during `map` (async) + const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { + if (x === 3) { + throw new Error('boom'); + } + return x + x; + }); + assert.rejects( + stream.map((x) => x + x).toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + const range = Readable.from([1, 2, 3, 4, 5]); + const stream = range.map(common.mustCall(async (_, { signal }) => { + await once(signal, 'abort'); + throw signal.reason; + }, 2), { signal: ac.signal, concurrency: 2, highWaterMark: 0 }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + assert.fail('should not reach here, got ' + item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); + + setImmediate(() => { + ac.abort(); + }); +} + +{ + // Concurrency result order + const stream = Readable.from([1, 2]).map(async (item, { signal }) => { + await setTimeout(10 - item, { signal }); + return item; + }, { concurrency: 2 }); + + (async () => { + const expected = [1, 2]; + for await (const item of stream) { + assert.strictEqual(item, expected.shift()); + } + })().then(common.mustCall()); +} + + +{ + // highWaterMark with small concurrency + const finishOrder = []; + + const promises = createDependentPromises(4); + + const raw = Readable.from([2, 0, 1, 3]); + const stream = raw.map(async (item) => { + const [promise, resolve] = promises[item]; + resolve(); + + await promise; + finishOrder.push(item); + return item; + }, { concurrency: 2 }); + + (async () => { + await stream.toArray(); + + assert.deepStrictEqual(finishOrder, [0, 1, 2, 3]); + })().then(common.mustCall(), common.mustNotCall()); +} + +{ + // highWaterMark with a lot of items and large concurrency + const finishOrder = []; + + const promises = createDependentPromises(20); + + const input = [10, 1, 0, 3, 4, 2, 5, 7, 8, 9, 6, 11, 12, 13, 18, 15, 16, 17, 14, 19]; + const raw = Readable.from(input); + // Should be + // 10, 1, 0, 3, 4, 2 | next: 0 + // 10, 1, 3, 4, 2, 5 | next: 1 + // 10, 3, 4, 2, 5, 7 | next: 2 + // 10, 3, 4, 5, 7, 8 | next: 3 + // 10, 4, 5, 7, 8, 9 | next: 4 + // 10, 5, 7, 8, 9, 6 | next: 5 + // 10, 7, 8, 9, 6, 11 | next: 6 + // 10, 7, 8, 9, 11, 12 | next: 7 + // 10, 8, 9, 11, 12, 13 | next: 8 + // 10, 9, 11, 12, 13, 18 | next: 9 + // 10, 11, 12, 13, 18, 15 | next: 10 + // 11, 12, 13, 18, 15, 16 | next: 11 + // 12, 13, 18, 15, 16, 17 | next: 12 + // 13, 18, 15, 16, 17, 14 | next: 13 + // 18, 15, 16, 17, 14, 19 | next: 14 + // 18, 15, 16, 17, 19 | next: 15 + // 18, 16, 17, 19 | next: 16 + // 18, 17, 19 | next: 17 + // 18, 19 | next: 18 + // 19 | next: 19 + // + + const stream = raw.map(async (item) => { + const [promise, resolve] = promises[item]; + resolve(); + + await promise; + finishOrder.push(item); + return item; + }, { concurrency: 6 }); + + (async () => { + const outputOrder = await stream.toArray(); + + assert.deepStrictEqual(outputOrder, input); + assert.deepStrictEqual(finishOrder, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]); + })().then(common.mustCall(), common.mustNotCall()); +} + +{ + // Custom highWaterMark with a lot of items and large concurrency + const finishOrder = []; + + const promises = createDependentPromises(20); + + const input = [11, 1, 0, 3, 4, 2, 5, 7, 8, 9, 6, 10, 12, 13, 18, 15, 16, 17, 14, 19]; + const raw = Readable.from(input); + // Should be + // 11, 1, 0, 3, 4 | next: 0, buffer: [] + // 11, 1, 3, 4, 2 | next: 1, buffer: [0] + // 11, 3, 4, 2, 5 | next: 2, buffer: [0, 1] + // 11, 3, 4, 5, 7 | next: 3, buffer: [0, 1, 2] + // 11, 4, 5, 7, 8 | next: 4, buffer: [0, 1, 2, 3] + // 11, 5, 7, 8, 9 | next: 5, buffer: [0, 1, 2, 3, 4] + // 11, 7, 8, 9, 6 | next: 6, buffer: [0, 1, 2, 3, 4, 5] + // 11, 7, 8, 9, 10 | next: 7, buffer: [0, 1, 2, 3, 4, 5, 6] -- buffer full + // 11, 8, 9, 10, 12 | next: 8, buffer: [0, 1, 2, 3, 4, 5, 6] + // 11, 9, 10, 12, 13 | next: 9, buffer: [0, 1, 2, 3, 4, 5, 6] + // 11, 10, 12, 13, 18 | next: 10, buffer: [0, 1, 2, 3, 4, 5, 6] + // 11, 12, 13, 18, 15 | next: 11, buffer: [0, 1, 2, 3, 4, 5, 6] + // 12, 13, 18, 15, 16 | next: 12, buffer: [] -- all items flushed as 11 is consumed and all the items wait for it + // 13, 18, 15, 16, 17 | next: 13, buffer: [] + // 18, 15, 16, 17, 14 | next: 14, buffer: [] + // 18, 15, 16, 17, 19 | next: 15, buffer: [14] + // 18, 16, 17, 19 | next: 16, buffer: [14, 15] + // 18, 17, 19 | next: 17, buffer: [14, 15, 16] + // 18, 19 | next: 18, buffer: [14, 15, 16, 17] + // 19 | next: 19, buffer: [] -- all items flushed + // + + const stream = raw.map(async (item) => { + const [promise, resolve] = promises[item]; + resolve(); + + await promise; + finishOrder.push(item); + return item; + }, { concurrency: 5, highWaterMark: 7 }); + + (async () => { + const outputOrder = await stream.toArray(); + + assert.deepStrictEqual(outputOrder, input); + assert.deepStrictEqual(finishOrder, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]); + })().then(common.mustCall(), common.mustNotCall()); +} + +{ + // Where there is a delay between the first and the next item it should not wait for filled queue + // before yielding to the user + const promises = createDependentPromises(3); + + const raw = Readable.from([0, 1, 2]); + + const stream = raw + .map(async (item) => { + if (item !== 0) { + await promises[item][0]; + } + + return item; + }, { concurrency: 2 }) + .map((item) => { + // eslint-disable-next-line no-unused-vars + for (const [_, resolve] of promises) { + resolve(); + } + + return item; + }); + + (async () => { + await stream.toArray(); + })().then(common.mustCall(), common.mustNotCall()); +} + +{ + // Error cases + assert.throws(() => Readable.from([1]).map(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).map((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).map((x) => x, { + concurrency: -1 + }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).map((x) => x, 1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).map((x) => x, { signal: true }), /ERR_INVALID_ARG_TYPE/); +} +{ + // Test result is a Readable + const stream = Readable.from([1, 2, 3, 4, 5]).map((x) => x); + assert.strictEqual(stream.readable, true); +} diff --git a/test/js/node/test/parallel/test-stream-objectmode-undefined.js b/test/js/node/test/parallel/test-stream-objectmode-undefined.js new file mode 100644 index 0000000000..64b960f92b --- /dev/null +++ b/test/js/node/test/parallel/test-stream-objectmode-undefined.js @@ -0,0 +1,44 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable, Transform } = require('stream'); + +{ + const stream = new Readable({ + objectMode: true, + read: common.mustCall(() => { + stream.push(undefined); + stream.push(null); + }) + }); + + stream.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined); + })); +} + +{ + const stream = new Writable({ + objectMode: true, + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined); + }) + }); + + stream.write(undefined); +} + +{ + const stream = new Transform({ + objectMode: true, + transform: common.mustCall((chunk) => { + stream.push(chunk); + }) + }); + + stream.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined); + })); + + stream.write(undefined); +} diff --git a/test/js/node/test/parallel/test-stream-pipe-deadlock.js b/test/js/node/test/parallel/test-stream-pipe-deadlock.js new file mode 100644 index 0000000000..bf75445877 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-pipe-deadlock.js @@ -0,0 +1,27 @@ +'use strict'; + +const common = require('../common'); +const { Readable, Writable } = require('stream'); + +// https://github.com/nodejs/node/issues/48666 +(async () => { + // Prepare src that is internally ended, with buffered data pending + const src = new Readable({ read() {} }); + src.push(Buffer.alloc(100)); + src.push(null); + src.pause(); + + // Give it time to settle + await new Promise((resolve) => setImmediate(resolve)); + + const dst = new Writable({ + highWaterMark: 1000, + write(buf, enc, cb) { + process.nextTick(cb); + } + }); + + dst.write(Buffer.alloc(1000)); // Fill write buffer + dst.on('finish', common.mustCall()); + src.pipe(dst); +})().then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-stream-pipe-without-listenerCount.js b/test/js/node/test/parallel/test-stream-pipe-without-listenerCount.js new file mode 100644 index 0000000000..c2b73c74b1 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-pipe-without-listenerCount.js @@ -0,0 +1,17 @@ +'use strict'; +const common = require('../common'); +const stream = require('stream'); + +const r = new stream.Stream(); +r.listenerCount = undefined; + +const w = new stream.Stream(); +w.listenerCount = undefined; + +w.on('pipe', function() { + r.emit('error', new Error('Readable Error')); + w.emit('error', new Error('Writable Error')); +}); +r.on('error', common.mustCall()); +w.on('error', common.mustCall()); +r.pipe(w); diff --git a/test/js/node/test/parallel/test-stream-pipeline-duplex.js b/test/js/node/test/parallel/test-stream-pipeline-duplex.js new file mode 100644 index 0000000000..0dbd27a717 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-pipeline-duplex.js @@ -0,0 +1,21 @@ +'use strict'; + +const common = require('../common'); +const { pipeline, Duplex, PassThrough } = require('stream'); +const assert = require('assert'); + +const remote = new PassThrough(); +const local = new Duplex({ + read() {}, + write(chunk, enc, callback) { + callback(); + } +}); + +pipeline(remote, local, remote, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); +})); + +setImmediate(() => { + remote.end(); +}); diff --git a/test/js/node/test/parallel/test-stream-pipeline-process.js b/test/js/node/test/parallel/test-stream-pipeline-process.js index a535e7263e..2212c702ff 100644 --- a/test/js/node/test/parallel/test-stream-pipeline-process.js +++ b/test/js/node/test/parallel/test-stream-pipeline-process.js @@ -13,14 +13,7 @@ if (process.argv[2] === 'child') { ); } else { const cp = require('child_process'); - cp.exec([ - 'echo', - 'hello', - '|', - `"${process.execPath}"`, - `"${__filename}"`, - 'child', - ].join(' '), common.mustSucceed((stdout) => { + cp.exec(...common.escapePOSIXShell`echo hello | "${process.execPath}" "${__filename}" child`, common.mustSucceed((stdout) => { assert.strictEqual(stdout.split(os.EOL).shift().trim(), 'hello'); })); } diff --git a/test/js/node/test/parallel/test-stream-readable-default-encoding.js b/test/js/node/test/parallel/test-stream-readable-default-encoding.js new file mode 100644 index 0000000000..954f1643ba --- /dev/null +++ b/test/js/node/test/parallel/test-stream-readable-default-encoding.js @@ -0,0 +1,37 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('stream'); + +{ + assert.throws(() => { + new Readable({ + read: () => {}, + defaultEncoding: 'my invalid encoding', + }); + }, { + code: 'ERR_UNKNOWN_ENCODING', + }); +} + +{ + const r = new Readable({ + read() {}, + defaultEncoding: 'hex' + }); + + r.push('ab'); + + r.on('data', common.mustCall((chunk) => assert.strictEqual(chunk.toString('hex'), 'ab')), 1); +} + +{ + const r = new Readable({ + read() {}, + defaultEncoding: 'hex', + }); + + r.push('xy', 'utf-8'); + + r.on('data', common.mustCall((chunk) => assert.strictEqual(chunk.toString('utf-8'), 'xy')), 1); +} diff --git a/test/js/node/test/parallel/test-stream-readable-dispose.js b/test/js/node/test/parallel/test-stream-readable-dispose.js new file mode 100644 index 0000000000..e940bf1688 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-readable-dispose.js @@ -0,0 +1,23 @@ +'use strict'; + +const common = require('../common'); +const { Readable } = require('stream'); +const assert = require('assert'); + +{ + const read = new Readable({ + read() {} + }); + read.resume(); + + read.on('end', common.mustNotCall('no end event')); + read.on('close', common.mustCall()); + read.on('error', common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError'); + })); + + read[Symbol.asyncDispose]().then(common.mustCall(() => { + assert.strictEqual(read.errored.name, 'AbortError'); + assert.strictEqual(read.destroyed, true); + })); +} diff --git a/test/js/node/test/parallel/test-stream-readable-from-web-termination.js b/test/js/node/test/parallel/test-stream-readable-from-web-termination.js new file mode 100644 index 0000000000..68ed7d6969 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-readable-from-web-termination.js @@ -0,0 +1,15 @@ +'use strict'; +require('../common'); +const { Readable } = require('stream'); + +{ + const r = Readable.from(['data']); + + const wrapper = Readable.fromWeb(Readable.toWeb(r)); + + wrapper.on('data', () => { + // Destroying wrapper while emitting data should not cause uncaught + // exceptions + wrapper.destroy(); + }); +} diff --git a/test/js/node/test/parallel/test-stream-readable-pause-and-resume.js b/test/js/node/test/parallel/test-stream-readable-pause-and-resume.js new file mode 100644 index 0000000000..53229ec333 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-readable-pause-and-resume.js @@ -0,0 +1,74 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('stream'); + +let ticks = 18; +let expectedData = 19; + +const rs = new Readable({ + objectMode: true, + read: () => { + if (ticks-- > 0) + return process.nextTick(() => rs.push({})); + rs.push({}); + rs.push(null); + } +}); + +rs.on('end', common.mustCall()); +readAndPause(); + +function readAndPause() { + // Does a on(data) -> pause -> wait -> resume -> on(data) ... loop. + // Expects on(data) to never fire if the stream is paused. + const ondata = common.mustCall((data) => { + rs.pause(); + + expectedData--; + if (expectedData <= 0) + return; + + setImmediate(function() { + rs.removeListener('data', ondata); + readAndPause(); + rs.resume(); + }); + }, 1); // Only call ondata once + + rs.on('data', ondata); +} + +{ + const readable = new Readable({ + read() {} + }); + + function read() {} + + readable.setEncoding('utf8'); + readable.on('readable', read); + readable.removeListener('readable', read); + readable.pause(); + + process.nextTick(function() { + assert(readable.isPaused()); + }); +} + +{ + const { PassThrough } = require('stream'); + + const source3 = new PassThrough(); + const target3 = new PassThrough(); + + const chunk = Buffer.allocUnsafe(1000); + while (target3.write(chunk)); + + source3.pipe(target3); + target3.on('drain', common.mustCall(() => { + assert(!source3.isPaused()); + })); + target3.on('data', () => {}); +} diff --git a/test/js/node/test/parallel/test-stream-readable-readable-then-resume.js b/test/js/node/test/parallel/test-stream-readable-readable-then-resume.js new file mode 100644 index 0000000000..63dbc306e7 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-readable-readable-then-resume.js @@ -0,0 +1,31 @@ +'use strict'; + +const common = require('../common'); +const { Readable } = require('stream'); +const assert = require('assert'); + +// This test verifies that a stream could be resumed after +// removing the readable event in the same tick + +check(new Readable({ + objectMode: true, + highWaterMark: 1, + read() { + if (!this.first) { + this.push('hello'); + this.first = true; + return; + } + + this.push(null); + } +})); + +function check(s) { + const readableListener = common.mustNotCall(); + s.on('readable', readableListener); + s.on('end', common.mustCall()); + assert.strictEqual(s.removeListener, s.off); + s.removeListener('readable', readableListener); + s.resume(); +} diff --git a/test/js/node/test/parallel/test-stream-readable-to-web-termination.js b/test/js/node/test/parallel/test-stream-readable-to-web-termination.js new file mode 100644 index 0000000000..13fce9bc71 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-readable-to-web-termination.js @@ -0,0 +1,12 @@ +'use strict'; +require('../common'); +const { Readable } = require('stream'); + +{ + const r = Readable.from([]); + // Cancelling reader while closing should not cause uncaught exceptions + r.on('close', () => reader.cancel()); + + const reader = Readable.toWeb(r).getReader(); + reader.read(); +} diff --git a/test/js/node/test/parallel/test-stream-readable-unpipe-resume.js b/test/js/node/test/parallel/test-stream-readable-unpipe-resume.js new file mode 100644 index 0000000000..b40f724bcc --- /dev/null +++ b/test/js/node/test/parallel/test-stream-readable-unpipe-resume.js @@ -0,0 +1,20 @@ +'use strict'; + +const common = require('../common'); +const stream = require('stream'); +const fs = require('fs'); + +const readStream = fs.createReadStream(process.execPath); + +const transformStream = new stream.Transform({ + transform: common.mustCall(() => { + readStream.unpipe(); + readStream.resume(); + }) +}); + +readStream.on('end', common.mustCall()); + +readStream + .pipe(transformStream) + .resume(); diff --git a/test/js/node/test/parallel/test-stream-readable-unshift.js b/test/js/node/test/parallel/test-stream-readable-unshift.js index cccc834fc1..e39a9abf36 100644 --- a/test/js/node/test/parallel/test-stream-readable-unshift.js +++ b/test/js/node/test/parallel/test-stream-readable-unshift.js @@ -156,9 +156,9 @@ const { Readable } = require('stream'); // Remove the 'readable' listener before unshifting stream.removeListener('readable', onRead); stream.unshift('a'); - stream.on('data', (chunk) => { - console.log(chunk.length); - }); + stream.on('data', common.mustCall((chunk) => { + // console.log(chunk.length); + }, 50)); break; } } diff --git a/test/js/node/test/parallel/test-stream-reduce.js b/test/js/node/test/parallel/test-stream-reduce.js new file mode 100644 index 0000000000..4cee2b5d71 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-reduce.js @@ -0,0 +1,132 @@ +'use strict'; + +const common = require('../common'); +const { + Readable, +} = require('stream'); +const assert = require('assert'); + +function sum(p, c) { + return p + c; +} + +{ + // Does the same thing as `(await stream.toArray()).reduce(...)` + (async () => { + const tests = [ + [[], sum, 0], + [[1], sum, 0], + [[1, 2, 3, 4, 5], sum, 0], + [[...Array(100).keys()], sum, 0], + [['a', 'b', 'c'], sum, ''], + [[1, 2], sum], + [[1, 2, 3], (x, y) => y], + ]; + for (const [values, fn, initial] of tests) { + const streamReduce = await Readable.from(values) + .reduce(fn, initial); + const arrayReduce = values.reduce(fn, initial); + assert.deepStrictEqual(streamReduce, arrayReduce); + } + // Does the same thing as `(await stream.toArray()).reduce(...)` with an + // asynchronous reducer + for (const [values, fn, initial] of tests) { + const streamReduce = await Readable.from(values) + .map(async (x) => x) + .reduce(fn, initial); + const arrayReduce = values.reduce(fn, initial); + assert.deepStrictEqual(streamReduce, arrayReduce); + } + })().then(common.mustCall()); +} +{ + // Works with an async reducer, with or without initial value + (async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c, 0); + assert.strictEqual(six, 6); + })().then(common.mustCall()); + (async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c); + assert.strictEqual(six, 6); + })().then(common.mustCall()); +} +{ + // Works lazily + assert.rejects(Readable.from([1, 2, 3, 4, 5, 6]) + .map(common.mustCall((x) => { + return x; + }, 3)) // Two consumed and one buffered by `map` due to default concurrency + .reduce(async (p, c) => { + if (p === 1) { + throw new Error('boom'); + } + return c; + }, 0) + , /boom/).then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + assert.rejects(async () => { + await Readable.from([1, 2, 3]).reduce(async (p, c) => { + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: ac.signal }); + }, { + name: 'AbortError', + }).then(common.mustCall()); + ac.abort(); +} + + +{ + // Support for AbortSignal - pre aborted + const stream = Readable.from([1, 2, 3]); + assert.rejects(async () => { + await stream.reduce(async (p, c) => { + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: AbortSignal.abort() }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + assert.strictEqual(stream.destroyed, true); + })); +} + +{ + // Support for AbortSignal - deep + const stream = Readable.from([1, 2, 3]); + assert.rejects(async () => { + await stream.reduce(async (p, c, { signal }) => { + signal.addEventListener('abort', common.mustCall(), { once: true }); + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: AbortSignal.abort() }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + assert.strictEqual(stream.destroyed, true); + })); +} + +{ + // Error cases + assert.rejects(() => Readable.from([]).reduce(1), /TypeError/).then(common.mustCall()); + assert.rejects(() => Readable.from([]).reduce('5'), /TypeError/).then(common.mustCall()); + assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, 1), /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, { signal: true }), /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); +} + +{ + // Test result is a Promise + const result = Readable.from([1, 2, 3, 4, 5]).reduce(sum, 0); + assert.ok(result instanceof Promise); +} diff --git a/test/js/node/test/parallel/test-stream-some-find-every.mjs b/test/js/node/test/parallel/test-stream-some-find-every.mjs new file mode 100644 index 0000000000..0617102bc4 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-some-find-every.mjs @@ -0,0 +1,172 @@ +import * as common from '../common/index.mjs'; +import { setTimeout } from 'timers/promises'; +import { Readable } from 'stream'; +import assert from 'assert'; + + +function oneTo5() { + return Readable.from([1, 2, 3, 4, 5]); +} + +function oneTo5Async() { + return oneTo5().map(async (x) => { + await Promise.resolve(); + return x; + }); +} +{ + // Some, find, and every work with a synchronous stream and predicate + assert.strictEqual(await oneTo5().some((x) => x > 3), true); + assert.strictEqual(await oneTo5().every((x) => x > 3), false); + assert.strictEqual(await oneTo5().find((x) => x > 3), 4); + assert.strictEqual(await oneTo5().some((x) => x > 6), false); + assert.strictEqual(await oneTo5().every((x) => x < 6), true); + assert.strictEqual(await oneTo5().find((x) => x > 6), undefined); + assert.strictEqual(await Readable.from([]).some(() => true), false); + assert.strictEqual(await Readable.from([]).every(() => true), true); + assert.strictEqual(await Readable.from([]).find(() => true), undefined); +} + +{ + // Some, find, and every work with an asynchronous stream and synchronous predicate + assert.strictEqual(await oneTo5Async().some((x) => x > 3), true); + assert.strictEqual(await oneTo5Async().every((x) => x > 3), false); + assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4); + assert.strictEqual(await oneTo5Async().some((x) => x > 6), false); + assert.strictEqual(await oneTo5Async().every((x) => x < 6), true); + assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined); +} + +{ + // Some, find, and every work on synchronous streams with an asynchronous predicate + assert.strictEqual(await oneTo5().some(async (x) => x > 3), true); + assert.strictEqual(await oneTo5().every(async (x) => x > 3), false); + assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4); + assert.strictEqual(await oneTo5().some(async (x) => x > 6), false); + assert.strictEqual(await oneTo5().every(async (x) => x < 6), true); + assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined); +} + +{ + // Some, find, and every work on asynchronous streams with an asynchronous predicate + assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true); + assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false); + assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4); + assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false); + assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true); + assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined); +} + +{ + async function checkDestroyed(stream) { + await setTimeout(); + assert.strictEqual(stream.destroyed, true); + } + + { + // Some, find, and every short circuit + const someStream = oneTo5(); + await someStream.some(common.mustCall((x) => x > 2, 3)); + await checkDestroyed(someStream); + + const everyStream = oneTo5(); + await everyStream.every(common.mustCall((x) => x < 3, 3)); + await checkDestroyed(everyStream); + + const findStream = oneTo5(); + await findStream.find(common.mustCall((x) => x > 1, 2)); + await checkDestroyed(findStream); + + // When short circuit isn't possible the whole stream is iterated + await oneTo5().some(common.mustCall(() => false, 5)); + await oneTo5().every(common.mustCall(() => true, 5)); + await oneTo5().find(common.mustCall(() => false, 5)); + } + + { + // Some, find, and every short circuit async stream/predicate + const someStream = oneTo5Async(); + await someStream.some(common.mustCall(async (x) => x > 2, 3)); + await checkDestroyed(someStream); + + const everyStream = oneTo5Async(); + await everyStream.every(common.mustCall(async (x) => x < 3, 3)); + await checkDestroyed(everyStream); + + const findStream = oneTo5Async(); + await findStream.find(common.mustCall(async (x) => x > 1, 2)); + await checkDestroyed(findStream); + + // When short circuit isn't possible the whole stream is iterated + await oneTo5Async().some(common.mustCall(async () => false, 5)); + await oneTo5Async().every(common.mustCall(async () => true, 5)); + await oneTo5Async().find(common.mustCall(async () => false, 5)); + } +} + +{ + // Concurrency doesn't affect which value is found. + const found = await Readable.from([1, 2]).find(async (val) => { + if (val === 1) { + await setTimeout(100); + } + return true; + }, { concurrency: 2 }); + assert.strictEqual(found, 1); +} + +{ + // Support for AbortSignal + for (const op of ['some', 'every', 'find']) { + { + const ac = new AbortController(); + assert.rejects(Readable.from([1, 2, 3])[op]( + () => new Promise(() => { }), + { signal: ac.signal } + ), { + name: 'AbortError', + }, `${op} should abort correctly with sync abort`).then(common.mustCall()); + ac.abort(); + } + { + // Support for pre-aborted AbortSignal + assert.rejects(Readable.from([1, 2, 3])[op]( + () => new Promise(() => { }), + { signal: AbortSignal.abort() } + ), { + name: 'AbortError', + }, `${op} should abort with pre-aborted abort controller`).then(common.mustCall()); + } + } +} +{ + // Error cases + for (const op of ['some', 'every', 'find']) { + assert.rejects(async () => { + await Readable.from([1])[op](1); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid function`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, { + concurrency: 'Foo' + }); + }, /ERR_OUT_OF_RANGE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, 1); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, { + signal: true + }); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid signal`).then(common.mustCall()); + } +} +{ + for (const op of ['some', 'every', 'find']) { + const stream = oneTo5(); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(), + }); + // Check that map isn't getting called. + stream[op](() => {}); + } +} diff --git a/test/js/node/test/parallel/test-stream-toArray.js b/test/js/node/test/parallel/test-stream-toArray.js new file mode 100644 index 0000000000..690b3c4b08 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-toArray.js @@ -0,0 +1,91 @@ +'use strict'; + +const common = require('../common'); +const { Readable } = require('stream'); +const assert = require('assert'); + +{ + // Works on a synchronous stream + (async () => { + const tests = [ + [], + [1], + [1, 2, 3], + Array(100).fill().map((_, i) => i), + ]; + for (const test of tests) { + const stream = Readable.from(test); + const result = await stream.toArray(); + assert.deepStrictEqual(result, test); + } + })().then(common.mustCall()); +} + +{ + // Works on a non-object-mode stream + (async () => { + const firstBuffer = Buffer.from([1, 2, 3]); + const secondBuffer = Buffer.from([4, 5, 6]); + const stream = Readable.from( + [firstBuffer, secondBuffer], + { objectMode: false }); + const result = await stream.toArray(); + assert.strictEqual(Array.isArray(result), true); + assert.deepStrictEqual(result, [firstBuffer, secondBuffer]); + })().then(common.mustCall()); +} + +{ + // Works on an asynchronous stream + (async () => { + const tests = [ + [], + [1], + [1, 2, 3], + Array(100).fill().map((_, i) => i), + ]; + for (const test of tests) { + const stream = Readable.from(test).map((x) => Promise.resolve(x)); + const result = await stream.toArray(); + assert.deepStrictEqual(result, test); + } + })().then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + let stream; + assert.rejects(async () => { + stream = Readable.from([1, 2, 3]).map(async (x) => { + if (x === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(x); + }); + await stream.toArray({ signal: ac.signal }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + // Only stops toArray, does not destroy the stream + assert(stream.destroyed, false); + })); + ac.abort(); +} +{ + // Test result is a Promise + const result = Readable.from([1, 2, 3, 4, 5]).toArray(); + assert.strictEqual(result instanceof Promise, true); +} +{ + // Error cases + assert.rejects(async () => { + await Readable.from([1]).toArray(1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + + assert.rejects(async () => { + await Readable.from([1]).toArray({ + signal: true + }); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); +} diff --git a/test/js/node/test/parallel/test-stream-toWeb-allows-server-response.js b/test/js/node/test/parallel/test-stream-toWeb-allows-server-response.js new file mode 100644 index 0000000000..fd7a14d596 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-toWeb-allows-server-response.js @@ -0,0 +1,29 @@ +'use strict'; +const common = require('../common'); +const { Writable } = require('stream'); + +const assert = require('assert'); +const http = require('http'); + +// Check if Writable.toWeb works on the response object after creating a server. +const server = http.createServer( + common.mustCall((req, res) => { + const webStreamResponse = Writable.toWeb(res); + assert.strictEqual(webStreamResponse instanceof WritableStream, true); + res.end(); + }) +); + +server.listen( + 0, + common.mustCall(() => { + http.get( + { + port: server.address().port, + }, + common.mustCall(() => { + server.close(); + }) + ); + }) +); diff --git a/test/js/node/test/parallel/test-stream-transform-destroy.js b/test/js/node/test/parallel/test-stream-transform-destroy.js new file mode 100644 index 0000000000..428bab9ce3 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-transform-destroy.js @@ -0,0 +1,154 @@ +'use strict'; + +const common = require('../common'); +const { Transform } = require('stream'); +const assert = require('assert'); + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + transform.resume(); + + transform.on('end', common.mustNotCall()); + transform.on('close', common.mustCall()); + transform.on('finish', common.mustNotCall()); + + transform.destroy(); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + transform.resume(); + + const expected = new Error('kaboom'); + + transform.on('end', common.mustNotCall()); + transform.on('finish', common.mustNotCall()); + transform.on('close', common.mustCall()); + transform.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + transform.destroy(expected); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(err); + }, 1); + + const expected = new Error('kaboom'); + + transform.on('finish', common.mustNotCall('no finish event')); + transform.on('close', common.mustCall()); + transform.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + transform.destroy(expected); +} + +{ + const expected = new Error('kaboom'); + const transform = new Transform({ + transform(chunk, enc, cb) {}, + destroy: common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(); + }, 1) + }); + transform.resume(); + + transform.on('end', common.mustNotCall('no end event')); + transform.on('close', common.mustCall()); + transform.on('finish', common.mustNotCall('no finish event')); + + // Error is swallowed by the custom _destroy + transform.on('error', common.mustNotCall('no error event')); + + transform.destroy(expected); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(); + }, 1); + + transform.destroy(); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + transform.resume(); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + process.nextTick(() => { + this.push(null); + this.end(); + cb(); + }); + }, 1); + + const fail = common.mustNotCall('no event'); + + transform.on('finish', fail); + transform.on('end', fail); + transform.on('close', common.mustCall()); + + transform.destroy(); + + transform.removeListener('end', fail); + transform.removeListener('finish', fail); + transform.on('end', common.mustCall()); + transform.on('finish', common.mustNotCall()); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + const expected = new Error('kaboom'); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(expected); + }, 1); + + transform.on('close', common.mustCall()); + transform.on('finish', common.mustNotCall('no finish event')); + transform.on('end', common.mustNotCall('no end event')); + transform.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + transform.destroy(); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + transform.on('error', common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError'); + })); + transform.on('close', common.mustCall()); + transform[Symbol.asyncDispose]().then(common.mustCall()); +} diff --git a/test/js/node/test/parallel/test-stream-transform-hwm0.js b/test/js/node/test/parallel/test-stream-transform-hwm0.js new file mode 100644 index 0000000000..8e8971f21f --- /dev/null +++ b/test/js/node/test/parallel/test-stream-transform-hwm0.js @@ -0,0 +1,28 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { Transform } = require('stream'); + +const t = new Transform({ + objectMode: true, highWaterMark: 0, + transform(chunk, enc, callback) { + process.nextTick(() => callback(null, chunk, enc)); + } +}); + +assert.strictEqual(t.write(1), false); +t.on('drain', common.mustCall(() => { + assert.strictEqual(t.write(2), false); + t.end(); +})); + +t.once('readable', common.mustCall(() => { + assert.strictEqual(t.read(), 1); + setImmediate(common.mustCall(() => { + assert.strictEqual(t.read(), null); + t.once('readable', common.mustCall(() => { + assert.strictEqual(t.read(), 2); + })); + })); +})); diff --git a/test/js/node/test/parallel/test-stream-transform-split-highwatermark.js b/test/js/node/test/parallel/test-stream-transform-split-highwatermark.js new file mode 100644 index 0000000000..290c7d957c --- /dev/null +++ b/test/js/node/test/parallel/test-stream-transform-split-highwatermark.js @@ -0,0 +1,73 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +const { Transform, Readable, Writable, getDefaultHighWaterMark } = require('stream'); + +const DEFAULT = getDefaultHighWaterMark(); + +function testTransform(expectedReadableHwm, expectedWritableHwm, options) { + const t = new Transform(options); + assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm); + assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm); +} + +// Test overriding defaultHwm +testTransform(666, DEFAULT, { readableHighWaterMark: 666 }); +testTransform(DEFAULT, 777, { writableHighWaterMark: 777 }); +testTransform(666, 777, { + readableHighWaterMark: 666, + writableHighWaterMark: 777, +}); + +// Test highWaterMark overriding +testTransform(555, 555, { + highWaterMark: 555, + readableHighWaterMark: 666, +}); +testTransform(555, 555, { + highWaterMark: 555, + writableHighWaterMark: 777, +}); +testTransform(555, 555, { + highWaterMark: 555, + readableHighWaterMark: 666, + writableHighWaterMark: 777, +}); + +// Test undefined, null +[undefined, null].forEach((v) => { + testTransform(DEFAULT, DEFAULT, { readableHighWaterMark: v }); + testTransform(DEFAULT, DEFAULT, { writableHighWaterMark: v }); + testTransform(666, DEFAULT, { highWaterMark: v, readableHighWaterMark: 666 }); + testTransform(DEFAULT, 777, { highWaterMark: v, writableHighWaterMark: 777 }); +}); + +// test NaN +{ + assert.throws(() => { + new Transform({ readableHighWaterMark: NaN }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.readableHighWaterMark' is invalid. " + + 'Received NaN' + }); + + assert.throws(() => { + new Transform({ writableHighWaterMark: NaN }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.writableHighWaterMark' is invalid. " + + 'Received NaN' + }); +} + +// Test non Duplex streams ignore the options +{ + const r = new Readable({ readableHighWaterMark: 666 }); + assert.strictEqual(r._readableState.highWaterMark, DEFAULT); + const w = new Writable({ writableHighWaterMark: 777 }); + assert.strictEqual(w._writableState.highWaterMark, DEFAULT); +} diff --git a/test/js/node/test/parallel/test-stream-transform-split-objectmode.js b/test/js/node/test/parallel/test-stream-transform-split-objectmode.js new file mode 100644 index 0000000000..f1341290d2 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-transform-split-objectmode.js @@ -0,0 +1,83 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const assert = require('assert'); + +const Transform = require('stream').Transform; + +const parser = new Transform({ + readableObjectMode: true +}); + +assert(parser._readableState.objectMode); +assert(!parser._writableState.objectMode); +assert.strictEqual(parser.readableHighWaterMark, 16); +assert.strictEqual(parser.writableHighWaterMark, process.platform === 'win32' ? 16 * 1024 : 64 * 1024); +assert.strictEqual(parser.readableHighWaterMark, + parser._readableState.highWaterMark); +assert.strictEqual(parser.writableHighWaterMark, + parser._writableState.highWaterMark); + +parser._transform = function(chunk, enc, callback) { + callback(null, { val: chunk[0] }); +}; + +let parsed; + +parser.on('data', function(obj) { + parsed = obj; +}); + +parser.end(Buffer.from([42])); + +process.on('exit', function() { + assert.strictEqual(parsed.val, 42); +}); + + +const serializer = new Transform({ writableObjectMode: true }); + +assert(!serializer._readableState.objectMode); +assert(serializer._writableState.objectMode); +assert.strictEqual(serializer.readableHighWaterMark, process.platform === 'win32' ? 16 * 1024 : 64 * 1024); +assert.strictEqual(serializer.writableHighWaterMark, 16); +assert.strictEqual(parser.readableHighWaterMark, + parser._readableState.highWaterMark); +assert.strictEqual(parser.writableHighWaterMark, + parser._writableState.highWaterMark); + +serializer._transform = function(obj, _, callback) { + callback(null, Buffer.from([obj.val])); +}; + +let serialized; + +serializer.on('data', function(chunk) { + serialized = chunk; +}); + +serializer.write({ val: 42 }); + +process.on('exit', function() { + assert.strictEqual(serialized[0], 42); +}); diff --git a/test/js/node/test/parallel/test-stream-typedarray.js b/test/js/node/test/parallel/test-stream-typedarray.js new file mode 100644 index 0000000000..a374989276 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-typedarray.js @@ -0,0 +1,105 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +const { Readable, Writable } = require('stream'); + +const buffer = Buffer.from('ABCD'); +const views = common.getArrayBufferViews(buffer); + +{ + // Simple Writable test. + let n = 0; + const writable = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert(chunk instanceof Buffer); + assert(ArrayBuffer.isView(chunk)); + assert.deepStrictEqual(common.getBufferSources(chunk)[n], views[n]); + n++; + cb(); + }, views.length), + }); + + views.forEach((msg) => writable.write(msg)); + writable.end(); +} + +{ + // Writable test with object mode True. + let n = 0; + const writable = new Writable({ + objectMode: true, + write: common.mustCall((chunk, encoding, cb) => { + assert(!(chunk instanceof Buffer)); + assert(ArrayBuffer.isView(chunk)); + assert.deepStrictEqual(common.getBufferSources(chunk)[n], views[n]); + n++; + cb(); + }, views.length), + }); + + views.forEach((msg) => writable.write(msg)); + writable.end(); +} + + +{ + // Writable test, multiple writes carried out via writev. + let n = 0; + let callback; + const writable = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert(chunk instanceof Buffer); + assert(ArrayBuffer.isView(chunk)); + assert.deepStrictEqual(common.getBufferSources(chunk)[n], views[n]); + n++; + callback = cb; + }), + + writev: common.mustCall((chunks, cb) => { + assert.strictEqual(chunks.length, views.length); + let res = ''; + for (const chunk of chunks) { + assert.strictEqual(chunk.encoding, 'buffer'); + res += chunk.chunk; + } + assert.strictEqual(res, 'ABCD'.repeat(9)); + }), + + }); + views.forEach((msg) => writable.write(msg)); + writable.end(views[0]); + callback(); +} + + +{ + // Simple Readable test. + const readable = new Readable({ + read() {} + }); + + readable.push(views[1]); + readable.push(views[2]); + readable.unshift(views[0]); + + const buf = readable.read(); + assert(buf instanceof Buffer); + assert.deepStrictEqual([...buf], [...views[0], ...views[1], ...views[2]]); +} + +{ + // Readable test, setEncoding. + const readable = new Readable({ + read() {} + }); + + readable.setEncoding('utf8'); + + readable.push(views[1]); + readable.push(views[2]); + readable.unshift(views[0]); + + const out = readable.read(); + assert.strictEqual(out, 'ABCD'.repeat(3)); +} diff --git a/test/js/node/test/parallel/test-stream-uint8array.js b/test/js/node/test/parallel/test-stream-uint8array.js new file mode 100644 index 0000000000..f1de4c873f --- /dev/null +++ b/test/js/node/test/parallel/test-stream-uint8array.js @@ -0,0 +1,101 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +const { Readable, Writable } = require('stream'); + +const ABC = new Uint8Array([0x41, 0x42, 0x43]); +const DEF = new Uint8Array([0x44, 0x45, 0x46]); +const GHI = new Uint8Array([0x47, 0x48, 0x49]); + +{ + // Simple Writable test. + + let n = 0; + const writable = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert(chunk instanceof Buffer); + if (n++ === 0) { + assert.strictEqual(String(chunk), 'ABC'); + } else { + assert.strictEqual(String(chunk), 'DEF'); + } + + cb(); + }, 2) + }); + + writable.write(ABC); + writable.end(DEF); +} + +{ + // Writable test, pass in Uint8Array in object mode. + + const writable = new Writable({ + objectMode: true, + write: common.mustCall((chunk, encoding, cb) => { + assert(!(chunk instanceof Buffer)); + assert(chunk instanceof Uint8Array); + assert.strictEqual(chunk, ABC); + assert.strictEqual(encoding, undefined); + cb(); + }) + }); + + writable.end(ABC); +} + +{ + // Writable test, multiple writes carried out via writev. + let callback; + + const writable = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert(chunk instanceof Buffer); + assert.strictEqual(encoding, 'buffer'); + assert.strictEqual(String(chunk), 'ABC'); + callback = cb; + }), + writev: common.mustCall((chunks, cb) => { + assert.strictEqual(chunks.length, 2); + assert.strictEqual(chunks[0].encoding, 'buffer'); + assert.strictEqual(chunks[1].encoding, 'buffer'); + assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI'); + }) + }); + + writable.write(ABC); + writable.write(DEF); + writable.end(GHI); + callback(); +} + +{ + // Simple Readable test. + const readable = new Readable({ + read() {} + }); + + readable.push(DEF); + readable.unshift(ABC); + + const buf = readable.read(); + assert(buf instanceof Buffer); + assert.deepStrictEqual([...buf], [...ABC, ...DEF]); +} + +{ + // Readable test, setEncoding. + const readable = new Readable({ + read() {} + }); + + readable.setEncoding('utf8'); + + readable.push(DEF); + readable.unshift(ABC); + + const out = readable.read(); + assert.strictEqual(out, 'ABCDEF'); +} diff --git a/test/js/node/test/parallel/test-stream-writable-change-default-encoding.js b/test/js/node/test/parallel/test-stream-writable-change-default-encoding.js index 94a892567c..9a9482cb62 100644 --- a/test/js/node/test/parallel/test-stream-writable-change-default-encoding.js +++ b/test/js/node/test/parallel/test-stream-writable-change-default-encoding.js @@ -65,7 +65,7 @@ assert.throws(() => { }, { name: 'TypeError', code: 'ERR_UNKNOWN_ENCODING', - message: 'Unknown encoding: {}' + message: 'Unknown encoding: [object Object]' }); (function checkVariableCaseEncoding() { diff --git a/test/js/node/test/parallel/test-stream-writable-decoded-encoding.js b/test/js/node/test/parallel/test-stream-writable-decoded-encoding.js new file mode 100644 index 0000000000..e3caa9928f --- /dev/null +++ b/test/js/node/test/parallel/test-stream-writable-decoded-encoding.js @@ -0,0 +1,105 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const assert = require('assert'); + +const stream = require('stream'); + +class MyWritable extends stream.Writable { + constructor(fn, options) { + super(options); + this.fn = fn; + } + + _write(chunk, encoding, callback) { + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); + callback(); + } +} + +{ + const m = new MyWritable(function(isBuffer, type, enc) { + assert(isBuffer); + assert.strictEqual(type, 'object'); + assert.strictEqual(enc, 'buffer'); + }, { decodeStrings: true }); + m.write('some-text', 'utf8'); + m.end(); +} + +{ + const m = new MyWritable(function(isBuffer, type, enc) { + assert(!isBuffer); + assert.strictEqual(type, 'string'); + assert.strictEqual(enc, 'utf8'); + }, { decodeStrings: false }); + m.write('some-text', 'utf8'); + m.end(); +} + +{ + assert.throws(() => { + const m = new MyWritable(null, { + defaultEncoding: 'my invalid encoding', + }); + m.end(); + }, { + code: 'ERR_UNKNOWN_ENCODING', + }); +} + +{ + const w = new MyWritable(function(isBuffer, type, enc) { + assert(!isBuffer); + assert.strictEqual(type, 'string'); + assert.strictEqual(enc, 'hex'); + }, { + defaultEncoding: 'hex', + decodeStrings: false + }); + w.write('asd'); + w.end(); +} + +{ + const w = new MyWritable(function(isBuffer, type, enc) { + assert(!isBuffer); + assert.strictEqual(type, 'string'); + assert.strictEqual(enc, 'utf8'); + }, { + defaultEncoding: null, + decodeStrings: false + }); + w.write('asd'); + w.end(); +} + +{ + const m = new MyWritable(function(isBuffer, type, enc) { + assert.strictEqual(type, 'object'); + assert.strictEqual(enc, 'utf8'); + }, { defaultEncoding: 'hex', + objectMode: true }); + m.write({ foo: 'bar' }, 'utf8'); + m.end(); +} diff --git a/test/js/node/test/parallel/test-stream-writable-destroy.js b/test/js/node/test/parallel/test-stream-writable-destroy.js new file mode 100644 index 0000000000..05d7932b88 --- /dev/null +++ b/test/js/node/test/parallel/test-stream-writable-destroy.js @@ -0,0 +1,501 @@ +'use strict'; + +const common = require('../common'); +const { Writable, addAbortSignal } = require('stream'); +const assert = require('assert'); + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.on('finish', common.mustNotCall()); + write.on('close', common.mustCall()); + + write.destroy(); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { + this.destroy(new Error('asd')); + cb(); + } + }); + + write.on('error', common.mustCall()); + write.on('finish', common.mustNotCall()); + write.end('asd'); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + const expected = new Error('kaboom'); + + write.on('finish', common.mustNotCall()); + write.on('close', common.mustCall()); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + write.destroy(expected); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write._destroy = function(err, cb) { + assert.strictEqual(err, expected); + cb(err); + }; + + const expected = new Error('kaboom'); + + write.on('finish', common.mustNotCall('no finish event')); + write.on('close', common.mustCall()); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + write.destroy(expected); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); }, + destroy: common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(); + }) + }); + + const expected = new Error('kaboom'); + + write.on('finish', common.mustNotCall('no finish event')); + write.on('close', common.mustCall()); + + // Error is swallowed by the custom _destroy + write.on('error', common.mustNotCall('no error event')); + + write.destroy(expected); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(); + }); + + write.destroy(); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + process.nextTick(() => { + this.end(); + cb(); + }); + }); + + const fail = common.mustNotCall('no finish event'); + + write.on('finish', fail); + write.on('close', common.mustCall()); + + write.destroy(); + + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + const expected = new Error('kaboom'); + + write._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(expected); + }); + + write.on('close', common.mustCall()); + write.on('finish', common.mustNotCall('no finish event')); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + write.destroy(); + assert.strictEqual(write.destroyed, true); +} + +{ + // double error case + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + let ticked = false; + write.on('close', common.mustCall(() => { + assert.strictEqual(ticked, true); + })); + write.on('error', common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.message, 'kaboom 1'); + assert.strictEqual(write._writableState.errorEmitted, true); + })); + + const expected = new Error('kaboom 1'); + write.destroy(expected); + write.destroy(new Error('kaboom 2')); + assert.strictEqual(write._writableState.errored, expected); + assert.strictEqual(write._writableState.errorEmitted, false); + assert.strictEqual(write.destroyed, true); + ticked = true; +} + +{ + const writable = new Writable({ + destroy: common.mustCall(function(err, cb) { + process.nextTick(cb, new Error('kaboom 1')); + }), + write(chunk, enc, cb) { + cb(); + } + }); + + let ticked = false; + writable.on('close', common.mustCall(() => { + writable.on('error', common.mustNotCall()); + writable.destroy(new Error('hello')); + assert.strictEqual(ticked, true); + assert.strictEqual(writable._writableState.errorEmitted, true); + })); + writable.on('error', common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.message, 'kaboom 1'); + assert.strictEqual(writable._writableState.errorEmitted, true); + })); + + writable.destroy(); + assert.strictEqual(writable.destroyed, true); + assert.strictEqual(writable._writableState.errored, null); + assert.strictEqual(writable._writableState.errorEmitted, false); + + // Test case where `writable.destroy()` is called again with an error before + // the `_destroy()` callback is called. + writable.destroy(new Error('kaboom 2')); + assert.strictEqual(writable._writableState.errorEmitted, false); + assert.strictEqual(writable._writableState.errored, null); + + ticked = true; +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.destroyed = true; + assert.strictEqual(write.destroyed, true); + + // The internal destroy() mechanism should not be triggered + write.on('close', common.mustNotCall()); + write.destroy(); +} + +{ + function MyWritable() { + assert.strictEqual(this.destroyed, false); + this.destroyed = false; + Writable.call(this); + } + + Object.setPrototypeOf(MyWritable.prototype, Writable.prototype); + Object.setPrototypeOf(MyWritable, Writable); + + new MyWritable(); +} + +{ + // Destroy and destroy callback + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.destroy(); + + const expected = new Error('kaboom'); + + write.destroy(expected, common.mustCall((err) => { + assert.strictEqual(err, undefined); + })); +} + +{ + // Checks that `._undestroy()` restores the state so that `final` will be + // called again. + const write = new Writable({ + write: common.mustNotCall(), + final: common.mustCall((cb) => cb(), 2), + autoDestroy: true + }); + + write.end(); + write.once('close', common.mustCall(() => { + write._undestroy(); + write.end(); + })); +} + +{ + const write = new Writable(); + + write.destroy(); + write.on('error', common.mustNotCall()); + write.write('asd', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + })); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.on('error', common.mustNotCall()); + + write.cork(); + write.write('asd', common.mustCall()); + write.uncork(); + + write.cork(); + write.write('asd', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + })); + write.destroy(); + write.write('asd', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + })); + write.uncork(); +} + +{ + // Call end(cb) after error & destroy + + const write = new Writable({ + write(chunk, enc, cb) { cb(new Error('asd')); } + }); + write.on('error', common.mustCall(() => { + write.destroy(); + let ticked = false; + write.end(common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + })); + ticked = true; + })); + write.write('asd'); +} + +{ + // Call end(cb) after finish & destroy + + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + write.on('finish', common.mustCall(() => { + write.destroy(); + let ticked = false; + write.end(common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED'); + })); + ticked = true; + })); + write.end(); +} + +{ + // Call end(cb) after error & destroy and don't trigger + // unhandled exception. + + const write = new Writable({ + write(chunk, enc, cb) { process.nextTick(cb); } + }); + const _err = new Error('asd'); + write.once('error', common.mustCall((err) => { + assert.strictEqual(err.message, 'asd'); + })); + write.end('asd', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.destroy(_err); +} + +{ + // Call buffered write callback with error + + const _err = new Error('asd'); + const write = new Writable({ + write(chunk, enc, cb) { + process.nextTick(cb, _err); + }, + autoDestroy: false + }); + write.cork(); + write.write('asd', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.write('asd', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.uncork(); +} + +{ + // Ensure callback order. + + let state = 0; + const write = new Writable({ + write(chunk, enc, cb) { + // `setImmediate()` is used on purpose to ensure the callback is called + // after `process.nextTick()` callbacks. + setImmediate(cb); + } + }); + write.write('asd', common.mustCall(() => { + assert.strictEqual(state++, 0); + })); + write.write('asd', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + assert.strictEqual(state++, 1); + })); + write.destroy(); +} + +{ + const write = new Writable({ + autoDestroy: false, + write(chunk, enc, cb) { + cb(); + cb(); + } + }); + + write.on('error', common.mustCall(() => { + assert(write._writableState.errored); + })); + write.write('asd'); +} + +{ + const ac = new AbortController(); + const write = addAbortSignal(ac.signal, new Writable({ + write(chunk, enc, cb) { cb(); } + })); + + write.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + assert.strictEqual(write.destroyed, true); + })); + write.write('asd'); + ac.abort(); +} + +{ + const ac = new AbortController(); + const write = new Writable({ + signal: ac.signal, + write(chunk, enc, cb) { cb(); } + }); + + write.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + assert.strictEqual(write.destroyed, true); + })); + write.write('asd'); + ac.abort(); +} + +{ + const signal = AbortSignal.abort(); + + const write = new Writable({ + signal, + write(chunk, enc, cb) { cb(); } + }); + + write.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + assert.strictEqual(write.destroyed, true); + })); +} + +{ + // Destroy twice + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.end(common.mustCall()); + write.destroy(); + write.destroy(); +} + +{ + // https://github.com/nodejs/node/issues/39356 + const s = new Writable({ + final() {} + }); + const _err = new Error('oh no'); + // Remove `callback` and it works + s.end(common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + s.on('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + s.destroy(_err); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + assert.strictEqual(write.destroyed, true); + })); + write[Symbol.asyncDispose]().then(common.mustCall()); +} diff --git a/test/js/node/test/parallel/test-stream-writable-end-cb-error.js b/test/js/node/test/parallel/test-stream-writable-end-cb-error.js new file mode 100644 index 0000000000..f140d939bc --- /dev/null +++ b/test/js/node/test/parallel/test-stream-writable-end-cb-error.js @@ -0,0 +1,78 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const stream = require('stream'); + +{ + // Invoke end callback on failure. + const writable = new stream.Writable(); + + const _err = new Error('kaboom'); + writable._write = (chunk, encoding, cb) => { + process.nextTick(cb, _err); + }; + + writable.on('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + writable.write('asd'); + writable.end(common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + writable.end(common.mustCall((err) => { + assert.strictEqual(err, _err); + })); +} + +{ + // Don't invoke end callback twice + const writable = new stream.Writable(); + + writable._write = (chunk, encoding, cb) => { + process.nextTick(cb); + }; + + let called = false; + writable.end('asd', common.mustCall((err) => { + called = true; + assert.strictEqual(err, null); + })); + + writable.on('error', common.mustCall((err) => { + assert.strictEqual(err.message, 'kaboom'); + })); + writable.on('finish', common.mustCall(() => { + assert.strictEqual(called, true); + writable.emit('error', new Error('kaboom')); + })); +} + +{ + const w = new stream.Writable({ + write(chunk, encoding, callback) { + setImmediate(callback); + }, + finish(callback) { + setImmediate(callback); + } + }); + w.end('testing ended state', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + assert.strictEqual(w.destroyed, false); + assert.strictEqual(w.writableEnded, true); + w.end(common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + assert.strictEqual(w.destroyed, false); + assert.strictEqual(w.writableEnded, true); + w.end('end', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + assert.strictEqual(w.destroyed, true); + w.on('error', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + w.on('finish', common.mustNotCall()); +} diff --git a/test/js/node/test/parallel/test-stream2-large-read-stall.js b/test/js/node/test/parallel/test-stream2-large-read-stall.js index 2d44bb7f78..5f5618ce73 100644 --- a/test/js/node/test/parallel/test-stream2-large-read-stall.js +++ b/test/js/node/test/parallel/test-stream2-large-read-stall.js @@ -45,11 +45,11 @@ r.on('readable', function() { do { console.error(` > read(${READSIZE})`); ret = r.read(READSIZE); - console.error(` < ${ret && ret.length} (${rs.length} remain)`); + console.error(` < ${ret?.length} (${rs.length} remain)`); } while (ret && ret.length === READSIZE); console.error('<< after read()', - ret && ret.length, + ret?.length, rs.needReadable, rs.length); }); diff --git a/test/js/node/test/parallel/test-stream2-transform.js b/test/js/node/test/parallel/test-stream2-transform.js new file mode 100644 index 0000000000..f222f1c03b --- /dev/null +++ b/test/js/node/test/parallel/test-stream2-transform.js @@ -0,0 +1,492 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const { PassThrough, Transform } = require('stream'); + +{ + // Verify writable side consumption + const tx = new Transform({ + highWaterMark: 10 + }); + + let transformed = 0; + tx._transform = function(chunk, encoding, cb) { + transformed += chunk.length; + tx.push(chunk); + cb(); + }; + + for (let i = 1; i <= 10; i++) { + tx.write(Buffer.allocUnsafe(i)); + } + tx.end(); + + assert.strictEqual(tx.readableLength, 10); + assert.strictEqual(transformed, 10); + assert.deepStrictEqual(tx.writableBuffer.map(function(c) { + return c.chunk.length; + }), [5, 6, 7, 8, 9, 10]); +} + +{ + // Verify passthrough behavior + const pt = new PassThrough(); + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5).toString(), 'l'); +} + +{ + // Verify object passthrough behavior + const pt = new PassThrough({ objectMode: true }); + + pt.write(1); + pt.write(true); + pt.write(false); + pt.write(0); + pt.write('foo'); + pt.write(''); + pt.write({ a: 'b' }); + pt.end(); + + assert.strictEqual(pt.read(), 1); + assert.strictEqual(pt.read(), true); + assert.strictEqual(pt.read(), false); + assert.strictEqual(pt.read(), 0); + assert.strictEqual(pt.read(), 'foo'); + assert.strictEqual(pt.read(), ''); + assert.deepStrictEqual(pt.read(), { a: 'b' }); +} + +{ + // Verify passthrough constructor behavior + const pt = PassThrough(); + + assert(pt instanceof PassThrough); +} + +{ + // Verify transform constructor behavior + const pt = Transform(); + + assert(pt instanceof Transform); +} + +{ + // Perform a simple transform + const pt = new Transform(); + pt._transform = function(c, e, cb) { + const ret = Buffer.alloc(c.length, 'x'); + pt.push(ret); + cb(); + }; + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + assert.strictEqual(pt.read(5).toString(), 'xxxxx'); + assert.strictEqual(pt.read(5).toString(), 'xxxxx'); + assert.strictEqual(pt.read(5).toString(), 'xxxxx'); + assert.strictEqual(pt.read(5).toString(), 'x'); +} + +{ + // Verify simple object transform + const pt = new Transform({ objectMode: true }); + pt._transform = function(c, e, cb) { + pt.push(JSON.stringify(c)); + cb(); + }; + + pt.write(1); + pt.write(true); + pt.write(false); + pt.write(0); + pt.write('foo'); + pt.write(''); + pt.write({ a: 'b' }); + pt.end(); + + assert.strictEqual(pt.read(), '1'); + assert.strictEqual(pt.read(), 'true'); + assert.strictEqual(pt.read(), 'false'); + assert.strictEqual(pt.read(), '0'); + assert.strictEqual(pt.read(), '"foo"'); + assert.strictEqual(pt.read(), '""'); + assert.strictEqual(pt.read(), '{"a":"b"}'); +} + +{ + // Verify async passthrough + const pt = new Transform(); + pt._transform = function(chunk, encoding, cb) { + setTimeout(function() { + pt.push(chunk); + cb(); + }, 10); + }; + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + pt.on('finish', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5).toString(), 'l'); + })); +} + +{ + // Verify asymmetric transform (expand) + const pt = new Transform(); + + // Emit each chunk 2 times. + pt._transform = function(chunk, encoding, cb) { + setTimeout(function() { + pt.push(chunk); + setTimeout(function() { + pt.push(chunk); + cb(); + }, 10); + }, 10); + }; + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + pt.on('finish', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'foogf'); + assert.strictEqual(pt.read(5).toString(), 'oogba'); + assert.strictEqual(pt.read(5).toString(), 'rkbar'); + assert.strictEqual(pt.read(5).toString(), 'kbazy'); + assert.strictEqual(pt.read(5).toString(), 'bazyk'); + assert.strictEqual(pt.read(5).toString(), 'uelku'); + assert.strictEqual(pt.read(5).toString(), 'el'); + })); +} + +{ + // Verify asymmetric transform (compress) + const pt = new Transform(); + + // Each output is the first char of 3 consecutive chunks, + // or whatever's left. + pt.state = ''; + + pt._transform = function(chunk, encoding, cb) { + const s = (chunk ||= '').toString(); + setTimeout(() => { + this.state += s.charAt(0); + if (this.state.length === 3) { + pt.push(Buffer.from(this.state)); + this.state = ''; + } + cb(); + }, 10); + }; + + pt._flush = function(cb) { + // Just output whatever we have. + pt.push(Buffer.from(this.state)); + this.state = ''; + cb(); + }; + + pt.write(Buffer.from('aaaa')); + pt.write(Buffer.from('bbbb')); + pt.write(Buffer.from('cccc')); + pt.write(Buffer.from('dddd')); + pt.write(Buffer.from('eeee')); + pt.write(Buffer.from('aaaa')); + pt.write(Buffer.from('bbbb')); + pt.write(Buffer.from('cccc')); + pt.write(Buffer.from('dddd')); + pt.write(Buffer.from('eeee')); + pt.write(Buffer.from('aaaa')); + pt.write(Buffer.from('bbbb')); + pt.write(Buffer.from('cccc')); + pt.write(Buffer.from('dddd')); + pt.end(); + + // 'abcdeabcdeabcd' + pt.on('finish', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'abcde'); + assert.strictEqual(pt.read(5).toString(), 'abcde'); + assert.strictEqual(pt.read(5).toString(), 'abcd'); + })); +} + +// This tests for a stall when data is written to a full stream +// that has empty transforms. +{ + // Verify complex transform behavior + let count = 0; + let saved = null; + const pt = new Transform({ highWaterMark: 3 }); + pt._transform = function(c, e, cb) { + if (count++ === 1) + saved = c; + else { + if (saved) { + pt.push(saved); + saved = null; + } + pt.push(c); + } + + cb(); + }; + + pt.once('readable', function() { + process.nextTick(function() { + pt.write(Buffer.from('d')); + pt.write(Buffer.from('ef'), common.mustCall(function() { + pt.end(); + })); + assert.strictEqual(pt.read().toString(), 'abcdef'); + assert.strictEqual(pt.read(), null); + }); + }); + + pt.write(Buffer.from('abc')); +} + + +{ + // Verify passthrough event emission + const pt = new PassThrough(); + let emits = 0; + pt.on('readable', function() { + emits++; + }); + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + + assert.strictEqual(emits, 0); + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(String(pt.read(5)), 'null'); + assert.strictEqual(emits, 0); + + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + + assert.strictEqual(emits, 0); + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5), null); + + pt.end(); + + assert.strictEqual(emits, 1); + assert.strictEqual(pt.read(5).toString(), 'l'); + assert.strictEqual(pt.read(5), null); + assert.strictEqual(emits, 1); +} + +{ + // Verify passthrough event emission reordering + const pt = new PassThrough(); + let emits = 0; + pt.on('readable', function() { + emits++; + }); + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + + assert.strictEqual(emits, 0); + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(pt.read(5), null); + + pt.once('readable', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5), null); + + pt.once('readable', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5), null); + pt.once('readable', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'l'); + assert.strictEqual(pt.read(5), null); + assert.strictEqual(emits, 3); + })); + pt.end(); + })); + pt.write(Buffer.from('kuel')); + })); + + pt.write(Buffer.from('bazy')); +} + +{ + // Verify passthrough facade + const pt = new PassThrough(); + const datas = []; + pt.on('data', function(chunk) { + datas.push(chunk.toString()); + }); + + pt.on('end', common.mustCall(function() { + assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel']); + })); + + pt.write(Buffer.from('foog')); + setTimeout(function() { + pt.write(Buffer.from('bark')); + setTimeout(function() { + pt.write(Buffer.from('bazy')); + setTimeout(function() { + pt.write(Buffer.from('kuel')); + setTimeout(function() { + pt.end(); + }, 10); + }, 10); + }, 10); + }, 10); +} + +{ + // Verify object transform (JSON parse) + const jp = new Transform({ objectMode: true }); + jp._transform = function(data, encoding, cb) { + try { + jp.push(JSON.parse(data)); + cb(); + } catch (er) { + cb(er); + } + }; + + // Anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [ + { foo: 'bar' }, + 100, + 'string', + { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }, + ]; + + let ended = false; + jp.on('end', function() { + ended = true; + }); + + for (const obj of objects) { + jp.write(JSON.stringify(obj)); + const res = jp.read(); + assert.deepStrictEqual(res, obj); + } + + jp.end(); + // Read one more time to get the 'end' event + jp.read(); + + process.nextTick(common.mustCall(function() { + assert.strictEqual(ended, true); + })); +} + +{ + // Verify object transform (JSON stringify) + const js = new Transform({ objectMode: true }); + js._transform = function(data, encoding, cb) { + try { + js.push(JSON.stringify(data)); + cb(); + } catch (er) { + cb(er); + } + }; + + // Anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [ + { foo: 'bar' }, + 100, + 'string', + { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }, + ]; + + let ended = false; + js.on('end', function() { + ended = true; + }); + + for (const obj of objects) { + js.write(obj); + const res = js.read(); + assert.strictEqual(res, JSON.stringify(obj)); + } + + js.end(); + // Read one more time to get the 'end' event + js.read(); + + process.nextTick(common.mustCall(function() { + assert.strictEqual(ended, true); + })); +} + +{ + const s = new Transform({ + objectMode: true, + construct(callback) { + this.push('header from constructor'); + callback(); + }, + transform: (row, encoding, callback) => { + callback(null, row); + }, + }); + + const expected = [ + 'header from constructor', + 'firstLine', + 'secondLine', + ]; + s.on('data', common.mustCall((data) => { + assert.strictEqual(data.toString(), expected.shift()); + }, 3)); + s.write('firstLine'); + process.nextTick(() => s.write('secondLine')); +} diff --git a/test/js/node/test/parallel/test-stream2-unpipe-leak.js b/test/js/node/test/parallel/test-stream2-unpipe-leak.js new file mode 100644 index 0000000000..52c16368f5 --- /dev/null +++ b/test/js/node/test/parallel/test-stream2-unpipe-leak.js @@ -0,0 +1,73 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const assert = require('assert'); +const stream = require('stream'); + +const chunk = Buffer.from('hallo'); + +class TestWriter extends stream.Writable { + _write(buffer, encoding, callback) { + callback(null); + } +} + +const dest = new TestWriter(); + +// Set this high so that we'd trigger a nextTick warning +// and/or RangeError if we do maybeReadMore wrong. +class TestReader extends stream.Readable { + constructor() { + super({ + highWaterMark: 0x10000 + }); + } + + _read(size) { + this.push(chunk); + } +} + +const src = new TestReader(); + +for (let i = 0; i < 10; i++) { + src.pipe(dest); + src.unpipe(dest); +} + +assert.strictEqual(src.listeners('end').length, 0); +assert.strictEqual(src.listeners('readable').length, 0); + +assert.strictEqual(dest.listeners('unpipe').length, 0); +assert.strictEqual(dest.listeners('drain').length, 0); +assert.strictEqual(dest.listeners('error').length, 0); +assert.strictEqual(dest.listeners('close').length, 0); +assert.strictEqual(dest.listeners('finish').length, 0); + +console.error(src._readableState); +process.on('exit', function() { + src.readableBuffer.length = 0; + console.error(src._readableState); + assert(src.readableLength >= src.readableHighWaterMark); + console.log('ok'); +}); diff --git a/test/js/node/test/parallel/test-stream2-writable.js b/test/js/node/test/parallel/test-stream2-writable.js new file mode 100644 index 0000000000..6d233ae6b6 --- /dev/null +++ b/test/js/node/test/parallel/test-stream2-writable.js @@ -0,0 +1,464 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +const common = require('../common'); +const { Writable: W, Duplex: D } = require('stream'); +const assert = require('assert'); + +class TestWriter extends W { + constructor(opts) { + super(opts); + this.buffer = []; + this.written = 0; + } + + _write(chunk, encoding, cb) { + // Simulate a small unpredictable latency + setTimeout(() => { + this.buffer.push(chunk.toString()); + this.written += chunk.length; + cb(); + }, Math.floor(Math.random() * 10)); + } +} + +const chunks = new Array(50); +for (let i = 0; i < chunks.length; i++) { + chunks[i] = 'x'.repeat(i); +} + +{ + // Verify fast writing + const tw = new TestWriter({ + highWaterMark: 100 + }); + + tw.on('finish', common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + })); + + chunks.forEach(function(chunk) { + // Ignore backpressure. Just buffer it all up. + tw.write(chunk); + }); + tw.end(); +} + +{ + // Verify slow writing + const tw = new TestWriter({ + highWaterMark: 100 + }); + + tw.on('finish', common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + })); + + let i = 0; + (function W() { + tw.write(chunks[i++]); + if (i < chunks.length) + setTimeout(W, 10); + else + tw.end(); + })(); +} + +{ + // Verify write backpressure + const tw = new TestWriter({ + highWaterMark: 50 + }); + + let drains = 0; + + tw.on('finish', common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + assert.strictEqual(drains, 17); + })); + + tw.on('drain', function() { + drains++; + }); + + let i = 0; + (function W() { + let ret; + do { + ret = tw.write(chunks[i++]); + } while (ret !== false && i < chunks.length); + + if (i < chunks.length) { + assert(tw.writableLength >= 50); + tw.once('drain', W); + } else { + tw.end(); + } + })(); +} + +{ + // Verify write buffersize + const tw = new TestWriter({ + highWaterMark: 100 + }); + + const encodings = + [ 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined ]; + + tw.on('finish', function() { + // Got the expected chunks + assert.deepStrictEqual(tw.buffer, chunks); + }); + + chunks.forEach(function(chunk, i) { + const enc = encodings[i % encodings.length]; + chunk = Buffer.from(chunk); + tw.write(chunk.toString(enc), enc); + }); +} + +{ + // Verify write with no buffersize + const tw = new TestWriter({ + highWaterMark: 100, + decodeStrings: false + }); + + tw._write = function(chunk, encoding, cb) { + assert.strictEqual(typeof chunk, 'string'); + chunk = Buffer.from(chunk, encoding); + return TestWriter.prototype._write.call(this, chunk, encoding, cb); + }; + + const encodings = + [ 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined ]; + + tw.on('finish', function() { + // Got the expected chunks + assert.deepStrictEqual(tw.buffer, chunks); + }); + + chunks.forEach(function(chunk, i) { + const enc = encodings[i % encodings.length]; + chunk = Buffer.from(chunk); + tw.write(chunk.toString(enc), enc); + }); +} + +{ + // Verify write callbacks + const callbacks = chunks.map(function(chunk, i) { + return [i, function(err) { + assert.strictEqual(err, null); + callbacks._called[i] = chunk; + }]; + }).reduce(function(set, x) { + set[`callback-${x[0]}`] = x[1]; + return set; + }, {}); + callbacks._called = []; + + const tw = new TestWriter({ + highWaterMark: 100 + }); + + tw.on('finish', common.mustCall(function() { + process.nextTick(common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + // Called all callbacks + assert.deepStrictEqual(callbacks._called, chunks); + })); + })); + + chunks.forEach(function(chunk, i) { + tw.write(chunk, callbacks[`callback-${i}`]); + }); + tw.end(); +} + +{ + // Verify end() callback + const tw = new TestWriter(); + tw.end(common.mustCall(function(err) { + assert.strictEqual(err, null); + })); +} + +const helloWorldBuffer = Buffer.from('hello world'); + +{ + // Verify end() callback with chunk + const tw = new TestWriter(); + tw.end(helloWorldBuffer, common.mustCall(function(err) { + assert.strictEqual(err, null); + })); +} + +{ + // Verify end() callback with chunk and encoding + const tw = new TestWriter(); + tw.end('hello world', 'ascii', common.mustCall()); +} + +{ + // Verify end() callback after write() call + const tw = new TestWriter(); + tw.write(helloWorldBuffer); + tw.end(common.mustCall()); +} + +{ + // Verify end() callback after write() callback + const tw = new TestWriter(); + let writeCalledback = false; + tw.write(helloWorldBuffer, function() { + writeCalledback = true; + }); + tw.end(common.mustCall(function() { + assert.strictEqual(writeCalledback, true); + })); +} + +{ + // Verify encoding is ignored for buffers + const tw = new W(); + const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'; + tw._write = common.mustCall(function(chunk) { + assert.strictEqual(chunk.toString('hex'), hex); + }); + const buf = Buffer.from(hex, 'hex'); + tw.write(buf, 'latin1'); +} + +{ + // Verify writables cannot be piped + const w = new W({ autoDestroy: false }); + w._write = common.mustNotCall(); + let gotError = false; + w.on('error', function() { + gotError = true; + }); + w.pipe(process.stdout); + assert.strictEqual(gotError, true); +} + +{ + // Verify that duplex streams cannot be piped + const d = new D(); + d._read = common.mustCall(); + d._write = common.mustNotCall(); + let gotError = false; + d.on('error', function() { + gotError = true; + }); + d.pipe(process.stdout); + assert.strictEqual(gotError, false); +} + +{ + // Verify that end(chunk) twice is an error + const w = new W(); + w._write = common.mustCall((msg) => { + assert.strictEqual(msg.toString(), 'this is the end'); + }); + let gotError = false; + w.on('error', function(er) { + gotError = true; + assert.strictEqual(er.message, 'write after end'); + }); + w.end('this is the end'); + w.end('and so is this'); + process.nextTick(common.mustCall(function() { + assert.strictEqual(gotError, true); + })); +} + +{ + // Verify stream doesn't end while writing + const w = new W(); + let wrote = false; + w._write = function(chunk, e, cb) { + assert.strictEqual(this.writing, undefined); + wrote = true; + this.writing = true; + setTimeout(() => { + this.writing = false; + cb(); + }, 1); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(wrote, true); + assert.strictEqual(this.writing, false); + })); + w.write(Buffer.alloc(0)); + w.end(); +} + +{ + // Verify finish does not come before write() callback + const w = new W(); + let writeCb = false; + w._write = function(chunk, e, cb) { + setTimeout(function() { + writeCb = true; + cb(); + }, 10); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(writeCb, true); + })); + w.write(Buffer.alloc(0)); + w.end(); +} + +{ + // Verify finish does not come before synchronous _write() callback + const w = new W(); + let writeCb = false; + w._write = function(chunk, e, cb) { + cb(); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(writeCb, true); + })); + w.write(Buffer.alloc(0), function() { + writeCb = true; + }); + w.end(); +} + +{ + // Verify finish is emitted if the last chunk is empty + const w = new W(); + w._write = function(chunk, e, cb) { + process.nextTick(cb); + }; + w.on('finish', common.mustCall()); + w.write(Buffer.allocUnsafe(1)); + w.end(Buffer.alloc(0)); +} + +{ + // Verify that finish is emitted after shutdown + const w = new W(); + let shutdown = false; + + w._final = common.mustCall(function(cb) { + assert.strictEqual(this, w); + setTimeout(function() { + shutdown = true; + cb(); + }, 100); + }); + w._write = function(chunk, e, cb) { + process.nextTick(cb); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(shutdown, true); + })); + w.write(Buffer.allocUnsafe(1)); + w.end(Buffer.allocUnsafe(0)); +} + +{ + // Verify that error is only emitted once when failing in _finish. + const w = new W(); + + w._final = common.mustCall(function(cb) { + cb(new Error('test')); + }); + w.on('error', common.mustCall((err) => { + assert.strictEqual(w._writableState.errorEmitted, true); + assert.strictEqual(err.message, 'test'); + w.on('error', common.mustNotCall()); + w.destroy(new Error()); + })); + w.end(); +} + +{ + // Verify that error is only emitted once when failing in write. + const w = new W(); + w.on('error', common.mustNotCall()); + assert.throws(() => { + w.write(null); + }, { + code: 'ERR_STREAM_NULL_VALUES' + }); +} + +{ + // Verify that error is only emitted once when failing in write after end. + const w = new W(); + w.on('error', common.mustCall((err) => { + assert.strictEqual(w._writableState.errorEmitted, true); + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + w.end(); + w.write('hello'); + w.destroy(new Error()); +} + +{ + // Verify that finish is not emitted after error + const w = new W(); + + w._final = common.mustCall(function(cb) { + cb(new Error()); + }); + w._write = function(chunk, e, cb) { + process.nextTick(cb); + }; + w.on('error', common.mustCall()); + w.on('prefinish', common.mustNotCall()); + w.on('finish', common.mustNotCall()); + w.write(Buffer.allocUnsafe(1)); + w.end(Buffer.allocUnsafe(0)); +} diff --git a/test/js/node/test/parallel/test-streams-highwatermark.js b/test/js/node/test/parallel/test-streams-highwatermark.js new file mode 100644 index 0000000000..e5c2f0b597 --- /dev/null +++ b/test/js/node/test/parallel/test-streams-highwatermark.js @@ -0,0 +1,111 @@ +'use strict'; +const common = require('../common'); + +const assert = require('assert'); +const stream = require('stream'); +const { inspect } = Bun; + +{ + // This test ensures that the stream implementation correctly handles values + // for highWaterMark which exceed the range of signed 32 bit integers and + // rejects invalid values. + + // This number exceeds the range of 32 bit integer arithmetic but should still + // be handled correctly. + const ovfl = Number.MAX_SAFE_INTEGER; + + const readable = stream.Readable({ highWaterMark: ovfl }); + assert.strictEqual(readable._readableState.highWaterMark, ovfl); + + const writable = stream.Writable({ highWaterMark: ovfl }); + assert.strictEqual(writable._writableState.highWaterMark, ovfl); + + for (const invalidHwm of [true, false, '5', {}, -5, NaN]) { + for (const type of [stream.Readable, stream.Writable]) { + assert.throws(() => { + type({ highWaterMark: invalidHwm }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.highWaterMark' is invalid. " + + `Received ${inspect(invalidHwm)}` + }); + } + } +} + +{ + // This test ensures that the push method's implementation + // correctly handles the edge case where the highWaterMark and + // the state.length are both zero + + const readable = stream.Readable({ highWaterMark: 0 }); + + for (let i = 0; i < 3; i++) { + const needMoreData = readable.push(); + assert.strictEqual(needMoreData, true); + } +} + +{ + // This test ensures that the read(n) method's implementation + // correctly handles the edge case where the highWaterMark, state.length + // and n are all zero + + const readable = stream.Readable({ highWaterMark: 0 }); + + readable._read = common.mustCall(); + readable.read(0); +} + +{ + // Parse size as decimal integer + ['1', '1.0', 1].forEach((size) => { + const readable = new stream.Readable({ + read: common.mustCall(), + highWaterMark: 0, + }); + readable.read(size); + + assert.strictEqual(readable._readableState.highWaterMark, Number(size)); + }); +} + +{ + // Test highwatermark limit + const hwm = 0x40000000 + 1; + const readable = stream.Readable({ + read() {}, + }); + + assert.throws(() => readable.read(hwm), common.expectsError({ + code: 'ERR_OUT_OF_RANGE', + message: 'The value of "size" is out of range.' + + ' It must be <= 1GiB. Received ' + + hwm, + })); +} + +{ + const res = []; + const r = new stream.Readable({ + read() {}, + }); + const w = new stream.Writable({ + highWaterMark: 0, + write(chunk, encoding, callback) { + res.push(chunk.toString()); + callback(); + }, + }); + + r.pipe(w); + r.push('a'); + r.push('b'); + r.push('c'); + r.push(null); + + r.on('end', common.mustCall(() => { + assert.deepStrictEqual(res, ['a', 'b', 'c']); + })); +} diff --git a/test/js/node/test/sequential/test-stream2-fs.js b/test/js/node/test/sequential/test-stream2-fs.js new file mode 100644 index 0000000000..3d06abc921 --- /dev/null +++ b/test/js/node/test/sequential/test-stream2-fs.js @@ -0,0 +1,70 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const fixtures = require('../common/fixtures'); +const assert = require('assert'); + +const fs = require('fs'); +const FSReadable = fs.ReadStream; + +const path = require('path'); +const file = path.resolve(fixtures.path('x1024.txt')); + +const size = fs.statSync(file).size; + +const expectLengths = [1024]; + +const Stream = require('stream'); + +class TestWriter extends Stream { + constructor() { + super(); + this.buffer = []; + this.length = 0; + } + + write(c) { + this.buffer.push(c.toString()); + this.length += c.length; + return true; + } + + end(c) { + if (c) this.buffer.push(c.toString()); + this.emit('results', this.buffer); + } +} + +const r = new FSReadable(file); +const w = new TestWriter(); + +w.on('results', function(res) { + console.error(res, w.length); + assert.strictEqual(w.length, size); + assert.deepStrictEqual(res.map(function(c) { + return c.length; + }), expectLengths); + console.log('ok'); +}); + +r.pipe(w);