diff --git a/CLAUDE.md b/CLAUDE.md index b83fd0dfa6..09a8499345 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -4,18 +4,14 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed ### Build Commands -- **Build debug version**: `bun bd` +- **Build Bun**: `bun bd` - Creates a debug build at `./build/debug/bun-debug` - - **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient. + - **CRITICAL**: no need for a timeout, the build is really fast! - **Run tests with your debug build**: `bun bd test ` - **CRITICAL**: Never use `bun test` directly - it won't include your changes - **Run any command with debug build**: `bun bd ` -### Other Build Variants - -- `bun run build:release` - Release build - -Address sanitizer is enabled by default in debug builds of Bun. +Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script. ## Testing diff --git a/cmake/sources/CxxSources.txt b/cmake/sources/CxxSources.txt deleted file mode 100644 index 03ba693fb5..0000000000 --- a/cmake/sources/CxxSources.txt +++ /dev/null @@ -1,506 +0,0 @@ -packages/bun-usockets/src/crypto/root_certs.cpp -packages/bun-usockets/src/crypto/sni_tree.cpp -src/bake/BakeGlobalObject.cpp -src/bake/BakeProduction.cpp -src/bake/BakeSourceProvider.cpp -src/bun.js/bindings/ActiveDOMCallback.cpp -src/bun.js/bindings/AsymmetricKeyValue.cpp -src/bun.js/bindings/AsyncContextFrame.cpp -src/bun.js/bindings/Base64Helpers.cpp -src/bun.js/bindings/bindings.cpp -src/bun.js/bindings/blob.cpp -src/bun.js/bindings/bun-simdutf.cpp -src/bun.js/bindings/bun-spawn.cpp -src/bun.js/bindings/BunClientData.cpp -src/bun.js/bindings/BunCommonStrings.cpp -src/bun.js/bindings/BunDebugger.cpp -src/bun.js/bindings/BunGCOutputConstraint.cpp -src/bun.js/bindings/BunGlobalScope.cpp -src/bun.js/bindings/BunHttp2CommonStrings.cpp -src/bun.js/bindings/BunInjectedScriptHost.cpp -src/bun.js/bindings/BunInspector.cpp -src/bun.js/bindings/BunJSCEventLoop.cpp -src/bun.js/bindings/BunObject.cpp -src/bun.js/bindings/BunPlugin.cpp -src/bun.js/bindings/BunProcess.cpp -src/bun.js/bindings/BunString.cpp -src/bun.js/bindings/BunWorkerGlobalScope.cpp -src/bun.js/bindings/c-bindings.cpp -src/bun.js/bindings/CallSite.cpp -src/bun.js/bindings/CallSitePrototype.cpp -src/bun.js/bindings/CatchScopeBinding.cpp -src/bun.js/bindings/CodeCoverage.cpp -src/bun.js/bindings/ConsoleObject.cpp -src/bun.js/bindings/Cookie.cpp -src/bun.js/bindings/CookieMap.cpp -src/bun.js/bindings/coroutine.cpp -src/bun.js/bindings/CPUFeatures.cpp -src/bun.js/bindings/decodeURIComponentSIMD.cpp -src/bun.js/bindings/DOMException.cpp -src/bun.js/bindings/DOMFormData.cpp -src/bun.js/bindings/DOMURL.cpp -src/bun.js/bindings/DOMWrapperWorld.cpp -src/bun.js/bindings/DoubleFormatter.cpp -src/bun.js/bindings/EncodeURIComponent.cpp -src/bun.js/bindings/EncodingTables.cpp -src/bun.js/bindings/ErrorCode.cpp -src/bun.js/bindings/ErrorStackFrame.cpp -src/bun.js/bindings/ErrorStackTrace.cpp -src/bun.js/bindings/EventLoopTaskNoContext.cpp -src/bun.js/bindings/ExposeNodeModuleGlobals.cpp -src/bun.js/bindings/ffi.cpp -src/bun.js/bindings/helpers.cpp -src/bun.js/bindings/highway_strings.cpp -src/bun.js/bindings/HTMLEntryPoint.cpp -src/bun.js/bindings/ImportMetaObject.cpp -src/bun.js/bindings/inlines.cpp -src/bun.js/bindings/InspectorBunFrontendDevServerAgent.cpp -src/bun.js/bindings/InspectorHTTPServerAgent.cpp -src/bun.js/bindings/InspectorLifecycleAgent.cpp -src/bun.js/bindings/InspectorTestReporterAgent.cpp -src/bun.js/bindings/InternalForTesting.cpp -src/bun.js/bindings/InternalModuleRegistry.cpp -src/bun.js/bindings/IPC.cpp -src/bun.js/bindings/isBuiltinModule.cpp -src/bun.js/bindings/JS2Native.cpp -src/bun.js/bindings/JSBigIntBinding.cpp -src/bun.js/bindings/JSBuffer.cpp -src/bun.js/bindings/JSBufferEncodingType.cpp -src/bun.js/bindings/JSBufferList.cpp -src/bun.js/bindings/JSBundlerPlugin.cpp -src/bun.js/bindings/JSBunRequest.cpp -src/bun.js/bindings/JSCommonJSExtensions.cpp -src/bun.js/bindings/JSCommonJSModule.cpp -src/bun.js/bindings/JSCTaskScheduler.cpp -src/bun.js/bindings/JSCTestingHelpers.cpp -src/bun.js/bindings/JSDOMExceptionHandling.cpp -src/bun.js/bindings/JSDOMFile.cpp -src/bun.js/bindings/JSDOMGlobalObject.cpp -src/bun.js/bindings/JSDOMWrapper.cpp -src/bun.js/bindings/JSDOMWrapperCache.cpp -src/bun.js/bindings/JSEnvironmentVariableMap.cpp -src/bun.js/bindings/JSFFIFunction.cpp -src/bun.js/bindings/JSMockFunction.cpp -src/bun.js/bindings/JSNextTickQueue.cpp -src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp -src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp -src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp -src/bun.js/bindings/JSPropertyIterator.cpp -src/bun.js/bindings/JSS3File.cpp -src/bun.js/bindings/JSSecrets.cpp -src/bun.js/bindings/JSSocketAddressDTO.cpp -src/bun.js/bindings/JSStringDecoder.cpp -src/bun.js/bindings/JSWrappingFunction.cpp -src/bun.js/bindings/JSX509Certificate.cpp -src/bun.js/bindings/JSX509CertificateConstructor.cpp -src/bun.js/bindings/JSX509CertificatePrototype.cpp -src/bun.js/bindings/linux_perf_tracing.cpp -src/bun.js/bindings/MarkedArgumentBufferBinding.cpp -src/bun.js/bindings/MarkingConstraint.cpp -src/bun.js/bindings/ModuleLoader.cpp -src/bun.js/bindings/napi_external.cpp -src/bun.js/bindings/napi_finalizer.cpp -src/bun.js/bindings/napi_handle_scope.cpp -src/bun.js/bindings/napi_type_tag.cpp -src/bun.js/bindings/napi.cpp -src/bun.js/bindings/NapiClass.cpp -src/bun.js/bindings/NapiRef.cpp -src/bun.js/bindings/NapiWeakValue.cpp -src/bun.js/bindings/ncrpyto_engine.cpp -src/bun.js/bindings/ncrypto.cpp -src/bun.js/bindings/node/crypto/CryptoDhJob.cpp -src/bun.js/bindings/node/crypto/CryptoGenDhKeyPair.cpp -src/bun.js/bindings/node/crypto/CryptoGenDsaKeyPair.cpp -src/bun.js/bindings/node/crypto/CryptoGenEcKeyPair.cpp -src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp -src/bun.js/bindings/node/crypto/CryptoGenNidKeyPair.cpp -src/bun.js/bindings/node/crypto/CryptoGenRsaKeyPair.cpp -src/bun.js/bindings/node/crypto/CryptoHkdf.cpp -src/bun.js/bindings/node/crypto/CryptoKeygen.cpp -src/bun.js/bindings/node/crypto/CryptoKeys.cpp -src/bun.js/bindings/node/crypto/CryptoPrimes.cpp -src/bun.js/bindings/node/crypto/CryptoSignJob.cpp -src/bun.js/bindings/node/crypto/CryptoUtil.cpp -src/bun.js/bindings/node/crypto/JSCipher.cpp -src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp -src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp -src/bun.js/bindings/node/crypto/JSDiffieHellman.cpp -src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp -src/bun.js/bindings/node/crypto/JSDiffieHellmanGroup.cpp -src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupConstructor.cpp -src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupPrototype.cpp -src/bun.js/bindings/node/crypto/JSDiffieHellmanPrototype.cpp -src/bun.js/bindings/node/crypto/JSECDH.cpp -src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp -src/bun.js/bindings/node/crypto/JSECDHPrototype.cpp -src/bun.js/bindings/node/crypto/JSHash.cpp -src/bun.js/bindings/node/crypto/JSHmac.cpp -src/bun.js/bindings/node/crypto/JSKeyObject.cpp -src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp -src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp -src/bun.js/bindings/node/crypto/JSPrivateKeyObject.cpp -src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp -src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp -src/bun.js/bindings/node/crypto/JSPublicKeyObject.cpp -src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp -src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp -src/bun.js/bindings/node/crypto/JSSecretKeyObject.cpp -src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp -src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp -src/bun.js/bindings/node/crypto/JSSign.cpp -src/bun.js/bindings/node/crypto/JSVerify.cpp -src/bun.js/bindings/node/crypto/KeyObject.cpp -src/bun.js/bindings/node/crypto/node_crypto_binding.cpp -src/bun.js/bindings/node/http/JSConnectionsList.cpp -src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp -src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp -src/bun.js/bindings/node/http/JSHTTPParser.cpp -src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp -src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp -src/bun.js/bindings/node/http/NodeHTTPParser.cpp -src/bun.js/bindings/node/NodeTimers.cpp -src/bun.js/bindings/NodeAsyncHooks.cpp -src/bun.js/bindings/NodeDirent.cpp -src/bun.js/bindings/NodeFetch.cpp -src/bun.js/bindings/NodeFSStatBinding.cpp -src/bun.js/bindings/NodeFSStatFSBinding.cpp -src/bun.js/bindings/NodeHTTP.cpp -src/bun.js/bindings/NodeTimerObject.cpp -src/bun.js/bindings/NodeTLS.cpp -src/bun.js/bindings/NodeURL.cpp -src/bun.js/bindings/NodeValidator.cpp -src/bun.js/bindings/NodeVM.cpp -src/bun.js/bindings/NodeVMModule.cpp -src/bun.js/bindings/NodeVMScript.cpp -src/bun.js/bindings/NodeVMSourceTextModule.cpp -src/bun.js/bindings/NodeVMSyntheticModule.cpp -src/bun.js/bindings/NoOpForTesting.cpp -src/bun.js/bindings/ObjectBindings.cpp -src/bun.js/bindings/objects.cpp -src/bun.js/bindings/OsBinding.cpp -src/bun.js/bindings/Path.cpp -src/bun.js/bindings/ProcessBindingBuffer.cpp -src/bun.js/bindings/ProcessBindingConstants.cpp -src/bun.js/bindings/ProcessBindingFs.cpp -src/bun.js/bindings/ProcessBindingHTTPParser.cpp -src/bun.js/bindings/ProcessBindingNatives.cpp -src/bun.js/bindings/ProcessBindingTTYWrap.cpp -src/bun.js/bindings/ProcessBindingUV.cpp -src/bun.js/bindings/ProcessIdentifier.cpp -src/bun.js/bindings/RegularExpression.cpp -src/bun.js/bindings/S3Error.cpp -src/bun.js/bindings/ScriptExecutionContext.cpp -src/bun.js/bindings/SecretsDarwin.cpp -src/bun.js/bindings/SecretsLinux.cpp -src/bun.js/bindings/SecretsWindows.cpp -src/bun.js/bindings/Serialization.cpp -src/bun.js/bindings/ServerRouteList.cpp -src/bun.js/bindings/spawn.cpp -src/bun.js/bindings/SQLClient.cpp -src/bun.js/bindings/sqlite/JSSQLStatement.cpp -src/bun.js/bindings/StringBuilderBinding.cpp -src/bun.js/bindings/stripANSI.cpp -src/bun.js/bindings/Strong.cpp -src/bun.js/bindings/TextCodec.cpp -src/bun.js/bindings/TextCodecCJK.cpp -src/bun.js/bindings/TextCodecReplacement.cpp -src/bun.js/bindings/TextCodecSingleByte.cpp -src/bun.js/bindings/TextCodecUserDefined.cpp -src/bun.js/bindings/TextCodecWrapper.cpp -src/bun.js/bindings/TextEncoding.cpp -src/bun.js/bindings/TextEncodingRegistry.cpp -src/bun.js/bindings/Uint8Array.cpp -src/bun.js/bindings/Undici.cpp -src/bun.js/bindings/URLDecomposition.cpp -src/bun.js/bindings/URLSearchParams.cpp -src/bun.js/bindings/UtilInspect.cpp -src/bun.js/bindings/v8/node.cpp -src/bun.js/bindings/v8/shim/Function.cpp -src/bun.js/bindings/v8/shim/FunctionTemplate.cpp -src/bun.js/bindings/v8/shim/GlobalInternals.cpp -src/bun.js/bindings/v8/shim/Handle.cpp -src/bun.js/bindings/v8/shim/HandleScopeBuffer.cpp -src/bun.js/bindings/v8/shim/InternalFieldObject.cpp -src/bun.js/bindings/v8/shim/Map.cpp -src/bun.js/bindings/v8/shim/ObjectTemplate.cpp -src/bun.js/bindings/v8/shim/Oddball.cpp -src/bun.js/bindings/v8/shim/TaggedPointer.cpp -src/bun.js/bindings/v8/v8_api_internal.cpp -src/bun.js/bindings/v8/v8_internal.cpp -src/bun.js/bindings/v8/V8Array.cpp -src/bun.js/bindings/v8/V8Boolean.cpp -src/bun.js/bindings/v8/V8Context.cpp -src/bun.js/bindings/v8/V8EscapableHandleScope.cpp -src/bun.js/bindings/v8/V8EscapableHandleScopeBase.cpp -src/bun.js/bindings/v8/V8External.cpp -src/bun.js/bindings/v8/V8Function.cpp -src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp -src/bun.js/bindings/v8/V8FunctionTemplate.cpp -src/bun.js/bindings/v8/V8HandleScope.cpp -src/bun.js/bindings/v8/V8Isolate.cpp -src/bun.js/bindings/v8/V8Local.cpp -src/bun.js/bindings/v8/V8Maybe.cpp -src/bun.js/bindings/v8/V8Number.cpp -src/bun.js/bindings/v8/V8Object.cpp -src/bun.js/bindings/v8/V8ObjectTemplate.cpp -src/bun.js/bindings/v8/V8String.cpp -src/bun.js/bindings/v8/V8Template.cpp -src/bun.js/bindings/v8/V8Value.cpp -src/bun.js/bindings/Weak.cpp -src/bun.js/bindings/webcore/AbortController.cpp -src/bun.js/bindings/webcore/AbortSignal.cpp -src/bun.js/bindings/webcore/ActiveDOMObject.cpp -src/bun.js/bindings/webcore/BroadcastChannel.cpp -src/bun.js/bindings/webcore/BunBroadcastChannelRegistry.cpp -src/bun.js/bindings/webcore/CloseEvent.cpp -src/bun.js/bindings/webcore/CommonAtomStrings.cpp -src/bun.js/bindings/webcore/ContextDestructionObserver.cpp -src/bun.js/bindings/webcore/CustomEvent.cpp -src/bun.js/bindings/webcore/CustomEventCustom.cpp -src/bun.js/bindings/webcore/DOMJITHelpers.cpp -src/bun.js/bindings/webcore/ErrorCallback.cpp -src/bun.js/bindings/webcore/ErrorEvent.cpp -src/bun.js/bindings/webcore/Event.cpp -src/bun.js/bindings/webcore/EventContext.cpp -src/bun.js/bindings/webcore/EventDispatcher.cpp -src/bun.js/bindings/webcore/EventEmitter.cpp -src/bun.js/bindings/webcore/EventFactory.cpp -src/bun.js/bindings/webcore/EventListenerMap.cpp -src/bun.js/bindings/webcore/EventNames.cpp -src/bun.js/bindings/webcore/EventPath.cpp -src/bun.js/bindings/webcore/EventTarget.cpp -src/bun.js/bindings/webcore/EventTargetConcrete.cpp -src/bun.js/bindings/webcore/EventTargetFactory.cpp -src/bun.js/bindings/webcore/FetchHeaders.cpp -src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp -src/bun.js/bindings/webcore/HTTPHeaderField.cpp -src/bun.js/bindings/webcore/HTTPHeaderIdentifiers.cpp -src/bun.js/bindings/webcore/HTTPHeaderMap.cpp -src/bun.js/bindings/webcore/HTTPHeaderNames.cpp -src/bun.js/bindings/webcore/HTTPHeaderStrings.cpp -src/bun.js/bindings/webcore/HTTPHeaderValues.cpp -src/bun.js/bindings/webcore/HTTPParsers.cpp -src/bun.js/bindings/webcore/IdentifierEventListenerMap.cpp -src/bun.js/bindings/webcore/InternalWritableStream.cpp -src/bun.js/bindings/webcore/JSAbortAlgorithm.cpp -src/bun.js/bindings/webcore/JSAbortController.cpp -src/bun.js/bindings/webcore/JSAbortSignal.cpp -src/bun.js/bindings/webcore/JSAbortSignalCustom.cpp -src/bun.js/bindings/webcore/JSAddEventListenerOptions.cpp -src/bun.js/bindings/webcore/JSBroadcastChannel.cpp -src/bun.js/bindings/webcore/JSByteLengthQueuingStrategy.cpp -src/bun.js/bindings/webcore/JSCallbackData.cpp -src/bun.js/bindings/webcore/JSCloseEvent.cpp -src/bun.js/bindings/webcore/JSCookie.cpp -src/bun.js/bindings/webcore/JSCookieMap.cpp -src/bun.js/bindings/webcore/JSCountQueuingStrategy.cpp -src/bun.js/bindings/webcore/JSCustomEvent.cpp -src/bun.js/bindings/webcore/JSDOMBindingInternalsBuiltins.cpp -src/bun.js/bindings/webcore/JSDOMBuiltinConstructorBase.cpp -src/bun.js/bindings/webcore/JSDOMConstructorBase.cpp -src/bun.js/bindings/webcore/JSDOMConvertDate.cpp -src/bun.js/bindings/webcore/JSDOMConvertNumbers.cpp -src/bun.js/bindings/webcore/JSDOMConvertStrings.cpp -src/bun.js/bindings/webcore/JSDOMConvertWebGL.cpp -src/bun.js/bindings/webcore/JSDOMException.cpp -src/bun.js/bindings/webcore/JSDOMFormData.cpp -src/bun.js/bindings/webcore/JSDOMGuardedObject.cpp -src/bun.js/bindings/webcore/JSDOMIterator.cpp -src/bun.js/bindings/webcore/JSDOMOperation.cpp -src/bun.js/bindings/webcore/JSDOMPromise.cpp -src/bun.js/bindings/webcore/JSDOMPromiseDeferred.cpp -src/bun.js/bindings/webcore/JSDOMURL.cpp -src/bun.js/bindings/webcore/JSErrorCallback.cpp -src/bun.js/bindings/webcore/JSErrorEvent.cpp -src/bun.js/bindings/webcore/JSErrorEventCustom.cpp -src/bun.js/bindings/webcore/JSErrorHandler.cpp -src/bun.js/bindings/webcore/JSEvent.cpp -src/bun.js/bindings/webcore/JSEventCustom.cpp -src/bun.js/bindings/webcore/JSEventDOMJIT.cpp -src/bun.js/bindings/webcore/JSEventEmitter.cpp -src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp -src/bun.js/bindings/webcore/JSEventInit.cpp -src/bun.js/bindings/webcore/JSEventListener.cpp -src/bun.js/bindings/webcore/JSEventListenerOptions.cpp -src/bun.js/bindings/webcore/JSEventModifierInit.cpp -src/bun.js/bindings/webcore/JSEventTarget.cpp -src/bun.js/bindings/webcore/JSEventTargetCustom.cpp -src/bun.js/bindings/webcore/JSEventTargetNode.cpp -src/bun.js/bindings/webcore/JSFetchHeaders.cpp -src/bun.js/bindings/webcore/JSMessageChannel.cpp -src/bun.js/bindings/webcore/JSMessageChannelCustom.cpp -src/bun.js/bindings/webcore/JSMessageEvent.cpp -src/bun.js/bindings/webcore/JSMessageEventCustom.cpp -src/bun.js/bindings/webcore/JSMessagePort.cpp -src/bun.js/bindings/webcore/JSMessagePortCustom.cpp -src/bun.js/bindings/webcore/JSMIMEBindings.cpp -src/bun.js/bindings/webcore/JSMIMEParams.cpp -src/bun.js/bindings/webcore/JSMIMEType.cpp -src/bun.js/bindings/webcore/JSPerformance.cpp -src/bun.js/bindings/webcore/JSPerformanceEntry.cpp -src/bun.js/bindings/webcore/JSPerformanceEntryCustom.cpp -src/bun.js/bindings/webcore/JSPerformanceMark.cpp -src/bun.js/bindings/webcore/JSPerformanceMarkOptions.cpp -src/bun.js/bindings/webcore/JSPerformanceMeasure.cpp -src/bun.js/bindings/webcore/JSPerformanceMeasureOptions.cpp -src/bun.js/bindings/webcore/JSPerformanceObserver.cpp -src/bun.js/bindings/webcore/JSPerformanceObserverCallback.cpp -src/bun.js/bindings/webcore/JSPerformanceObserverCustom.cpp -src/bun.js/bindings/webcore/JSPerformanceObserverEntryList.cpp -src/bun.js/bindings/webcore/JSPerformanceResourceTiming.cpp -src/bun.js/bindings/webcore/JSPerformanceServerTiming.cpp -src/bun.js/bindings/webcore/JSPerformanceTiming.cpp -src/bun.js/bindings/webcore/JSReadableByteStreamController.cpp -src/bun.js/bindings/webcore/JSReadableStream.cpp -src/bun.js/bindings/webcore/JSReadableStreamBYOBReader.cpp -src/bun.js/bindings/webcore/JSReadableStreamBYOBRequest.cpp -src/bun.js/bindings/webcore/JSReadableStreamDefaultController.cpp -src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp -src/bun.js/bindings/webcore/JSReadableStreamSink.cpp -src/bun.js/bindings/webcore/JSReadableStreamSource.cpp -src/bun.js/bindings/webcore/JSReadableStreamSourceCustom.cpp -src/bun.js/bindings/webcore/JSStructuredSerializeOptions.cpp -src/bun.js/bindings/webcore/JSTextDecoderStream.cpp -src/bun.js/bindings/webcore/JSTextEncoder.cpp -src/bun.js/bindings/webcore/JSTextEncoderStream.cpp -src/bun.js/bindings/webcore/JSTransformStream.cpp -src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp -src/bun.js/bindings/webcore/JSURLSearchParams.cpp -src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp -src/bun.js/bindings/webcore/JSWebSocket.cpp -src/bun.js/bindings/webcore/JSWorker.cpp -src/bun.js/bindings/webcore/JSWorkerOptions.cpp -src/bun.js/bindings/webcore/JSWritableStream.cpp -src/bun.js/bindings/webcore/JSWritableStreamDefaultController.cpp -src/bun.js/bindings/webcore/JSWritableStreamDefaultWriter.cpp -src/bun.js/bindings/webcore/JSWritableStreamSink.cpp -src/bun.js/bindings/webcore/MessageChannel.cpp -src/bun.js/bindings/webcore/MessageEvent.cpp -src/bun.js/bindings/webcore/MessagePort.cpp -src/bun.js/bindings/webcore/MessagePortChannel.cpp -src/bun.js/bindings/webcore/MessagePortChannelProvider.cpp -src/bun.js/bindings/webcore/MessagePortChannelProviderImpl.cpp -src/bun.js/bindings/webcore/MessagePortChannelRegistry.cpp -src/bun.js/bindings/webcore/NetworkLoadMetrics.cpp -src/bun.js/bindings/webcore/Performance.cpp -src/bun.js/bindings/webcore/PerformanceEntry.cpp -src/bun.js/bindings/webcore/PerformanceMark.cpp -src/bun.js/bindings/webcore/PerformanceMeasure.cpp -src/bun.js/bindings/webcore/PerformanceObserver.cpp -src/bun.js/bindings/webcore/PerformanceObserverEntryList.cpp -src/bun.js/bindings/webcore/PerformanceResourceTiming.cpp -src/bun.js/bindings/webcore/PerformanceServerTiming.cpp -src/bun.js/bindings/webcore/PerformanceTiming.cpp -src/bun.js/bindings/webcore/PerformanceUserTiming.cpp -src/bun.js/bindings/webcore/ReadableStream.cpp -src/bun.js/bindings/webcore/ReadableStreamDefaultController.cpp -src/bun.js/bindings/webcore/ReadableStreamSink.cpp -src/bun.js/bindings/webcore/ReadableStreamSource.cpp -src/bun.js/bindings/webcore/ResourceTiming.cpp -src/bun.js/bindings/webcore/RFC7230.cpp -src/bun.js/bindings/webcore/SerializedScriptValue.cpp -src/bun.js/bindings/webcore/ServerTiming.cpp -src/bun.js/bindings/webcore/ServerTimingParser.cpp -src/bun.js/bindings/webcore/StructuredClone.cpp -src/bun.js/bindings/webcore/TextEncoder.cpp -src/bun.js/bindings/webcore/WebCoreTypedArrayController.cpp -src/bun.js/bindings/webcore/WebSocket.cpp -src/bun.js/bindings/webcore/Worker.cpp -src/bun.js/bindings/webcore/WritableStream.cpp -src/bun.js/bindings/webcrypto/CommonCryptoDERUtilities.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithm.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBC.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBCOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFB.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFBOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTR.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTROpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCM.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCMOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KW.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KWOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmECDH.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmECDHOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDF.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDFOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2OpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEP.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEPOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5OpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp -src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp -src/bun.js/bindings/webcrypto/CryptoDigest.cpp -src/bun.js/bindings/webcrypto/CryptoKey.cpp -src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp -src/bun.js/bindings/webcrypto/CryptoKeyEC.cpp -src/bun.js/bindings/webcrypto/CryptoKeyECOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoKeyHMAC.cpp -src/bun.js/bindings/webcrypto/CryptoKeyOKP.cpp -src/bun.js/bindings/webcrypto/CryptoKeyOKPOpenSSL.cpp -src/bun.js/bindings/webcrypto/CryptoKeyRaw.cpp -src/bun.js/bindings/webcrypto/CryptoKeyRSA.cpp -src/bun.js/bindings/webcrypto/CryptoKeyRSAComponents.cpp -src/bun.js/bindings/webcrypto/CryptoKeyRSAOpenSSL.cpp -src/bun.js/bindings/webcrypto/JSAesCbcCfbParams.cpp -src/bun.js/bindings/webcrypto/JSAesCtrParams.cpp -src/bun.js/bindings/webcrypto/JSAesGcmParams.cpp -src/bun.js/bindings/webcrypto/JSAesKeyParams.cpp -src/bun.js/bindings/webcrypto/JSCryptoAesKeyAlgorithm.cpp -src/bun.js/bindings/webcrypto/JSCryptoAlgorithmParameters.cpp -src/bun.js/bindings/webcrypto/JSCryptoEcKeyAlgorithm.cpp -src/bun.js/bindings/webcrypto/JSCryptoHmacKeyAlgorithm.cpp -src/bun.js/bindings/webcrypto/JSCryptoKey.cpp -src/bun.js/bindings/webcrypto/JSCryptoKeyAlgorithm.cpp -src/bun.js/bindings/webcrypto/JSCryptoKeyPair.cpp -src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp -src/bun.js/bindings/webcrypto/JSCryptoRsaHashedKeyAlgorithm.cpp -src/bun.js/bindings/webcrypto/JSCryptoRsaKeyAlgorithm.cpp -src/bun.js/bindings/webcrypto/JSEcdhKeyDeriveParams.cpp -src/bun.js/bindings/webcrypto/JSEcdsaParams.cpp -src/bun.js/bindings/webcrypto/JSEcKeyParams.cpp -src/bun.js/bindings/webcrypto/JSHkdfParams.cpp -src/bun.js/bindings/webcrypto/JSHmacKeyParams.cpp -src/bun.js/bindings/webcrypto/JSJsonWebKey.cpp -src/bun.js/bindings/webcrypto/JSPbkdf2Params.cpp -src/bun.js/bindings/webcrypto/JSRsaHashedImportParams.cpp -src/bun.js/bindings/webcrypto/JSRsaHashedKeyGenParams.cpp -src/bun.js/bindings/webcrypto/JSRsaKeyGenParams.cpp -src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp -src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp -src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp -src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp -src/bun.js/bindings/webcrypto/JSX25519Params.cpp -src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp -src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp -src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp -src/bun.js/bindings/webcrypto/SubtleCrypto.cpp -src/bun.js/bindings/workaround-missing-symbols.cpp -src/bun.js/bindings/wtf-bindings.cpp -src/bun.js/bindings/ZigGeneratedCode.cpp -src/bun.js/bindings/ZigGlobalObject.cpp -src/bun.js/bindings/ZigSourceProvider.cpp -src/bun.js/modules/NodeModuleModule.cpp -src/bun.js/modules/NodeTTYModule.cpp -src/bun.js/modules/NodeUtilTypesModule.cpp -src/bun.js/modules/ObjectModule.cpp -src/deps/libuwsockets.cpp -src/io/io_darwin.cpp -src/vm/Semaphore.cpp -src/vm/SigintWatcher.cpp diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index 197788ca78..ca700a2ce4 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION f474428677de1fafaf13bb3b9a050fe3504dda25) + set(WEBKIT_VERSION 0ddf6f47af0a9782a354f61e06d7f83d097d9f84) endif() string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX) diff --git a/docs/api/sql.md b/docs/api/sql.md index 385fc6c3d1..01d4d8acd3 100644 --- a/docs/api/sql.md +++ b/docs/api/sql.md @@ -604,13 +604,12 @@ const db = new SQL({ connectionTimeout: 30, // Timeout when establishing new connections // SSL/TLS options - ssl: "prefer", // or "disable", "require", "verify-ca", "verify-full" - // tls: { - // rejectUnauthorized: true, - // ca: "path/to/ca.pem", - // key: "path/to/key.pem", - // cert: "path/to/cert.pem", - // }, + tls: { + rejectUnauthorized: true, + ca: "path/to/ca.pem", + key: "path/to/key.pem", + cert: "path/to/cert.pem", + }, // Callbacks onconnect: client => { diff --git a/docs/nav.ts b/docs/nav.ts index 90514e1eca..0e7c91bed0 100644 --- a/docs/nav.ts +++ b/docs/nav.ts @@ -407,6 +407,9 @@ export default { page("api/cc", "C Compiler", { description: `Build & run native C from JavaScript with Bun's native C compiler API`, }), // "`bun:ffi`"), + page("api/secrets", "Secrets", { + description: `Store and retrieve sensitive credentials securely using the operating system's native credential storage APIs.`, + }), // "`Bun.secrets`"), page("cli/test", "Testing", { description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`, }), // "`bun:test`"), diff --git a/docs/test/writing.md b/docs/test/writing.md index f61e911426..1f21bfaa5d 100644 --- a/docs/test/writing.md +++ b/docs/test/writing.md @@ -756,3 +756,76 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap - [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot) {% /table %} + +## TypeScript Type Safety + +Bun's test runner provides enhanced TypeScript support with intelligent type checking for your test assertions. The type system helps catch potential bugs at compile time while still allowing flexibility when needed. + +### Strict Type Checking by Default + +By default, Bun's test matchers enforce strict type checking between the actual value and expected value: + +```ts +import { expect, test } from "bun:test"; + +test("strict typing", () => { + const str = "hello"; + const num = 42; + + expect(str).toBe("hello"); // ✅ OK: string to string + expect(num).toBe(42); // ✅ OK: number to number + expect(str).toBe(42); // ❌ TypeScript error: string vs number +}); +``` + +This helps catch common mistakes where you might accidentally compare values of different types. + +### Relaxed Type Checking with Type Parameters + +Sometimes you need more flexibility in your tests, especially when working with: + +- Dynamic data from APIs +- Polymorphic functions that can return multiple types +- Generic utility functions +- Migration of existing test suites + +For these cases, you can "opt out" of strict type checking by providing an explicit type parameter to matcher methods: + +```ts +import { expect, test } from "bun:test"; + +test("relaxed typing with type parameters", () => { + const value: unknown = getSomeValue(); + + // These would normally cause TypeScript errors, but type parameters allow them: + expect(value).toBe(42); // No TS error, runtime check still works + expect(value).toEqual("hello"); // No TS error, runtime check still works + expect(value).toStrictEqual(true); // No TS error, runtime check still works +}); + +test("useful for dynamic data", () => { + const apiResponse: any = { status: "success" }; + + // Without type parameter: TypeScript error (any vs string) + // expect(apiResponse.status).toBe("success"); + + // With type parameter: No TypeScript error, runtime assertion still enforced + expect(apiResponse.status).toBe("success"); // ✅ OK +}); +``` + +### Migration from Looser Type Systems + +If migrating from a test framework with looser TypeScript integration, you can use type parameters as a stepping stone: + +```ts +// Old Jest test that worked but wasn't type-safe +expect(response.data).toBe(200); // No type error in some setups + +// Bun equivalent with explicit typing during migration +expect(response.data).toBe(200); // Explicit about expected type + +// Ideal Bun test after refactoring +const statusCode: number = response.data; +expect(statusCode).toBe(200); // Type-safe without explicit parameter +``` diff --git a/packages/bun-types/globals.d.ts b/packages/bun-types/globals.d.ts index ab3685cfff..56fb8515d9 100644 --- a/packages/bun-types/globals.d.ts +++ b/packages/bun-types/globals.d.ts @@ -1564,6 +1564,12 @@ declare var AbortController: Bun.__internal.UseLibDomIfAvailable< } >; +interface AbortSignal extends EventTarget { + readonly aborted: boolean; + onabort: ((this: AbortSignal, ev: Event) => any) | null; + readonly reason: any; + throwIfAborted(): void; +} declare var AbortSignal: Bun.__internal.UseLibDomIfAvailable< "AbortSignal", { @@ -1948,3 +1954,21 @@ declare namespace fetch { ): void; } //#endregion + +interface RegExpConstructor { + /** + * Escapes any potential regex syntax characters in a string, and returns a + * new string that can be safely used as a literal pattern for the RegExp() + * constructor. + * + * [MDN Reference](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/escape) + * + * @example + * ```ts + * const re = new RegExp(RegExp.escape("foo.bar")); + * re.test("foo.bar"); // true + * re.test("foo!bar"); // false + * ``` + */ + escape(string: string): string; +} diff --git a/packages/bun-types/sql.d.ts b/packages/bun-types/sql.d.ts index b792170381..7b0526b380 100644 --- a/packages/bun-types/sql.d.ts +++ b/packages/bun-types/sql.d.ts @@ -41,22 +41,22 @@ declare module "bun" { class PostgresError extends SQLError { public readonly code: string; - public readonly errno: string | undefined; - public readonly detail: string | undefined; - public readonly hint: string | undefined; - public readonly severity: string | undefined; - public readonly position: string | undefined; - public readonly internalPosition: string | undefined; - public readonly internalQuery: string | undefined; - public readonly where: string | undefined; - public readonly schema: string | undefined; - public readonly table: string | undefined; - public readonly column: string | undefined; - public readonly dataType: string | undefined; - public readonly constraint: string | undefined; - public readonly file: string | undefined; - public readonly line: string | undefined; - public readonly routine: string | undefined; + public readonly errno?: string | undefined; + public readonly detail?: string | undefined; + public readonly hint?: string | undefined; + public readonly severity?: string | undefined; + public readonly position?: string | undefined; + public readonly internalPosition?: string | undefined; + public readonly internalQuery?: string | undefined; + public readonly where?: string | undefined; + public readonly schema?: string | undefined; + public readonly table?: string | undefined; + public readonly column?: string | undefined; + public readonly dataType?: string | undefined; + public readonly constraint?: string | undefined; + public readonly file?: string | undefined; + public readonly line?: string | undefined; + public readonly routine?: string | undefined; constructor( message: string, @@ -84,8 +84,8 @@ declare module "bun" { class MySQLError extends SQLError { public readonly code: string; - public readonly errno: number | undefined; - public readonly sqlState: string | undefined; + public readonly errno?: number | undefined; + public readonly sqlState?: string | undefined; constructor(message: string, options: { code: string; errno: number | undefined; sqlState: string | undefined }); } @@ -143,13 +143,13 @@ declare module "bun" { /** * Database server hostname + * @deprecated Prefer {@link hostname} * @default "localhost" */ host?: string | undefined; /** - * Database server hostname (alias for host) - * @deprecated Prefer {@link host} + * Database server hostname * @default "localhost" */ hostname?: string | undefined; @@ -264,13 +264,14 @@ declare module "bun" { * Whether to use TLS/SSL for the connection * @default false */ - tls?: TLSOptions | boolean | undefined; + tls?: Bun.BunFile | TLSOptions | boolean | undefined; /** * Whether to use TLS/SSL for the connection (alias for tls) + * @deprecated Prefer {@link tls} * @default false */ - ssl?: TLSOptions | boolean | undefined; + ssl?: Bun.BunFile | TLSOptions | boolean | undefined; /** * Unix domain socket path for connection diff --git a/packages/bun-types/test.d.ts b/packages/bun-types/test.d.ts index 4bfa684980..9bd2ddaa81 100644 --- a/packages/bun-types/test.d.ts +++ b/packages/bun-types/test.d.ts @@ -14,11 +14,6 @@ * ``` */ declare module "bun:test" { - /** - * -- Mocks -- - * - * @category Testing - */ export type Mock any> = JestMock.Mock; export const mock: { @@ -588,7 +583,9 @@ declare module "bun:test" { * @param customFailMessage an optional custom message to display if the test fails. * */ - (actual?: T, customFailMessage?: string): Matchers; + (actual?: never, customFailMessage?: string): Matchers; + (actual: T, customFailMessage?: string): Matchers; + (actual?: T, customFailMessage?: string): Matchers; /** * Access to negated asymmetric matchers. @@ -906,6 +903,7 @@ declare module "bun:test" { * @param message the message to display if the test fails (optional) */ pass: (message?: string) => void; + /** * Assertion which fails. * @@ -917,6 +915,7 @@ declare module "bun:test" { * expect().not.fail("hi"); */ fail: (message?: string) => void; + /** * Asserts that a value equals what is expected. * @@ -930,9 +929,15 @@ declare module "bun:test" { * expect([123]).toBe([123]); // fail, use toEqual() * expect(3 + 0.14).toBe(3.14); // fail, use toBeCloseTo() * + * // TypeScript errors: + * expect("hello").toBe(3.14); // typescript error + fail + * expect("hello").toBe(3.14); // no typescript error, but still fails + * * @param expected the expected value */ toBe(expected: T): void; + toBe(expected: NoInfer): void; + /** * Asserts that a number is odd. * @@ -942,6 +947,7 @@ declare module "bun:test" { * expect(2).not.toBeOdd(); */ toBeOdd(): void; + /** * Asserts that a number is even. * @@ -951,6 +957,7 @@ declare module "bun:test" { * expect(1).not.toBeEven(); */ toBeEven(): void; + /** * Asserts that value is close to the expected by floating point precision. * @@ -969,6 +976,7 @@ declare module "bun:test" { * @param numDigits the number of digits to check after the decimal point. Default is `2` */ toBeCloseTo(expected: number, numDigits?: number): void; + /** * Asserts that a value is deeply equal to what is expected. * @@ -981,6 +989,8 @@ declare module "bun:test" { * @param expected the expected value */ toEqual(expected: T): void; + toEqual(expected: NoInfer): void; + /** * Asserts that a value is deeply and strictly equal to * what is expected. @@ -1005,6 +1015,8 @@ declare module "bun:test" { * @param expected the expected value */ toStrictEqual(expected: T): void; + toStrictEqual(expected: NoInfer): void; + /** * Asserts that the value is deep equal to an element in the expected array. * @@ -1017,7 +1029,9 @@ declare module "bun:test" { * * @param expected the expected value */ - toBeOneOf(expected: Array | Iterable): void; + toBeOneOf(expected: Iterable): void; + toBeOneOf(expected: NoInfer>): void; + /** * Asserts that a value contains what is expected. * @@ -1031,7 +1045,9 @@ declare module "bun:test" { * * @param expected the expected value */ - toContain(expected: unknown): void; + toContain(expected: T extends Iterable ? U : T): void; + toContain(expected: NoInfer ? U : X>): void; + /** * Asserts that an `object` contains a key. * @@ -1045,7 +1061,9 @@ declare module "bun:test" { * * @param expected the expected value */ - toContainKey(expected: unknown): void; + toContainKey(expected: keyof T): void; + toContainKey(expected: NoInfer): void; + /** * Asserts that an `object` contains all the provided keys. * @@ -1060,7 +1078,9 @@ declare module "bun:test" { * * @param expected the expected value */ - toContainAllKeys(expected: unknown): void; + toContainAllKeys(expected: Array): void; + toContainAllKeys(expected: NoInfer>): void; + /** * Asserts that an `object` contains at least one of the provided keys. * Asserts that an `object` contains all the provided keys. @@ -1075,12 +1095,16 @@ declare module "bun:test" { * * @param expected the expected value */ - toContainAnyKeys(expected: unknown): void; + toContainAnyKeys(expected: Array): void; + toContainAnyKeys(expected: NoInfer>): void; /** * Asserts that an `object` contain the provided value. * - * The value must be an object + * This method is deep and will look through child properties to find the + * expected value. + * + * The input value must be an object. * * @example * const shallow = { hello: "world" }; @@ -1104,11 +1128,16 @@ declare module "bun:test" { * * @param expected the expected value */ + // Contributor note: In theory we could type this better but it would be a + // slow union to compute... toContainValue(expected: unknown): void; /** * Asserts that an `object` contain the provided value. * + * This is the same as {@link toContainValue}, but accepts an array of + * values instead. + * * The value must be an object * * @example @@ -1118,7 +1147,7 @@ declare module "bun:test" { * expect(o).not.toContainValues(['qux', 'foo']); * @param expected the expected value */ - toContainValues(expected: unknown): void; + toContainValues(expected: Array): void; /** * Asserts that an `object` contain all the provided values. @@ -1132,7 +1161,7 @@ declare module "bun:test" { * expect(o).not.toContainAllValues(['bar', 'foo']); * @param expected the expected value */ - toContainAllValues(expected: unknown): void; + toContainAllValues(expected: Array): void; /** * Asserts that an `object` contain any provided value. @@ -1147,7 +1176,7 @@ declare module "bun:test" { * expect(o).not.toContainAnyValues(['qux']); * @param expected the expected value */ - toContainAnyValues(expected: unknown): void; + toContainAnyValues(expected: Array): void; /** * Asserts that an `object` contains all the provided keys. @@ -1159,7 +1188,9 @@ declare module "bun:test" { * * @param expected the expected value */ - toContainKeys(expected: unknown): void; + toContainKeys(expected: Array): void; + toContainKeys(expected: NoInfer>): void; + /** * Asserts that a value contains and equals what is expected. * @@ -1172,7 +1203,9 @@ declare module "bun:test" { * * @param expected the expected value */ - toContainEqual(expected: unknown): void; + toContainEqual(expected: T extends Iterable ? U : T): void; + toContainEqual(expected: NoInfer ? U : X>): void; + /** * Asserts that a value has a `.length` property * that is equal to the expected length. @@ -1184,6 +1217,7 @@ declare module "bun:test" { * @param length the expected length */ toHaveLength(length: number): void; + /** * Asserts that a value has a property with the * expected name, and value if provided. @@ -1198,6 +1232,7 @@ declare module "bun:test" { * @param value the expected property value, if provided */ toHaveProperty(keyPath: string | number | Array, value?: unknown): void; + /** * Asserts that a value is "truthy". * @@ -1210,6 +1245,7 @@ declare module "bun:test" { * expect({}).toBeTruthy(); */ toBeTruthy(): void; + /** * Asserts that a value is "falsy". * @@ -1222,6 +1258,7 @@ declare module "bun:test" { * expect({}).toBeTruthy(); */ toBeFalsy(): void; + /** * Asserts that a value is defined. (e.g. is not `undefined`) * @@ -1230,6 +1267,7 @@ declare module "bun:test" { * expect(undefined).toBeDefined(); // fail */ toBeDefined(): void; + /** * Asserts that the expected value is an instance of value * @@ -1238,6 +1276,7 @@ declare module "bun:test" { * expect(null).toBeInstanceOf(Array); // fail */ toBeInstanceOf(value: unknown): void; + /** * Asserts that a value is `undefined`. * @@ -1246,6 +1285,7 @@ declare module "bun:test" { * expect(null).toBeUndefined(); // fail */ toBeUndefined(): void; + /** * Asserts that a value is `null`. * @@ -1254,6 +1294,7 @@ declare module "bun:test" { * expect(undefined).toBeNull(); // fail */ toBeNull(): void; + /** * Asserts that a value is `NaN`. * @@ -1265,6 +1306,7 @@ declare module "bun:test" { * expect("notanumber").toBeNaN(); // fail */ toBeNaN(): void; + /** * Asserts that a value is a `number` and is greater than the expected value. * @@ -1276,6 +1318,7 @@ declare module "bun:test" { * @param expected the expected number */ toBeGreaterThan(expected: number | bigint): void; + /** * Asserts that a value is a `number` and is greater than or equal to the expected value. * @@ -1287,6 +1330,7 @@ declare module "bun:test" { * @param expected the expected number */ toBeGreaterThanOrEqual(expected: number | bigint): void; + /** * Asserts that a value is a `number` and is less than the expected value. * @@ -1298,6 +1342,7 @@ declare module "bun:test" { * @param expected the expected number */ toBeLessThan(expected: number | bigint): void; + /** * Asserts that a value is a `number` and is less than or equal to the expected value. * @@ -1309,6 +1354,7 @@ declare module "bun:test" { * @param expected the expected number */ toBeLessThanOrEqual(expected: number | bigint): void; + /** * Asserts that a function throws an error. * @@ -1329,6 +1375,7 @@ declare module "bun:test" { * @param expected the expected error, error message, or error pattern */ toThrow(expected?: unknown): void; + /** * Asserts that a function throws an error. * @@ -1350,6 +1397,7 @@ declare module "bun:test" { * @alias toThrow */ toThrowError(expected?: unknown): void; + /** * Asserts that a value matches a regular expression or includes a substring. * @@ -1360,6 +1408,7 @@ declare module "bun:test" { * @param expected the expected substring or pattern. */ toMatch(expected: string | RegExp): void; + /** * Asserts that a value matches the most recent snapshot. * @@ -1368,6 +1417,7 @@ declare module "bun:test" { * @param hint Hint used to identify the snapshot in the snapshot file. */ toMatchSnapshot(hint?: string): void; + /** * Asserts that a value matches the most recent snapshot. * @@ -1380,6 +1430,7 @@ declare module "bun:test" { * @param hint Hint used to identify the snapshot in the snapshot file. */ toMatchSnapshot(propertyMatchers?: object, hint?: string): void; + /** * Asserts that a value matches the most recent inline snapshot. * @@ -1390,6 +1441,7 @@ declare module "bun:test" { * @param value The latest automatically-updated snapshot value. */ toMatchInlineSnapshot(value?: string): void; + /** * Asserts that a value matches the most recent inline snapshot. * @@ -1405,6 +1457,7 @@ declare module "bun:test" { * @param value The latest automatically-updated snapshot value. */ toMatchInlineSnapshot(propertyMatchers?: object, value?: string): void; + /** * Asserts that a function throws an error matching the most recent snapshot. * @@ -1418,6 +1471,7 @@ declare module "bun:test" { * @param value The latest automatically-updated snapshot value. */ toThrowErrorMatchingSnapshot(hint?: string): void; + /** * Asserts that a function throws an error matching the most recent snapshot. * @@ -1431,6 +1485,7 @@ declare module "bun:test" { * @param value The latest automatically-updated snapshot value. */ toThrowErrorMatchingInlineSnapshot(value?: string): void; + /** * Asserts that an object matches a subset of properties. * @@ -1441,6 +1496,7 @@ declare module "bun:test" { * @param subset Subset of properties to match with. */ toMatchObject(subset: object): void; + /** * Asserts that a value is empty. * @@ -1451,6 +1507,7 @@ declare module "bun:test" { * expect(new Set()).toBeEmpty(); */ toBeEmpty(): void; + /** * Asserts that a value is an empty `object`. * @@ -1459,6 +1516,7 @@ declare module "bun:test" { * expect({ a: 'hello' }).not.toBeEmptyObject(); */ toBeEmptyObject(): void; + /** * Asserts that a value is `null` or `undefined`. * @@ -1467,6 +1525,7 @@ declare module "bun:test" { * expect(undefined).toBeNil(); */ toBeNil(): void; + /** * Asserts that a value is a `array`. * @@ -1477,6 +1536,7 @@ declare module "bun:test" { * expect({}).not.toBeArray(); */ toBeArray(): void; + /** * Asserts that a value is a `array` of a certain length. * @@ -1488,6 +1548,7 @@ declare module "bun:test" { * expect({}).not.toBeArrayOfSize(0); */ toBeArrayOfSize(size: number): void; + /** * Asserts that a value is a `boolean`. * @@ -1498,6 +1559,7 @@ declare module "bun:test" { * expect(0).not.toBeBoolean(); */ toBeBoolean(): void; + /** * Asserts that a value is `true`. * @@ -1507,6 +1569,7 @@ declare module "bun:test" { * expect(1).not.toBeTrue(); */ toBeTrue(): void; + /** * Asserts that a value matches a specific type. * @@ -1517,6 +1580,7 @@ declare module "bun:test" { * expect([]).not.toBeTypeOf("boolean"); */ toBeTypeOf(type: "bigint" | "boolean" | "function" | "number" | "object" | "string" | "symbol" | "undefined"): void; + /** * Asserts that a value is `false`. * @@ -1526,6 +1590,7 @@ declare module "bun:test" { * expect(0).not.toBeFalse(); */ toBeFalse(): void; + /** * Asserts that a value is a `number`. * @@ -1536,6 +1601,7 @@ declare module "bun:test" { * expect(BigInt(1)).not.toBeNumber(); */ toBeNumber(): void; + /** * Asserts that a value is a `number`, and is an integer. * @@ -1545,6 +1611,7 @@ declare module "bun:test" { * expect(NaN).not.toBeInteger(); */ toBeInteger(): void; + /** * Asserts that a value is an `object`. * @@ -1554,6 +1621,7 @@ declare module "bun:test" { * expect(NaN).not.toBeObject(); */ toBeObject(): void; + /** * Asserts that a value is a `number`, and is not `NaN` or `Infinity`. * @@ -1564,6 +1632,7 @@ declare module "bun:test" { * expect(Infinity).not.toBeFinite(); */ toBeFinite(): void; + /** * Asserts that a value is a positive `number`. * @@ -1573,6 +1642,7 @@ declare module "bun:test" { * expect(NaN).not.toBePositive(); */ toBePositive(): void; + /** * Asserts that a value is a negative `number`. * @@ -1582,6 +1652,7 @@ declare module "bun:test" { * expect(NaN).not.toBeNegative(); */ toBeNegative(): void; + /** * Asserts that a value is a number between a start and end value. * @@ -1589,6 +1660,7 @@ declare module "bun:test" { * @param end the end number (exclusive) */ toBeWithin(start: number, end: number): void; + /** * Asserts that a value is equal to the expected string, ignoring any whitespace. * @@ -1599,6 +1671,7 @@ declare module "bun:test" { * @param expected the expected string */ toEqualIgnoringWhitespace(expected: string): void; + /** * Asserts that a value is a `symbol`. * @@ -1607,6 +1680,7 @@ declare module "bun:test" { * expect("foo").not.toBeSymbol(); */ toBeSymbol(): void; + /** * Asserts that a value is a `function`. * @@ -1614,6 +1688,7 @@ declare module "bun:test" { * expect(() => {}).toBeFunction(); */ toBeFunction(): void; + /** * Asserts that a value is a `Date` object. * @@ -1625,6 +1700,7 @@ declare module "bun:test" { * expect("2020-03-01").not.toBeDate(); */ toBeDate(): void; + /** * Asserts that a value is a valid `Date` object. * @@ -1634,6 +1710,7 @@ declare module "bun:test" { * expect("2020-03-01").not.toBeValidDate(); */ toBeValidDate(): void; + /** * Asserts that a value is a `string`. * @@ -1643,6 +1720,7 @@ declare module "bun:test" { * expect(123).not.toBeString(); */ toBeString(): void; + /** * Asserts that a value includes a `string`. * @@ -1651,12 +1729,14 @@ declare module "bun:test" { * @param expected the expected substring */ toInclude(expected: string): void; + /** * Asserts that a value includes a `string` {times} times. * @param expected the expected substring * @param times the number of times the substring should occur */ toIncludeRepeated(expected: string, times: number): void; + /** * Checks whether a value satisfies a custom condition. * @param {Function} predicate - The custom condition to be satisfied. It should be a function that takes a value as an argument (in this case the value from expect) and returns a boolean. @@ -1668,18 +1748,21 @@ declare module "bun:test" { * @link https://jest-extended.jestcommunity.dev/docs/matchers/toSatisfy */ toSatisfy(predicate: (value: T) => boolean): void; + /** * Asserts that a value starts with a `string`. * * @param expected the string to start with */ toStartWith(expected: string): void; + /** * Asserts that a value ends with a `string`. * * @param expected the string to end with */ toEndWith(expected: string): void; + /** * Ensures that a mock function has returned successfully at least once. * @@ -1720,42 +1803,51 @@ declare module "bun:test" { * Ensures that a mock function is called. */ toHaveBeenCalled(): void; + /** * Ensures that a mock function is called an exact number of times. * @alias toHaveBeenCalled */ toBeCalled(): void; + /** * Ensures that a mock function is called an exact number of times. */ toHaveBeenCalledTimes(expected: number): void; + /** * Ensure that a mock function is called with specific arguments. * @alias toHaveBeenCalledTimes */ toBeCalledTimes(expected: number): void; + /** * Ensure that a mock function is called with specific arguments. */ toHaveBeenCalledWith(...expected: unknown[]): void; + /** * Ensure that a mock function is called with specific arguments. * @alias toHaveBeenCalledWith */ toBeCalledWith(...expected: unknown[]): void; + /** * Ensure that a mock function is called with specific arguments for the last call. */ toHaveBeenLastCalledWith(...expected: unknown[]): void; + /** * Ensure that a mock function is called with specific arguments for the nth call. * @alias toHaveBeenCalledWith */ lastCalledWith(...expected: unknown[]): void; + /** * Ensure that a mock function is called with specific arguments for the nth call. */ toHaveBeenNthCalledWith(n: number, ...expected: unknown[]): void; + /** * Ensure that a mock function is called with specific arguments for the nth call. * @alias toHaveBeenCalledWith diff --git a/scripts/buildkite-slow-tests.js b/scripts/buildkite-slow-tests.js new file mode 100755 index 0000000000..ccbbde9678 --- /dev/null +++ b/scripts/buildkite-slow-tests.js @@ -0,0 +1,107 @@ +#!/usr/bin/env bun + +import { readFileSync } from "fs"; + +function parseLogFile(filename) { + const testDetails = new Map(); // Track individual attempts and total for each test + let currentTest = null; + let startTime = null; + + // Pattern to match test group start: --- [90m[N/TOTAL][0m test/path + // Note: there are escape sequences before _bk + const startPattern = /_bk;t=(\d+).*?--- .*?\[90m\[(\d+)\/(\d+)\].*?\[0m (.+)/; + + const content = readFileSync(filename, "utf-8"); + const lines = content.split("\n"); + + for (const line of lines) { + const match = line.match(startPattern); + if (match) { + // If we have a previous test, calculate its duration + if (currentTest && startTime) { + const endTime = parseInt(match[1]); + const duration = endTime - startTime; + + // Extract attempt info - match the actual ANSI pattern + const attemptMatch = currentTest.match(/\s+\x1b\[90m\[attempt #(\d+)\]\x1b\[0m$/); + const cleanName = currentTest.replace(/\s+\x1b\[90m\[attempt #\d+\]\x1b\[0m$/, "").trim(); + const attemptNum = attemptMatch ? parseInt(attemptMatch[1]) : 1; + + if (!testDetails.has(cleanName)) { + testDetails.set(cleanName, { total: 0, attempts: [] }); + } + + const testInfo = testDetails.get(cleanName); + testInfo.total += duration; + testInfo.attempts.push({ attempt: attemptNum, duration }); + } + + // Start new test + startTime = parseInt(match[1]); + currentTest = match[4].trim(); + } + } + + // Convert to array and sort by total duration + const testGroups = Array.from(testDetails.entries()) + .map(([name, info]) => ({ + name, + totalDuration: info.total, + attempts: info.attempts.sort((a, b) => a.attempt - b.attempt), + })) + .sort((a, b) => b.totalDuration - a.totalDuration); + + return testGroups; +} + +function formatAttempts(attempts) { + if (attempts.length <= 1) return ""; + + const attemptStrings = attempts.map( + ({ attempt, duration }) => `${(duration / 1000).toFixed(1)}s attempt #${attempt}`, + ); + return ` [${attemptStrings.join(", ")}]`; +} + +if (process.argv.length !== 3) { + console.log("Usage: bun parse_test_logs.js "); + process.exit(1); +} + +const filename = process.argv[2]; +const testGroups = parseLogFile(filename); + +const totalTime = testGroups.reduce((sum, group) => sum + group.totalDuration, 0) / 1000; +const avgTime = testGroups.length > 0 ? totalTime / testGroups.length : 0; + +console.log( + `## Slowest Tests Analysis - ${testGroups.length} tests (${totalTime.toFixed(1)}s total, ${avgTime.toFixed(2)}s avg)`, +); +console.log(""); + +// Top 10 summary +console.log("**Top 10 slowest tests:**"); +for (let i = 0; i < Math.min(10, testGroups.length); i++) { + const { name, totalDuration, attempts } = testGroups[i]; + const durationSec = totalDuration / 1000; + const testName = name.replace("test/", "").replace(".test.ts", "").replace(".test.js", ""); + const attemptInfo = formatAttempts(attempts); + console.log(`- **${durationSec.toFixed(1)}s** ${testName}${attemptInfo}`); +} + +console.log(""); + +// Filter tests > 1 second +const slowTests = testGroups.filter(test => test.totalDuration > 1000); + +console.log("```"); +console.log(`All tests > 1s (${slowTests.length} tests):`); + +for (let i = 0; i < slowTests.length; i++) { + const { name, totalDuration, attempts } = slowTests[i]; + const durationSec = totalDuration / 1000; + const attemptInfo = formatAttempts(attempts); + console.log(`${(i + 1).toString().padStart(3)}. ${durationSec.toFixed(2).padStart(7)}s ${name}${attemptInfo}`); +} + +console.log("```"); diff --git a/src/HTMLScanner.zig b/src/HTMLScanner.zig index f9edb06d0d..7e305c7b4c 100644 --- a/src/HTMLScanner.zig +++ b/src/HTMLScanner.zig @@ -18,7 +18,7 @@ pub fn deinit(this: *HTMLScanner) void { for (this.import_records.slice()) |*record| { this.allocator.free(record.path.text); } - this.import_records.deinitWithAllocator(this.allocator); + this.import_records.deinit(this.allocator); } fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKind) !void { @@ -44,7 +44,7 @@ fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKi .range = logger.Range.None, }; - try this.import_records.push(this.allocator, record); + try this.import_records.append(this.allocator, record); } const debug = bun.Output.scoped(.HTMLScanner, .hidden); diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig index b42b5182ac..40af49d457 100644 --- a/src/StandaloneModuleGraph.zig +++ b/src/StandaloneModuleGraph.zig @@ -44,11 +44,20 @@ pub const StandaloneModuleGraph = struct { }; } - pub fn isBunStandaloneFilePath(str: []const u8) bool { + pub fn isBunStandaloneFilePathCanonicalized(str: []const u8) bool { return bun.strings.hasPrefixComptime(str, base_path) or (Environment.isWindows and bun.strings.hasPrefixComptime(str, base_public_path)); } + pub fn isBunStandaloneFilePath(str: []const u8) bool { + if (Environment.isWindows) { + // On Windows, remove NT path prefixes before checking + const canonicalized = strings.withoutNTPrefix(u8, str); + return isBunStandaloneFilePathCanonicalized(canonicalized); + } + return isBunStandaloneFilePathCanonicalized(str); + } + pub fn entryPoint(this: *const StandaloneModuleGraph) *File { return &this.files.values()[this.entry_point_id]; } @@ -980,27 +989,54 @@ pub const StandaloneModuleGraph = struct { } if (Environment.isWindows) { - var outfile_buf: bun.OSPathBuffer = undefined; - const outfile_slice = brk: { - const outfile_w = bun.strings.toWPathNormalized(&outfile_buf, std.fs.path.basenameWindows(outfile)); - bun.assert(outfile_w.ptr == &outfile_buf); - const outfile_buf_u16 = bun.reinterpretSlice(u16, &outfile_buf); - outfile_buf_u16[outfile_w.len] = 0; - break :brk outfile_buf_u16[0..outfile_w.len :0]; + // Get the current path of the temp file + var temp_buf: bun.PathBuffer = undefined; + const temp_path = bun.getFdPath(fd, &temp_buf) catch |err| { + return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get temp file path: {s}", .{@errorName(err)}) catch "Failed to get temp file path"); }; - bun.windows.moveOpenedFileAtLoose(fd, .fromStdDir(root_dir), outfile_slice, true).unwrap() catch |err| { - _ = bun.windows.deleteOpenedFile(fd); - if (err == error.EISDIR) { - return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory"); - } else { - return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to result path: {s}", .{@errorName(err)}) catch "failed to move executable"); - } + // Build the absolute destination path + // On Windows, we need an absolute path for MoveFileExW + // Get the current working directory and join with outfile + var cwd_buf: bun.PathBuffer = undefined; + const cwd_path = bun.getcwd(&cwd_buf) catch |err| { + return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get current directory: {s}", .{@errorName(err)}) catch "Failed to get current directory"); }; + const dest_path = if (std.fs.path.isAbsolute(outfile)) + outfile + else + bun.path.joinAbsString(cwd_path, &[_][]const u8{outfile}, .auto); + // Convert paths to Windows UTF-16 + var temp_buf_w: bun.OSPathBuffer = undefined; + var dest_buf_w: bun.OSPathBuffer = undefined; + const temp_w = bun.strings.toWPathNormalized(&temp_buf_w, temp_path); + const dest_w = bun.strings.toWPathNormalized(&dest_buf_w, dest_path); + + // Ensure null termination + const temp_buf_u16 = bun.reinterpretSlice(u16, &temp_buf_w); + const dest_buf_u16 = bun.reinterpretSlice(u16, &dest_buf_w); + temp_buf_u16[temp_w.len] = 0; + dest_buf_u16[dest_w.len] = 0; + + // Close the file handle before moving (Windows requires this) fd.close(); fd = bun.invalid_fd; + // Move the file using MoveFileExW + if (bun.windows.kernel32.MoveFileExW(temp_buf_u16[0..temp_w.len :0].ptr, dest_buf_u16[0..dest_w.len :0].ptr, bun.windows.MOVEFILE_COPY_ALLOWED | bun.windows.MOVEFILE_REPLACE_EXISTING | bun.windows.MOVEFILE_WRITE_THROUGH) == bun.windows.FALSE) { + const err = bun.windows.Win32Error.get(); + if (err.toSystemErrno()) |sys_err| { + if (sys_err == .EISDIR) { + return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory"); + } else { + return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}: {s}", .{ dest_path, @tagName(sys_err) }) catch "failed to move executable"); + } + } else { + return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}", .{dest_path}) catch "failed to move executable"); + } + } + // Set Windows icon and/or metadata using unified function if (windows_options.icon != null or windows_options.title != null or @@ -1009,25 +1045,9 @@ pub const StandaloneModuleGraph = struct { windows_options.description != null or windows_options.copyright != null) { - // Need to get the full path to the executable - var full_path_buf: bun.OSPathBuffer = undefined; - const full_path = brk: { - // Get the directory path - var dir_buf: bun.PathBuffer = undefined; - const dir_path = bun.getFdPath(bun.FD.fromStdDir(root_dir), &dir_buf) catch |err| { - return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get directory path: {s}", .{@errorName(err)}) catch "Failed to get directory path"); - }; - - // Join with the outfile name - const full_path_str = bun.path.joinAbsString(dir_path, &[_][]const u8{outfile}, .auto); - const full_path_w = bun.strings.toWPathNormalized(&full_path_buf, full_path_str); - const buf_u16 = bun.reinterpretSlice(u16, &full_path_buf); - buf_u16[full_path_w.len] = 0; - break :brk buf_u16[0..full_path_w.len :0]; - }; - + // The file has been moved to dest_path bun.windows.rescle.setWindowsMetadata( - full_path.ptr, + dest_buf_u16[0..dest_w.len :0].ptr, windows_options.icon, windows_options.title, windows_options.publisher, diff --git a/src/allocators/MimallocArena.zig b/src/allocators/MimallocArena.zig index 0588a34821..0b6a646b86 100644 --- a/src/allocators/MimallocArena.zig +++ b/src/allocators/MimallocArena.zig @@ -78,6 +78,15 @@ pub const Borrowed = struct { else null; } + + pub fn downcast(std_alloc: std.mem.Allocator) Borrowed { + bun.assertf( + isInstance(std_alloc), + "not a MimallocArena (vtable is {*})", + .{std_alloc.vtable}, + ); + return .fromOpaque(std_alloc.ptr); + } }; const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap; diff --git a/src/ast/Ast.zig b/src/ast/Ast.zig index 9f619e4a60..9aa1386f1a 100644 --- a/src/ast/Ast.zig +++ b/src/ast/Ast.zig @@ -83,14 +83,14 @@ pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged pub fn fromParts(parts: []Part) Ast { return Ast{ - .parts = Part.List.init(parts), + .parts = Part.List.fromOwnedSlice(parts), .runtime_imports = .{}, }; } -pub fn initTest(parts: []Part) Ast { +pub fn initTest(parts: []const Part) Ast { return Ast{ - .parts = Part.List.init(parts), + .parts = Part.List.fromBorrowedSliceDangerous(parts), .runtime_imports = .{}, }; } @@ -107,9 +107,9 @@ pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void { /// Do not call this if it wasn't globally allocated! pub fn deinit(this: *Ast) void { // TODO: assert mimalloc-owned memory - if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator); - if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator); - if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator); + this.parts.deinit(bun.default_allocator); + this.symbols.deinit(bun.default_allocator); + this.import_records.deinit(bun.default_allocator); } pub const Class = G.Class; diff --git a/src/ast/Binding.zig b/src/ast/Binding.zig index 1b484725c7..349bd2ae99 100644 --- a/src/ast/Binding.zig +++ b/src/ast/Binding.zig @@ -56,7 +56,14 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr { }; } - return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc); + return Expr.init( + E.Array, + E.Array{ + .items = ExprNodeList.fromOwnedSlice(exprs), + .is_single_line = b.is_single_line, + }, + loc, + ); }, .b_object => |b| { const properties = wrapper @@ -77,7 +84,7 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr { return Expr.init( E.Object, E.Object{ - .properties = G.Property.List.init(properties), + .properties = G.Property.List.fromOwnedSlice(properties), .is_single_line = b.is_single_line, }, loc, diff --git a/src/ast/ConvertESMExportsForHmr.zig b/src/ast/ConvertESMExportsForHmr.zig index 561fdeb18c..117b170f0c 100644 --- a/src/ast/ConvertESMExportsForHmr.zig +++ b/src/ast/ConvertESMExportsForHmr.zig @@ -121,7 +121,7 @@ pub fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void const temp_id = p.generateTempRef("default_export"); try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true }); try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 }); - try p.current_scope.generated.push(p.allocator, temp_id); + try p.current_scope.generated.append(p.allocator, temp_id); try ctx.export_props.append(p.allocator, .{ .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc), @@ -395,7 +395,7 @@ fn visitRefToExport( const arg1 = p.generateTempRef(symbol.original_name); try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true }); try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 }); - try p.current_scope.generated.push(p.allocator, arg1); + try p.current_scope.generated.append(p.allocator, arg1); // 'get abc() { return abc }' try ctx.export_props.append(p.allocator, .{ @@ -438,7 +438,7 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P if (ctx.export_props.items.len > 0) { const obj = Expr.init(E.Object, .{ - .properties = G.Property.List.fromList(ctx.export_props), + .properties = G.Property.List.moveFromList(&ctx.export_props), }, logger.Loc.Empty); // `hmr.exports = ...` @@ -466,7 +466,7 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P .name = "reactRefreshAccept", .name_loc = .Empty, }, .Empty), - .args = .init(&.{}), + .args = .empty, }, .Empty), }, .Empty)); } @@ -474,7 +474,10 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P // Merge all part metadata into the first part. for (all_parts[0 .. all_parts.len - 1]) |*part| { try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols); - try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice()); + try ctx.last_part.import_record_indices.appendSlice( + p.allocator, + part.import_record_indices.slice(), + ); for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| { const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k); if (!gop.found_existing) { @@ -487,13 +490,16 @@ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.P part.declared_symbols.entries.len = 0; part.tag = .dead_due_to_inlining; part.dependencies.clearRetainingCapacity(); - try part.dependencies.push(p.allocator, .{ + try part.dependencies.append(p.allocator, .{ .part_index = @intCast(all_parts.len - 1), .source_index = p.source.index, }); } - try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items); + try ctx.last_part.import_record_indices.appendSlice( + p.allocator, + p.import_records_for_current_part.items, + ); try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols); ctx.last_part.stmts = ctx.stmts.items; diff --git a/src/ast/E.zig b/src/ast/E.zig index a787f836f8..c6cf7cf413 100644 --- a/src/ast/E.zig +++ b/src/ast/E.zig @@ -18,7 +18,7 @@ pub const Array = struct { close_bracket_loc: logger.Loc = logger.Loc.Empty, pub fn push(this: *Array, allocator: std.mem.Allocator, item: Expr) !void { - try this.items.push(allocator, item); + try this.items.append(allocator, item); } pub inline fn slice(this: Array) []Expr { @@ -30,12 +30,13 @@ pub const Array = struct { allocator: std.mem.Allocator, estimated_count: usize, ) !ExprNodeList { - var out = try allocator.alloc( - Expr, + var out: bun.BabyList(Expr) = try .initCapacity( + allocator, // This over-allocates a little but it's fine estimated_count + @as(usize, this.items.len), ); - var remain = out; + out.expandToCapacity(); + var remain = out.slice(); for (this.items.slice()) |item| { switch (item.data) { .e_spread => |val| { @@ -63,7 +64,8 @@ pub const Array = struct { remain = remain[1..]; } - return ExprNodeList.init(out[0 .. out.len - remain.len]); + out.shrinkRetainingCapacity(out.len - remain.len); + return out; } pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *jsc.JSGlobalObject) ToJSError!jsc.JSValue { @@ -98,6 +100,43 @@ pub const Array = struct { pub const Unary = struct { op: Op.Code, value: ExprNodeIndex, + flags: Unary.Flags = .{}, + + pub const Flags = packed struct(u8) { + /// The expression "typeof (0, x)" must not become "typeof x" if "x" + /// is unbound because that could suppress a ReferenceError from "x". + /// + /// Also if we know a typeof operator was originally an identifier, then + /// we know that this typeof operator always has no side effects (even if + /// we consider the identifier by itself to have a side effect). + /// + /// Note that there *is* actually a case where "typeof x" can throw an error: + /// when "x" is being referenced inside of its TDZ (temporal dead zone). TDZ + /// checks are not yet handled correctly by Bun, so this possibility is + /// currently ignored. + was_originally_typeof_identifier: bool = false, + + /// Similarly the expression "delete (0, x)" must not become "delete x" + /// because that syntax is invalid in strict mode. We also need to make sure + /// we don't accidentally change the return value: + /// + /// Returns false: + /// "var a; delete (a)" + /// "var a = Object.freeze({b: 1}); delete (a.b)" + /// "var a = Object.freeze({b: 1}); delete (a?.b)" + /// "var a = Object.freeze({b: 1}); delete (a['b'])" + /// "var a = Object.freeze({b: 1}); delete (a?.['b'])" + /// + /// Returns true: + /// "var a; delete (0, a)" + /// "var a = Object.freeze({b: 1}); delete (true && a.b)" + /// "var a = Object.freeze({b: 1}); delete (false || a?.b)" + /// "var a = Object.freeze({b: 1}); delete (null ?? a?.['b'])" + /// + /// "var a = Object.freeze({b: 1}); delete (true ? a['b'] : a['b'])" + was_originally_delete_of_identifier_or_property_access: bool = false, + _: u6 = 0, + }; }; pub const Binary = struct { @@ -536,7 +575,7 @@ pub const Object = struct { if (asProperty(self, key)) |query| { self.properties.ptr[query.i].value = expr; } else { - try self.properties.push(allocator, .{ + try self.properties.append(allocator, .{ .key = Expr.init(E.String, E.String.init(key), expr.loc), .value = expr, }); @@ -551,7 +590,7 @@ pub const Object = struct { pub fn set(self: *const Object, key: Expr, allocator: std.mem.Allocator, value: Expr) SetError!void { if (self.hasProperty(key.data.e_string.data)) return error.Clobber; - try self.properties.push(allocator, .{ + try self.properties.append(allocator, .{ .key = key, .value = value, }); @@ -605,7 +644,7 @@ pub const Object = struct { value_ = obj; } - try self.properties.push(allocator, .{ + try self.properties.append(allocator, .{ .key = rope.head, .value = value_, }); @@ -646,7 +685,7 @@ pub const Object = struct { if (rope.next) |next| { var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); const out = try obj.data.e_object.getOrPutObject(next, allocator); - try self.properties.push(allocator, .{ + try self.properties.append(allocator, .{ .key = rope.head, .value = obj, }); @@ -654,7 +693,7 @@ pub const Object = struct { } const out = Expr.init(E.Object, E.Object{}, rope.head.loc); - try self.properties.push(allocator, .{ + try self.properties.append(allocator, .{ .key = rope.head, .value = out, }); @@ -695,7 +734,7 @@ pub const Object = struct { if (rope.next) |next| { var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); const out = try obj.data.e_object.getOrPutArray(next, allocator); - try self.properties.push(allocator, .{ + try self.properties.append(allocator, .{ .key = rope.head, .value = obj, }); @@ -703,7 +742,7 @@ pub const Object = struct { } const out = Expr.init(E.Array, E.Array{}, rope.head.loc); - try self.properties.push(allocator, .{ + try self.properties.append(allocator, .{ .key = rope.head, .value = out, }); @@ -940,6 +979,30 @@ pub const String = struct { return bun.handleOom(this.string(allocator)); } + fn stringCompareForJavaScript(comptime T: type, a: []const T, b: []const T) std.math.Order { + const a_slice = a[0..@min(a.len, b.len)]; + const b_slice = b[0..@min(a.len, b.len)]; + for (a_slice, b_slice) |a_char, b_char| { + const delta: i32 = @as(i32, a_char) - @as(i32, b_char); + if (delta != 0) { + return if (delta < 0) .lt else .gt; + } + } + return std.math.order(a.len, b.len); + } + + /// Compares two strings lexicographically for JavaScript semantics. + /// Both strings must share the same encoding (UTF-8 vs UTF-16). + pub inline fn order(this: *const String, other: *const String) std.math.Order { + bun.debugAssert(this.isUTF8() == other.isUTF8()); + + if (this.isUTF8()) { + return stringCompareForJavaScript(u8, this.data, other.data); + } else { + return stringCompareForJavaScript(u16, this.slice16(), other.slice16()); + } + } + pub var empty = String{}; pub var @"true" = String{ .data = "true" }; pub var @"false" = String{ .data = "false" }; diff --git a/src/ast/Expr.zig b/src/ast/Expr.zig index 7f4ed148ec..b1814aab95 100644 --- a/src/ast/Expr.zig +++ b/src/ast/Expr.zig @@ -273,13 +273,10 @@ pub fn set(expr: *Expr, allocator: std.mem.Allocator, name: string, value: Expr) } } - var new_props = expr.data.e_object.properties.listManaged(allocator); - try new_props.append(.{ + try expr.data.e_object.properties.append(allocator, .{ .key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty), .value = value, }); - - expr.data.e_object.properties = BabyList(G.Property).fromList(new_props); } /// Don't use this if you care about performance. @@ -298,13 +295,10 @@ pub fn setString(expr: *Expr, allocator: std.mem.Allocator, name: string, value: } } - var new_props = expr.data.e_object.properties.listManaged(allocator); - try new_props.append(.{ + try expr.data.e_object.properties.append(allocator, .{ .key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty), .value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty), }); - - expr.data.e_object.properties = BabyList(G.Property).fromList(new_props); } pub fn getObject(expr: *const Expr, name: string) ?Expr { @@ -647,6 +641,29 @@ pub fn jsonStringify(self: *const @This(), writer: anytype) !void { return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "expr", .value = self.data, .loc = self.loc }); } +pub fn extractNumericValuesInSafeRange(left: Expr.Data, right: Expr.Data) ?[2]f64 { + const l_value = left.extractNumericValue() orelse return null; + const r_value = right.extractNumericValue() orelse return null; + + // Check for NaN and return null if either value is NaN + if (std.math.isNan(l_value) or std.math.isNan(r_value)) { + return null; + } + + if (std.math.isInf(l_value) or std.math.isInf(r_value)) { + return .{ l_value, r_value }; + } + + if (l_value > bun.jsc.MAX_SAFE_INTEGER or r_value > bun.jsc.MAX_SAFE_INTEGER) { + return null; + } + if (l_value < bun.jsc.MIN_SAFE_INTEGER or r_value < bun.jsc.MIN_SAFE_INTEGER) { + return null; + } + + return .{ l_value, r_value }; +} + pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 { return .{ left.extractNumericValue() orelse return null, @@ -654,6 +671,20 @@ pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 { }; } +pub fn extractStringValues(left: Expr.Data, right: Expr.Data, allocator: std.mem.Allocator) ?[2]*E.String { + const l_string = left.extractStringValue() orelse return null; + const r_string = right.extractStringValue() orelse return null; + l_string.resolveRopeIfNeeded(allocator); + r_string.resolveRopeIfNeeded(allocator); + + if (l_string.isUTF8() != r_string.isUTF8()) return null; + + return .{ + l_string, + r_string, + }; +} + pub var icount: usize = 0; // We don't need to dynamically allocate booleans @@ -1407,11 +1438,17 @@ pub fn init(comptime Type: type, st: Type, loc: logger.Loc) Expr { } } -pub fn isPrimitiveLiteral(this: Expr) bool { +/// If this returns true, then calling this expression captures the target of +/// the property access as "this" when calling the function in the property. +pub inline fn isPropertyAccess(this: *const Expr) bool { + return this.hasValueForThisInCall(); +} + +pub inline fn isPrimitiveLiteral(this: *const Expr) bool { return @as(Tag, this.data).isPrimitiveLiteral(); } -pub fn isRef(this: Expr, ref: Ref) bool { +pub inline fn isRef(this: *const Expr, ref: Ref) bool { return switch (this.data) { .e_import_identifier => |import_identifier| import_identifier.ref.eql(ref), .e_identifier => |ident| ident.ref.eql(ref), @@ -1873,36 +1910,19 @@ pub const Tag = enum { } }; -pub fn isBoolean(a: Expr) bool { - switch (a.data) { - .e_boolean => { - return true; +pub fn isBoolean(a: *const Expr) bool { + return switch (a.data) { + .e_boolean => true, + .e_if => |ex| ex.yes.isBoolean() and ex.no.isBoolean(), + .e_unary => |ex| ex.op == .un_not or ex.op == .un_delete, + .e_binary => |ex| switch (ex.op) { + .bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => true, + .bin_logical_or => ex.left.isBoolean() and ex.right.isBoolean(), + .bin_logical_and => ex.left.isBoolean() and ex.right.isBoolean(), + else => false, }, - - .e_if => |ex| { - return isBoolean(ex.yes) and isBoolean(ex.no); - }, - .e_unary => |ex| { - return ex.op == .un_not or ex.op == .un_delete; - }, - .e_binary => |ex| { - switch (ex.op) { - .bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => { - return true; - }, - .bin_logical_or => { - return isBoolean(ex.left) and isBoolean(ex.right); - }, - .bin_logical_and => { - return isBoolean(ex.left) and isBoolean(ex.right); - }, - else => {}, - } - }, - else => {}, - } - - return false; + else => false, + }; } pub fn assign(a: Expr, b: Expr) Expr { @@ -1912,7 +1932,7 @@ pub fn assign(a: Expr, b: Expr) Expr { .right = b, }, a.loc); } -pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr { +pub inline fn at(expr: *const Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr { return init(Type, t, expr.loc); } @@ -1920,21 +1940,19 @@ pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator) // will potentially be simplified to avoid generating unnecessary extra "!" // operators. For example, calling this with "!!x" will return "!x" instead // of returning "!!!x". -pub fn not(expr: Expr, allocator: std.mem.Allocator) Expr { - return maybeSimplifyNot( - expr, - allocator, - ) orelse Expr.init( - E.Unary, - E.Unary{ - .op = .un_not, - .value = expr, - }, - expr.loc, - ); +pub fn not(expr: *const Expr, allocator: std.mem.Allocator) Expr { + return expr.maybeSimplifyNot(allocator) orelse + Expr.init( + E.Unary, + E.Unary{ + .op = .un_not, + .value = expr.*, + }, + expr.loc, + ); } -pub fn hasValueForThisInCall(expr: Expr) bool { +pub inline fn hasValueForThisInCall(expr: *const Expr) bool { return switch (expr.data) { .e_dot, .e_index => true, else => false, @@ -1946,7 +1964,7 @@ pub fn hasValueForThisInCall(expr: Expr) bool { /// whole operator (i.e. the "!x") if it can be simplified, or false if not. /// It's separate from "Not()" above to avoid allocation on failure in case /// that is undesired. -pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr { +pub fn maybeSimplifyNot(expr: *const Expr, allocator: std.mem.Allocator) ?Expr { switch (expr.data) { .e_null, .e_undefined => { return expr.at(E.Boolean, E.Boolean{ .value = true }, allocator); @@ -1968,7 +1986,7 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr { }, // "!!!a" => "!a" .e_unary => |un| { - if (un.op == Op.Code.un_not and knownPrimitive(un.value) == .boolean) { + if (un.op == Op.Code.un_not and un.value.knownPrimitive() == .boolean) { return un.value; } }, @@ -1981,33 +1999,33 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr { Op.Code.bin_loose_eq => { // "!(a == b)" => "a != b" ex.op = .bin_loose_ne; - return expr; + return expr.*; }, Op.Code.bin_loose_ne => { // "!(a != b)" => "a == b" ex.op = .bin_loose_eq; - return expr; + return expr.*; }, Op.Code.bin_strict_eq => { // "!(a === b)" => "a !== b" ex.op = .bin_strict_ne; - return expr; + return expr.*; }, Op.Code.bin_strict_ne => { // "!(a !== b)" => "a === b" ex.op = .bin_strict_eq; - return expr; + return expr.*; }, Op.Code.bin_comma => { // "!(a, b)" => "a, !b" ex.right = ex.right.not(allocator); - return expr; + return expr.*; }, else => {}, } }, .e_inlined_enum => |inlined| { - return maybeSimplifyNot(inlined.value, allocator); + return inlined.value.maybeSimplifyNot(allocator); }, else => {}, @@ -2016,11 +2034,11 @@ pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr { return null; } -pub fn toStringExprWithoutSideEffects(expr: Expr, allocator: std.mem.Allocator) ?Expr { +pub fn toStringExprWithoutSideEffects(expr: *const Expr, allocator: std.mem.Allocator) ?Expr { const unwrapped = expr.unwrapInlined(); const slice = switch (unwrapped.data) { .e_null => "null", - .e_string => return expr, + .e_string => return expr.*, .e_undefined => "undefined", .e_boolean => |data| if (data.value) "true" else "false", .e_big_int => |bigint| bigint.value, @@ -2054,7 +2072,7 @@ pub fn isOptionalChain(self: *const @This()) bool { }; } -pub inline fn knownPrimitive(self: @This()) PrimitiveType { +pub inline fn knownPrimitive(self: *const @This()) PrimitiveType { return self.data.knownPrimitive(); } @@ -2294,6 +2312,7 @@ pub const Data = union(Tag) { const item = bun.create(allocator, E.Unary, .{ .op = el.op, .value = try el.value.deepClone(allocator), + .flags = el.flags, }); return .{ .e_unary = item }; }, @@ -2506,6 +2525,7 @@ pub const Data = union(Tag) { } }, .e_unary => |e| { + writeAnyToHasher(hasher, @as(u8, @bitCast(e.flags))); writeAnyToHasher(hasher, .{e.op}); e.value.data.writeToHasher(hasher, symbol_table); }, @@ -2537,7 +2557,7 @@ pub const Data = union(Tag) { inline .e_spread, .e_await => |e| { e.value.data.writeToHasher(hasher, symbol_table); }, - inline .e_yield => |e| { + .e_yield => |e| { writeAnyToHasher(hasher, .{ e.is_star, e.value }); if (e.value) |value| value.data.writeToHasher(hasher, symbol_table); @@ -2860,6 +2880,17 @@ pub const Data = union(Tag) { }; } + pub fn extractStringValue(data: Expr.Data) ?*E.String { + return switch (data) { + .e_string => data.e_string, + .e_inlined_enum => |inlined| switch (inlined.value.data) { + .e_string => |str| str, + else => null, + }, + else => null, + }; + } + pub const Equality = struct { equal: bool = false, ok: bool = false, @@ -3208,7 +3239,6 @@ const JSPrinter = @import("../js_printer.zig"); const std = @import("std"); const bun = @import("bun"); -const BabyList = bun.BabyList; const Environment = bun.Environment; const JSONParser = bun.json; const MutableString = bun.MutableString; diff --git a/src/ast/KnownGlobal.zig b/src/ast/KnownGlobal.zig index 76eaf465c0..35fb4c8190 100644 --- a/src/ast/KnownGlobal.zig +++ b/src/ast/KnownGlobal.zig @@ -8,18 +8,158 @@ pub const KnownGlobal = enum { Response, TextEncoder, TextDecoder, + Error, + TypeError, + SyntaxError, + RangeError, + ReferenceError, + EvalError, + URIError, + AggregateError, + Array, + Object, + Function, + RegExp, pub const map = bun.ComptimeEnumMap(KnownGlobal); - pub noinline fn maybeMarkConstructorAsPure(noalias e: *E.New, symbols: []const Symbol) void { - const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return; + inline fn callFromNew(e: *E.New, loc: logger.Loc) js_ast.Expr { + const call = E.Call{ + .target = e.target, + .args = e.args, + .close_paren_loc = e.close_parens_loc, + .can_be_unwrapped_if_unused = e.can_be_unwrapped_if_unused, + }; + return js_ast.Expr.init(E.Call, call, loc); + } + + pub noinline fn minifyGlobalConstructor(allocator: std.mem.Allocator, noalias e: *E.New, symbols: []const Symbol, loc: logger.Loc, minify_whitespace: bool) ?js_ast.Expr { + const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return null; const symbol = &symbols[id.innerIndex()]; if (symbol.kind != .unbound) - return; + return null; - const constructor = map.get(symbol.original_name) orelse return; + const constructor = map.get(symbol.original_name) orelse return null; - switch (constructor) { + return switch (constructor) { + // Error constructors can be called without 'new' with identical behavior + .Error, .TypeError, .SyntaxError, .RangeError, .ReferenceError, .EvalError, .URIError, .AggregateError => { + // Convert `new Error(...)` to `Error(...)` to save bytes + return callFromNew(e, loc); + }, + + .Object => { + const n = e.args.len; + + if (n == 0) { + // new Object() -> {} + return js_ast.Expr.init(E.Object, E.Object{}, loc); + } + + if (n == 1) { + const arg = e.args.ptr[0]; + switch (arg.data) { + .e_object, .e_array => { + // new Object({a: 1}) -> {a: 1} + // new Object([1, 2]) -> [1, 2] + return arg; + }, + .e_null, .e_undefined => { + // new Object(null) -> {} + // new Object(undefined) -> {} + return js_ast.Expr.init(E.Object, E.Object{}, loc); + }, + else => {}, + } + } + + // For other cases, just remove 'new' + return callFromNew(e, loc); + }, + + .Array => { + const n = e.args.len; + + return switch (n) { + 0 => { + // new Array() -> [] + return js_ast.Expr.init(E.Array, E.Array{}, loc); + }, + 1 => { + // For single argument, only convert to literal if we're SURE it's not a number + const arg = e.args.ptr[0]; + + // Check if it's an object or array literal first + switch (arg.data) { + .e_object, .e_array => { + // new Array({}) -> [{}], new Array([1]) -> [[1]] + // These are definitely not numbers, safe to convert + return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc); + }, + else => {}, + } + + // For other types, check via knownPrimitive + const primitive = arg.knownPrimitive(); + // Only convert if we know for certain it's not a number + // unknown could be a number at runtime, so we must preserve Array() call + switch (primitive) { + .null, .undefined, .boolean, .string, .bigint => { + // These are definitely not numbers, safe to convert + return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc); + }, + .number => { + const val = arg.data.e_number.value; + if ( + // only want this with whitespace minification + minify_whitespace and + (val == 0 or + val == 1 or + val == 2 or + val == 3 or + val == 4 or + val == 5 or + val == 6 or + val == 7 or + val == 8 or + val == 9 or + val == 10)) + { + const arg_loc = arg.loc; + var list = e.args.moveToListManaged(allocator); + list.clearRetainingCapacity(); + bun.handleOom(list.appendNTimes(js_ast.Expr{ .data = js_parser.Prefill.Data.EMissing, .loc = arg_loc }, @intFromFloat(val))); + return js_ast.Expr.init(E.Array, .{ .items = .moveFromList(&list) }, loc); + } + return callFromNew(e, loc); + }, + .unknown, .mixed => { + // Could be a number, preserve Array() call + return callFromNew(e, loc); + }, + } + }, + // > 1 + else => { + // new Array(1, 2, 3) -> [1, 2, 3] + // But NOT new Array(3) which creates an array with 3 empty slots + return js_ast.Expr.init(E.Array, .{ .items = e.args }, loc); + }, + }; + }, + + .Function => { + // Just remove 'new' for Function + return callFromNew(e, loc); + }, + .RegExp => { + // Don't optimize RegExp - the semantics are too complex: + // - new RegExp(re) creates a copy, but RegExp(re) returns the same instance + // - This affects object identity and lastIndex behavior + // - The difference only applies when flags are undefined + // Keep the original new RegExp() call to preserve correct semantics + return null; + }, .WeakSet, .WeakMap => { const n = e.args.len; @@ -27,7 +167,7 @@ pub const KnownGlobal = enum { // "new WeakSet()" is pure e.can_be_unwrapped_if_unused = .if_unused; - return; + return null; } if (n == 1) { @@ -50,6 +190,7 @@ pub const KnownGlobal = enum { }, } } + return null; }, .Date => { const n = e.args.len; @@ -58,7 +199,7 @@ pub const KnownGlobal = enum { // "new Date()" is pure e.can_be_unwrapped_if_unused = .if_unused; - return; + return null; } if (n == 1) { @@ -78,6 +219,7 @@ pub const KnownGlobal = enum { }, } } + return null; }, .Set => { @@ -86,7 +228,7 @@ pub const KnownGlobal = enum { if (n == 0) { // "new Set()" is pure e.can_be_unwrapped_if_unused = .if_unused; - return; + return null; } if (n == 1) { @@ -102,6 +244,7 @@ pub const KnownGlobal = enum { }, } } + return null; }, .Headers => { @@ -111,8 +254,9 @@ pub const KnownGlobal = enum { // "new Headers()" is pure e.can_be_unwrapped_if_unused = .if_unused; - return; + return null; } + return null; }, .Response => { @@ -122,7 +266,7 @@ pub const KnownGlobal = enum { // "new Response()" is pure e.can_be_unwrapped_if_unused = .if_unused; - return; + return null; } if (n == 1) { @@ -142,6 +286,7 @@ pub const KnownGlobal = enum { }, } } + return null; }, .TextDecoder, .TextEncoder => { const n = e.args.len; @@ -151,11 +296,12 @@ pub const KnownGlobal = enum { // "new TextDecoder()" is pure e.can_be_unwrapped_if_unused = .if_unused; - return; + return null; } // We _could_ validate the encoding argument // But let's not bother + return null; }, .Map => { @@ -164,7 +310,7 @@ pub const KnownGlobal = enum { if (n == 0) { // "new Map()" is pure e.can_be_unwrapped_if_unused = .if_unused; - return; + return null; } if (n == 1) { @@ -193,18 +339,20 @@ pub const KnownGlobal = enum { }, } } + return null; }, - } + }; } }; const string = []const u8; +const std = @import("std"); + const bun = @import("bun"); +const js_parser = bun.js_parser; +const logger = bun.logger; const js_ast = bun.ast; const E = js_ast.E; const Symbol = js_ast.Symbol; - -const std = @import("std"); -const Map = std.AutoHashMapUnmanaged; diff --git a/src/ast/Macro.zig b/src/ast/Macro.zig index b4b3f6dbd4..620fa2ed8f 100644 --- a/src/ast/Macro.zig +++ b/src/ast/Macro.zig @@ -386,7 +386,7 @@ pub const Runner = struct { const result = Expr.init( E.Array, E.Array{ - .items = ExprNodeList.init(&[_]Expr{}), + .items = ExprNodeList.empty, .was_originally_macro = true, }, this.caller.loc, @@ -398,7 +398,7 @@ pub const Runner = struct { var out = Expr.init( E.Array, E.Array{ - .items = ExprNodeList.init(array[0..0]), + .items = ExprNodeList.empty, .was_originally_macro = true, }, this.caller.loc, @@ -413,7 +413,7 @@ pub const Runner = struct { continue; i += 1; } - out.data.e_array.items = ExprNodeList.init(array); + out.data.e_array.items = ExprNodeList.fromOwnedSlice(array); _entry.value_ptr.* = out; return out; }, @@ -438,27 +438,37 @@ pub const Runner = struct { .include_value = true, }).init(this.global, obj); defer object_iter.deinit(); - var properties = this.allocator.alloc(G.Property, object_iter.len) catch unreachable; - errdefer this.allocator.free(properties); - var out = Expr.init( + + const out = _entry.value_ptr; + out.* = Expr.init( E.Object, E.Object{ - .properties = BabyList(G.Property).init(properties), + .properties = bun.handleOom( + G.Property.List.initCapacity(this.allocator, object_iter.len), + ), .was_originally_macro = true, }, this.caller.loc, ); - _entry.value_ptr.* = out; + const properties = &out.data.e_object.properties; + errdefer properties.clearAndFree(this.allocator); while (try object_iter.next()) |prop| { - properties[object_iter.i] = G.Property{ - .key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc), + bun.assertf( + object_iter.i == properties.len, + "`properties` unexpectedly modified (length {d}, expected {d})", + .{ properties.len, object_iter.i }, + ); + properties.appendAssumeCapacity(G.Property{ + .key = Expr.init( + E.String, + E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), + this.caller.loc, + ), .value = try this.run(object_iter.value), - }; + }); } - out.data.e_object.properties = BabyList(G.Property).init(properties[0..object_iter.i]); - _entry.value_ptr.* = out; - return out; + return out.*; }, .JSON => { @@ -644,7 +654,6 @@ const Resolver = @import("../resolver/resolver.zig").Resolver; const isPackagePath = @import("../resolver/resolver.zig").isPackagePath; const bun = @import("bun"); -const BabyList = bun.BabyList; const Environment = bun.Environment; const Output = bun.Output; const Transpiler = bun.Transpiler; diff --git a/src/ast/P.zig b/src/ast/P.zig index b321106fd5..3dd1b87160 100644 --- a/src/ast/P.zig +++ b/src/ast/P.zig @@ -536,7 +536,7 @@ pub fn NewParser_( return p.newExpr(E.Call{ .target = require_resolve_ref, - .args = ExprNodeList.init(args), + .args = ExprNodeList.fromOwnedSlice(args), }, arg.loc); } @@ -570,7 +570,7 @@ pub fn NewParser_( return p.newExpr( E.Call{ .target = p.valueForRequire(arg.loc), - .args = ExprNodeList.init(args), + .args = ExprNodeList.fromOwnedSlice(args), }, arg.loc, ); @@ -648,7 +648,7 @@ pub fn NewParser_( return p.newExpr( E.Call{ .target = p.valueForRequire(arg.loc), - .args = ExprNodeList.init(args), + .args = ExprNodeList.fromOwnedSlice(args), }, arg.loc, ); @@ -955,7 +955,7 @@ pub fn NewParser_( .e_identifier => |ident| { // is this a require("something") if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) { - _ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable); + _ = p.addImportRecord(.require, loc, call.args.at(0).data.e_string.string(p.allocator) catch unreachable); } }, else => {}, @@ -971,7 +971,7 @@ pub fn NewParser_( .e_identifier => |ident| { // is this a require("something") if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) { - _ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable); + _ = p.addImportRecord(.require, loc, call.args.at(0).data.e_string.string(p.allocator) catch unreachable); } }, else => {}, @@ -1250,7 +1250,7 @@ pub fn NewParser_( .ref = namespace_ref, .is_top_level = true, }); - try p.module_scope.generated.push(allocator, namespace_ref); + try p.module_scope.generated.append(allocator, namespace_ref); for (imports, clause_items) |alias, *clause_item| { const ref = symbols.get(alias) orelse unreachable; const alias_name = if (@TypeOf(symbols) == RuntimeImports) RuntimeImports.all[alias] else alias; @@ -1305,7 +1305,7 @@ pub fn NewParser_( parts.append(js_ast.Part{ .stmts = stmts, .declared_symbols = declared_symbols, - .import_record_indices = bun.BabyList(u32).init(import_records), + .import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_records), .tag = .runtime, }) catch unreachable; } @@ -1360,7 +1360,7 @@ pub fn NewParser_( .ref = namespace_ref, .is_top_level = true, }); - try p.module_scope.generated.push(allocator, namespace_ref); + try p.module_scope.generated.append(allocator, namespace_ref); for (clauses) |entry| { if (entry.enabled) { @@ -1374,7 +1374,7 @@ pub fn NewParser_( .name = LocRef{ .ref = entry.ref, .loc = logger.Loc{} }, }); declared_symbols.appendAssumeCapacity(.{ .ref = entry.ref, .is_top_level = true }); - try p.module_scope.generated.push(allocator, entry.ref); + try p.module_scope.generated.append(allocator, entry.ref); try p.is_import_item.put(allocator, entry.ref, {}); try p.named_imports.put(allocator, entry.ref, .{ .alias = entry.name, @@ -2113,7 +2113,7 @@ pub fn NewParser_( // const hoisted_ref = p.newSymbol(.hoisted, symbol.original_name) catch unreachable; symbols = p.symbols.items; - scope.generated.push(p.allocator, hoisted_ref) catch unreachable; + bun.handleOom(scope.generated.append(p.allocator, hoisted_ref)); p.hoisted_ref_for_sloppy_mode_block_fn.put(p.allocator, value.ref, hoisted_ref) catch unreachable; value.ref = hoisted_ref; symbol = &symbols[hoisted_ref.innerIndex()]; @@ -2258,7 +2258,7 @@ pub fn NewParser_( .generated = .{}, }; - try parent.children.push(allocator, scope); + try parent.children.append(allocator, scope); scope.strict_mode = parent.strict_mode; p.current_scope = scope; @@ -2569,7 +2569,7 @@ pub fn NewParser_( const name = try strings.append(p.allocator, "import_", try path_name.nonUniqueNameString(p.allocator)); stmt.namespace_ref = try p.newSymbol(.other, name); var scope: *Scope = p.current_scope; - try scope.generated.push(p.allocator, stmt.namespace_ref); + try scope.generated.append(p.allocator, stmt.namespace_ref); } var item_refs = ImportItemForNamespaceMap.init(p.allocator); @@ -2761,7 +2761,7 @@ pub fn NewParser_( var scope = p.current_scope; - try scope.generated.push(p.allocator, name.ref.?); + try scope.generated.append(p.allocator, name.ref.?); return name; } @@ -3067,7 +3067,7 @@ pub fn NewParser_( // this module will be unable to reference this symbol. However, we must // still add the symbol to the scope so it gets minified (automatically- // generated code may still reference the symbol). - try p.module_scope.generated.push(p.allocator, ref); + try p.module_scope.generated.append(p.allocator, ref); return ref; } @@ -3141,7 +3141,7 @@ pub fn NewParser_( entry.key_ptr.* = name; entry.value_ptr.* = js_ast.Scope.Member{ .ref = ref, .loc = loc }; if (comptime is_generated) { - try p.module_scope.generated.push(p.allocator, ref); + try p.module_scope.generated.append(p.allocator, ref); } return ref; } @@ -3448,7 +3448,10 @@ pub fn NewParser_( decls[0] = Decl{ .binding = p.b(B.Identifier{ .ref = ref }, local.loc), }; - try partStmts.append(p.s(S.Local{ .decls = G.Decl.List.init(decls) }, local.loc)); + try partStmts.append(p.s( + S.Local{ .decls = G.Decl.List.fromOwnedSlice(decls) }, + local.loc, + )); try p.declared_symbols.append(p.allocator, .{ .ref = ref, .is_top_level = true }); } } @@ -3463,7 +3466,7 @@ pub fn NewParser_( .symbol_uses = p.symbol_uses, .import_symbol_property_uses = p.import_symbol_property_uses, .declared_symbols = p.declared_symbols.toOwnedSlice(), - .import_record_indices = bun.BabyList(u32).init( + .import_record_indices = bun.BabyList(u32).fromOwnedSlice( p.import_records_for_current_part.toOwnedSlice( p.allocator, ) catch unreachable, @@ -3975,7 +3978,7 @@ pub fn NewParser_( // checks are not yet handled correctly by bun or esbuild, so this possibility is // currently ignored. .un_typeof => { - if (ex.value.data == .e_identifier) { + if (ex.value.data == .e_identifier and ex.flags.was_originally_typeof_identifier) { return true; } @@ -4014,6 +4017,18 @@ pub fn NewParser_( ex.right.data, ) and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.left) and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.right), + + // Special-case "<" and ">" with string, number, or bigint arguments + .bin_lt, .bin_gt, .bin_le, .bin_ge => { + const left = ex.left.knownPrimitive(); + const right = ex.right.knownPrimitive(); + switch (left) { + .string, .number, .bigint => { + return right == left and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.left) and p.exprCanBeRemovedIfUnusedWithoutDCECheck(&ex.right); + }, + else => {}, + } + }, else => {}, } }, @@ -4234,13 +4249,14 @@ pub fn NewParser_( // return false; // } - fn isSideEffectFreeUnboundIdentifierRef(p: *P, value: Expr, guard_condition: Expr, is_yes_branch: bool) bool { + fn isSideEffectFreeUnboundIdentifierRef(p: *P, value: Expr, guard_condition: Expr, is_yes_branch_: bool) bool { if (value.data != .e_identifier or p.symbols.items[value.data.e_identifier.ref.innerIndex()].kind != .unbound or guard_condition.data != .e_binary) return false; const binary = guard_condition.data.e_binary.*; + var is_yes_branch = is_yes_branch_; switch (binary.op) { .bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne => { @@ -4269,6 +4285,39 @@ pub fn NewParser_( (binary.op == .bin_strict_ne or binary.op == .bin_loose_ne)) and id.eql(id2); }, + .bin_lt, .bin_gt, .bin_le, .bin_ge => { + // Pattern match for "typeof x < " + var typeof: Expr.Data = binary.left.data; + var str: Expr.Data = binary.right.data; + + // Check if order is flipped: 'u' >= typeof x + if (typeof == .e_string) { + typeof = binary.right.data; + str = binary.left.data; + is_yes_branch = !is_yes_branch; + } + + if (typeof == .e_unary and str == .e_string) { + const unary = typeof.e_unary.*; + if (unary.op == .un_typeof and + unary.value.data == .e_identifier and + unary.flags.was_originally_typeof_identifier and + str.e_string.eqlComptime("u")) + { + // In "typeof x < 'u' ? x : null", the reference to "x" is side-effect free + // In "typeof x > 'u' ? x : null", the reference to "x" is side-effect free + if (is_yes_branch == (binary.op == .bin_lt or binary.op == .bin_le)) { + const id = value.data.e_identifier.ref; + const id2 = unary.value.data.e_identifier.ref; + if (id.eql(id2)) { + return true; + } + } + } + } + + return false; + }, else => return false, } } @@ -4297,7 +4346,7 @@ pub fn NewParser_( .ref = (p.declareGeneratedSymbol(.other, symbol_name) catch unreachable), }; - p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable; + bun.handleOom(p.module_scope.generated.append(p.allocator, loc_ref.ref.?)); p.is_import_item.put(p.allocator, loc_ref.ref.?, {}) catch unreachable; @field(p.jsx_imports, @tagName(field)) = loc_ref; break :brk loc_ref.ref.?; @@ -4399,7 +4448,7 @@ pub fn NewParser_( var local = p.s( S.Local{ .is_export = true, - .decls = Decl.List.init(decls), + .decls = Decl.List.fromOwnedSlice(decls), }, loc, ); @@ -4420,7 +4469,7 @@ pub fn NewParser_( var local = p.s( S.Local{ .is_export = true, - .decls = Decl.List.init(decls), + .decls = Decl.List.fromOwnedSlice(decls), }, loc, ); @@ -4542,7 +4591,7 @@ pub fn NewParser_( stmts.append( p.s(S.Local{ .kind = .k_var, - .decls = G.Decl.List.init(decls), + .decls = G.Decl.List.fromOwnedSlice(decls), .is_export = is_export, }, stmt_loc), ) catch |err| bun.handleOom(err); @@ -4551,7 +4600,7 @@ pub fn NewParser_( stmts.append( p.s(S.Local{ .kind = .k_let, - .decls = G.Decl.List.init(decls), + .decls = G.Decl.List.fromOwnedSlice(decls), }, stmt_loc), ) catch |err| bun.handleOom(err); } @@ -4636,7 +4685,7 @@ pub fn NewParser_( const call = p.newExpr( E.Call{ .target = target, - .args = ExprNodeList.init(args_list), + .args = ExprNodeList.fromOwnedSlice(args_list), // TODO: make these fully tree-shakable. this annotation // as-is is incorrect. This would be done by changing all // enum wrappers into `var Enum = ...` instead of two @@ -4691,18 +4740,16 @@ pub fn NewParser_( for (func.func.args, 0..) |arg, i| { for (arg.ts_decorators.ptr[0..arg.ts_decorators.len]) |arg_decorator| { var decorators = if (is_constructor) - class.ts_decorators.listManaged(p.allocator) + &class.ts_decorators else - prop.ts_decorators.listManaged(p.allocator); + &prop.ts_decorators; const args = p.allocator.alloc(Expr, 2) catch unreachable; args[0] = p.newExpr(E.Number{ .value = @as(f64, @floatFromInt(i)) }, arg_decorator.loc); args[1] = arg_decorator; - decorators.append(p.callRuntime(arg_decorator.loc, "__legacyDecorateParamTS", args)) catch unreachable; - if (is_constructor) { - class.ts_decorators.update(decorators); - } else { - prop.ts_decorators.update(decorators); - } + decorators.append( + p.allocator, + p.callRuntime(arg_decorator.loc, "__legacyDecorateParamTS", args), + ) catch |err| bun.handleOom(err); } } }, @@ -4732,7 +4779,7 @@ pub fn NewParser_( target = p.newExpr(E.Dot{ .target = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc), .name = "prototype", .name_loc = loc }, loc); } - var array = prop.ts_decorators.listManaged(p.allocator); + var array: std.ArrayList(Expr) = .init(p.allocator); if (p.options.features.emit_decorator_metadata) { switch (prop.kind) { @@ -4757,7 +4804,7 @@ pub fn NewParser_( entry.* = p.serializeMetadata(method_arg.ts_metadata) catch unreachable; } - args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(args_array) }, logger.Loc.Empty); + args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(args_array) }, logger.Loc.Empty); array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable; } @@ -4782,7 +4829,7 @@ pub fn NewParser_( { var args = p.allocator.alloc(Expr, 2) catch unreachable; args[0] = p.newExpr(E.String{ .data = "design:paramtypes" }, logger.Loc.Empty); - args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty); + args[1] = p.newExpr(E.Array{ .items = ExprNodeList.empty }, logger.Loc.Empty); array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable; } } @@ -4802,7 +4849,7 @@ pub fn NewParser_( entry.* = p.serializeMetadata(method_arg.ts_metadata) catch unreachable; } - args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(args_array) }, logger.Loc.Empty); + args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(args_array) }, logger.Loc.Empty); array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable; } @@ -4819,8 +4866,9 @@ pub fn NewParser_( } } + bun.handleOom(array.insertSlice(0, prop.ts_decorators.slice())); const args = p.allocator.alloc(Expr, 4) catch unreachable; - args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, loc); + args[0] = p.newExpr(E.Array{ .items = ExprNodeList.moveFromList(&array) }, loc); args[1] = target; args[2] = descriptor_key; args[3] = descriptor_kind; @@ -4882,10 +4930,10 @@ pub fn NewParser_( if (class.extends != null) { const target = p.newExpr(E.Super{}, stmt.loc); const arguments_ref = p.newSymbol(.unbound, arguments_str) catch unreachable; - p.current_scope.generated.push(p.allocator, arguments_ref) catch unreachable; + bun.handleOom(p.current_scope.generated.append(p.allocator, arguments_ref)); const super = p.newExpr(E.Spread{ .value = p.newExpr(E.Identifier{ .ref = arguments_ref }, stmt.loc) }, stmt.loc); - const args = ExprNodeList.one(p.allocator, super) catch unreachable; + const args = bun.handleOom(ExprNodeList.initOne(p.allocator, super)); constructor_stmts.append(p.s(S.SExpr{ .value = p.newExpr(E.Call{ .target = target, .args = args }, stmt.loc) }, stmt.loc)) catch unreachable; } @@ -4933,7 +4981,7 @@ pub fn NewParser_( stmts.appendSliceAssumeCapacity(instance_decorators.items); stmts.appendSliceAssumeCapacity(static_decorators.items); if (class.ts_decorators.len > 0) { - var array = class.ts_decorators.listManaged(p.allocator); + var array = class.ts_decorators.moveToListManaged(p.allocator); if (p.options.features.emit_decorator_metadata) { if (constructor_function != null) { @@ -4949,9 +4997,9 @@ pub fn NewParser_( param_array[i] = p.serializeMetadata(constructor_arg.ts_metadata) catch unreachable; } - args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(param_array) }, logger.Loc.Empty); + args[1] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(param_array) }, logger.Loc.Empty); } else { - args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty); + args[1] = p.newExpr(E.Array{ .items = ExprNodeList.empty }, logger.Loc.Empty); } array.append(p.callRuntime(stmt.loc, "__legacyMetadataTS", args)) catch unreachable; @@ -4959,7 +5007,7 @@ pub fn NewParser_( } const args = p.allocator.alloc(Expr, 2) catch unreachable; - args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, stmt.loc); + args[0] = p.newExpr(E.Array{ .items = ExprNodeList.fromOwnedSlice(array.items) }, stmt.loc); args[1] = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc); stmts.appendAssumeCapacity(Stmt.assign( @@ -5369,7 +5417,7 @@ pub fn NewParser_( name, loc_ref.ref.?, ); - p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable; + bun.handleOom(p.module_scope.generated.append(p.allocator, loc_ref.ref.?)); return loc_ref.ref.?; } } else { @@ -5393,7 +5441,7 @@ pub fn NewParser_( return p.newExpr( E.Call{ .target = p.runtimeIdentifier(loc, name), - .args = ExprNodeList.init(args), + .args = ExprNodeList.fromOwnedSlice(args), }, loc, ); @@ -5453,7 +5501,7 @@ pub fn NewParser_( for (to_flatten.children.slice()) |item| { item.parent = parent; - parent.children.push(p.allocator, item) catch unreachable; + bun.handleOom(parent.children.append(p.allocator, item)); } } @@ -5474,7 +5522,7 @@ pub fn NewParser_( .ref = ref, }) catch |err| bun.handleOom(err); - bun.handleOom(scope.generated.append(p.allocator, &.{ref})); + bun.handleOom(scope.generated.append(p.allocator, ref)); return ref; } @@ -5664,7 +5712,7 @@ pub fn NewParser_( } const is_top_level = scope == p.module_scope; - scope.generated.append(p.allocator, &.{ + scope.generated.appendSlice(p.allocator, &.{ ctx.stack_ref, caught_ref, err_ref, @@ -5704,7 +5752,7 @@ pub fn NewParser_( const finally_stmts = finally: { if (ctx.has_await_using) { const promise_ref = p.generateTempRef("_promise"); - bun.handleOom(scope.generated.append(p.allocator, &.{promise_ref})); + bun.handleOom(scope.generated.append(p.allocator, promise_ref)); p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = promise_ref }); const promise_ref_expr = p.newExpr(E.Identifier{ .ref = promise_ref }, loc); @@ -5722,7 +5770,7 @@ pub fn NewParser_( .binding = p.b(B.Identifier{ .ref = promise_ref }, loc), .value = call_dispose, }; - break :decls G.Decl.List.init(decls); + break :decls G.Decl.List.fromOwnedSlice(decls); }, }, loc); @@ -5758,7 +5806,7 @@ pub fn NewParser_( .binding = p.b(B.Identifier{ .ref = ctx.stack_ref }, loc), .value = p.newExpr(E.Array{}, loc), }; - break :decls G.Decl.List.init(decls); + break :decls G.Decl.List.fromOwnedSlice(decls); }, .kind = .k_let, }, loc)); @@ -5780,7 +5828,7 @@ pub fn NewParser_( .binding = p.b(B.Identifier{ .ref = has_err_ref }, loc), .value = p.newExpr(E.Number{ .value = 1 }, loc), }; - break :decls G.Decl.List.init(decls); + break :decls G.Decl.List.fromOwnedSlice(decls); }, }, loc); break :catch_body statements; @@ -6057,7 +6105,7 @@ pub fn NewParser_( .body = .{ .stmts = p.allocator.dupe(Stmt, &.{ p.s(S.Return{ .value = p.newExpr(E.Array{ - .items = ExprNodeList.init(ctx.user_hooks.values()), + .items = ExprNodeList.fromBorrowedSliceDangerous(ctx.user_hooks.values()), }, loc) }, loc), }) catch |err| bun.handleOom(err), .loc = loc, @@ -6069,7 +6117,7 @@ pub fn NewParser_( // _s(func, "", force, () => [useCustom]) return p.newExpr(E.Call{ .target = Expr.initIdentifier(ctx.signature_cb, loc), - .args = ExprNodeList.init(args), + .args = ExprNodeList.fromOwnedSlice(args), }, loc); } @@ -6150,11 +6198,14 @@ pub fn NewParser_( } if (part.import_record_indices.len == 0) { - part.import_record_indices = @TypeOf(part.import_record_indices).init( - (p.import_records_for_current_part.clone(p.allocator) catch unreachable).items, - ); + part.import_record_indices = .fromOwnedSlice(bun.handleOom( + p.allocator.dupe(u32, p.import_records_for_current_part.items), + )); } else { - part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items) catch unreachable; + part.import_record_indices.appendSlice( + p.allocator, + p.import_records_for_current_part.items, + ) catch |err| bun.handleOom(err); } parts.items[parts_end] = part; @@ -6295,7 +6346,7 @@ pub fn NewParser_( entry.value_ptr.* = .{}; } - entry.value_ptr.push(ctx.allocator, @as(u32, @truncate(ctx.part_index))) catch unreachable; + bun.handleOom(entry.value_ptr.append(ctx.allocator, @as(u32, @truncate(ctx.part_index)))); } }; @@ -6321,7 +6372,7 @@ pub fn NewParser_( entry.value_ptr.* = .{}; } - entry.value_ptr.push(p.allocator, js_ast.namespace_export_part_index) catch unreachable; + bun.handleOom(entry.value_ptr.append(p.allocator, js_ast.namespace_export_part_index)); } } @@ -6344,17 +6395,12 @@ pub fn NewParser_( break :brk Ref.None; }; - const parts_list = bun.BabyList(js_ast.Part).fromList(parts); - return .{ .runtime_imports = p.runtime_imports, - .parts = parts_list, .module_scope = p.module_scope.*, - .symbols = js_ast.Symbol.List.fromList(p.symbols), .exports_ref = p.exports_ref, .wrapper_ref = wrapper_ref, .module_ref = p.module_ref, - .import_records = ImportRecord.List.fromList(p.import_records), .export_star_import_records = p.export_star_import_records.items, .approximate_newline_count = p.lexer.approximate_newline_count, .exports_kind = exports_kind, @@ -6394,12 +6440,14 @@ pub fn NewParser_( .has_commonjs_export_names = p.has_commonjs_export_names, .hashbang = hashbang, - // TODO: cross-module constant inlining // .const_values = p.const_values, .ts_enums = try p.computeTsEnumsMap(allocator), - .import_meta_ref = p.import_meta_ref, + + .symbols = js_ast.Symbol.List.moveFromList(&p.symbols), + .parts = bun.BabyList(js_ast.Part).moveFromList(parts), + .import_records = ImportRecord.List.moveFromList(&p.import_records), }; } diff --git a/src/ast/Parser.zig b/src/ast/Parser.zig index ff68c2e000..ed8461d8c9 100644 --- a/src/ast/Parser.zig +++ b/src/ast/Parser.zig @@ -188,7 +188,7 @@ pub const Parser = struct { // in the `symbols` array. bun.assert(p.symbols.items.len == 0); var symbols_ = symbols; - p.symbols = symbols_.listManaged(p.allocator); + p.symbols = symbols_.moveToListManaged(p.allocator); try p.prepareForVisitPass(); @@ -550,10 +550,7 @@ pub const Parser = struct { var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1); sliced.items.len = 1; var _local = local.*; - var list = try ListManaged(G.Decl).initCapacity(p.allocator, 1); - list.items.len = 1; - list.items[0] = decl; - _local.decls.update(list); + _local.decls = try .initOne(p.allocator, decl); sliced.items[0] = p.s(_local, stmt.loc); try p.appendPart(&parts, sliced.items); } @@ -686,7 +683,7 @@ pub const Parser = struct { var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable; part_stmts[0] = p.s(S.Local{ .kind = .k_var, - .decls = Decl.List.init(decls), + .decls = Decl.List.fromOwnedSlice(decls), }, logger.Loc.Empty); before.append(js_ast.Part{ .stmts = part_stmts, @@ -713,7 +710,7 @@ pub const Parser = struct { var import_part_stmts = remaining_stmts[0..1]; remaining_stmts = remaining_stmts[1..]; - bun.handleOom(p.module_scope.generated.push(p.allocator, deferred_import.namespace.ref.?)); + bun.handleOom(p.module_scope.generated.append(p.allocator, deferred_import.namespace.ref.?)); import_part_stmts[0] = Stmt.alloc( S.Import, @@ -835,7 +832,7 @@ pub const Parser = struct { part.symbol_uses = .{}; return js_ast.Result{ .ast = js_ast.Ast{ - .import_records = ImportRecord.List.init(p.import_records.items), + .import_records = ImportRecord.List.moveFromList(&p.import_records), .redirect_import_record_index = id, .named_imports = p.named_imports, .named_exports = p.named_exports, @@ -905,7 +902,10 @@ pub const Parser = struct { break :brk new_stmts.items; }; - part.import_record_indices.push(p.allocator, right.data.e_require_string.import_record_index) catch unreachable; + part.import_record_indices.append( + p.allocator, + right.data.e_require_string.import_record_index, + ) catch |err| bun.handleOom(err); p.symbols.items[p.module_ref.innerIndex()].use_count_estimate = 0; p.symbols.items[namespace_ref.innerIndex()].use_count_estimate -|= 1; _ = part.symbol_uses.swapRemove(namespace_ref); @@ -1165,7 +1165,7 @@ pub const Parser = struct { var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable; part_stmts[0] = p.s(S.Local{ .kind = .k_var, - .decls = Decl.List.init(decls), + .decls = Decl.List.fromOwnedSlice(decls), }, logger.Loc.Empty); before.append(js_ast.Part{ .stmts = part_stmts, @@ -1245,7 +1245,7 @@ pub const Parser = struct { before.append(js_ast.Part{ .stmts = part_stmts, .declared_symbols = declared_symbols, - .import_record_indices = bun.BabyList(u32).init(import_record_indices), + .import_record_indices = bun.BabyList(u32).fromOwnedSlice(import_record_indices), .tag = .bun_test, }) catch unreachable; diff --git a/src/ast/SideEffects.zig b/src/ast/SideEffects.zig index 39909204e5..1b0f023c3a 100644 --- a/src/ast/SideEffects.zig +++ b/src/ast/SideEffects.zig @@ -153,7 +153,7 @@ pub const SideEffects = enum(u1) { // "typeof x" must not be transformed into if "x" since doing so could // cause an exception to be thrown. Instead we can just remove it since // "typeof x" is special-cased in the standard to never throw. - if (std.meta.activeTag(un.value.data) == .e_identifier) { + if (un.value.data == .e_identifier and un.flags.was_originally_typeof_identifier) { return null; } @@ -199,6 +199,10 @@ pub const SideEffects = enum(u1) { // "toString" and/or "valueOf" to be called. .bin_loose_eq, .bin_loose_ne, + .bin_lt, + .bin_gt, + .bin_le, + .bin_ge, => { if (isPrimitiveWithSideEffects(bin.left.data) and isPrimitiveWithSideEffects(bin.right.data)) { return Expr.joinWithComma( @@ -207,13 +211,23 @@ pub const SideEffects = enum(u1) { p.allocator, ); } - // If one side is a number, the number can be printed as - // `0` since the result being unused doesnt matter, we - // only care to invoke the coercion. - if (bin.left.data == .e_number) { - bin.left.data = .{ .e_number = .{ .value = 0.0 } }; - } else if (bin.right.data == .e_number) { - bin.right.data = .{ .e_number = .{ .value = 0.0 } }; + + switch (bin.op) { + .bin_loose_eq, + .bin_loose_ne, + => { + // If one side is a number and the other side is a known primitive with side effects, + // the number can be printed as `0` since the result being unused doesn't matter, + // we only care to invoke the coercion. + // We only do this optimization if the other side is a known primitive with side effects + // to avoid corrupting shared nodes when the other side is an undefined identifier + if (bin.left.data == .e_number) { + bin.left.data = .{ .e_number = .{ .value = 0.0 } }; + } else if (bin.right.data == .e_number) { + bin.right.data = .{ .e_number = .{ .value = 0.0 } }; + } + }, + else => {}, } }, @@ -259,7 +273,8 @@ pub const SideEffects = enum(u1) { } properties_slice = properties_slice[0..end]; - expr.data.e_object.properties = G.Property.List.init(properties_slice); + expr.data.e_object.properties = + G.Property.List.fromBorrowedSliceDangerous(properties_slice); return expr; } } @@ -297,16 +312,14 @@ pub const SideEffects = enum(u1) { for (items) |item| { if (item.data == .e_spread) { var end: usize = 0; - for (items) |item__| { - const item_ = item__; + for (items) |item_| { if (item_.data != .e_missing) { items[end] = item_; end += 1; } - - expr.data.e_array.items = ExprNodeList.init(items[0..end]); - return expr; } + expr.data.e_array.items.shrinkRetainingCapacity(end); + return expr; } } @@ -443,7 +456,7 @@ pub const SideEffects = enum(u1) { findIdentifiers(decl.binding, &decls); } - local.decls.update(decls); + local.decls = .moveFromList(&decls); return true; }, @@ -875,7 +888,6 @@ const js_ast = bun.ast; const Binding = js_ast.Binding; const E = js_ast.E; const Expr = js_ast.Expr; -const ExprNodeList = js_ast.ExprNodeList; const Stmt = js_ast.Stmt; const G = js_ast.G; diff --git a/src/ast/Symbol.zig b/src/ast/Symbol.zig index eada199e7d..1a8d31d5d4 100644 --- a/src/ast/Symbol.zig +++ b/src/ast/Symbol.zig @@ -412,7 +412,7 @@ pub const Map = struct { } pub fn initWithOneList(list: List) Map { - const baby_list = BabyList(List).init((&list)[0..1]); + const baby_list = BabyList(List).fromBorrowedSliceDangerous((&list)[0..1]); return initList(baby_list); } diff --git a/src/ast/maybe.zig b/src/ast/maybe.zig index 0c620c2962..1c461b3099 100644 --- a/src/ast/maybe.zig +++ b/src/ast/maybe.zig @@ -68,7 +68,7 @@ pub fn AstMaybe( .loc = name_loc, .ref = p.newSymbol(.import, name) catch unreachable, }; - p.module_scope.generated.push(p.allocator, new_item.ref.?) catch unreachable; + bun.handleOom(p.module_scope.generated.append(p.allocator, new_item.ref.?)); import_items.put(name, new_item) catch unreachable; p.is_import_item.put(p.allocator, new_item.ref.?, {}) catch unreachable; @@ -214,7 +214,7 @@ pub fn AstMaybe( .other, std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(key)}) catch unreachable, ) catch unreachable; - p.module_scope.generated.push(p.allocator, new_ref) catch unreachable; + bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref)); named_export_entry.value_ptr.* = .{ .loc_ref = LocRef{ .loc = name_loc, @@ -320,7 +320,7 @@ pub fn AstMaybe( .other, std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable, ) catch unreachable; - p.module_scope.generated.push(p.allocator, new_ref) catch unreachable; + bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref)); named_export_entry.value_ptr.* = .{ .loc_ref = LocRef{ .loc = name_loc, @@ -493,7 +493,7 @@ pub fn AstMaybe( .other, std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable, ) catch unreachable; - p.module_scope.generated.push(p.allocator, new_ref) catch unreachable; + bun.handleOom(p.module_scope.generated.append(p.allocator, new_ref)); named_export_entry.value_ptr.* = .{ .loc_ref = LocRef{ .loc = name_loc, @@ -650,6 +650,9 @@ pub fn AstMaybe( E.Unary{ .op = .un_typeof, .value = expr, + .flags = .{ + .was_originally_typeof_identifier = expr.data == .e_identifier, + }, }, logger.Loc.Empty, ), diff --git a/src/ast/parse.zig b/src/ast/parse.zig index c7c026f091..2582d5bd40 100644 --- a/src/ast/parse.zig +++ b/src/ast/parse.zig @@ -200,7 +200,7 @@ pub fn Parse( .class_name = name, .extends = extends, .close_brace_loc = close_brace_loc, - .ts_decorators = ExprNodeList.init(class_opts.ts_decorators), + .ts_decorators = ExprNodeList.fromOwnedSlice(class_opts.ts_decorators), .class_keyword = class_keyword, .body_loc = body_loc, .properties = properties.items, @@ -283,7 +283,7 @@ pub fn Parse( } const close_paren_loc = p.lexer.loc(); try p.lexer.expect(.t_close_paren); - return ExprListLoc{ .list = ExprNodeList.fromList(args), .loc = close_paren_loc }; + return ExprListLoc{ .list = ExprNodeList.moveFromList(&args), .loc = close_paren_loc }; } pub fn parseJSXPropValueIdentifier(noalias p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr { @@ -474,7 +474,10 @@ pub fn Parse( if (opts.is_async) { p.logExprErrors(&errors); const async_expr = p.newExpr(E.Identifier{ .ref = try p.storeNameInRef("async") }, loc); - return p.newExpr(E.Call{ .target = async_expr, .args = ExprNodeList.init(items) }, loc); + return p.newExpr(E.Call{ + .target = async_expr, + .args = ExprNodeList.fromOwnedSlice(items), + }, loc); } // Is this a chain of expressions and comma operators? @@ -621,16 +624,17 @@ pub fn Parse( try p.forbidLexicalDecl(token_range.loc); } - const decls = try p.parseAndDeclareDecls(.other, opts); + var decls_list = try p.parseAndDeclareDecls(.other, opts); + const decls: G.Decl.List = .moveFromList(&decls_list); return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .stmt = p.s(S.Local{ .kind = .k_let, - .decls = G.Decl.List.fromList(decls), + .decls = decls, .is_export = opts.is_export, }, token_range.loc), }, - .decls = decls.items, + .decls = decls.slice(), }; } }, @@ -650,19 +654,20 @@ pub fn Parse( } // p.markSyntaxFeature(.using, token_range.loc); opts.is_using_statement = true; - const decls = try p.parseAndDeclareDecls(.constant, opts); + var decls_list = try p.parseAndDeclareDecls(.constant, opts); + const decls: G.Decl.List = .moveFromList(&decls_list); if (!opts.is_for_loop_init) { - try p.requireInitializers(.k_using, decls.items); + try p.requireInitializers(.k_using, decls.slice()); } return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .stmt = p.s(S.Local{ .kind = .k_using, - .decls = G.Decl.List.fromList(decls), + .decls = decls, .is_export = false, }, token_range.loc), }, - .decls = decls.items, + .decls = decls.slice(), }; } } else if (p.fn_or_arrow_data_parse.allow_await == .allow_expr and strings.eqlComptime(raw, "await")) { @@ -689,19 +694,20 @@ pub fn Parse( } // p.markSyntaxFeature(.using, using_range.loc); opts.is_using_statement = true; - const decls = try p.parseAndDeclareDecls(.constant, opts); + var decls_list = try p.parseAndDeclareDecls(.constant, opts); + const decls: G.Decl.List = .moveFromList(&decls_list); if (!opts.is_for_loop_init) { - try p.requireInitializers(.k_await_using, decls.items); + try p.requireInitializers(.k_await_using, decls.slice()); } return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .stmt = p.s(S.Local{ .kind = .k_await_using, - .decls = G.Decl.List.fromList(decls), + .decls = decls, .is_export = false, }, token_range.loc), }, - .decls = decls.items, + .decls = decls.slice(), }; } break :value Expr{ diff --git a/src/ast/parseFn.zig b/src/ast/parseFn.zig index 9e53368ada..bf27bc2d31 100644 --- a/src/ast/parseFn.zig +++ b/src/ast/parseFn.zig @@ -281,7 +281,7 @@ pub fn ParseFn( } args.append(p.allocator, G.Arg{ - .ts_decorators = ExprNodeList.init(ts_decorators), + .ts_decorators = ExprNodeList.fromOwnedSlice(ts_decorators), .binding = arg, .default = default_value, diff --git a/src/ast/parseJSXElement.zig b/src/ast/parseJSXElement.zig index 3faa412c96..5aadd4bbdb 100644 --- a/src/ast/parseJSXElement.zig +++ b/src/ast/parseJSXElement.zig @@ -148,7 +148,7 @@ pub fn ParseJSXElement( const is_key_after_spread = key_prop_i > -1 and first_spread_prop_i > -1 and key_prop_i > first_spread_prop_i; flags.setPresent(.is_key_after_spread, is_key_after_spread); - properties = G.Property.List.fromList(props); + properties = G.Property.List.moveFromList(&props); if (is_key_after_spread and p.options.jsx.runtime == .automatic and !p.has_classic_runtime_warned) { try p.log.addWarning(p.source, spread_loc, "\"key\" prop after a {...spread} is deprecated in JSX. Falling back to classic runtime."); p.has_classic_runtime_warned = true; @@ -268,7 +268,7 @@ pub fn ParseJSXElement( return p.newExpr(E.JSXElement{ .tag = end_tag.data.asExpr(), - .children = ExprNodeList.fromList(children), + .children = ExprNodeList.moveFromList(&children), .properties = properties, .key_prop_index = key_prop_i, .flags = flags, diff --git a/src/ast/parsePrefix.zig b/src/ast/parsePrefix.zig index 671fef3045..14eae7eb71 100644 --- a/src/ast/parsePrefix.zig +++ b/src/ast/parsePrefix.zig @@ -262,7 +262,16 @@ pub fn ParsePrefix( return error.SyntaxError; } - return p.newExpr(E.Unary{ .op = .un_typeof, .value = value }, loc); + return p.newExpr( + E.Unary{ + .op = .un_typeof, + .value = value, + .flags = .{ + .was_originally_typeof_identifier = value.data == .e_identifier, + }, + }, + loc, + ); } fn t_delete(noalias p: *P) anyerror!Expr { const loc = p.lexer.loc(); @@ -281,7 +290,14 @@ pub fn ParsePrefix( } } - return p.newExpr(E.Unary{ .op = .un_delete, .value = value }, loc); + return p.newExpr(E.Unary{ + .op = .un_delete, + .value = value, + .flags = .{ + .was_originally_delete_of_identifier_or_property_access = value.data == .e_identifier or + value.isPropertyAccess(), + }, + }, loc); } fn t_plus(noalias p: *P) anyerror!Expr { const loc = p.lexer.loc(); @@ -500,7 +516,7 @@ pub fn ParsePrefix( self_errors.mergeInto(errors.?); } return p.newExpr(E.Array{ - .items = ExprNodeList.fromList(items), + .items = ExprNodeList.moveFromList(&items), .comma_after_spread = comma_after_spread.toNullable(), .is_single_line = is_single_line, .close_bracket_loc = close_bracket_loc, @@ -584,7 +600,7 @@ pub fn ParsePrefix( } return p.newExpr(E.Object{ - .properties = G.Property.List.fromList(properties), + .properties = G.Property.List.moveFromList(&properties), .comma_after_spread = if (comma_after_spread.start > 0) comma_after_spread else diff --git a/src/ast/parseProperty.zig b/src/ast/parseProperty.zig index 9ca95c0b74..01586cdc60 100644 --- a/src/ast/parseProperty.zig +++ b/src/ast/parseProperty.zig @@ -119,7 +119,7 @@ pub fn ParseProperty( } return G.Property{ - .ts_decorators = ExprNodeList.init(opts.ts_decorators), + .ts_decorators = try ExprNodeList.fromSlice(p.allocator, opts.ts_decorators), .kind = kind, .flags = Flags.Property.init(.{ .is_computed = is_computed, @@ -333,7 +333,7 @@ pub fn ParseProperty( ) catch unreachable; block.* = G.ClassStaticBlock{ - .stmts = js_ast.BabyList(Stmt).init(stmts), + .stmts = js_ast.BabyList(Stmt).fromOwnedSlice(stmts), .loc = loc, }; @@ -506,7 +506,7 @@ pub fn ParseProperty( try p.lexer.expectOrInsertSemicolon(); return G.Property{ - .ts_decorators = ExprNodeList.init(opts.ts_decorators), + .ts_decorators = try ExprNodeList.fromSlice(p.allocator, opts.ts_decorators), .kind = kind, .flags = Flags.Property.init(.{ .is_computed = is_computed, diff --git a/src/ast/parseStmt.zig b/src/ast/parseStmt.zig index 274f64eaf9..b8bce67462 100644 --- a/src/ast/parseStmt.zig +++ b/src/ast/parseStmt.zig @@ -493,9 +493,13 @@ pub fn ParseStmt( } fn t_var(p: *P, opts: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt { try p.lexer.next(); - const decls = try p.parseAndDeclareDecls(.hoisted, opts); + var decls = try p.parseAndDeclareDecls(.hoisted, opts); try p.lexer.expectOrInsertSemicolon(); - return p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc); + return p.s(S.Local{ + .kind = .k_var, + .decls = Decl.List.moveFromList(&decls), + .is_export = opts.is_export, + }, loc); } fn t_const(p: *P, opts: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt { if (opts.lexical_decl != .allow_all) { @@ -509,14 +513,18 @@ pub fn ParseStmt( return p.parseTypescriptEnumStmt(loc, opts); } - const decls = try p.parseAndDeclareDecls(.constant, opts); + var decls = try p.parseAndDeclareDecls(.constant, opts); try p.lexer.expectOrInsertSemicolon(); if (!opts.is_typescript_declare) { try p.requireInitializers(.k_const, decls.items); } - return p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc); + return p.s(S.Local{ + .kind = .k_const, + .decls = Decl.List.moveFromList(&decls), + .is_export = opts.is_export, + }, loc); } fn t_if(p: *P, _: *ParseStatementOptions, loc: logger.Loc) anyerror!Stmt { var current_loc = loc; @@ -795,15 +803,17 @@ pub fn ParseStmt( is_var = true; try p.lexer.next(); var stmtOpts = ParseStatementOptions{}; - decls.update(try p.parseAndDeclareDecls(.hoisted, &stmtOpts)); - init_ = p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls) }, init_loc); + var decls_list = try p.parseAndDeclareDecls(.hoisted, &stmtOpts); + decls = .moveFromList(&decls_list); + init_ = p.s(S.Local{ .kind = .k_var, .decls = decls }, init_loc); }, // for (const ) .t_const => { try p.lexer.next(); var stmtOpts = ParseStatementOptions{}; - decls.update(try p.parseAndDeclareDecls(.constant, &stmtOpts)); - init_ = p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls) }, init_loc); + var decls_list = try p.parseAndDeclareDecls(.constant, &stmtOpts); + decls = .moveFromList(&decls_list); + init_ = p.s(S.Local{ .kind = .k_const, .decls = decls }, init_loc); }, // for (;) .t_semicolon => {}, @@ -1293,7 +1303,7 @@ pub fn ParseStmt( for (local.decls.slice()) |decl| { try extractDeclsForBinding(decl.binding, &_decls); } - decls.update(_decls); + decls = .moveFromList(&_decls); }, else => {}, } diff --git a/src/ast/parseTypescript.zig b/src/ast/parseTypescript.zig index bf6793aa25..35432904e3 100644 --- a/src/ast/parseTypescript.zig +++ b/src/ast/parseTypescript.zig @@ -201,7 +201,7 @@ pub fn ParseTypescript( // run the renamer. For external-facing things the renamer will avoid // collisions automatically so this isn't important for correctness. arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable; - p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable; + bun.handleOom(p.current_scope.generated.append(p.allocator, arg_ref)); } else { arg_ref = p.newSymbol(.hoisted, name_text) catch unreachable; } @@ -238,7 +238,7 @@ pub fn ParseTypescript( try p.lexer.expect(.t_string_literal); try p.lexer.expect(.t_close_paren); if (!opts.is_typescript_declare) { - const args = try ExprNodeList.one(p.allocator, path); + const args = try ExprNodeList.initOne(p.allocator, path); value = p.newExpr(E.Call{ .target = target, .close_paren_loc = p.lexer.loc(), .args = args }, loc); } } else { @@ -266,7 +266,12 @@ pub fn ParseTypescript( .binding = p.b(B.Identifier{ .ref = ref }, default_name_loc), .value = value, }; - return p.s(S.Local{ .kind = kind, .decls = Decl.List.init(decls), .is_export = opts.is_export, .was_ts_import_equals = true }, loc); + return p.s(S.Local{ + .kind = kind, + .decls = Decl.List.fromOwnedSlice(decls), + .is_export = opts.is_export, + .was_ts_import_equals = true, + }, loc); } pub fn parseTypescriptEnumStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt { @@ -372,7 +377,7 @@ pub fn ParseTypescript( // run the renamer. For external-facing things the renamer will avoid // collisions automatically so this isn't important for correctness. arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable; - p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable; + bun.handleOom(p.current_scope.generated.append(p.allocator, arg_ref)); } else { arg_ref = p.declareSymbol(.hoisted, name_loc, name_text) catch unreachable; } diff --git a/src/ast/visit.zig b/src/ast/visit.zig index e37b74e4f2..18ee06ec63 100644 --- a/src/ast/visit.zig +++ b/src/ast/visit.zig @@ -567,9 +567,9 @@ pub fn Visit( // Make it an error to use "arguments" in a static class block p.current_scope.forbid_arguments = true; - var list = property.class_static_block.?.stmts.listManaged(p.allocator); + var list = property.class_static_block.?.stmts.moveToListManaged(p.allocator); p.visitStmts(&list, .fn_body) catch unreachable; - property.class_static_block.?.stmts = js_ast.BabyList(Stmt).fromList(list); + property.class_static_block.?.stmts = js_ast.BabyList(Stmt).moveFromList(&list); p.popScope(); p.fn_or_arrow_data_visit = old_fn_or_arrow_data; @@ -912,12 +912,13 @@ pub fn Visit( before.ensureUnusedCapacity(@as(usize, @intFromBool(let_decls.items.len > 0)) + @as(usize, @intFromBool(var_decls.items.len > 0)) + non_fn_stmts.items.len) catch unreachable; if (let_decls.items.len > 0) { + const decls: Decl.List = .moveFromList(&let_decls); before.appendAssumeCapacity(p.s( S.Local{ .kind = .k_let, - .decls = Decl.List.fromList(let_decls), + .decls = decls, }, - let_decls.items[0].value.?.loc, + decls.at(0).value.?.loc, )); } @@ -928,12 +929,13 @@ pub fn Visit( before.appendAssumeCapacity(new); } } else { + const decls: Decl.List = .moveFromList(&var_decls); before.appendAssumeCapacity(p.s( S.Local{ .kind = .k_var, - .decls = Decl.List.fromList(var_decls), + .decls = decls, }, - var_decls.items[0].value.?.loc, + decls.at(0).value.?.loc, )); } } @@ -1166,7 +1168,10 @@ pub fn Visit( if (prev_stmt.data == .s_local and local.canMergeWith(prev_stmt.data.s_local)) { - prev_stmt.data.s_local.decls.append(p.allocator, local.decls.slice()) catch unreachable; + prev_stmt.data.s_local.decls.appendSlice( + p.allocator, + local.decls.slice(), + ) catch |err| bun.handleOom(err); continue; } } diff --git a/src/ast/visitBinaryExpression.zig b/src/ast/visitBinaryExpression.zig index d6cd5885f0..12c3de8786 100644 --- a/src/ast/visitBinaryExpression.zig +++ b/src/ast/visitBinaryExpression.zig @@ -6,6 +6,50 @@ pub fn CreateBinaryExpressionVisitor( return struct { const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only); + /// Try to optimize "typeof x === 'undefined'" to "typeof x > 'u'" or similar + /// Returns the optimized expression if successful, null otherwise + fn tryOptimizeTypeofUndefined(e_: *E.Binary, p: *P, replacement_op: js_ast.Op.Code) ?Expr { + // Check if this is a typeof comparison with "undefined" + const typeof_expr, const string_expr, const flip_comparison = exprs: { + // Try left side as typeof, right side as string + if (e_.left.data == .e_unary and e_.left.data.e_unary.op == .un_typeof) { + if (e_.right.data == .e_string and + e_.right.data.e_string.eqlComptime("undefined")) + { + break :exprs .{ e_.left, e_.right, false }; + } + + return null; + } + + // Try right side as typeof, left side as string + if (e_.right.data == .e_unary and e_.right.data.e_unary.op == .un_typeof) { + if (e_.left.data == .e_string and + e_.left.data.e_string.eqlComptime("undefined")) + { + break :exprs .{ e_.right, e_.left, true }; + } + + return null; + } + + return null; + }; + + // Create new string with "u" + const u_string = p.newExpr(E.String{ .data = "u" }, string_expr.loc); + + // Create the optimized comparison + const left = if (flip_comparison) u_string else typeof_expr; + const right = if (flip_comparison) typeof_expr else u_string; + + return p.newExpr(E.Binary{ + .left = left, + .right = right, + .op = replacement_op, + }, e_.left.loc); + } + pub const BinaryExpressionVisitor = struct { e: *E.Binary, loc: logger.Loc, @@ -121,6 +165,11 @@ pub fn CreateBinaryExpressionVisitor( } if (p.options.features.minify_syntax) { + // "typeof x == 'undefined'" => "typeof x > 'u'" + if (tryOptimizeTypeofUndefined(e_, p, .bin_gt)) |optimized| { + return optimized; + } + // "x == void 0" => "x == null" if (e_.left.data == .e_undefined) { e_.left.data = .{ .e_null = E.Null{} }; @@ -146,6 +195,13 @@ pub fn CreateBinaryExpressionVisitor( return p.newExpr(E.Boolean{ .value = equality.equal }, v.loc); } + if (p.options.features.minify_syntax) { + // "typeof x === 'undefined'" => "typeof x > 'u'" + if (tryOptimizeTypeofUndefined(e_, p, .bin_gt)) |optimized| { + return optimized; + } + } + // const after_op_loc = locAfterOp(e_.); // TODO: warn about equality check // TODO: warn about typeof string @@ -161,6 +217,13 @@ pub fn CreateBinaryExpressionVisitor( return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc); } + if (p.options.features.minify_syntax) { + // "typeof x != 'undefined'" => "typeof x < 'u'" + if (tryOptimizeTypeofUndefined(e_, p, .bin_lt)) |optimized| { + return optimized; + } + } + // const after_op_loc = locAfterOp(e_.); // TODO: warn about equality check // TODO: warn about typeof string @@ -181,6 +244,13 @@ pub fn CreateBinaryExpressionVisitor( return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc); } + + if (p.options.features.minify_syntax) { + // "typeof x !== 'undefined'" => "typeof x < 'u'" + if (tryOptimizeTypeofUndefined(e_, p, .bin_lt)) |optimized| { + return optimized; + } + } }, .bin_nullish_coalescing => { const nullorUndefined = SideEffects.toNullOrUndefined(p, e_.left.data); @@ -360,6 +430,70 @@ pub fn CreateBinaryExpressionVisitor( } } }, + + .bin_lt => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| { + return p.newExpr(E.Boolean{ + .value = vals[0] < vals[1], + }, v.loc); + } + if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| { + return p.newExpr(E.Boolean{ + .value = vals[0].order(vals[1]) == .lt, + }, v.loc); + } + } + }, + .bin_gt => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| { + return p.newExpr(E.Boolean{ + .value = vals[0] > vals[1], + }, v.loc); + } + if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| { + return p.newExpr(E.Boolean{ + .value = vals[0].order(vals[1]) == .gt, + }, v.loc); + } + } + }, + .bin_le => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| { + return p.newExpr(E.Boolean{ + .value = vals[0] <= vals[1], + }, v.loc); + } + if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| { + return p.newExpr(E.Boolean{ + .value = switch (vals[0].order(vals[1])) { + .eq, .lt => true, + .gt => false, + }, + }, v.loc); + } + } + }, + .bin_ge => { + if (p.should_fold_typescript_constant_expressions) { + if (Expr.extractNumericValuesInSafeRange(e_.left.data, e_.right.data)) |vals| { + return p.newExpr(E.Boolean{ + .value = vals[0] >= vals[1], + }, v.loc); + } + if (Expr.extractStringValues(e_.left.data, e_.right.data, p.allocator)) |vals| { + return p.newExpr(E.Boolean{ + .value = switch (vals[0].order(vals[1])) { + .eq, .gt => true, + .lt => false, + }, + }, v.loc); + } + } + }, + // --------------------------------------------------------------------------------------------------- .bin_assign => { // Optionally preserve the name diff --git a/src/ast/visitExpr.zig b/src/ast/visitExpr.zig index d77ae9fe31..a01a185b1d 100644 --- a/src/ast/visitExpr.zig +++ b/src/ast/visitExpr.zig @@ -228,26 +228,30 @@ pub fn VisitExpr( // That would reduce the amount of allocations a little if (runtime == .classic or is_key_after_spread) { // Arguments to createElement() - const args = p.allocator.alloc(Expr, 2 + children_count) catch unreachable; - // There are at least two args: - // - name of the tag - // - props - var i: usize = 2; - args[0] = tag; + var args = bun.BabyList(Expr).initCapacity( + p.allocator, + 2 + children_count, + ) catch |err| bun.handleOom(err); + args.appendAssumeCapacity(tag); const num_props = e_.properties.len; if (num_props > 0) { const props = p.allocator.alloc(G.Property, num_props) catch unreachable; bun.copy(G.Property, props, e_.properties.slice()); - args[1] = p.newExpr(E.Object{ .properties = G.Property.List.init(props) }, expr.loc); + args.appendAssumeCapacity(p.newExpr( + E.Object{ .properties = G.Property.List.fromOwnedSlice(props) }, + expr.loc, + )); } else { - args[1] = p.newExpr(E.Null{}, expr.loc); + args.appendAssumeCapacity(p.newExpr(E.Null{}, expr.loc)); } const children_elements = e_.children.slice()[0..children_count]; for (children_elements) |child| { - args[i] = p.visitExpr(child); - i += @as(usize, @intCast(@intFromBool(args[i].data != .e_missing))); + const arg = p.visitExpr(child); + if (arg.data != .e_missing) { + args.appendAssumeCapacity(arg); + } } const target = p.jsxStringsToMemberExpression(expr.loc, p.options.jsx.factory) catch unreachable; @@ -255,7 +259,7 @@ pub fn VisitExpr( // Call createElement() return p.newExpr(E.Call{ .target = if (runtime == .classic) target else p.jsxImport(.createElement, expr.loc), - .args = ExprNodeList.init(args[0..i]), + .args = args, // Enable tree shaking .can_be_unwrapped_if_unused = if (!p.options.ignore_dce_annotations and !p.options.jsx.side_effects) .if_unused else .never, .close_paren_loc = e_.close_tag_loc, @@ -265,7 +269,7 @@ pub fn VisitExpr( else if (runtime == .automatic) { // --- These must be done in all cases -- const allocator = p.allocator; - var props: std.ArrayListUnmanaged(G.Property) = e_.properties.list(); + var props = &e_.properties; const maybe_key_value: ?ExprNodeIndex = if (e_.key_prop_index > -1) props.orderedRemove(@intCast(e_.key_prop_index)).value else null; @@ -296,8 +300,8 @@ pub fn VisitExpr( // -> //
// jsx("div", {...foo}) - while (props.items.len == 1 and props.items[0].kind == .spread and props.items[0].value.?.data == .e_object) { - props = props.items[0].value.?.data.e_object.properties.list(); + while (props.len == 1 and props.at(0).kind == .spread and props.at(0).value.?.data == .e_object) { + props = &props.at(0).value.?.data.e_object.properties; } // Typescript defines static jsx as children.len > 1 or single spread @@ -326,7 +330,7 @@ pub fn VisitExpr( args[0] = tag; args[1] = p.newExpr(E.Object{ - .properties = G.Property.List.fromList(props), + .properties = props.*, }, expr.loc); if (maybe_key_value) |key| { @@ -360,7 +364,7 @@ pub fn VisitExpr( return p.newExpr(E.Call{ .target = p.jsxImportAutomatic(expr.loc, is_static_jsx), - .args = ExprNodeList.init(args), + .args = ExprNodeList.fromOwnedSlice(args), // Enable tree shaking .can_be_unwrapped_if_unused = if (!p.options.ignore_dce_annotations and !p.options.jsx.side_effects) .if_unused else .never, .was_jsx_element = true, @@ -804,6 +808,7 @@ pub fn VisitExpr( E.Unary{ .op = e_.op, .value = comma.right, + .flags = e_.flags, }, comma.right.loc, ), @@ -1278,7 +1283,7 @@ pub fn VisitExpr( // the try/catch statement is there to handle the potential run-time // error from the unbundled require() call failing. if (e_.args.len == 1) { - const first = e_.args.first_(); + const first = e_.args.slice()[0]; const state = TransposeState{ .is_require_immediately_assigned_to_decl = in.is_immediately_assigned_to_decl and first.data == .e_string, @@ -1323,7 +1328,7 @@ pub fn VisitExpr( } if (e_.args.len == 1) { - const first = e_.args.first_(); + const first = e_.args.slice()[0]; switch (first.data) { .e_string => { // require.resolve(FOO) => require.resolve(FOO) @@ -1491,7 +1496,9 @@ pub fn VisitExpr( } if (p.options.features.minify_syntax) { - KnownGlobal.maybeMarkConstructorAsPure(e_, p.symbols.items); + if (KnownGlobal.minifyGlobalConstructor(p.allocator, e_, p.symbols.items, expr.loc, p.options.features.minify_whitespace)) |minified| { + return minified; + } } return expr; } diff --git a/src/ast/visitStmt.zig b/src/ast/visitStmt.zig index 4de08b6d57..9a6e7b879a 100644 --- a/src/ast/visitStmt.zig +++ b/src/ast/visitStmt.zig @@ -126,7 +126,7 @@ pub fn VisitStmt( const name = p.loadNameFromRef(data.namespace_ref); data.namespace_ref = try p.newSymbol(.other, name); - try p.current_scope.generated.push(p.allocator, data.namespace_ref); + try p.current_scope.generated.append(p.allocator, data.namespace_ref); try p.recordDeclaredSymbol(data.namespace_ref); if (p.options.features.replace_exports.count() > 0) { @@ -146,7 +146,7 @@ pub fn VisitStmt( const _name = p.loadNameFromRef(old_ref); const ref = try p.newSymbol(.import, _name); - try p.current_scope.generated.push(p.allocator, ref); + try p.current_scope.generated.append(p.allocator, ref); try p.recordDeclaredSymbol(ref); data.items[j] = item; data.items[j].name.ref = ref; @@ -163,7 +163,7 @@ pub fn VisitStmt( for (data.items) |*item| { const _name = p.loadNameFromRef(item.name.ref.?); const ref = try p.newSymbol(.import, _name); - try p.current_scope.generated.push(p.allocator, ref); + try p.current_scope.generated.append(p.allocator, ref); try p.recordDeclaredSymbol(ref); item.name.ref = ref; } @@ -176,7 +176,7 @@ pub fn VisitStmt( // "export * from 'path'" const name = p.loadNameFromRef(data.namespace_ref); data.namespace_ref = try p.newSymbol(.other, name); - try p.current_scope.generated.push(p.allocator, data.namespace_ref); + try p.current_scope.generated.append(p.allocator, data.namespace_ref); try p.recordDeclaredSymbol(data.namespace_ref); // "export * as ns from 'path'" @@ -262,7 +262,7 @@ pub fn VisitStmt( }) { // declare a temporary ref for this const temp_id = p.generateTempRef("default_export"); - try p.current_scope.generated.push(p.allocator, temp_id); + try p.current_scope.generated.append(p.allocator, temp_id); try stmts.append(Stmt.alloc(S.Local, .{ .kind = .k_const, @@ -293,7 +293,7 @@ pub fn VisitStmt( .value = data.value.expr, }; stmts.appendAssumeCapacity(p.s(S.Local{ - .decls = G.Decl.List.init(decls), + .decls = G.Decl.List.fromOwnedSlice(decls), }, stmt.loc)); const items = bun.handleOom(p.allocator.alloc(js_ast.ClauseItem, 1)); items[0] = js_ast.ClauseItem{ @@ -390,7 +390,7 @@ pub fn VisitStmt( } const temp_id = p.generateTempRef("default_export"); - try p.current_scope.generated.push(p.allocator, temp_id); + try p.current_scope.generated.append(p.allocator, temp_id); break :brk temp_id; }; @@ -865,7 +865,7 @@ pub fn VisitStmt( .kind = .k_var, .is_export = false, .was_commonjs_export = true, - .decls = G.Decl.List.init(decls), + .decls = G.Decl.List.fromOwnedSlice(decls), }, stmt.loc, ), @@ -1205,7 +1205,7 @@ pub fn VisitStmt( .binding = p.b(B.Identifier{ .ref = id.ref }, loc), .value = p.newExpr(E.Identifier{ .ref = temp_ref }, loc), }; - break :bindings G.Decl.List.init(decls); + break :bindings G.Decl.List.fromOwnedSlice(decls); }, }, loc); diff --git a/src/bun.js/ModuleLoader.zig b/src/bun.js/ModuleLoader.zig index 378a6bb5c2..5bdfc5cb88 100644 --- a/src/bun.js/ModuleLoader.zig +++ b/src/bun.js/ModuleLoader.zig @@ -1111,7 +1111,7 @@ pub fn transpileSourceCode( .allocator = null, .specifier = input_specifier, .source_url = input_specifier.createIfDifferent(path.text), - .jsvalue_for_export = parse_result.ast.parts.@"[0]"().stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}), + .jsvalue_for_export = parse_result.ast.parts.at(0).stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}), .tag = .exports_object, }; } @@ -1947,7 +1947,8 @@ export fn Bun__transpileVirtualModule( ) bool { jsc.markBinding(@src()); const jsc_vm = globalObject.bunVM(); - bun.assert(jsc_vm.plugin_runner != null); + // Plugin runner is not required for virtual modules created via build.module() + // bun.assert(jsc_vm.plugin_runner != null); var specifier_slice = specifier_ptr.toUTF8(jsc_vm.allocator); const specifier = specifier_slice.slice(); diff --git a/src/bun.js/RuntimeTranspilerCache.zig b/src/bun.js/RuntimeTranspilerCache.zig index a39006121f..b951af8bdf 100644 --- a/src/bun.js/RuntimeTranspilerCache.zig +++ b/src/bun.js/RuntimeTranspilerCache.zig @@ -12,7 +12,8 @@ /// Version 13: Hoist `import.meta.require` definition, see #15738 /// Version 14: Updated global defines table list. /// Version 15: Updated global defines table list. -const expected_version = 15; +/// Version 16: Added typeof undefined minification optimization. +const expected_version = 16; const debug = Output.scoped(.cache, .visible); const MINIMUM_CACHE_SIZE = 50 * 1024; diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index 2f2a3d4f80..bafedf0888 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -649,8 +649,12 @@ pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) b .transpiler = undefined, .scan_pass_result = ScanPassResult.init(bun.default_allocator), }); - errdefer bun.destroy(this); - errdefer this.arena.deinit(); + errdefer { + this.config.log.deinit(); + this.arena.deinit(); + this.ref_count.clearWithoutDestructor(); + bun.destroy(this); + } const config_arg = if (arguments.len > 0) arguments.ptr[0] else .js_undefined; const allocator = this.arena.allocator(); diff --git a/src/bun.js/api/Timer.zig b/src/bun.js/api/Timer.zig index 8345724837..0e66247057 100644 --- a/src/bun.js/api/Timer.zig +++ b/src/bun.js/api/Timer.zig @@ -31,6 +31,9 @@ pub const All = struct { immediate_ref_count: i32 = 0, uv_idle: if (Environment.isWindows) uv.uv_idle_t else void = if (Environment.isWindows) std.mem.zeroes(uv.uv_idle_t), + // Event loop delay monitoring (not exposed to JS) + event_loop_delay: EventLoopDelayMonitor = .{}, + // We split up the map here to avoid storing an extra "repeat" boolean maps: struct { setTimeout: TimeoutMap = .{}, @@ -597,6 +600,8 @@ pub const WTFTimer = @import("./Timer/WTFTimer.zig"); pub const DateHeaderTimer = @import("./Timer/DateHeaderTimer.zig"); +pub const EventLoopDelayMonitor = @import("./Timer/EventLoopDelayMonitor.zig"); + pub const internal_bindings = struct { /// Node.js has some tests that check whether timers fire at the right time. They check this /// with the internal binding `getLibuvNow()`, which returns an integer in milliseconds. This diff --git a/src/bun.js/api/Timer/EventLoopDelayMonitor.zig b/src/bun.js/api/Timer/EventLoopDelayMonitor.zig new file mode 100644 index 0000000000..c546ede802 --- /dev/null +++ b/src/bun.js/api/Timer/EventLoopDelayMonitor.zig @@ -0,0 +1,83 @@ +const EventLoopDelayMonitor = @This(); + +/// We currently only globally share the same instance, which is kept alive by +/// the existence of the src/js/internal/perf_hooks/monitorEventLoopDelay.ts +/// function's scope. +/// +/// I don't think having a single event loop delay monitor histogram instance +/// /will cause any issues? Let's find out. +js_histogram: jsc.JSValue = jsc.JSValue.zero, + +event_loop_timer: jsc.API.Timer.EventLoopTimer = .{ + .next = .epoch, + .tag = .EventLoopDelayMonitor, +}, +resolution_ms: i32 = 10, +last_fire_ns: u64 = 0, +enabled: bool = false, + +pub fn enable(this: *EventLoopDelayMonitor, vm: *VirtualMachine, histogram: jsc.JSValue, resolution_ms: i32) void { + if (this.enabled) return; + this.js_histogram = histogram; + this.resolution_ms = resolution_ms; + + this.enabled = true; + + // Schedule timer + const now = bun.timespec.now(); + this.event_loop_timer.next = now.addMs(@intCast(resolution_ms)); + vm.timer.insert(&this.event_loop_timer); +} + +pub fn disable(this: *EventLoopDelayMonitor, vm: *VirtualMachine) void { + if (!this.enabled) return; + + this.enabled = false; + this.js_histogram = jsc.JSValue.zero; + this.last_fire_ns = 0; + vm.timer.remove(&this.event_loop_timer); +} + +pub fn isEnabled(this: *const EventLoopDelayMonitor) bool { + return this.enabled and this.js_histogram != jsc.JSValue.zero; +} + +pub fn onFire(this: *EventLoopDelayMonitor, vm: *VirtualMachine, now: *const bun.timespec) void { + if (!this.enabled or this.js_histogram == jsc.JSValue.zero) { + return; + } + + const now_ns = now.ns(); + if (this.last_fire_ns > 0) { + const expected_ns = @as(u64, @intCast(this.resolution_ms)) *| 1_000_000; + const actual_ns = now_ns - this.last_fire_ns; + + if (actual_ns > expected_ns) { + const delay_ns = @as(i64, @intCast(actual_ns -| expected_ns)); + JSNodePerformanceHooksHistogram_recordDelay(this.js_histogram, delay_ns); + } + } + + this.last_fire_ns = now_ns; + + // Reschedule + this.event_loop_timer.next = now.addMs(@intCast(this.resolution_ms)); + vm.timer.insert(&this.event_loop_timer); +} + +// Record delay to histogram +extern fn JSNodePerformanceHooksHistogram_recordDelay(histogram: jsc.JSValue, delay_ns: i64) void; + +// Export functions for C++ +export fn Timer_enableEventLoopDelayMonitoring(vm: *VirtualMachine, histogram: jsc.JSValue, resolution_ms: i32) void { + vm.timer.event_loop_delay.enable(vm, histogram, resolution_ms); +} + +export fn Timer_disableEventLoopDelayMonitoring(vm: *VirtualMachine) void { + vm.timer.event_loop_delay.disable(vm); +} + +const bun = @import("bun"); + +const jsc = bun.jsc; +const VirtualMachine = jsc.VirtualMachine; diff --git a/src/bun.js/api/Timer/EventLoopTimer.zig b/src/bun.js/api/Timer/EventLoopTimer.zig index e4fb58ab22..eb5a73d5bb 100644 --- a/src/bun.js/api/Timer/EventLoopTimer.zig +++ b/src/bun.js/api/Timer/EventLoopTimer.zig @@ -68,6 +68,7 @@ pub const Tag = if (Environment.isWindows) enum { DevServerMemoryVisualizerTick, AbortSignalTimeout, DateHeaderTimer, + EventLoopDelayMonitor, pub fn Type(comptime T: Tag) type { return switch (T) { @@ -92,6 +93,7 @@ pub const Tag = if (Environment.isWindows) enum { => bun.bake.DevServer, .AbortSignalTimeout => jsc.WebCore.AbortSignal.Timeout, .DateHeaderTimer => jsc.API.Timer.DateHeaderTimer, + .EventLoopDelayMonitor => jsc.API.Timer.EventLoopDelayMonitor, }; } } else enum { @@ -114,6 +116,7 @@ pub const Tag = if (Environment.isWindows) enum { DevServerMemoryVisualizerTick, AbortSignalTimeout, DateHeaderTimer, + EventLoopDelayMonitor, pub fn Type(comptime T: Tag) type { return switch (T) { @@ -137,6 +140,7 @@ pub const Tag = if (Environment.isWindows) enum { => bun.bake.DevServer, .AbortSignalTimeout => jsc.WebCore.AbortSignal.Timeout, .DateHeaderTimer => jsc.API.Timer.DateHeaderTimer, + .EventLoopDelayMonitor => jsc.API.Timer.EventLoopDelayMonitor, }; } }; @@ -213,6 +217,11 @@ pub fn fire(self: *Self, now: *const timespec, vm: *VirtualMachine) Arm { date_header_timer.run(vm); return .disarm; }, + .EventLoopDelayMonitor => { + const monitor = @as(*jsc.API.Timer.EventLoopDelayMonitor, @fieldParentPtr("event_loop_timer", self)); + monitor.onFire(vm, now); + return .disarm; + }, inline else => |t| { if (@FieldType(t.Type(), "event_loop_timer") != Self) { @compileError(@typeName(t.Type()) ++ " has wrong type for 'event_loop_timer'"); diff --git a/src/bun.js/api/bun/h2_frame_parser.zig b/src/bun.js/api/bun/h2_frame_parser.zig index 856d82b853..8f032d36e0 100644 --- a/src/bun.js/api/bun/h2_frame_parser.zig +++ b/src/bun.js/api/bun/h2_frame_parser.zig @@ -4465,13 +4465,9 @@ pub const H2FrameParser = struct { this.detachNativeSocket(); this.readBuffer.deinit(); - - { - var writeBuffer = this.writeBuffer; - this.writeBuffer = .{}; - writeBuffer.deinitWithAllocator(this.allocator); - } + this.writeBuffer.clearAndFree(this.allocator); this.writeBufferOffset = 0; + if (this.hpack) |hpack| { hpack.deinit(); this.hpack = null; diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 73ffa0fee5..dad7b94373 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -285,7 +285,7 @@ pub fn NewSocket(comptime ssl: bool) type { // Ensure the socket is still alive for any defer's we have this.ref(); defer this.deref(); - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); + this.buffered_data_for_node_net.clearAndFree(bun.default_allocator); const needs_deref = !this.socket.isDetached(); this.socket = Socket.detached; @@ -368,7 +368,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn closeAndDetach(this: *This, code: uws.Socket.CloseCode) void { const socket = this.socket; - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); + this.buffered_data_for_node_net.clearAndFree(bun.default_allocator); this.socket.detach(); this.detachNativeCallback(); @@ -883,7 +883,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn writeBuffered(this: *This, globalObject: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue { if (this.socket.isDetached()) { - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); + this.buffered_data_for_node_net.clearAndFree(bun.default_allocator); // TODO: should we separate unattached and detached? unattached shouldn't throw here const err: jsc.SystemError = .{ .errno = @intFromEnum(bun.sys.SystemErrno.EBADF), @@ -904,7 +904,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn endBuffered(this: *This, globalObject: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue { if (this.socket.isDetached()) { - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); + this.buffered_data_for_node_net.clearAndFree(bun.default_allocator); return .false; } @@ -987,8 +987,7 @@ pub fn NewSocket(comptime ssl: bool) type { const written: usize = @intCast(@max(rc, 0)); const leftover = total_to_write -| written; if (leftover == 0) { - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); - this.buffered_data_for_node_net = .{}; + this.buffered_data_for_node_net.clearAndFree(bun.default_allocator); break :brk rc; } @@ -1004,7 +1003,10 @@ pub fn NewSocket(comptime ssl: bool) type { } if (remaining_in_input_data.len > 0) { - bun.handleOom(this.buffered_data_for_node_net.append(bun.default_allocator, remaining_in_input_data)); + bun.handleOom(this.buffered_data_for_node_net.appendSlice( + bun.default_allocator, + remaining_in_input_data, + )); } break :brk rc; @@ -1012,15 +1014,17 @@ pub fn NewSocket(comptime ssl: bool) type { } // slower-path: clone the data, do one write. - bun.handleOom(this.buffered_data_for_node_net.append(bun.default_allocator, buffer.slice())); + bun.handleOom(this.buffered_data_for_node_net.appendSlice( + bun.default_allocator, + buffer.slice(), + )); const rc = this.writeMaybeCorked(this.buffered_data_for_node_net.slice()); if (rc > 0) { const wrote: usize = @intCast(@max(rc, 0)); // did we write everything? // we can free this temporary buffer. if (wrote == this.buffered_data_for_node_net.len) { - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); - this.buffered_data_for_node_net = .{}; + this.buffered_data_for_node_net.clearAndFree(bun.default_allocator); } else { // Otherwise, let's move the temporary buffer back. const len = @as(usize, @intCast(this.buffered_data_for_node_net.len)) - wrote; @@ -1166,7 +1170,10 @@ pub fn NewSocket(comptime ssl: bool) type { if (buffer_unwritten_data) { const remaining = bytes[uwrote..]; if (remaining.len > 0) { - bun.handleOom(this.buffered_data_for_node_net.append(bun.default_allocator, remaining)); + bun.handleOom(this.buffered_data_for_node_net.appendSlice( + bun.default_allocator, + remaining, + )); } } @@ -1203,8 +1210,7 @@ pub fn NewSocket(comptime ssl: bool) type { _ = bun.c.memmove(this.buffered_data_for_node_net.ptr, remaining.ptr, remaining.len); this.buffered_data_for_node_net.len = @truncate(remaining.len); } else { - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); - this.buffered_data_for_node_net = .{}; + this.buffered_data_for_node_net.clearAndFree(bun.default_allocator); } } } @@ -1293,7 +1299,7 @@ pub fn NewSocket(comptime ssl: bool) type { this.markInactive(); this.detachNativeCallback(); - this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); + this.buffered_data_for_node_net.deinit(bun.default_allocator); this.poll_ref.unref(jsc.VirtualMachine.get()); // need to deinit event without being attached diff --git a/src/bun.js/api/bun/socket/Listener.zig b/src/bun.js/api/bun/socket/Listener.zig index cfed0821e4..755bcb16b6 100644 --- a/src/bun.js/api/bun/socket/Listener.zig +++ b/src/bun.js/api/bun/socket/Listener.zig @@ -437,6 +437,7 @@ pub fn stop(this: *Listener, _: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) fn doStop(this: *Listener, force_close: bool) void { if (this.listener == .none) return; const listener = this.listener; + defer switch (listener) { .uws => |socket| socket.close(this.ssl), .namedPipe => |namedPipe| if (Environment.isWindows) namedPipe.closePipeAndDeinit(), diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index c2a4f2d720..b928bb7e8d 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -277,7 +277,7 @@ pub const HTMLRewriter = struct { return; } - const write_result = this.output.write(.{ .temporary = bun.ByteList.init(bytes) }); + const write_result = this.output.write(.{ .temporary = bun.ByteList.fromBorrowedSliceDangerous(bytes) }); switch (write_result) { .err => |err| { @@ -346,7 +346,7 @@ pub const HTMLRewriter = struct { .path = bun.handleOom(bun.default_allocator.dupe(u8, LOLHTML.HTMLString.lastError().slice())), }; }; - if (comptime deinit_) bytes.listManaged(bun.default_allocator).deinit(); + if (comptime deinit_) bytes.deinit(bun.default_allocator); return null; } diff --git a/src/bun.js/api/server/NodeHTTPResponse.zig b/src/bun.js/api/server/NodeHTTPResponse.zig index 2e0f8883ed..8207977a07 100644 --- a/src/bun.js/api/server/NodeHTTPResponse.zig +++ b/src/bun.js/api/server/NodeHTTPResponse.zig @@ -257,7 +257,7 @@ pub fn shouldRequestBePending(this: *const NodeHTTPResponse) bool { pub fn dumpRequestBody(this: *NodeHTTPResponse, globalObject: *jsc.JSGlobalObject, _: *jsc.CallFrame, thisValue: jsc.JSValue) bun.JSError!jsc.JSValue { if (this.buffered_request_body_data_during_pause.cap > 0) { - this.buffered_request_body_data_during_pause.deinitWithAllocator(bun.default_allocator); + this.buffered_request_body_data_during_pause.clearAndFree(bun.default_allocator); } if (!this.flags.request_has_completed) { this.clearOnDataCallback(thisValue, globalObject); @@ -273,7 +273,7 @@ fn markRequestAsDone(this: *NodeHTTPResponse) void { this.clearOnDataCallback(this.getThisValue(), jsc.VirtualMachine.get().global); this.upgrade_context.deinit(); - this.buffered_request_body_data_during_pause.deinitWithAllocator(bun.default_allocator); + this.buffered_request_body_data_during_pause.clearAndFree(bun.default_allocator); const server = this.server; this.js_ref.unref(jsc.VirtualMachine.get()); this.deref(); @@ -705,7 +705,10 @@ pub fn abort(this: *NodeHTTPResponse, _: *jsc.JSGlobalObject, _: *jsc.CallFrame) fn onBufferRequestBodyWhilePaused(this: *NodeHTTPResponse, chunk: []const u8, last: bool) void { log("onBufferRequestBodyWhilePaused({d}, {})", .{ chunk.len, last }); - bun.handleOom(this.buffered_request_body_data_during_pause.append(bun.default_allocator, chunk)); + bun.handleOom(this.buffered_request_body_data_during_pause.appendSlice( + bun.default_allocator, + chunk, + )); if (last) { this.flags.is_data_buffered_during_pause_last = true; if (this.body_read_ref.has) { @@ -743,7 +746,7 @@ fn onDataOrAborted(this: *NodeHTTPResponse, chunk: []const u8, last: bool, event const bytes: jsc.JSValue = brk: { if (chunk.len > 0 and this.buffered_request_body_data_during_pause.len > 0) { const buffer = jsc.JSValue.createBufferFromLength(globalThis, chunk.len + this.buffered_request_body_data_during_pause.len) catch return; // TODO: properly propagate exception upwards - this.buffered_request_body_data_during_pause.deinitWithAllocator(bun.default_allocator); + this.buffered_request_body_data_during_pause.clearAndFree(bun.default_allocator); if (buffer.asArrayBuffer(globalThis)) |array_buffer| { var input = array_buffer.slice(); @memcpy(input[0..this.buffered_request_body_data_during_pause.len], this.buffered_request_body_data_during_pause.slice()); @@ -1134,7 +1137,7 @@ fn deinit(this: *NodeHTTPResponse) void { bun.debugAssert(!this.flags.is_request_pending); bun.debugAssert(this.flags.socket_closed or this.flags.request_has_completed); - this.buffered_request_body_data_during_pause.deinitWithAllocator(bun.default_allocator); + this.buffered_request_body_data_during_pause.deinit(bun.default_allocator); this.js_ref.unref(jsc.VirtualMachine.get()); this.body_read_ref.unref(jsc.VirtualMachine.get()); diff --git a/src/bun.js/api/server/RequestContext.zig b/src/bun.js/api/server/RequestContext.zig index 849303e32e..e1c0097107 100644 --- a/src/bun.js/api/server/RequestContext.zig +++ b/src/bun.js/api/server/RequestContext.zig @@ -1761,7 +1761,7 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, // we can avoid streaming it and just send it all at once. if (byte_stream.has_received_last_chunk) { var byte_list = byte_stream.drain(); - this.blob = .fromArrayList(byte_list.listManaged(bun.default_allocator)); + this.blob = .fromArrayList(byte_list.moveToListManaged(bun.default_allocator)); this.readable_stream_ref.deinit(); this.doRenderBlob(); return; @@ -1771,7 +1771,8 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, this.readable_stream_ref = jsc.WebCore.ReadableStream.Strong.init(stream, globalThis); this.byte_stream = byte_stream; - this.response_buf_owned = byte_stream.drain().list(); + var response_buf = byte_stream.drain(); + this.response_buf_owned = response_buf.moveToList(); // we don't set size here because even if we have a hint // uWebSockets won't let us partially write streaming content @@ -1817,8 +1818,8 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, if (is_done) this.deref(); if (stream_needs_deinit) { switch (stream_) { - .owned_and_done => |*owned| owned.listManaged(allocator).deinit(), - .owned => |*owned| owned.listManaged(allocator).deinit(), + .owned_and_done => |*owned| owned.deinit(allocator), + .owned => |*owned| owned.deinit(allocator), else => unreachable, } } @@ -2240,7 +2241,7 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, if (!last) { readable.ptr.Bytes.onData( .{ - .temporary = bun.ByteList.initConst(chunk), + .temporary = bun.ByteList.fromBorrowedSliceDangerous(chunk), }, bun.default_allocator, ); @@ -2256,7 +2257,7 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, readable.value.ensureStillAlive(); readable.ptr.Bytes.onData( .{ - .temporary_and_done = bun.ByteList.initConst(chunk), + .temporary_and_done = bun.ByteList.fromBorrowedSliceDangerous(chunk), }, bun.default_allocator, ); diff --git a/src/bun.js/api/server/ServerConfig.zig b/src/bun.js/api/server/ServerConfig.zig index 907e878bf9..5e347924d5 100644 --- a/src/bun.js/api/server/ServerConfig.zig +++ b/src/bun.js/api/server/ServerConfig.zig @@ -260,11 +260,11 @@ pub fn deinit(this: *ServerConfig) void { ssl_config.deinit(); this.ssl_config = null; } - if (this.sni) |sni| { + if (this.sni) |*sni| { for (sni.slice()) |*ssl_config| { ssl_config.deinit(); } - this.sni.?.deinitWithAllocator(bun.default_allocator); + sni.deinit(bun.default_allocator); this.sni = null; } @@ -939,7 +939,7 @@ pub fn fromJS( args.sni = bun.handleOom(bun.BabyList(SSLConfig).initCapacity(bun.default_allocator, value_iter.len - 1)); } - bun.handleOom(args.sni.?.push(bun.default_allocator, ssl_config)); + bun.handleOom(args.sni.?.append(bun.default_allocator, ssl_config)); } } } diff --git a/src/bun.js/bindings/BunPlugin.cpp b/src/bun.js/bindings/BunPlugin.cpp index 3ca7f114f4..c0f830030a 100644 --- a/src/bun.js/bindings/BunPlugin.cpp +++ b/src/bun.js/bindings/BunPlugin.cpp @@ -150,9 +150,14 @@ static EncodedJSValue jsFunctionAppendVirtualModulePluginBody(JSC::JSGlobalObjec virtualModules->set(moduleId, JSC::Strong { vm, jsCast(functionValue) }); - global->requireMap()->remove(globalObject, moduleIdValue); + auto* requireMap = global->requireMap(); RETURN_IF_EXCEPTION(scope, {}); - global->esmRegistryMap()->remove(globalObject, moduleIdValue); + requireMap->remove(globalObject, moduleIdValue); + RETURN_IF_EXCEPTION(scope, {}); + + auto* esmRegistry = global->esmRegistryMap(); + RETURN_IF_EXCEPTION(scope, {}); + esmRegistry->remove(globalObject, moduleIdValue); RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(callframe->thisValue()); diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index d0b0610852..cd04ea9ee0 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -1644,7 +1644,7 @@ bool setProcessExitCodeInner(JSC::JSGlobalObject* lexicalGlobalObject, Process* auto num = code.toNumber(lexicalGlobalObject); RETURN_IF_EXCEPTION(throwScope, {}); if (!std::isnan(num)) { - code = jsDoubleNumber(num); + code = jsNumber(num); } } ssize_t exitCodeInt; @@ -1723,11 +1723,9 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb struct rlimit limit; getrlimit(resourceLimits[i], &limit); - JSValue soft = limit.rlim_cur == RLIM_INFINITY ? JSC::jsString(vm, String("unlimited"_s)) : limit.rlim_cur > INT32_MAX ? JSC::jsNumber(limit.rlim_cur) - : JSC::jsDoubleNumber(static_cast(limit.rlim_cur)); + JSValue soft = limit.rlim_cur == RLIM_INFINITY ? JSC::jsString(vm, String("unlimited"_s)) : JSC::jsNumber(limit.rlim_cur); - JSValue hard = limit.rlim_max == RLIM_INFINITY ? JSC::jsString(vm, String("unlimited"_s)) : limit.rlim_max > INT32_MAX ? JSC::jsNumber(limit.rlim_max) - : JSC::jsDoubleNumber(static_cast(limit.rlim_max)); + JSValue hard = limit.rlim_max == RLIM_INFINITY ? JSC::jsString(vm, String("unlimited"_s)) : JSC::jsNumber(limit.rlim_max); limitObject->putDirect(vm, JSC::Identifier::fromString(vm, "soft"_s), soft, 0); limitObject->putDirect(vm, JSC::Identifier::fromString(vm, "hard"_s), hard, 0); @@ -1885,7 +1883,7 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb heapSpaces->putDirect(vm, JSC::Identifier::fromString(vm, "shared_large_object_space"_s), JSC::constructEmptyObject(globalObject), 0); RETURN_IF_EXCEPTION(scope, {}); - heap->putDirect(vm, JSC::Identifier::fromString(vm, "totalMemory"_s), JSC::jsDoubleNumber(static_cast(WTF::ramSize())), 0); + heap->putDirect(vm, JSC::Identifier::fromString(vm, "totalMemory"_s), JSC::jsNumber(WTF::ramSize()), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "executableMemory"_s), jsNumber(0), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "totalCommittedMemory"_s), jsNumber(0), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "availableMemory"_s), jsNumber(0), 0); @@ -1894,7 +1892,7 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb heap->putDirect(vm, JSC::Identifier::fromString(vm, "usedMemory"_s), jsNumber(0), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "memoryLimit"_s), jsNumber(0), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "mallocedMemory"_s), jsNumber(0), 0); - heap->putDirect(vm, JSC::Identifier::fromString(vm, "externalMemory"_s), JSC::jsDoubleNumber(static_cast(vm.heap.externalMemorySize())), 0); + heap->putDirect(vm, JSC::Identifier::fromString(vm, "externalMemory"_s), JSC::jsNumber(vm.heap.externalMemorySize()), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "peakMallocedMemory"_s), jsNumber(0), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "nativeContextCount"_s), JSC::jsNumber(1), 0); heap->putDirect(vm, JSC::Identifier::fromString(vm, "detachedContextCount"_s), JSC::jsNumber(0), 0); @@ -2756,7 +2754,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionAssert, (JSGlobalObject * globalObject, extern "C" uint64_t Bun__Os__getFreeMemory(void); JSC_DEFINE_HOST_FUNCTION(Process_availableMemory, (JSGlobalObject * globalObject, CallFrame* callFrame)) { - return JSValue::encode(jsDoubleNumber(Bun__Os__getFreeMemory())); + return JSValue::encode(jsNumber(Bun__Os__getFreeMemory())); } #define PROCESS_BINDING_NOT_IMPLEMENTED_ISSUE(str, issue) \ @@ -2962,7 +2960,7 @@ static Process* getProcessObject(JSC::JSGlobalObject* lexicalGlobalObject, JSVal JSC_DEFINE_HOST_FUNCTION(Process_functionConstrainedMemory, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { - return JSValue::encode(jsDoubleNumber(static_cast(WTF::ramSize()))); + return JSValue::encode(jsNumber(WTF::ramSize())); } JSC_DEFINE_HOST_FUNCTION(Process_functionResourceUsage, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) @@ -3086,8 +3084,8 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionCpuUsage, (JSC::JSGlobalObject * global JSC::JSObject* result = JSC::constructEmptyObject(vm, cpuUsageStructure); RETURN_IF_EXCEPTION(throwScope, JSC::JSValue::encode(JSC::jsUndefined())); - result->putDirectOffset(vm, 0, JSC::jsDoubleNumber(user)); - result->putDirectOffset(vm, 1, JSC::jsDoubleNumber(system)); + result->putDirectOffset(vm, 0, JSC::jsNumber(user)); + result->putDirectOffset(vm, 1, JSC::jsNumber(system)); RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(result)); } @@ -3203,14 +3201,14 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsage, (JSC::JSGlobalObject * glo // arrayBuffers: 9386 // } - result->putDirectOffset(vm, 0, JSC::jsDoubleNumber(current_rss)); - result->putDirectOffset(vm, 1, JSC::jsDoubleNumber(vm.heap.blockBytesAllocated())); + result->putDirectOffset(vm, 0, JSC::jsNumber(current_rss)); + result->putDirectOffset(vm, 1, JSC::jsNumber(vm.heap.blockBytesAllocated())); // heap.size() loops through every cell... // TODO: add a binding for heap.sizeAfterLastCollection() - result->putDirectOffset(vm, 2, JSC::jsDoubleNumber(vm.heap.sizeAfterLastEdenCollection())); + result->putDirectOffset(vm, 2, JSC::jsNumber(vm.heap.sizeAfterLastEdenCollection())); - result->putDirectOffset(vm, 3, JSC::jsDoubleNumber(vm.heap.extraMemorySize() + vm.heap.externalMemorySize())); + result->putDirectOffset(vm, 3, JSC::jsNumber(vm.heap.extraMemorySize() + vm.heap.externalMemorySize())); // JSC won't count this number until vm.heap.addReference() is called. // That will only happen in cases like: @@ -3223,7 +3221,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsage, (JSC::JSGlobalObject * glo // - new Uint8Array(42) // - Buffer.alloc(42) // - new Uint8Array(42).slice() - result->putDirectOffset(vm, 4, JSC::jsDoubleNumber(vm.heap.arrayBufferSize())); + result->putDirectOffset(vm, 4, JSC::jsNumber(vm.heap.arrayBufferSize())); RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(result)); } diff --git a/src/bun.js/bindings/Cookie.cpp b/src/bun.js/bindings/Cookie.cpp index 3859e591a7..3a8ef1f4b3 100644 --- a/src/bun.js/bindings/Cookie.cpp +++ b/src/bun.js/bindings/Cookie.cpp @@ -132,12 +132,14 @@ ExceptionOr> Cookie::parse(StringView cookieString) } else if (attributeName == "expires"_s && !hasMaxAge && !attributeValue.isEmpty()) { if (!attributeValue.is8Bit()) [[unlikely]] { auto asLatin1 = attributeValue.latin1(); - if (auto parsed = WTF::parseDate({ reinterpret_cast(asLatin1.data()), asLatin1.length() })) { + double parsed = WTF::parseDate({ reinterpret_cast(asLatin1.data()), asLatin1.length() }); + if (std::isfinite(parsed)) { expires = static_cast(parsed); } } else { auto nullTerminated = attributeValue.utf8(); - if (auto parsed = WTF::parseDate(std::span(reinterpret_cast(nullTerminated.data()), nullTerminated.length()))) { + double parsed = WTF::parseDate(std::span(reinterpret_cast(nullTerminated.data()), nullTerminated.length())); + if (std::isfinite(parsed)) { expires = static_cast(parsed); } } @@ -168,7 +170,7 @@ ExceptionOr> Cookie::parse(StringView cookieString) bool Cookie::isExpired() const { - if (m_expires == Cookie::emptyExpiresAtValue || m_expires < 1) + if (m_expires == Cookie::emptyExpiresAtValue) return false; // Session cookie auto currentTime = WTF::WallTime::now().secondsSinceEpoch().seconds() * 1000.0; diff --git a/src/bun.js/bindings/JSBundlerPlugin.cpp b/src/bun.js/bindings/JSBundlerPlugin.cpp index 0f24159f72..9b129148a1 100644 --- a/src/bun.js/bindings/JSBundlerPlugin.cpp +++ b/src/bun.js/bindings/JSBundlerPlugin.cpp @@ -29,7 +29,7 @@ #include #include "ErrorCode.h" #include "napi_external.h" -#include + #include #if OS(WINDOWS) @@ -117,9 +117,9 @@ static const HashTableValue JSBundlerPluginHashTable[] = { { "generateDeferPromise"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_generateDeferPromise, 0 } }, }; -class JSBundlerPlugin final : public JSC::JSNonFinalObject { +class JSBundlerPlugin final : public JSC::JSDestructibleObject { public: - using Base = JSC::JSNonFinalObject; + using Base = JSC::JSDestructibleObject; static JSBundlerPlugin* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, @@ -156,6 +156,9 @@ public: } DECLARE_VISIT_CHILDREN; + DECLARE_VISIT_OUTPUT_CONSTRAINTS; + + template void visitAdditionalChildren(Visitor&); Bun::BundlerPlugin plugin; /// These are defined in BundlerPlugin.ts @@ -165,30 +168,53 @@ public: JSC::JSGlobalObject* m_globalObject; + static void destroy(JSC::JSCell* cell) + { + JSBundlerPlugin* thisObject = static_cast(cell); + thisObject->~JSBundlerPlugin(); + } + private: JSBundlerPlugin(JSC::VM& vm, JSC::JSGlobalObject* global, JSC::Structure* structure, void* config, BunPluginTarget target, JSBundlerPluginAddErrorCallback addError, JSBundlerPluginOnLoadAsyncCallback onLoadAsync, JSBundlerPluginOnResolveAsyncCallback onResolveAsync) - : JSC::JSNonFinalObject(vm, structure) + : Base(vm, structure) , plugin(BundlerPlugin(config, target, addError, onLoadAsync, onResolveAsync)) , m_globalObject(global) { } + ~JSBundlerPlugin() = default; void finishCreation(JSC::VM&); }; +template +void JSBundlerPlugin::visitAdditionalChildren(Visitor& visitor) +{ + this->onLoadFunction.visit(visitor); + this->onResolveFunction.visit(visitor); + this->setupFunction.visit(visitor); + this->plugin.deferredPromises.visit(this, visitor); +} + template void JSBundlerPlugin::visitChildrenImpl(JSCell* cell, Visitor& visitor) { JSBundlerPlugin* thisObject = jsCast(cell); ASSERT_GC_OBJECT_INHERITS(thisObject, info()); Base::visitChildren(thisObject, visitor); - thisObject->onLoadFunction.visit(visitor); - thisObject->onResolveFunction.visit(visitor); - thisObject->setupFunction.visit(visitor); + thisObject->visitAdditionalChildren(visitor); } DEFINE_VISIT_CHILDREN(JSBundlerPlugin); +template +void JSBundlerPlugin::visitOutputConstraintsImpl(JSCell* cell, Visitor& visitor) +{ + JSBundlerPlugin* thisObject = jsCast(cell); + ASSERT_GC_OBJECT_INHERITS(thisObject, info()); + thisObject->visitAdditionalChildren(visitor); +} +DEFINE_VISIT_OUTPUT_CONSTRAINTS(JSBundlerPlugin); + const JSC::ClassInfo JSBundlerPlugin::s_info = { "BundlerPlugin"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSBundlerPlugin) }; /// `BundlerPlugin.prototype.addFilter(filter: RegExp, namespace: string, isOnLoad: 0 | 1): void` @@ -425,10 +451,11 @@ JSC_DEFINE_HOST_FUNCTION(jsBundlerPluginFunction_onResolveAsync, (JSC::JSGlobalO extern "C" JSC::EncodedJSValue JSBundlerPlugin__appendDeferPromise(Bun::JSBundlerPlugin* pluginObject) { - JSC::JSGlobalObject* globalObject = pluginObject->globalObject(); - Strong strong_promise = JSC::Strong(globalObject->vm(), JSPromise::create(globalObject->vm(), globalObject->promiseStructure())); - JSPromise* ret = strong_promise.get(); - pluginObject->plugin.deferredPromises.append(strong_promise); + auto* vm = &pluginObject->vm(); + auto* globalObject = pluginObject->globalObject(); + + JSPromise* ret = JSPromise::create(*vm, globalObject->promiseStructure()); + pluginObject->plugin.deferredPromises.append(*vm, pluginObject, ret); return JSC::JSValue::encode(ret); } @@ -638,15 +665,22 @@ extern "C" void JSBundlerPlugin__setConfig(Bun::JSBundlerPlugin* plugin, void* c extern "C" void JSBundlerPlugin__drainDeferred(Bun::JSBundlerPlugin* pluginObject, bool rejected) { - auto deferredPromises = std::exchange(pluginObject->plugin.deferredPromises, {}); - for (auto& promise : deferredPromises) { + auto* globalObject = pluginObject->globalObject(); + MarkedArgumentBuffer arguments; + pluginObject->plugin.deferredPromises.moveTo(pluginObject, arguments); + ASSERT(!arguments.hasOverflowed()); + + auto scope = DECLARE_THROW_SCOPE(pluginObject->vm()); + for (auto promiseValue : arguments) { + JSPromise* promise = jsCast(JSValue::decode(promiseValue)); if (rejected) { - promise->reject(pluginObject->globalObject(), JSC::jsUndefined()); + promise->reject(globalObject, JSC::jsUndefined()); } else { - promise->resolve(pluginObject->globalObject(), JSC::jsUndefined()); + promise->resolve(globalObject, JSC::jsUndefined()); } - promise.clear(); + RETURN_IF_EXCEPTION(scope, ); } + RETURN_IF_EXCEPTION(scope, ); } extern "C" void JSBundlerPlugin__tombstone(Bun::JSBundlerPlugin* plugin) diff --git a/src/bun.js/bindings/JSBundlerPlugin.h b/src/bun.js/bindings/JSBundlerPlugin.h index 7bef5769fa..34fb313075 100644 --- a/src/bun.js/bindings/JSBundlerPlugin.h +++ b/src/bun.js/bindings/JSBundlerPlugin.h @@ -7,6 +7,7 @@ #include #include "napi_external.h" #include +#include "WriteBarrierList.h" typedef void (*JSBundlerPluginAddErrorCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue); typedef void (*JSBundlerPluginOnLoadAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue); @@ -134,7 +135,7 @@ public: NativePluginList onBeforeParse = {}; BunPluginTarget target { BunPluginTargetBrowser }; - Vector> deferredPromises = {}; + WriteBarrierList deferredPromises = {}; JSBundlerPluginAddErrorCallback addError; JSBundlerPluginOnLoadAsyncCallback onLoadAsync; diff --git a/src/bun.js/bindings/JSNodePerformanceHooksHistogram.h b/src/bun.js/bindings/JSNodePerformanceHooksHistogram.h index 51a2bd9604..14c7b838ea 100644 --- a/src/bun.js/bindings/JSNodePerformanceHooksHistogram.h +++ b/src/bun.js/bindings/JSNodePerformanceHooksHistogram.h @@ -48,6 +48,9 @@ JSC_DECLARE_HOST_FUNCTION(jsNodePerformanceHooksHistogramProtoFuncPercentile); JSC_DECLARE_HOST_FUNCTION(jsNodePerformanceHooksHistogramProtoFuncPercentileBigInt); JSC_DECLARE_HOST_FUNCTION(jsFunction_createHistogram); +JSC_DECLARE_HOST_FUNCTION(jsFunction_monitorEventLoopDelay); +JSC_DECLARE_HOST_FUNCTION(jsFunction_enableEventLoopDelay); +JSC_DECLARE_HOST_FUNCTION(jsFunction_disableEventLoopDelay); class HistogramData { public: diff --git a/src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp b/src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp index e7363a94b0..1dddbd727a 100644 --- a/src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp +++ b/src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp @@ -1,5 +1,6 @@ #include "ErrorCode.h" #include "JSDOMExceptionHandling.h" +#include "NodeValidator.h" #include "root.h" #include "JSNodePerformanceHooksHistogramPrototype.h" @@ -140,6 +141,20 @@ JSC_DEFINE_HOST_FUNCTION(jsNodePerformanceHooksHistogramProtoFuncReset, (JSGloba return JSValue::encode(jsUndefined()); } +static double toPercentile(JSC::ThrowScope& scope, JSGlobalObject* globalObject, JSValue value) +{ + Bun::V::validateNumber(scope, globalObject, value, "percentile"_s, jsNumber(0), jsNumber(100)); + RETURN_IF_EXCEPTION(scope, {}); + + // TODO: rewrite validateNumber to return the validated value. + double percentile = value.toNumber(globalObject); + scope.assertNoException(); + if (percentile <= 0 || percentile > 100 || std::isnan(percentile)) { + Bun::ERR::OUT_OF_RANGE(scope, globalObject, "percentile"_s, "> 0 && <= 100"_s, value); + return {}; + } + return percentile; +} JSC_DEFINE_HOST_FUNCTION(jsNodePerformanceHooksHistogramProtoFuncPercentile, (JSGlobalObject * globalObject, CallFrame* callFrame)) { VM& vm = globalObject->vm(); @@ -156,12 +171,8 @@ JSC_DEFINE_HOST_FUNCTION(jsNodePerformanceHooksHistogramProtoFuncPercentile, (JS return {}; } - double percentile = callFrame->uncheckedArgument(0).toNumber(globalObject); + double percentile = toPercentile(scope, globalObject, callFrame->uncheckedArgument(0)); RETURN_IF_EXCEPTION(scope, {}); - if (percentile <= 0 || percentile > 100 || std::isnan(percentile)) { - Bun::ERR::OUT_OF_RANGE(scope, globalObject, "percentile"_s, "> 0 && <= 100"_s, jsNumber(percentile)); - return {}; - } return JSValue::encode(jsNumber(static_cast(thisObject->getPercentile(percentile)))); } @@ -182,12 +193,8 @@ JSC_DEFINE_HOST_FUNCTION(jsNodePerformanceHooksHistogramProtoFuncPercentileBigIn return {}; } - double percentile = callFrame->uncheckedArgument(0).toNumber(globalObject); + double percentile = toPercentile(scope, globalObject, callFrame->uncheckedArgument(0)); RETURN_IF_EXCEPTION(scope, {}); - if (percentile <= 0 || percentile > 100 || std::isnan(percentile)) { - Bun::ERR::OUT_OF_RANGE(scope, globalObject, "percentile"_s, "> 0 && <= 100"_s, jsNumber(percentile)); - return {}; - } RELEASE_AND_RETURN(scope, JSValue::encode(JSBigInt::createFrom(globalObject, thisObject->getPercentile(percentile)))); } @@ -415,4 +422,107 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_createHistogram, (JSGlobalObject * globalObj return JSValue::encode(histogram); } +// Extern declarations for Timer.zig +extern "C" void Timer_enableEventLoopDelayMonitoring(void* vm, JSC::EncodedJSValue histogram, int32_t resolution); +extern "C" void Timer_disableEventLoopDelayMonitoring(void* vm); + +// Create histogram for event loop delay monitoring +JSC_DEFINE_HOST_FUNCTION(jsFunction_monitorEventLoopDelay, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + int32_t resolution = 10; // default 10ms + if (callFrame->argumentCount() > 0) { + resolution = callFrame->argument(0).toInt32(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + if (resolution < 1) { + throwRangeError(globalObject, scope, "Resolution must be >= 1"_s); + return JSValue::encode(jsUndefined()); + } + } + + // Create histogram with range for event loop delays (1ns to 1 hour) + auto* zigGlobalObject = defaultGlobalObject(globalObject); + Structure* structure = zigGlobalObject->m_JSNodePerformanceHooksHistogramClassStructure.get(zigGlobalObject); + RETURN_IF_EXCEPTION(scope, {}); + + JSNodePerformanceHooksHistogram* histogram = JSNodePerformanceHooksHistogram::create( + vm, structure, globalObject, + 1, // lowest: 1 nanosecond + 3600000000000LL, // highest: 1 hour in nanoseconds + 3 // figures: 3 significant digits + ); + + RETURN_IF_EXCEPTION(scope, {}); + + return JSValue::encode(histogram); +} + +// Enable event loop delay monitoring +JSC_DEFINE_HOST_FUNCTION(jsFunction_enableEventLoopDelay, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + if (callFrame->argumentCount() < 2) { + throwTypeError(globalObject, scope, "Missing arguments"_s); + return JSValue::encode(jsUndefined()); + } + + JSValue histogramValue = callFrame->argument(0); + JSNodePerformanceHooksHistogram* histogram = jsDynamicCast(histogramValue); + + if (!histogram) { + throwTypeError(globalObject, scope, "Invalid histogram"_s); + return JSValue::encode(jsUndefined()); + } + + int32_t resolution = callFrame->argument(1).toInt32(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + // Reset histogram data on enable + histogram->reset(); + + // Enable the event loop delay monitor in Timer.zig + Timer_enableEventLoopDelayMonitoring(bunVM(globalObject), JSValue::encode(histogram), resolution); + + RELEASE_AND_RETURN(scope, JSValue::encode(jsUndefined())); +} + +// Disable event loop delay monitoring +JSC_DEFINE_HOST_FUNCTION(jsFunction_disableEventLoopDelay, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + if (callFrame->argumentCount() < 1) { + throwTypeError(globalObject, scope, "Missing histogram argument"_s); + return JSValue::encode(jsUndefined()); + } + + JSValue histogramValue = callFrame->argument(0); + JSNodePerformanceHooksHistogram* histogram = jsDynamicCast(histogramValue); + + if (!histogram) { + throwTypeError(globalObject, scope, "Invalid histogram"_s); + return JSValue::encode(jsUndefined()); + } + + // Call into Zig to disable monitoring + Timer_disableEventLoopDelayMonitoring(bunVM(globalObject)); + + return JSValue::encode(jsUndefined()); +} + +// Extern function for Zig to record delays +extern "C" void JSNodePerformanceHooksHistogram_recordDelay(JSC::EncodedJSValue histogram, int64_t delay_ns) +{ + if (!histogram || delay_ns <= 0) return; + + auto* hist = jsCast(JSValue::decode(histogram)); + hist->record(delay_ns); +} + } // namespace Bun diff --git a/src/bun.js/bindings/JSValue.zig b/src/bun.js/bindings/JSValue.zig index b9f096f5fd..4887d49f85 100644 --- a/src/bun.js/bindings/JSValue.zig +++ b/src/bun.js/bindings/JSValue.zig @@ -584,7 +584,12 @@ pub const JSValue = enum(i64) { return switch (comptime Number) { JSValue => number, u0 => jsNumberFromInt32(0), - f32, f64 => jsDoubleNumber(@as(f64, number)), + f32, f64 => { + if (canBeStrictInt32(number)) { + return jsNumberFromInt32(@intFromFloat(number)); + } + return jsDoubleNumber(number); + }, u31, c_ushort, u8, i16, i32, c_int, i8, u16 => jsNumberFromInt32(@as(i32, @intCast(number))), c_long, u32, u52, c_uint, i64, isize => jsNumberFromInt64(@as(i64, @intCast(number))), usize, u64 => jsNumberFromUint64(@as(u64, @intCast(number))), @@ -782,20 +787,24 @@ pub const JSValue = enum(i64) { return jsDoubleNumber(@floatFromInt(i)); } - pub inline fn toJS(this: JSValue, _: *const JSGlobalObject) JSValue { - return this; - } - pub fn jsNumberFromUint64(i: u64) JSValue { if (i <= std.math.maxInt(i32)) { return jsNumberFromInt32(@as(i32, @intCast(i))); } - return jsNumberFromPtrSize(i); + return jsDoubleNumber(@floatFromInt(i)); } - pub fn jsNumberFromPtrSize(i: usize) JSValue { - return jsDoubleNumber(@floatFromInt(i)); + // https://github.com/oven-sh/WebKit/blob/df8aa4c4d01a1c2fe22ac599adfe0a582fce2b20/Source/JavaScriptCore/runtime/MathCommon.h#L243-L249 + pub fn canBeStrictInt32(value: f64) bool { + if (std.math.isInf(value) or std.math.isNan(value)) { + return false; + } + const int: i32 = int: { + @setRuntimeSafety(false); + break :int @intFromFloat(value); + }; + return !(@as(f64, @floatFromInt(int)) != value or (int == 0 and std.math.signbit(value))); // true for -0.0 } fn coerceJSValueDoubleTruncatingT(comptime T: type, num: f64) T { @@ -1853,6 +1862,11 @@ pub const JSValue = enum(i64) { return JSC__JSValue__createRangeError(message, code, global); } + extern fn JSC__JSValue__isStrictEqual(JSValue, JSValue, *JSGlobalObject) bool; + pub fn isStrictEqual(this: JSValue, other: JSValue, global: *JSGlobalObject) JSError!bool { + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__isStrictEqual, .{ this, other, global }); + } + extern fn JSC__JSValue__isSameValue(this: JSValue, other: JSValue, global: *JSGlobalObject) bool; /// Object.is() diff --git a/src/bun.js/bindings/NodeFSStatBinding.cpp b/src/bun.js/bindings/NodeFSStatBinding.cpp index 3ca16d8abe..7a1ee3586b 100644 --- a/src/bun.js/bindings/NodeFSStatBinding.cpp +++ b/src/bun.js/bindings/NodeFSStatBinding.cpp @@ -604,20 +604,20 @@ extern "C" JSC::EncodedJSValue Bun__createJSStatsObject(Zig::GlobalObject* globa { auto& vm = globalObject->vm(); - JSC::JSValue js_dev = JSC::jsDoubleNumber(dev); - JSC::JSValue js_ino = JSC::jsDoubleNumber(ino); - JSC::JSValue js_mode = JSC::jsDoubleNumber(mode); - JSC::JSValue js_nlink = JSC::jsDoubleNumber(nlink); - JSC::JSValue js_uid = JSC::jsDoubleNumber(uid); - JSC::JSValue js_gid = JSC::jsDoubleNumber(gid); - JSC::JSValue js_rdev = JSC::jsDoubleNumber(rdev); - JSC::JSValue js_size = JSC::jsDoubleNumber(size); - JSC::JSValue js_blksize = JSC::jsDoubleNumber(blksize); - JSC::JSValue js_blocks = JSC::jsDoubleNumber(blocks); - JSC::JSValue js_atimeMs = JSC::jsDoubleNumber(atimeMs); - JSC::JSValue js_mtimeMs = JSC::jsDoubleNumber(mtimeMs); - JSC::JSValue js_ctimeMs = JSC::jsDoubleNumber(ctimeMs); - JSC::JSValue js_birthtimeMs = JSC::jsDoubleNumber(birthtimeMs); + JSC::JSValue js_dev = JSC::jsNumber(dev); + JSC::JSValue js_ino = JSC::jsNumber(ino); + JSC::JSValue js_mode = JSC::jsNumber(mode); + JSC::JSValue js_nlink = JSC::jsNumber(nlink); + JSC::JSValue js_uid = JSC::jsNumber(uid); + JSC::JSValue js_gid = JSC::jsNumber(gid); + JSC::JSValue js_rdev = JSC::jsNumber(rdev); + JSC::JSValue js_size = JSC::jsNumber(size); + JSC::JSValue js_blksize = JSC::jsNumber(blksize); + JSC::JSValue js_blocks = JSC::jsNumber(blocks); + JSC::JSValue js_atimeMs = JSC::jsNumber(atimeMs); + JSC::JSValue js_mtimeMs = JSC::jsNumber(mtimeMs); + JSC::JSValue js_ctimeMs = JSC::jsNumber(ctimeMs); + JSC::JSValue js_birthtimeMs = JSC::jsNumber(birthtimeMs); auto* structure = getStructure(globalObject); auto* object = JSC::JSFinalObject::create(vm, structure); diff --git a/src/bun.js/bindings/SQLClient.cpp b/src/bun.js/bindings/SQLClient.cpp index a1f881876b..c9c8f41313 100644 --- a/src/bun.js/bindings/SQLClient.cpp +++ b/src/bun.js/bindings/SQLClient.cpp @@ -152,7 +152,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel return jsEmptyString(vm); } case DataCellTag::Double: - return jsDoubleNumber(cell.value.number); + return jsNumber(cell.value.number); break; case DataCellTag::Integer: return jsNumber(cell.value.integer); diff --git a/src/bun.js/bindings/WriteBarrierList.h b/src/bun.js/bindings/WriteBarrierList.h new file mode 100644 index 0000000000..0db6c5811c --- /dev/null +++ b/src/bun.js/bindings/WriteBarrierList.h @@ -0,0 +1,92 @@ +#pragma once + +#include +#include +#include + +namespace Bun { + +/** + * A variable-length list of JSValue objects with garbage collection support. + * + * This class provides a thread-safe container for WriteBarrier objects that can + * dynamically grow and shrink. It includes helper methods for visiting contained + * objects during garbage collection traversal. + * + * Use this class when: + * - The number of items may change at runtime (append/remove operations) + * - You need thread-safe access to the list + * - You need automatic garbage collection support for contained JSValues + * + * For better performance when the length is known and fixed, prefer + * FixedVector> instead. + * + * @tparam T The type of JSC objects to store (must inherit from JSC::JSCell) + */ +template +class WriteBarrierList { +public: + WriteBarrierList() + { + } + + void append(JSC::VM& vm, JSC::JSCell* owner, T* value) + { + WTF::Locker locker { owner->cellLock() }; + m_list.append(JSC::WriteBarrier(vm, owner, value)); + } + + std::span> list() + { + return m_list.mutableSpan(); + } + + void moveTo(JSC::JSCell* owner, JSC::MarkedArgumentBuffer& arguments) + { + WTF::Locker locker { owner->cellLock() }; + for (JSC::WriteBarrier& value : m_list) { + if (auto* cell = value.get()) { + arguments.append(cell); + value.clear(); + } + } + } + + template + void visit(JSC::JSCell* owner, Visitor& visitor) + { + WTF::Locker locker { owner->cellLock() }; + for (auto& value : m_list) { + visitor.append(value); + } + } + + bool isEmpty() const + { + return m_list.isEmpty(); + } + + T* takeFirst(JSC::JSCell* owner) + { + WTF::Locker locker { owner->cellLock() }; + if (m_list.isEmpty()) { + return nullptr; + } + + T* value = m_list.first().get(); + m_list.removeAt(0); + return value; + } + + template + bool removeFirstMatching(JSC::JSCell* owner, const MatchFunction& matches) + { + WTF::Locker locker { owner->cellLock() }; + return m_list.removeFirstMatching(matches); + } + +private: + WTF::Vector> m_list; +}; + +} diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index 7e7a3c0270..fc584871ca 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -1354,10 +1354,10 @@ void GlobalObject::promiseRejectionTracker(JSGlobalObject* obj, JSC::JSPromise* auto* globalObj = static_cast(obj); switch (operation) { case JSPromiseRejectionOperation::Reject: - globalObj->m_aboutToBeNotifiedRejectedPromises.append(JSC::Strong(obj->vm(), promise)); + globalObj->m_aboutToBeNotifiedRejectedPromises.append(obj->vm(), globalObj, promise); break; case JSPromiseRejectionOperation::Handle: - bool removed = globalObj->m_aboutToBeNotifiedRejectedPromises.removeFirstMatching([&](Strong& unhandledPromise) { + bool removed = globalObj->m_aboutToBeNotifiedRejectedPromises.removeFirstMatching(globalObj, [&](JSC::WriteBarrier& unhandledPromise) { return unhandledPromise.get() == promise; }); if (removed) break; @@ -4048,16 +4048,13 @@ void GlobalObject::handleRejectedPromises() { JSC::VM& virtual_machine = vm(); auto scope = DECLARE_CATCH_SCOPE(virtual_machine); - do { - auto unhandledRejections = WTFMove(m_aboutToBeNotifiedRejectedPromises); - for (auto& promise : unhandledRejections) { - if (promise->isHandled(virtual_machine)) - continue; + while (auto* promise = m_aboutToBeNotifiedRejectedPromises.takeFirst(this)) { + if (promise->isHandled(virtual_machine)) + continue; - Bun__handleRejectedPromise(this, promise.get()); - if (auto ex = scope.exception()) this->reportUncaughtExceptionAtEventLoop(this, ex); - } - } while (!m_aboutToBeNotifiedRejectedPromises.isEmpty()); + Bun__handleRejectedPromise(this, promise); + if (auto ex = scope.exception()) this->reportUncaughtExceptionAtEventLoop(this, ex); + } } DEFINE_VISIT_CHILDREN(GlobalObject); @@ -4070,6 +4067,9 @@ void GlobalObject::visitAdditionalChildren(Visitor& visitor) thisObject->globalEventScope->visitJSEventListeners(visitor); + thisObject->m_aboutToBeNotifiedRejectedPromises.visit(thisObject, visitor); + thisObject->m_ffiFunctions.visit(thisObject, visitor); + ScriptExecutionContext* context = thisObject->scriptExecutionContext(); visitor.addOpaqueRoot(context); } @@ -4625,18 +4625,13 @@ void GlobalObject::setNodeWorkerEnvironmentData(JSMap* data) { m_nodeWorkerEnvir void GlobalObject::trackFFIFunction(JSC::JSFunction* function) { - this->m_ffiFunctions.append(JSC::Strong { vm(), function }); + this->m_ffiFunctions.append(vm(), this, function); } bool GlobalObject::untrackFFIFunction(JSC::JSFunction* function) { - for (size_t i = 0; i < this->m_ffiFunctions.size(); ++i) { - if (this->m_ffiFunctions[i].get() == function) { - this->m_ffiFunctions[i].clear(); - this->m_ffiFunctions.removeAt(i); - return true; - } - } - return false; + return this->m_ffiFunctions.removeFirstMatching(this, [&](JSC::WriteBarrier& untrackedFunction) -> bool { + return untrackedFunction.get() == function; + }); } extern "C" void Zig__GlobalObject__destructOnExit(Zig::GlobalObject* globalObject) diff --git a/src/bun.js/bindings/ZigGlobalObject.h b/src/bun.js/bindings/ZigGlobalObject.h index 55fb45fd77..7f9f3412b0 100644 --- a/src/bun.js/bindings/ZigGlobalObject.h +++ b/src/bun.js/bindings/ZigGlobalObject.h @@ -57,6 +57,7 @@ class GlobalInternals; #include "BunGlobalScope.h" #include #include +#include "WriteBarrierList.h" namespace Bun { class JSCommonJSExtensions; @@ -735,8 +736,8 @@ private: DOMGuardedObjectSet m_guardedObjects WTF_GUARDED_BY_LOCK(m_gcLock); WebCore::SubtleCrypto* m_subtleCrypto = nullptr; - WTF::Vector> m_aboutToBeNotifiedRejectedPromises; - WTF::Vector> m_ffiFunctions; + Bun::WriteBarrierList m_aboutToBeNotifiedRejectedPromises; + Bun::WriteBarrierList m_ffiFunctions; }; class EvalGlobalObject : public GlobalObject { diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 4e518ea354..aff6f36740 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -2607,6 +2607,13 @@ size_t JSC__VM__heapSize(JSC::VM* arg0) return arg0->heap.size(); } +bool JSC__JSValue__isStrictEqual(JSC::EncodedJSValue l, JSC::EncodedJSValue r, JSC::JSGlobalObject* globalObject) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + RELEASE_AND_RETURN(scope, JSC::JSValue::strictEqual(globalObject, JSC::JSValue::decode(l), JSC::JSValue::decode(r))); +} + bool JSC__JSValue__isSameValue(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* globalObject) { @@ -3800,10 +3807,6 @@ void JSC__JSValue__forEach(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* ar { return JSC::JSValue::encode(JSC::jsEmptyString(arg0->vm())); } -JSC::EncodedJSValue JSC__JSValue__jsNull() -{ - return JSC::JSValue::encode(JSC::jsNull()); -} [[ZIG_EXPORT(nothrow)]] JSC::EncodedJSValue JSC__JSValue__jsNumberFromChar(unsigned char arg0) { return JSC::JSValue::encode(JSC::jsNumber(arg0)); @@ -4301,8 +4304,11 @@ JSC::EncodedJSValue JSC__JSValue__getErrorsProperty(JSC::EncodedJSValue JSValue0 return JSC::JSValue::encode(obj->getDirect(global->vm(), global->vm().propertyNames->errors)); } -[[ZIG_EXPORT(nothrow)]] JSC::EncodedJSValue JSC__JSValue__jsTDZValue() { return JSC::JSValue::encode(JSC::jsTDZValue()); }; -JSC::EncodedJSValue JSC__JSValue__jsUndefined() { return JSC::JSValue::encode(JSC::jsUndefined()); }; +[[ZIG_EXPORT(nothrow)]] JSC::EncodedJSValue JSC__JSValue__jsTDZValue() +{ + return JSC::JSValue::encode(JSC::jsTDZValue()); +}; + JSC::JSObject* JSC__JSValue__toObject(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1) { JSC::JSValue value = JSC::JSValue::decode(JSValue0); diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h index b52e538af1..8d15002abd 100644 --- a/src/bun.js/bindings/headers-handwritten.h +++ b/src/bun.js/bindings/headers-handwritten.h @@ -225,18 +225,19 @@ const JSErrorCode JSErrorCodeUserErrorCode = 254; // Must be kept in sync. typedef uint8_t BunLoaderType; const BunLoaderType BunLoaderTypeNone = 254; -const BunLoaderType BunLoaderTypeJSX = 0; -const BunLoaderType BunLoaderTypeJS = 1; -const BunLoaderType BunLoaderTypeTS = 2; -const BunLoaderType BunLoaderTypeTSX = 3; -const BunLoaderType BunLoaderTypeCSS = 4; -const BunLoaderType BunLoaderTypeFILE = 5; -const BunLoaderType BunLoaderTypeJSON = 6; -const BunLoaderType BunLoaderTypeJSONC = 7; -const BunLoaderType BunLoaderTypeTOML = 8; -const BunLoaderType BunLoaderTypeWASM = 9; -const BunLoaderType BunLoaderTypeNAPI = 10; -const BunLoaderType BunLoaderTypeYAML = 18; +// Must match api/schema.zig Loader enum values +const BunLoaderType BunLoaderTypeJSX = 1; +const BunLoaderType BunLoaderTypeJS = 2; +const BunLoaderType BunLoaderTypeTS = 3; +const BunLoaderType BunLoaderTypeTSX = 4; +const BunLoaderType BunLoaderTypeCSS = 5; +const BunLoaderType BunLoaderTypeFILE = 6; +const BunLoaderType BunLoaderTypeJSON = 7; +const BunLoaderType BunLoaderTypeJSONC = 8; +const BunLoaderType BunLoaderTypeTOML = 9; +const BunLoaderType BunLoaderTypeWASM = 10; +const BunLoaderType BunLoaderTypeNAPI = 11; +const BunLoaderType BunLoaderTypeYAML = 19; #pragma mark - Stream diff --git a/src/bun.js/bindings/headers.h b/src/bun.js/bindings/headers.h index d9f3418338..0428366adb 100644 --- a/src/bun.js/bindings/headers.h +++ b/src/bun.js/bindings/headers.h @@ -260,8 +260,6 @@ CPP_DECL bool JSC__JSValue__isUInt32AsAnyInt(JSC::EncodedJSValue JSValue0); CPP_DECL bool JSC__JSValue__jestDeepEquals(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2); CPP_DECL bool JSC__JSValue__jestDeepMatch(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2, bool arg3); CPP_DECL bool JSC__JSValue__jestStrictDeepEquals(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2); -CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsDoubleNumber(double arg0); -CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsNull(); CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsNumberFromChar(unsigned char arg0); CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsNumberFromDouble(double arg0); CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsNumberFromInt64(int64_t arg0); @@ -269,7 +267,6 @@ CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsNumberFromU16(uint16_t arg0); CPP_DECL void JSC__JSValue__jsonStringify(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1, uint32_t arg2, BunString* arg3); CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsTDZValue(); CPP_DECL unsigned char JSC__JSValue__jsType(JSC::EncodedJSValue JSValue0); -CPP_DECL JSC::EncodedJSValue JSC__JSValue__jsUndefined(); CPP_DECL JSC::EncodedJSValue JSC__JSValue__keys(JSC::JSGlobalObject* arg0, JSC::EncodedJSValue arg1); CPP_DECL JSC::EncodedJSValue JSC__JSValue__values(JSC::JSGlobalObject* arg0, JSC::EncodedJSValue arg1); CPP_DECL JSC::EncodedJSValue JSC__JSValue__parseJSON(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1); diff --git a/src/bun.js/bindings/napi.cpp b/src/bun.js/bindings/napi.cpp index fb56166da8..1c362e3884 100644 --- a/src/bun.js/bindings/napi.cpp +++ b/src/bun.js/bindings/napi.cpp @@ -1554,13 +1554,10 @@ extern "C" napi_status napi_object_freeze(napi_env env, napi_value object_value) NAPI_RETURN_EARLY_IF_FALSE(env, value.isObject(), napi_object_expected); Zig::GlobalObject* globalObject = toJS(env); - JSC::VM& vm = JSC::getVM(globalObject); JSC::JSObject* object = JSC::jsCast(value); - // TODO is this check necessary? - if (!hasIndexedProperties(object->indexingType())) { - object->freeze(vm); - } + objectConstructorFreeze(globalObject, object); + NAPI_RETURN_IF_EXCEPTION(env); NAPI_RETURN_SUCCESS(env); } @@ -1572,13 +1569,10 @@ extern "C" napi_status napi_object_seal(napi_env env, napi_value object_value) NAPI_RETURN_EARLY_IF_FALSE(env, value.isObject(), napi_object_expected); Zig::GlobalObject* globalObject = toJS(env); - JSC::VM& vm = JSC::getVM(globalObject); JSC::JSObject* object = JSC::jsCast(value); - // TODO is this check necessary? - if (!hasIndexedProperties(object->indexingType())) { - object->seal(vm); - } + objectConstructorSeal(globalObject, object); + NAPI_RETURN_IF_EXCEPTION(env); NAPI_RETURN_SUCCESS(env); } @@ -1637,8 +1631,8 @@ extern "C" napi_status napi_create_dataview(napi_env env, size_t length, NAPI_RETURN_EARLY_IF_FALSE(env, arraybufferPtr, napi_arraybuffer_expected); if (byte_offset + length > arraybufferPtr->impl()->byteLength()) { - JSC::throwRangeError(globalObject, scope, "byteOffset exceeds source ArrayBuffer byteLength"_s); - RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); + napi_throw_range_error(env, "ERR_NAPI_INVALID_DATAVIEW_ARGS", "byte_offset + byte_length should be less than or equal to the size in bytes of the array passed in"); + return napi_set_last_error(env, napi_pending_exception); } auto dataView = JSC::DataView::create(arraybufferPtr->impl(), byte_offset, length); @@ -2318,15 +2312,14 @@ extern "C" napi_status napi_create_external(napi_env env, void* data, extern "C" napi_status napi_typeof(napi_env env, napi_value val, napi_valuetype* result) { - NAPI_PREAMBLE(env); NAPI_CHECK_ENV_NOT_IN_GC(env); + NAPI_CHECK_ARG(env, val); NAPI_CHECK_ARG(env, result); JSValue value = toJS(val); if (value.isEmpty()) { - // This can happen - *result = napi_undefined; - NAPI_RETURN_SUCCESS(env); + *result = napi_object; + return napi_clear_last_error(env); } if (value.isCell()) { @@ -2336,44 +2329,44 @@ extern "C" napi_status napi_typeof(napi_env env, napi_value val, case JSC::JSFunctionType: case JSC::InternalFunctionType: *result = napi_function; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); case JSC::ObjectType: if (JSC::jsDynamicCast(value)) { *result = napi_external; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); } *result = napi_object; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); case JSC::HeapBigIntType: *result = napi_bigint; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); case JSC::DerivedStringObjectType: case JSC::StringObjectType: case JSC::StringType: *result = napi_string; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); case JSC::SymbolType: *result = napi_symbol; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); case JSC::FinalObjectType: case JSC::ArrayType: case JSC::DerivedArrayType: *result = napi_object; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); default: { if (cell->isCallable() || cell->isConstructor()) { *result = napi_function; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); } if (cell->isObject()) { *result = napi_object; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); } break; @@ -2383,22 +2376,22 @@ extern "C" napi_status napi_typeof(napi_env env, napi_value val, if (value.isNumber()) { *result = napi_number; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); } if (value.isUndefined()) { *result = napi_undefined; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); } if (value.isNull()) { *result = napi_null; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); } if (value.isBoolean()) { *result = napi_boolean; - NAPI_RETURN_SUCCESS(env); + return napi_clear_last_error(env); } // Unexpected type, report an error in debug mode @@ -2735,6 +2728,7 @@ extern "C" napi_status napi_call_function(napi_env env, napi_value recv, } NAPI_PREAMBLE(env); + NAPI_CHECK_ARG(env, recv); NAPI_RETURN_EARLY_IF_FALSE(env, argc == 0 || argv, napi_invalid_arg); NAPI_CHECK_ARG(env, func); JSValue funcValue = toJS(func); diff --git a/src/bun.js/bindings/sqlite/JSSQLStatement.cpp b/src/bun.js/bindings/sqlite/JSSQLStatement.cpp index 521a5946c0..706b1a2025 100644 --- a/src/bun.js/bindings/sqlite/JSSQLStatement.cpp +++ b/src/bun.js/bindings/sqlite/JSSQLStatement.cpp @@ -160,7 +160,7 @@ static constexpr int MAX_SQLITE_PREPARE_FLAG = SQLITE_PREPARE_PERSISTENT | SQLIT static inline JSC::JSValue jsNumberFromSQLite(sqlite3_stmt* stmt, unsigned int i) { int64_t num = sqlite3_column_int64(stmt, i); - return num > INT_MAX || num < INT_MIN ? JSC::jsDoubleNumber(static_cast(num)) : JSC::jsNumber(static_cast(num)); + return JSC::jsNumber(num); } static inline JSC::JSValue jsBigIntFromSQLite(JSC::JSGlobalObject* globalObject, sqlite3_stmt* stmt, unsigned int i) @@ -561,7 +561,7 @@ static JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, sqlite3_stmt } } case SQLITE_FLOAT: { - return jsDoubleNumber(sqlite3_column_double(stmt, i)); + return jsNumber(sqlite3_column_double(stmt, i)); } // > Note that the SQLITE_TEXT constant was also used in SQLite version // > 2 for a completely different meaning. Software that links against diff --git a/src/bun.js/bindings/v8/AGENTS.md b/src/bun.js/bindings/v8/AGENTS.md new file mode 120000 index 0000000000..681311eb9c --- /dev/null +++ b/src/bun.js/bindings/v8/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/src/bun.js/bindings/v8/CLAUDE.md b/src/bun.js/bindings/v8/CLAUDE.md new file mode 100644 index 0000000000..307093d6b9 --- /dev/null +++ b/src/bun.js/bindings/v8/CLAUDE.md @@ -0,0 +1,326 @@ +# V8 C++ API Implementation Guide + +This directory contains Bun's implementation of the V8 C++ API on top of JavaScriptCore. This allows native Node.js modules that use V8 APIs to work with Bun. + +## Architecture Overview + +Bun implements V8 APIs by creating a compatibility layer that: + +- Maps V8's `Local` handles to JSC's `JSValue` system +- Uses handle scopes to manage memory lifetimes similar to V8 +- Provides V8-compatible object layouts that inline V8 functions can read +- Manages tagged pointers for efficient value representation + +For detailed background, see the blog series: + +- [Part 1: Introduction and challenges](https://bun.sh/blog/how-bun-supports-v8-apis-without-using-v8-part-1.md) +- [Part 2: Memory layout and object representation](https://bun.sh/blog/how-bun-supports-v8-apis-without-using-v8-part-2.md) +- [Part 3: Garbage collection and primitives](https://bun.sh/blog/how-bun-supports-v8-apis-without-using-v8-part-3.md) + +## Directory Structure + +``` +src/bun.js/bindings/v8/ +├── v8.h # Main header with V8_UNIMPLEMENTED macro +├── v8_*.h # V8 compatibility headers +├── V8*.h # V8 class headers (Number, String, Object, etc.) +├── V8*.cpp # V8 class implementations +├── shim/ # Internal implementation details +│ ├── Handle.h # Handle and ObjectLayout implementation +│ ├── HandleScopeBuffer.h # Handle scope memory management +│ ├── TaggedPointer.h # V8-style tagged pointer implementation +│ ├── Map.h # V8 Map objects for inline function compatibility +│ ├── GlobalInternals.h # V8 global state management +│ ├── InternalFieldObject.h # Objects with internal fields +│ └── Oddball.h # Primitive values (undefined, null, true, false) +├── node.h # Node.js module registration compatibility +└── real_v8.h # Includes real V8 headers when needed +``` + +## Implementing New V8 APIs + +### 1. Create Header and Implementation Files + +Create `V8NewClass.h`: + +```cpp +#pragma once + +#include "v8.h" +#include "V8Local.h" +#include "V8Isolate.h" + +namespace v8 { + +class NewClass : public Data { +public: + BUN_EXPORT static Local New(Isolate* isolate, /* parameters */); + BUN_EXPORT /* return_type */ SomeMethod() const; + + // Add other methods as needed +}; + +} // namespace v8 +``` + +Create `V8NewClass.cpp`: + +```cpp +#include "V8NewClass.h" +#include "V8HandleScope.h" +#include "v8_compatibility_assertions.h" + +ASSERT_V8_TYPE_LAYOUT_MATCHES(v8::NewClass) + +namespace v8 { + +Local NewClass::New(Isolate* isolate, /* parameters */) +{ + // Implementation - typically: + // 1. Create JSC value + // 2. Get current handle scope + // 3. Create local handle + return isolate->currentHandleScope()->createLocal(isolate->vm(), /* JSC value */); +} + +/* return_type */ NewClass::SomeMethod() const +{ + // Implementation - typically: + // 1. Convert this Local to JSValue via localToJSValue() + // 2. Perform JSC operations + // 3. Return converted result + auto jsValue = localToJSValue(); + // ... JSC operations ... + return /* result */; +} + +} // namespace v8 +``` + +### 2. Add Symbol Exports + +For each new C++ method, you must add the mangled symbol names to multiple files: + +#### a. Add to `src/napi/napi.zig` + +Find the `V8API` struct (around line 1801) and add entries for both GCC/Clang and MSVC: + +```zig +const V8API = if (!bun.Environment.isWindows) struct { + // ... existing functions ... + pub extern fn _ZN2v88NewClass3NewEPNS_7IsolateE/* parameters */() *anyopaque; + pub extern fn _ZNK2v88NewClass10SomeMethodEv() *anyopaque; +} else struct { + // ... existing functions ... + pub extern fn @"?New@NewClass@v8@@SA?AV?$Local@VNewClass@v8@@@2@PEAVIsolate@2@/* parameters */@Z"() *anyopaque; + pub extern fn @"?SomeMethod@NewClass@v8@@QEBA/* return_type */XZ"() *anyopaque; +}; +``` + +**To get the correct mangled names:** + +For **GCC/Clang** (Unix): + +```bash +# Build your changes first +bun bd --help # This compiles your code + +# Extract symbols +nm build/CMakeFiles/bun-debug.dir/src/bun.js/bindings/v8/V8NewClass.cpp.o | grep "T _ZN2v8" +``` + +For **MSVC** (Windows): + +```powershell +# Use the provided PowerShell script in the comments: +dumpbin .\build\CMakeFiles\bun-debug.dir\src\bun.js\bindings\v8\V8NewClass.cpp.obj /symbols | where-object { $_.Contains(' v8::') } | foreach-object { (($_ -split "\|")[1] -split " ")[1] } | ForEach-Object { "extern fn @`"${_}`"() *anyopaque;" } +``` + +#### b. Add to Symbol Files + +Add to `src/symbols.txt` (without leading underscore): + +``` +_ZN2v88NewClass3NewEPNS_7IsolateE... +_ZNK2v88NewClass10SomeMethodEv +``` + +Add to `src/symbols.dyn` (with leading underscore and semicolons): + +``` +{ + __ZN2v88NewClass3NewEPNS_7IsolateE...; + __ZNK2v88NewClass10SomeMethodEv; +} +``` + +**Note:** `src/symbols.def` is Windows-only and typically doesn't contain V8 symbols. + +### 3. Add Tests + +Create tests in `test/v8/v8-module/main.cpp`: + +```cpp +void test_new_class_feature(const FunctionCallbackInfo &info) { + Isolate* isolate = info.GetIsolate(); + + // Test your new V8 API + Local obj = NewClass::New(isolate, /* parameters */); + auto result = obj->SomeMethod(); + + // Print results for comparison with Node.js + std::cout << "Result: " << result << std::endl; + + info.GetReturnValue().Set(Undefined(isolate)); +} +``` + +Add the test to the registration section: + +```cpp +void Init(Local exports, Local module, Local context) { + // ... existing functions ... + NODE_SET_METHOD(exports, "test_new_class_feature", test_new_class_feature); +} +``` + +Add test case to `test/v8/v8.test.ts`: + +```typescript +describe("NewClass", () => { + it("can use new feature", async () => { + await checkSameOutput("test_new_class_feature", []); + }); +}); +``` + +### 4. Handle Special Cases + +#### Objects with Internal Fields + +If implementing objects that need internal fields, extend `InternalFieldObject`: + +```cpp +// In your .h file +class MyObject : public InternalFieldObject { + // ... implementation +}; +``` + +#### Primitive Values + +For primitive values, ensure they work with the `Oddball` system in `shim/Oddball.h`. + +#### Template Classes + +For `ObjectTemplate` or `FunctionTemplate` implementations, see existing patterns in `V8ObjectTemplate.cpp` and `V8FunctionTemplate.cpp`. + +## Memory Management Guidelines + +### Handle Scopes + +- All V8 values must be created within an active handle scope +- Use `isolate->currentHandleScope()->createLocal()` to create handles +- Handle scopes automatically clean up when destroyed + +### JSC Integration + +- Use `localToJSValue()` to convert V8 handles to JSC values +- Use `JSC::WriteBarrier` for heap-allocated references +- Implement `visitChildren()` for custom heap objects + +### Tagged Pointers + +- Small integers (±2^31) are stored directly as Smis +- Objects use pointer tagging with map pointers +- Doubles are stored in object layouts with special maps + +## Testing Strategy + +### Comprehensive Testing + +The V8 test suite compares output between Node.js and Bun for the same C++ code: + +1. **Install Phase**: Sets up identical module builds for Node.js and Bun +2. **Build Phase**: Compiles native modules using node-gyp +3. **Test Phase**: Runs identical C++ functions and compares output + +### Test Categories + +- **Primitives**: undefined, null, booleans, numbers, strings +- **Objects**: creation, property access, internal fields +- **Arrays**: creation, length, iteration, element access +- **Functions**: callbacks, templates, argument handling +- **Memory**: handle scopes, garbage collection, external data +- **Advanced**: templates, inheritance, error handling + +### Adding New Tests + +1. Add C++ test function to `test/v8/v8-module/main.cpp` +2. Register function in the module exports +3. Add test case to `test/v8/v8.test.ts` using `checkSameOutput()` +4. Run with: `bun bd test test/v8/v8.test.ts -t "your test name"` + +## Debugging Tips + +### Build and Test + +```bash +# Build debug version (takes ~5 minutes) +bun bd --help + +# Run V8 tests +bun bd test test/v8/v8.test.ts + +# Run specific test +bun bd test test/v8/v8.test.ts -t "can create small integer" +``` + +### Common Issues + +**Symbol Not Found**: Ensure mangled names are correctly added to `napi.zig` and symbol files. + +**Segmentation Fault**: Usually indicates inline V8 functions are reading incorrect memory layouts. Check `Map` setup and `ObjectLayout` structure. + +**GC Issues**: Objects being freed prematurely. Ensure proper `WriteBarrier` usage and `visitChildren()` implementation. + +**Type Mismatches**: Use `v8_compatibility_assertions.h` macros to verify type layouts match V8 expectations. + +### Debug Logging + +Use `V8_UNIMPLEMENTED()` macro for functions not yet implemented: + +```cpp +void MyClass::NotYetImplemented() { + V8_UNIMPLEMENTED(); +} +``` + +## Advanced Topics + +### Inline Function Compatibility + +Many V8 functions are inline and compiled into native modules. The memory layout must exactly match what these functions expect: + +- Objects start with tagged pointer to `Map` +- Maps have instance type at offset 12 +- Handle scopes store tagged pointers +- Primitive values at fixed global offsets + +### Cross-Platform Considerations + +- Symbol mangling differs between GCC/Clang and MSVC +- Handle calling conventions (JSC uses System V on Unix) +- Ensure `BUN_EXPORT` visibility on all public functions +- Test on all target platforms via CI + +## Contributing + +When contributing V8 API implementations: + +1. **Follow existing patterns** in similar classes +2. **Add comprehensive tests** that compare with Node.js +3. **Update all symbol files** with correct mangled names +4. **Document any special behavior** or limitations + +For questions about V8 API implementation, refer to the blog series linked above or examine existing implementations in this directory. diff --git a/src/bun.js/bindings/v8/V8Data.h b/src/bun.js/bindings/v8/V8Data.h index f3ebc81aaf..0449f184aa 100644 --- a/src/bun.js/bindings/v8/V8Data.h +++ b/src/bun.js/bindings/v8/V8Data.h @@ -54,6 +54,7 @@ public: case InstanceType::Oddball: return reinterpret_cast(v8_object)->toJSValue(); case InstanceType::HeapNumber: + // a number that doesn't fit in int32_t, always EncodeAsDouble return JSC::jsDoubleNumber(v8_object->asDouble()); default: return v8_object->asCell(); diff --git a/src/bun.js/bindings/webcore/JSPerformance.cpp b/src/bun.js/bindings/webcore/JSPerformance.cpp index 55e677a097..eb9f5ab174 100644 --- a/src/bun.js/bindings/webcore/JSPerformance.cpp +++ b/src/bun.js/bindings/webcore/JSPerformance.cpp @@ -120,6 +120,7 @@ static inline JSC::EncodedJSValue functionPerformanceNowBody(VM& vm) double result = time / 1000000.0; // https://github.com/oven-sh/bun/issues/5604 + // Must be EncodeAsDouble because the DOMJIT signature has SpecDoubleReal. return JSValue::encode(jsDoubleNumber(result)); } @@ -284,7 +285,7 @@ void JSPerformance::finishCreation(VM& vm) this->putDirect( vm, JSC::Identifier::fromString(vm, "timeOrigin"_s), - jsDoubleNumber(Bun__readOriginTimerStart(reinterpret_cast(this->globalObject())->bunVM())), + jsNumber(Bun__readOriginTimerStart(reinterpret_cast(this->globalObject())->bunVM())), PropertyAttribute::ReadOnly | 0); } diff --git a/src/bun.js/bindings/windows/rescle-binding.cpp b/src/bun.js/bindings/windows/rescle-binding.cpp index 0bb1f6e1d4..8458134ae7 100644 --- a/src/bun.js/bindings/windows/rescle-binding.cpp +++ b/src/bun.js/bindings/windows/rescle-binding.cpp @@ -87,6 +87,11 @@ extern "C" int rescle__setWindowsMetadata( } } + // Remove the "Original Filename" field by setting it to empty + // This prevents the compiled executable from showing "bun.exe" as the original filename + if (!updater.SetVersionString(RU_VS_ORIGINAL_FILENAME, L"")) + return -13; + // Commit all changes at once if (!updater.Commit()) return -12; diff --git a/src/bun.js/ipc.zig b/src/bun.js/ipc.zig index 6ce31d65c4..660dbe0136 100644 --- a/src/bun.js/ipc.zig +++ b/src/bun.js/ipc.zig @@ -459,7 +459,7 @@ pub const SendQueue = struct { for (self.queue.items) |*item| item.deinit(); self.queue.deinit(); self.internal_msg_queue.deinit(); - self.incoming.deinitWithAllocator(bun.default_allocator); + self.incoming.deinit(bun.default_allocator); if (self.waiting_for_ack) |*waiting| waiting.deinit(); // if there is a close next tick task, cancel it so it doesn't get called and then UAF @@ -1297,10 +1297,10 @@ pub const IPCHandlers = struct { pub const WindowsNamedPipe = struct { fn onReadAlloc(send_queue: *SendQueue, suggested_size: usize) []u8 { - var available = send_queue.incoming.available(); + var available = send_queue.incoming.unusedCapacitySlice(); if (available.len < suggested_size) { bun.handleOom(send_queue.incoming.ensureUnusedCapacity(bun.default_allocator, suggested_size)); - available = send_queue.incoming.available(); + available = send_queue.incoming.unusedCapacitySlice(); } log("NewNamedPipeIPCHandler#onReadAlloc {d}", .{suggested_size}); return available.ptr[0..suggested_size]; diff --git a/src/bun.js/modules/BunJSCModule.h b/src/bun.js/modules/BunJSCModule.h index 5d511fb498..f9804d70bc 100644 --- a/src/bun.js/modules/BunJSCModule.h +++ b/src/bun.js/modules/BunJSCModule.h @@ -343,7 +343,7 @@ JSC_DEFINE_HOST_FUNCTION(functionMemoryUsageStatistics, auto* zoneSizesObject = constructEmptyObject(globalObject); for (auto& it : zoneSizes) { - zoneSizesObject->putDirect(vm, it.first, jsDoubleNumber(it.second)); + zoneSizesObject->putDirect(vm, it.first, jsNumber(it.second)); } object->putDirect(vm, Identifier::fromString(vm, "zones"_s), @@ -882,7 +882,7 @@ JSC_DEFINE_HOST_FUNCTION(functionEstimateDirectMemoryUsageOf, (JSGlobalObject * if (value.isCell()) { auto& vm = JSC::getVM(globalObject); EnsureStillAliveScope alive = value; - return JSValue::encode(jsDoubleNumber(alive.value().asCell()->estimatedSizeInBytes(vm))); + return JSValue::encode(jsNumber(alive.value().asCell()->estimatedSizeInBytes(vm))); } return JSValue::encode(jsNumber(0)); diff --git a/src/bun.js/modules/NodeBufferModule.h b/src/bun.js/modules/NodeBufferModule.h index 8542ac4dcf..bb554a9696 100644 --- a/src/bun.js/modules/NodeBufferModule.h +++ b/src/bun.js/modules/NodeBufferModule.h @@ -140,7 +140,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionNotImplemented, JSC_DEFINE_CUSTOM_GETTER(jsGetter_INSPECT_MAX_BYTES, (JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, PropertyName propertyName)) { auto globalObject = reinterpret_cast(lexicalGlobalObject); - return JSValue::encode(jsDoubleNumber(globalObject->INSPECT_MAX_BYTES)); + return JSValue::encode(jsNumber(globalObject->INSPECT_MAX_BYTES)); } JSC_DEFINE_CUSTOM_SETTER(jsSetter_INSPECT_MAX_BYTES, (JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue value, PropertyName propertyName)) @@ -210,7 +210,7 @@ DEFINE_NATIVE_MODULE(NodeBuffer) put(JSC::Identifier::fromString(vm, "resolveObjectURL"_s), resolveObjectURL); - put(JSC::Identifier::fromString(vm, "isAscii"_s), JSC::JSFunction::create(vm, globalObject, 1, "isAscii"_s, jsBufferConstructorFunction_isAscii, ImplementationVisibility::Public, NoIntrinsic, jsBufferConstructorFunction_isUtf8)); + put(JSC::Identifier::fromString(vm, "isAscii"_s), JSC::JSFunction::create(vm, globalObject, 1, "isAscii"_s, jsBufferConstructorFunction_isAscii, ImplementationVisibility::Public, NoIntrinsic, jsBufferConstructorFunction_isAscii)); put(JSC::Identifier::fromString(vm, "isUtf8"_s), JSC::JSFunction::create(vm, globalObject, 1, "isUtf8"_s, jsBufferConstructorFunction_isUtf8, ImplementationVisibility::Public, NoIntrinsic, jsBufferConstructorFunction_isUtf8)); } diff --git a/src/bun.js/node/fs_events.zig b/src/bun.js/node/fs_events.zig index 59df4243ff..22315f8d0a 100644 --- a/src/bun.js/node/fs_events.zig +++ b/src/bun.js/node/fs_events.zig @@ -484,7 +484,7 @@ pub const FSEventsLoop = struct { defer this.mutex.unlock(); if (this.watcher_count == this.watchers.len) { this.watcher_count += 1; - this.watchers.push(bun.default_allocator, watcher) catch unreachable; + bun.handleOom(this.watchers.append(bun.default_allocator, watcher)); } else { var watchers = this.watchers.slice(); for (watchers, 0..) |w, i| { @@ -544,8 +544,7 @@ pub const FSEventsLoop = struct { } } - this.watchers.deinitWithAllocator(bun.default_allocator); - + this.watchers.deinit(bun.default_allocator); bun.default_allocator.destroy(this); } }; diff --git a/src/bun.js/node/path_watcher.zig b/src/bun.js/node/path_watcher.zig index 3556185f40..16c1f5b462 100644 --- a/src/bun.js/node/path_watcher.zig +++ b/src/bun.js/node/path_watcher.zig @@ -113,7 +113,7 @@ pub const PathWatcherManager = struct { const this = bun.handleOom(bun.default_allocator.create(PathWatcherManager)); errdefer bun.default_allocator.destroy(this); var watchers = bun.handleOom(bun.BabyList(?*PathWatcher).initCapacity(bun.default_allocator, 1)); - errdefer watchers.deinitWithAllocator(bun.default_allocator); + errdefer watchers.deinit(bun.default_allocator); const manager = PathWatcherManager{ .file_paths = bun.StringHashMap(PathInfo).init(bun.default_allocator), @@ -348,7 +348,7 @@ pub const PathWatcherManager = struct { routine = entry.value_ptr.*; if (watcher.refPendingDirectory()) { - routine.watcher_list.push(bun.default_allocator, watcher) catch |err| { + routine.watcher_list.append(bun.default_allocator, watcher) catch |err| { watcher.unrefPendingDirectory(); return err; }; @@ -369,7 +369,7 @@ pub const PathWatcherManager = struct { }; errdefer routine.deinit(); if (watcher.refPendingDirectory()) { - routine.watcher_list.push(bun.default_allocator, watcher) catch |err| { + routine.watcher_list.append(bun.default_allocator, watcher) catch |err| { watcher.unrefPendingDirectory(); return err; }; @@ -448,7 +448,7 @@ pub const PathWatcherManager = struct { { watcher.mutex.lock(); defer watcher.mutex.unlock(); - watcher.file_paths.push(bun.default_allocator, child_path.path) catch |err| { + watcher.file_paths.append(bun.default_allocator, child_path.path) catch |err| { manager._decrementPathRef(entry_path_z); return switch (err) { error.OutOfMemory => .{ .err = .{ @@ -541,7 +541,7 @@ pub const PathWatcherManager = struct { if (this.watcher_count == this.watchers.len) { this.watcher_count += 1; - this.watchers.push(bun.default_allocator, watcher) catch |err| { + this.watchers.append(bun.default_allocator, watcher) catch |err| { this.watcher_count -= 1; return err; }; @@ -687,11 +687,8 @@ pub const PathWatcherManager = struct { } this.file_paths.deinit(); - - this.watchers.deinitWithAllocator(bun.default_allocator); - + this.watchers.deinit(bun.default_allocator); this.current_fd_task.deinit(); - bun.default_allocator.destroy(this); } }; @@ -889,11 +886,11 @@ pub const PathWatcher = struct { manager.unregisterWatcher(this); } else { manager.unregisterWatcher(this); - this.file_paths.deinitWithAllocator(bun.default_allocator); + this.file_paths.deinit(bun.default_allocator); } } else { manager.unregisterWatcher(this); - this.file_paths.deinitWithAllocator(bun.default_allocator); + this.file_paths.deinit(bun.default_allocator); } } diff --git a/src/bun.js/webcore/ArrayBufferSink.zig b/src/bun.js/webcore/ArrayBufferSink.zig index d6ba0bd7c1..dd50843d34 100644 --- a/src/bun.js/webcore/ArrayBufferSink.zig +++ b/src/bun.js/webcore/ArrayBufferSink.zig @@ -16,15 +16,13 @@ pub fn connect(this: *ArrayBufferSink, signal: Signal) void { } pub fn start(this: *ArrayBufferSink, stream_start: streams.Start) bun.sys.Maybe(void) { - this.bytes.len = 0; - var list = this.bytes.listManaged(this.allocator); - list.clearRetainingCapacity(); + this.bytes.clearRetainingCapacity(); switch (stream_start) { .ArrayBufferSink => |config| { if (config.chunk_size > 0) { - list.ensureTotalCapacityPrecise(config.chunk_size) catch return .{ .err = Syscall.Error.oom }; - this.bytes.update(list); + this.bytes.ensureTotalCapacityPrecise(this.allocator, config.chunk_size) catch + return .{ .err = Syscall.Error.oom }; } this.as_uint8array = config.as_uint8array; @@ -63,7 +61,7 @@ pub fn finalize(this: *ArrayBufferSink) void { pub fn init(allocator: std.mem.Allocator, next: ?Sink) !*ArrayBufferSink { return bun.new(ArrayBufferSink, .{ - .bytes = bun.ByteList.init(&.{}), + .bytes = bun.ByteList.empty, .allocator = allocator, .next = next, }); @@ -121,7 +119,7 @@ pub fn end(this: *ArrayBufferSink, err: ?Syscall.Error) bun.sys.Maybe(void) { return .success; } pub fn destroy(this: *ArrayBufferSink) void { - this.bytes.deinitWithAllocator(this.allocator); + this.bytes.deinit(this.allocator); bun.destroy(this); } pub fn toJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, as_uint8array: bool) JSValue { @@ -134,10 +132,9 @@ pub fn toJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, as_uint8array: return value; } - var list = this.bytes.listManaged(this.allocator); - this.bytes = bun.ByteList.init(""); + defer this.bytes = bun.ByteList.empty; return ArrayBuffer.fromBytes( - try list.toOwnedSlice(), + try this.bytes.toOwnedSlice(this.allocator), if (as_uint8array) .Uint8Array else @@ -151,12 +148,11 @@ pub fn endFromJS(this: *ArrayBufferSink, _: *JSGlobalObject) bun.sys.Maybe(Array } bun.assert(this.next == null); - var list = this.bytes.listManaged(this.allocator); - this.bytes = bun.ByteList.init(""); this.done = true; this.signal.close(null); + defer this.bytes = bun.ByteList.empty; return .{ .result = ArrayBuffer.fromBytes( - bun.handleOom(list.toOwnedSlice()), + bun.handleOom(this.bytes.toOwnedSlice(this.allocator)), if (this.as_uint8array) .Uint8Array else diff --git a/src/bun.js/webcore/Body.zig b/src/bun.js/webcore/Body.zig index 9dc2de6f1f..fc47b6570c 100644 --- a/src/bun.js/webcore/Body.zig +++ b/src/bun.js/webcore/Body.zig @@ -1441,8 +1441,8 @@ pub const ValueBufferer = struct { defer { if (stream_needs_deinit) { switch (stream_) { - .owned_and_done => |*owned| owned.listManaged(allocator).deinit(), - .owned => |*owned| owned.listManaged(allocator).deinit(), + .owned_and_done => |*owned| owned.deinit(allocator), + .owned => |*owned| owned.deinit(allocator), else => unreachable, } } @@ -1503,7 +1503,7 @@ pub const ValueBufferer = struct { var globalThis = sink.global; buffer_stream.* = ArrayBufferSink.JSSink{ .sink = ArrayBufferSink{ - .bytes = bun.ByteList.init(&.{}), + .bytes = bun.ByteList.empty, .allocator = allocator, .next = null, }, diff --git a/src/bun.js/webcore/ByteBlobLoader.zig b/src/bun.js/webcore/ByteBlobLoader.zig index 350cab17f0..6f6016ca4b 100644 --- a/src/bun.js/webcore/ByteBlobLoader.zig +++ b/src/bun.js/webcore/ByteBlobLoader.zig @@ -166,12 +166,12 @@ pub fn drain(this: *ByteBlobLoader) bun.ByteList { temporary = temporary[this.offset..]; temporary = temporary[0..@min(16384, @min(temporary.len, this.remain))]; - var byte_list = bun.ByteList.init(temporary); - const cloned = bun.handleOom(byte_list.listManaged(bun.default_allocator).clone()); - this.offset +|= @as(Blob.SizeType, @truncate(cloned.items.len)); - this.remain -|= @as(Blob.SizeType, @truncate(cloned.items.len)); + var byte_list = bun.ByteList.fromBorrowedSliceDangerous(temporary); + const cloned = bun.handleOom(byte_list.clone(bun.default_allocator)); + this.offset +|= @as(Blob.SizeType, cloned.len); + this.remain -|= @as(Blob.SizeType, cloned.len); - return bun.ByteList.fromList(cloned); + return cloned; } pub fn toBufferedValue(this: *ByteBlobLoader, globalThis: *JSGlobalObject, action: streams.BufferAction.Tag) bun.JSError!JSValue { diff --git a/src/bun.js/webcore/ByteStream.zig b/src/bun.js/webcore/ByteStream.zig index 25d4d3036a..95a2017230 100644 --- a/src/bun.js/webcore/ByteStream.zig +++ b/src/bun.js/webcore/ByteStream.zig @@ -43,7 +43,8 @@ pub fn onStart(this: *@This()) streams.Start { } if (this.has_received_last_chunk) { - return .{ .owned_and_done = bun.ByteList.fromList(this.buffer.moveToUnmanaged()) }; + var buffer = this.buffer.moveToUnmanaged(); + return .{ .owned_and_done = bun.ByteList.moveFromList(&buffer) }; } if (this.highWaterMark == 0) { @@ -230,11 +231,11 @@ pub fn append( if (this.buffer.capacity == 0) { switch (stream_) { .owned => |*owned| { - this.buffer = owned.listManaged(allocator); + this.buffer = owned.moveToListManaged(allocator); this.offset += offset; }, .owned_and_done => |*owned| { - this.buffer = owned.listManaged(allocator); + this.buffer = owned.moveToListManaged(allocator); this.offset += offset; }, .temporary_and_done, .temporary => { @@ -390,16 +391,8 @@ pub fn deinit(this: *@This()) void { pub fn drain(this: *@This()) bun.ByteList { if (this.buffer.items.len > 0) { - const out = bun.ByteList.fromList(this.buffer); - this.buffer = .{ - .allocator = bun.default_allocator, - .items = &.{}, - .capacity = 0, - }; - - return out; + return bun.ByteList.moveFromList(&this.buffer); } - return .{}; } diff --git a/src/bun.js/webcore/FileReader.zig b/src/bun.js/webcore/FileReader.zig index 0807d0bf54..33dcc82036 100644 --- a/src/bun.js/webcore/FileReader.zig +++ b/src/bun.js/webcore/FileReader.zig @@ -264,9 +264,7 @@ pub fn onStart(this: *FileReader) streams.Start { if (this.reader.isDone()) { this.consumeReaderBuffer(); if (this.buffered.items.len > 0) { - const buffered = this.buffered; - this.buffered = .{}; - return .{ .owned_and_done = bun.ByteList.fromList(buffered) }; + return .{ .owned_and_done = bun.ByteList.moveFromList(&this.buffered) }; } } else if (comptime Environment.isPosix) { if (!was_lazy and this.reader.flags.pollable) { @@ -331,6 +329,7 @@ pub fn onReadChunk(this: *@This(), init_buf: []const u8, state: bun.io.ReadState } } + const reader_buffer = this.reader.buffer(); if (this.read_inside_on_pull != .none) { switch (this.read_inside_on_pull) { .js => |in_progress| { @@ -352,35 +351,30 @@ pub fn onReadChunk(this: *@This(), init_buf: []const u8, state: bun.io.ReadState else => @panic("Invalid state"), } } else if (this.pending.state == .pending) { - if (buf.len == 0) { - { - if (this.buffered.items.len == 0) { - if (this.buffered.capacity > 0) { - this.buffered.clearAndFree(bun.default_allocator); - } - - if (this.reader.buffer().items.len != 0) { - this.buffered = this.reader.buffer().moveToUnmanaged(); - } - } - - var buffer = &this.buffered; - defer buffer.clearAndFree(bun.default_allocator); - if (buffer.items.len > 0) { - if (this.pending_view.len >= buffer.items.len) { - @memcpy(this.pending_view[0..buffer.items.len], buffer.items); - this.pending.result = .{ .into_array_and_done = .{ .value = this.pending_value.get() orelse .zero, .len = @truncate(buffer.items.len) } }; - } else { - this.pending.result = .{ .owned_and_done = bun.ByteList.fromList(buffer.*) }; - buffer.* = .{}; - } - } else { - this.pending.result = .{ .done = {} }; - } - } + defer { this.pending_value.clearWithoutDeallocation(); this.pending_view = &.{}; this.pending.run(); + } + + if (buf.len == 0) { + if (this.buffered.items.len == 0) { + this.buffered.clearAndFree(bun.default_allocator); + this.buffered = reader_buffer.moveToUnmanaged(); + } + + var buffer = &this.buffered; + defer buffer.clearAndFree(bun.default_allocator); + if (buffer.items.len > 0) { + if (this.pending_view.len >= buffer.items.len) { + @memcpy(this.pending_view[0..buffer.items.len], buffer.items); + this.pending.result = .{ .into_array_and_done = .{ .value = this.pending_value.get() orelse .zero, .len = @truncate(buffer.items.len) } }; + } else { + this.pending.result = .{ .owned_and_done = bun.ByteList.moveFromList(buffer) }; + } + } else { + this.pending.result = .{ .done = {} }; + } return false; } @@ -388,78 +382,63 @@ pub fn onReadChunk(this: *@This(), init_buf: []const u8, state: bun.io.ReadState if (this.pending_view.len >= buf.len) { @memcpy(this.pending_view[0..buf.len], buf); - this.reader.buffer().clearRetainingCapacity(); + reader_buffer.clearRetainingCapacity(); this.buffered.clearRetainingCapacity(); - if (was_done) { - this.pending.result = .{ - .into_array_and_done = .{ - .value = this.pending_value.get() orelse .zero, - .len = @truncate(buf.len), - }, - }; - } else { - this.pending.result = .{ - .into_array = .{ - .value = this.pending_value.get() orelse .zero, - .len = @truncate(buf.len), - }, - }; - } + const into_array: streams.Result.IntoArray = .{ + .value = this.pending_value.get() orelse .zero, + .len = @truncate(buf.len), + }; - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - this.pending.run(); + this.pending.result = if (was_done) + .{ .into_array_and_done = into_array } + else + .{ .into_array = into_array }; + return !was_done; + } + + if (bun.isSliceInBuffer(buf, reader_buffer.allocatedSlice())) { + if (this.reader.isDone()) { + bun.assert_eql(buf.ptr, reader_buffer.items.ptr); + var buffer = reader_buffer.moveToUnmanaged(); + buffer.shrinkRetainingCapacity(buf.len); + this.pending.result = .{ .owned_and_done = .moveFromList(&buffer) }; + } else { + reader_buffer.clearRetainingCapacity(); + this.pending.result = .{ .temporary = .fromBorrowedSliceDangerous(buf) }; + } return !was_done; } if (!bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { - if (this.reader.isDone()) { - if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { - this.reader.buffer().* = std.ArrayList(u8).init(bun.default_allocator); - } - this.pending.result = .{ - .temporary_and_done = bun.ByteList.init(buf), - }; - } else { - this.pending.result = .{ - .temporary = bun.ByteList.init(buf), - }; - - if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { - this.reader.buffer().clearRetainingCapacity(); - } - } - - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - this.pending.run(); + this.pending.result = if (this.reader.isDone()) + .{ .temporary_and_done = .fromBorrowedSliceDangerous(buf) } + else + .{ .temporary = .fromBorrowedSliceDangerous(buf) }; return !was_done; } - if (this.reader.isDone()) { - this.pending.result = .{ - .owned_and_done = bun.ByteList.init(buf), - }; - } else { - this.pending.result = .{ - .owned = bun.ByteList.init(buf), - }; - } + bun.assert_eql(buf.ptr, this.buffered.items.ptr); + var buffered = this.buffered; this.buffered = .{}; - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - this.pending.run(); + buffered.shrinkRetainingCapacity(buf.len); + + this.pending.result = if (this.reader.isDone()) + .{ .owned_and_done = .moveFromList(&buffered) } + else + .{ .owned = .moveFromList(&buffered) }; return !was_done; } else if (!bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { bun.handleOom(this.buffered.appendSlice(bun.default_allocator, buf)); - if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { - this.reader.buffer().clearRetainingCapacity(); + if (bun.isSliceInBuffer(buf, reader_buffer.allocatedSlice())) { + reader_buffer.clearRetainingCapacity(); } } // For pipes, we have to keep pulling or the other process will block. - return this.read_inside_on_pull != .temporary and !(this.buffered.items.len + this.reader.buffer().items.len >= this.highwater_mark and !this.reader.flags.pollable); + return this.read_inside_on_pull != .temporary and + !(this.buffered.items.len + reader_buffer.items.len >= this.highwater_mark and + !this.reader.flags.pollable); } fn isPulling(this: *const FileReader) bool { @@ -525,20 +504,17 @@ pub fn onPull(this: *FileReader, buffer: []u8, array: jsc.JSValue) streams.Resul .temporary => |buf| { log("onPull({d}) = {d}", .{ buffer.len, buf.len }); if (this.reader.isDone()) { - return .{ .temporary_and_done = bun.ByteList.init(buf) }; + return .{ .temporary_and_done = bun.ByteList.fromBorrowedSliceDangerous(buf) }; } - return .{ .temporary = bun.ByteList.init(buf) }; + return .{ .temporary = bun.ByteList.fromBorrowedSliceDangerous(buf) }; }, .use_buffered => { - const buffered = this.buffered; - this.buffered = .{}; - log("onPull({d}) = {d}", .{ buffer.len, buffered.items.len }); + log("onPull({d}) = {d}", .{ buffer.len, this.buffered.items.len }); if (this.reader.isDone()) { - return .{ .owned_and_done = bun.ByteList.fromList(buffered) }; + return .{ .owned_and_done = bun.ByteList.moveFromList(&this.buffered) }; } - - return .{ .owned = bun.ByteList.fromList(buffered) }; + return .{ .owned = bun.ByteList.moveFromList(&this.buffered) }; }, else => {}, } @@ -560,8 +536,7 @@ pub fn onPull(this: *FileReader, buffer: []u8, array: jsc.JSValue) streams.Resul pub fn drain(this: *FileReader) bun.ByteList { if (this.buffered.items.len > 0) { - const out = bun.ByteList.fromList(this.buffered); - this.buffered = .{}; + const out = bun.ByteList.moveFromList(&this.buffered); if (comptime Environment.allow_assert) { bun.assert(this.reader.buffer().items.ptr != out.ptr); } @@ -572,9 +547,7 @@ pub fn drain(this: *FileReader) bun.ByteList { return .{}; } - const out = this.reader.buffer().*; - this.reader.buffer().* = std.ArrayList(u8).init(bun.default_allocator); - return bun.ByteList.fromList(out); + return bun.ByteList.moveFromList(this.reader.buffer()); } pub fn setRefOrUnref(this: *FileReader, enable: bool) void { @@ -594,7 +567,7 @@ pub fn onReaderDone(this: *FileReader) void { this.consumeReaderBuffer(); if (this.pending.state == .pending) { if (this.buffered.items.len > 0) { - this.pending.result = .{ .owned_and_done = bun.ByteList.fromList(this.buffered) }; + this.pending.result = .{ .owned_and_done = bun.ByteList.moveFromList(&this.buffered) }; } else { this.pending.result = .{ .done = {} }; } diff --git a/src/bun.js/webcore/ResumableSink.zig b/src/bun.js/webcore/ResumableSink.zig index 11df2ea90c..ddbe325c40 100644 --- a/src/bun.js/webcore/ResumableSink.zig +++ b/src/bun.js/webcore/ResumableSink.zig @@ -91,25 +91,23 @@ pub fn ResumableSink( break :brk_err null; }; - var byte_list = byte_stream.drain(); - const bytes = byte_list.listManaged(bun.default_allocator); - defer bytes.deinit(); - log("onWrite {}", .{bytes.items.len}); - _ = onWrite(this.context, bytes.items); + var bytes = byte_stream.drain(); + defer bytes.deinit(bun.default_allocator); + log("onWrite {}", .{bytes.len}); + _ = onWrite(this.context, bytes.slice()); onEnd(this.context, err); this.deref(); return this; } // We can pipe but we also wanna to drain as much as possible first - var byte_list = byte_stream.drain(); - const bytes = byte_list.listManaged(bun.default_allocator); - defer bytes.deinit(); + var bytes = byte_stream.drain(); + defer bytes.deinit(bun.default_allocator); // lets write and see if we can still pipe or if we have backpressure - if (bytes.items.len > 0) { - log("onWrite {}", .{bytes.items.len}); + if (bytes.len > 0) { + log("onWrite {}", .{bytes.len}); // we ignore the return value here because we dont want to pause the stream // if we pause will just buffer in the pipe and we can do the buffer in one place - _ = onWrite(this.context, bytes.items); + _ = onWrite(this.context, bytes.slice()); } this.status = .piped; byte_stream.pipe = jsc.WebCore.Pipe.Wrap(@This(), onStreamPipe).init(this); @@ -292,8 +290,8 @@ pub fn ResumableSink( defer { if (stream_needs_deinit) { switch (stream_) { - .owned_and_done => |*owned| owned.listManaged(allocator).deinit(), - .owned => |*owned| owned.listManaged(allocator).deinit(), + .owned_and_done => |*owned| owned.deinit(allocator), + .owned => |*owned| owned.deinit(allocator), else => unreachable, } } diff --git a/src/bun.js/webcore/Sink.zig b/src/bun.js/webcore/Sink.zig index 765b71a919..f26a613feb 100644 --- a/src/bun.js/webcore/Sink.zig +++ b/src/bun.js/webcore/Sink.zig @@ -53,10 +53,10 @@ pub const UTF8Fallback = struct { bun.strings.replaceLatin1WithUTF8(buf[0..str.len]); if (input.isDone()) { - const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..str.len]) }); + const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.fromBorrowedSliceDangerous(buf[0..str.len]) }); return result; } else { - const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..str.len]) }); + const result = writeFn(ctx, .{ .temporary = bun.ByteList.fromBorrowedSliceDangerous(buf[0..str.len]) }); return result; } } @@ -67,9 +67,9 @@ pub const UTF8Fallback = struct { bun.strings.replaceLatin1WithUTF8(slice[0..str.len]); if (input.isDone()) { - return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(slice) }); + return writeFn(ctx, .{ .owned_and_done = bun.ByteList.fromOwnedSlice(slice) }); } else { - return writeFn(ctx, .{ .owned = bun.ByteList.init(slice) }); + return writeFn(ctx, .{ .owned = bun.ByteList.fromOwnedSlice(slice) }); } } } @@ -83,10 +83,10 @@ pub const UTF8Fallback = struct { bun.assert(copied.written <= stack_size); bun.assert(copied.read <= stack_size); if (input.isDone()) { - const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..copied.written]) }); + const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.fromBorrowedSliceDangerous(buf[0..copied.written]) }); return result; } else { - const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..copied.written]) }); + const result = writeFn(ctx, .{ .temporary = bun.ByteList.fromBorrowedSliceDangerous(buf[0..copied.written]) }); return result; } } @@ -94,9 +94,9 @@ pub const UTF8Fallback = struct { { const allocated = bun.strings.toUTF8Alloc(bun.default_allocator, str) catch return .{ .err = Syscall.Error.oom }; if (input.isDone()) { - return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(allocated) }); + return writeFn(ctx, .{ .owned_and_done = bun.ByteList.fromOwnedSlice(allocated) }); } else { - return writeFn(ctx, .{ .owned = bun.ByteList.init(allocated) }); + return writeFn(ctx, .{ .owned = bun.ByteList.fromOwnedSlice(allocated) }); } } } @@ -394,7 +394,9 @@ pub fn JSSink(comptime SinkType: type, comptime abi_name: []const u8) type { return jsc.JSValue.jsNumber(0); } - return this.sink.writeBytes(.{ .temporary = bun.ByteList.init(slice) }).toJS(globalThis); + return this.sink.writeBytes( + .{ .temporary = bun.ByteList.fromBorrowedSliceDangerous(slice) }, + ).toJS(globalThis); } if (!arg.isString()) { @@ -414,10 +416,14 @@ pub fn JSSink(comptime SinkType: type, comptime abi_name: []const u8) type { defer str.ensureStillAlive(); if (view.is16Bit()) { - return this.sink.writeUTF16(.{ .temporary = bun.ByteList.initConst(std.mem.sliceAsBytes(view.utf16SliceAligned())) }).toJS(globalThis); + return this.sink.writeUTF16(.{ .temporary = bun.ByteList.fromBorrowedSliceDangerous( + std.mem.sliceAsBytes(view.utf16SliceAligned()), + ) }).toJS(globalThis); } - return this.sink.writeLatin1(.{ .temporary = bun.ByteList.initConst(view.slice()) }).toJS(globalThis); + return this.sink.writeLatin1( + .{ .temporary = bun.ByteList.fromBorrowedSliceDangerous(view.slice()) }, + ).toJS(globalThis); } pub fn writeUTF8(globalThis: *JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue { diff --git a/src/bun.js/webcore/fetch.zig b/src/bun.js/webcore/fetch.zig index a0c8d7dc90..633f17d16c 100644 --- a/src/bun.js/webcore/fetch.zig +++ b/src/bun.js/webcore/fetch.zig @@ -108,6 +108,7 @@ pub const FetchTasklet = struct { // custom checkServerIdentity check_server_identity: jsc.Strong.Optional = .empty, reject_unauthorized: bool = true, + upgraded_connection: bool = false, // Custom Hostname hostname: ?[]u8 = null, is_waiting_body: bool = false, @@ -406,14 +407,14 @@ pub const FetchTasklet = struct { if (readable.ptr == .Bytes) { readable.ptr.Bytes.size_hint = this.getSizeHint(); // body can be marked as used but we still need to pipe the data - const scheduled_response_buffer = this.scheduled_response_buffer.list; + const scheduled_response_buffer = &this.scheduled_response_buffer.list; const chunk = scheduled_response_buffer.items; if (this.result.has_more) { readable.ptr.Bytes.onData( .{ - .temporary = bun.ByteList.initConst(chunk), + .temporary = bun.ByteList.fromBorrowedSliceDangerous(chunk), }, bun.default_allocator, ); @@ -423,16 +424,9 @@ pub const FetchTasklet = struct { defer prev.deinit(); buffer_reset = false; this.memory_reporter.discard(scheduled_response_buffer.allocatedSlice()); - this.scheduled_response_buffer = .{ - .allocator = bun.default_allocator, - .list = .{ - .items = &.{}, - .capacity = 0, - }, - }; readable.ptr.Bytes.onData( .{ - .owned_and_done = bun.ByteList.initConst(chunk), + .owned_and_done = bun.ByteList.moveFromList(scheduled_response_buffer), }, bun.default_allocator, ); @@ -455,7 +449,7 @@ pub const FetchTasklet = struct { if (this.result.has_more) { readable.ptr.Bytes.onData( .{ - .temporary = bun.ByteList.initConst(chunk), + .temporary = bun.ByteList.fromBorrowedSliceDangerous(chunk), }, bun.default_allocator, ); @@ -467,7 +461,7 @@ pub const FetchTasklet = struct { readable.value.ensureStillAlive(); readable.ptr.Bytes.onData( .{ - .temporary_and_done = bun.ByteList.initConst(chunk), + .temporary_and_done = bun.ByteList.fromBorrowedSliceDangerous(chunk), }, bun.default_allocator, ); @@ -1069,6 +1063,7 @@ pub const FetchTasklet = struct { .memory_reporter = fetch_options.memory_reporter, .check_server_identity = fetch_options.check_server_identity, .reject_unauthorized = fetch_options.reject_unauthorized, + .upgraded_connection = fetch_options.upgraded_connection, }; fetch_tasklet.signals = fetch_tasklet.signal_store.to(); @@ -1201,19 +1196,23 @@ pub const FetchTasklet = struct { // dont have backpressure so we will schedule the data to be written // if we have backpressure the onWritable will drain the buffer needs_schedule = stream_buffer.isEmpty(); - //16 is the max size of a hex number size that represents 64 bits + 2 for the \r\n - var formated_size_buffer: [18]u8 = undefined; - const formated_size = std.fmt.bufPrint( - formated_size_buffer[0..], - "{x}\r\n", - .{data.len}, - ) catch |err| switch (err) { - error.NoSpaceLeft => unreachable, - }; - bun.handleOom(stream_buffer.ensureUnusedCapacity(formated_size.len + data.len + 2)); - stream_buffer.writeAssumeCapacity(formated_size); - stream_buffer.writeAssumeCapacity(data); - stream_buffer.writeAssumeCapacity("\r\n"); + if (this.upgraded_connection) { + bun.handleOom(stream_buffer.write(data)); + } else { + //16 is the max size of a hex number size that represents 64 bits + 2 for the \r\n + var formated_size_buffer: [18]u8 = undefined; + const formated_size = std.fmt.bufPrint( + formated_size_buffer[0..], + "{x}\r\n", + .{data.len}, + ) catch |err| switch (err) { + error.NoSpaceLeft => unreachable, + }; + bun.handleOom(stream_buffer.ensureUnusedCapacity(formated_size.len + data.len + 2)); + stream_buffer.writeAssumeCapacity(formated_size); + stream_buffer.writeAssumeCapacity(data); + stream_buffer.writeAssumeCapacity("\r\n"); + } // pause the stream if we hit the high water mark return stream_buffer.size() >= highWaterMark; @@ -1271,6 +1270,7 @@ pub const FetchTasklet = struct { check_server_identity: jsc.Strong.Optional = .empty, unix_socket_path: ZigString.Slice, ssl_config: ?*SSLConfig = null, + upgraded_connection: bool = false, }; pub fn queue( @@ -1494,6 +1494,7 @@ pub fn Bun__fetch_( var memory_reporter = bun.handleOom(bun.default_allocator.create(bun.MemoryReportingAllocator)); // used to clean up dynamically allocated memory on error (a poor man's errdefer) var is_error = false; + var upgraded_connection = false; var allocator = memory_reporter.wrap(bun.default_allocator); errdefer bun.default_allocator.destroy(memory_reporter); defer { @@ -2198,6 +2199,15 @@ pub fn Bun__fetch_( } } + if (headers_.fastGet(bun.webcore.FetchHeaders.HTTPHeaderName.Upgrade)) |_upgrade| { + const upgrade = _upgrade.toSlice(bun.default_allocator); + defer upgrade.deinit(); + const slice = upgrade.slice(); + if (!bun.strings.eqlComptime(slice, "h2") and !bun.strings.eqlComptime(slice, "h2c")) { + upgraded_connection = true; + } + } + break :extract_headers Headers.from(headers_, allocator, .{ .body = body.getAnyBlob() }) catch |err| bun.handleOom(err); } @@ -2333,7 +2343,7 @@ pub fn Bun__fetch_( } } - if (!method.hasRequestBody() and body.hasBody()) { + if (!method.hasRequestBody() and body.hasBody() and !upgraded_connection) { const err = globalThis.toTypeError(.INVALID_ARG_VALUE, fetch_error_unexpected_body, .{}); is_error = true; return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); @@ -2651,6 +2661,7 @@ pub fn Bun__fetch_( .ssl_config = ssl_config, .hostname = hostname, .memory_reporter = memory_reporter, + .upgraded_connection = upgraded_connection, .check_server_identity = if (check_server_identity.isEmptyOrUndefinedOrNull()) .empty else .create(check_server_identity, globalThis), .unix_socket_path = unix_socket_path, }, diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index b88ae0708c..ad1bab017c 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -207,8 +207,8 @@ pub const Result = union(Tag) { pub fn deinit(this: *Result) void { switch (this.*) { - .owned => |*owned| owned.deinitWithAllocator(bun.default_allocator), - .owned_and_done => |*owned_and_done| owned_and_done.deinitWithAllocator(bun.default_allocator), + .owned => |*owned| owned.clearAndFree(bun.default_allocator), + .owned_and_done => |*owned_and_done| owned_and_done.clearAndFree(bun.default_allocator), .err => |err| { if (err == .JSValue) { err.JSValue.unprotect(); @@ -910,17 +910,13 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { else => {}, } - var list = this.buffer.listManaged(this.allocator); - list.clearRetainingCapacity(); - list.ensureTotalCapacityPrecise(this.highWaterMark) catch return .{ .err = Syscall.Error.oom }; - this.buffer.update(list); + this.buffer.clearRetainingCapacity(); + this.buffer.ensureTotalCapacityPrecise(this.allocator, this.highWaterMark) catch + return .{ .err = Syscall.Error.oom }; this.done = false; - this.signal.start(); - log("start({d})", .{this.highWaterMark}); - return .success; } @@ -1260,12 +1256,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { pub fn destroy(this: *@This()) void { log("destroy()", .{}); - var bytes = this.buffer.listManaged(this.allocator); - if (bytes.capacity > 0) { - this.buffer = bun.ByteList.init(""); - bytes.deinit(); - } - + this.buffer.deinit(this.allocator); this.unregisterAutoFlusher(); this.allocator.destroy(this); } @@ -1298,19 +1289,18 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { if (this.pooled_buffer) |pooled| { this.buffer.len = 0; if (this.buffer.cap > 64 * 1024) { - this.buffer.deinitWithAllocator(bun.default_allocator); - this.buffer = bun.ByteList.init(""); + this.buffer.clearAndFree(bun.default_allocator); } pooled.data = this.buffer; - this.buffer = bun.ByteList.init(""); + this.buffer = bun.ByteList.empty; this.pooled_buffer = null; pooled.release(); } else if (this.buffer.cap == 0) { // } else if (FeatureFlags.http_buffer_pooling and !WebCore.ByteListPool.full()) { const buffer = this.buffer; - this.buffer = bun.ByteList.init(""); + this.buffer = bun.ByteList.empty; WebCore.ByteListPool.push(this.allocator, buffer); } else { // Don't release this buffer until destroy() is called @@ -1621,9 +1611,9 @@ pub const ReadResult = union(enum) { const done = is_done or (close_on_empty and slice.len == 0); break :brk if (owned and done) - Result{ .owned_and_done = bun.ByteList.init(slice) } + Result{ .owned_and_done = bun.ByteList.fromOwnedSlice(slice) } else if (owned) - Result{ .owned = bun.ByteList.init(slice) } + Result{ .owned = bun.ByteList.fromOwnedSlice(slice) } else if (done) Result{ .into_array_and_done = .{ .len = @as(Blob.SizeType, @truncate(slice.len)), .value = view } } else @@ -1633,28 +1623,6 @@ pub const ReadResult = union(enum) { } }; -pub const AutoSizer = struct { - buffer: *bun.ByteList, - allocator: std.mem.Allocator, - max: usize, - - pub fn resize(this: *AutoSizer, size: usize) ![]u8 { - const available = this.buffer.cap - this.buffer.len; - if (available >= size) return this.buffer.ptr[this.buffer.len..this.buffer.cap][0..size]; - const to_grow = size -| available; - if (to_grow + @as(usize, this.buffer.cap) > this.max) - return this.buffer.ptr[this.buffer.len..this.buffer.cap]; - - var list = this.buffer.listManaged(this.allocator); - const prev_len = list.items.len; - try list.ensureTotalCapacity(to_grow + @as(usize, this.buffer.cap)); - this.buffer.update(list); - return this.buffer.ptr[prev_len..@as(usize, this.buffer.cap)]; - } -}; - -const string = []const u8; - const std = @import("std"); const bun = @import("bun"); diff --git a/src/bun.zig b/src/bun.zig index a991c7806a..9091ba0f8a 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -414,14 +414,12 @@ pub const StringHashMapUnowned = struct { pub const collections = @import("./collections.zig"); pub const MultiArrayList = bun.collections.MultiArrayList; pub const BabyList = collections.BabyList; -pub const OffsetList = collections.OffsetList; +pub const ByteList = collections.ByteList; // alias of BabyList(u8) +pub const OffsetByteList = collections.OffsetByteList; pub const bit_set = collections.bit_set; pub const HiveArray = collections.HiveArray; pub const BoundedArray = collections.BoundedArray; -pub const ByteList = BabyList(u8); -pub const OffsetByteList = OffsetList(u8); - pub fn DebugOnly(comptime Type: type) type { if (comptime Environment.isDebug) { return Type; @@ -3745,7 +3743,7 @@ pub const S3 = @import("./s3/client.zig"); /// decommits it or the memory allocator reuses it for a new allocation. /// So if we're about to free something sensitive, we should zero it out first. pub fn freeSensitive(allocator: std.mem.Allocator, slice: anytype) void { - @memset(@constCast(slice), 0); + std.crypto.secureZero(std.meta.Child(@TypeOf(slice)), @constCast(slice)); allocator.free(slice); } diff --git a/src/bundler/AstBuilder.zig b/src/bundler/AstBuilder.zig index 419285bbe7..b42ac70c55 100644 --- a/src/bundler/AstBuilder.zig +++ b/src/bundler/AstBuilder.zig @@ -101,7 +101,7 @@ pub const AstBuilder = struct { .source_index = p.source_index, .tag = .symbol, }; - try p.current_scope.generated.push(p.allocator, ref); + try p.current_scope.generated.append(p.allocator, ref); try p.declared_symbols.append(p.allocator, .{ .ref = ref, .is_top_level = p.scopes.items.len == 0 or p.current_scope == p.scopes.items[0], @@ -260,16 +260,16 @@ pub const AstBuilder = struct { parts.mut(1).declared_symbols = p.declared_symbols; parts.mut(1).scopes = p.scopes.items; - parts.mut(1).import_record_indices = BabyList(u32).fromList(p.import_records_for_current_part); + parts.mut(1).import_record_indices = BabyList(u32).moveFromList(&p.import_records_for_current_part); return .{ .parts = parts, .module_scope = module_scope.*, - .symbols = js_ast.Symbol.List.fromList(p.symbols), + .symbols = js_ast.Symbol.List.moveFromList(&p.symbols), .exports_ref = Ref.None, .wrapper_ref = Ref.None, .module_ref = p.module_ref, - .import_records = ImportRecord.List.fromList(p.import_records), + .import_records = ImportRecord.List.moveFromList(&p.import_records), .export_star_import_records = &.{}, .approximate_newline_count = 1, .exports_kind = .esm, diff --git a/src/bundler/Chunk.zig b/src/bundler/Chunk.zig index 2abc1e462b..ac17d003f3 100644 --- a/src/bundler/Chunk.zig +++ b/src/bundler/Chunk.zig @@ -528,7 +528,7 @@ pub const Chunk = struct { pub fn deinit(self: *Self, a: std.mem.Allocator) void { // do shallow deinit since `LayerName` has // allocations in arena - self.deinitWithAllocator(a); + self.clearAndFree(a); } }); diff --git a/src/bundler/LinkerContext.zig b/src/bundler/LinkerContext.zig index 7b9cd948ed..a8109b3240 100644 --- a/src/bundler/LinkerContext.zig +++ b/src/bundler/LinkerContext.zig @@ -307,7 +307,7 @@ pub const LinkerContext = struct { @panic("Assertion failed: HTML import file not found in pathToSourceIndexMap"); }; - bun.handleOom(html_source_indices.push(this.allocator(), source_index)); + bun.handleOom(html_source_indices.append(this.allocator(), source_index)); // S.LazyExport is a call to __jsonParse. const original_ref = parts[html_import] @@ -454,7 +454,7 @@ pub const LinkerContext = struct { var parts_list = this.allocator().alloc(u32, 1) catch unreachable; parts_list[0] = part_index; - top_level.put(this.allocator(), ref, BabyList(u32).init(parts_list)) catch unreachable; + top_level.put(this.allocator(), ref, BabyList(u32).fromOwnedSlice(parts_list)) catch unreachable; var resolved_exports = &this.graph.meta.items(.resolved_exports)[source_index]; resolved_exports.put(this.allocator(), alias, ExportData{ @@ -2074,7 +2074,7 @@ pub const LinkerContext = struct { .{ .ref = c.graph.ast.items(.wrapper_ref)[source_index], .is_top_level = true }, }, ) catch unreachable, - .dependencies = Dependency.List.init(dependencies), + .dependencies = Dependency.List.fromOwnedSlice(dependencies), }, ) catch unreachable; bun.assert(part_index != js_ast.namespace_export_part_index); @@ -2126,7 +2126,7 @@ pub const LinkerContext = struct { .declared_symbols = js_ast.DeclaredSymbol.List.fromSlice(c.allocator(), &[_]js_ast.DeclaredSymbol{ .{ .ref = wrapper_ref, .is_top_level = true }, }) catch unreachable, - .dependencies = Dependency.List.init(dependencies), + .dependencies = Dependency.List.fromOwnedSlice(dependencies), }, ) catch unreachable; bun.assert(part_index != js_ast.namespace_export_part_index); @@ -2315,7 +2315,7 @@ pub const LinkerContext = struct { c.allocator(), import_ref, .{ - .re_exports = bun.BabyList(js_ast.Dependency).init(re_exports.items), + .re_exports = bun.BabyList(js_ast.Dependency).fromOwnedSlice(re_exports.items), .data = .{ .source_index = Index.source(result.source_index), .import_ref = result.ref, @@ -2334,7 +2334,7 @@ pub const LinkerContext = struct { c.allocator(), import_ref, .{ - .re_exports = bun.BabyList(js_ast.Dependency).init(re_exports.items), + .re_exports = bun.BabyList(js_ast.Dependency).fromOwnedSlice(re_exports.items), .data = .{ .source_index = Index.source(result.source_index), .import_ref = result.ref, @@ -2497,7 +2497,7 @@ pub const LinkerContext = struct { try pieces.append(OutputPiece.init(output, OutputPiece.Query.none)); return .{ - .pieces = bun.BabyList(Chunk.OutputPiece).init(pieces.items), + .pieces = bun.BabyList(Chunk.OutputPiece).fromOwnedSlice(pieces.items), }; } }; diff --git a/src/bundler/LinkerGraph.zig b/src/bundler/LinkerGraph.zig index e9a2705848..b29fdaa47f 100644 --- a/src/bundler/LinkerGraph.zig +++ b/src/bundler/LinkerGraph.zig @@ -59,15 +59,16 @@ pub fn generateNewSymbol(this: *LinkerGraph, source_index: u32, kind: Symbol.Kin ref.tag = .symbol; // TODO: will this crash on resize due to using threadlocal mimalloc heap? - source_symbols.push( + source_symbols.append( this.allocator, .{ .kind = kind, .original_name = original_name, }, - ) catch unreachable; + ) catch |err| bun.handleOom(err); - this.ast.items(.module_scope)[source_index].generated.push(this.allocator, ref) catch unreachable; + this.ast.items(.module_scope)[source_index].generated.append(this.allocator, ref) catch |err| + bun.handleOom(err); return ref; } @@ -98,7 +99,7 @@ pub fn addPartToFile( ) !u32 { var parts: *Part.List = &graph.ast.items(.parts)[id]; const part_id = @as(u32, @truncate(parts.len)); - try parts.push(graph.allocator, part); + try parts.append(graph.allocator, part); var top_level_symbol_to_parts_overlay: ?*TopLevelSymbolToParts = null; const Iterator = struct { @@ -127,12 +128,12 @@ pub fn addPartToFile( list.appendSliceAssumeCapacity(original_parts.slice()); list.appendAssumeCapacity(self.part_id); - entry.value_ptr.* = .init(list.items); + entry.value_ptr.* = .fromOwnedSlice(list.items); } else { entry.value_ptr.* = BabyList(u32).fromSlice(self.graph.allocator, &.{self.part_id}) catch |err| bun.handleOom(err); } } else { - entry.value_ptr.push(self.graph.allocator, self.part_id) catch unreachable; + bun.handleOom(entry.value_ptr.append(self.graph.allocator, self.part_id)); } } }; @@ -144,7 +145,7 @@ pub fn addPartToFile( .top_level_symbol_to_parts_overlay = &top_level_symbol_to_parts_overlay, }; - js_ast.DeclaredSymbol.forEachTopLevelSymbol(&parts.ptr[part_id].declared_symbols, &ctx, Iterator.next); + js_ast.DeclaredSymbol.forEachTopLevelSymbol(&parts.mut(part_id).declared_symbols, &ctx, Iterator.next); return part_id; } @@ -352,7 +353,9 @@ pub fn load( } { - var input_symbols = js_ast.Symbol.Map.initList(js_ast.Symbol.NestedList.init(this.ast.items(.symbols))); + var input_symbols = js_ast.Symbol.Map.initList( + js_ast.Symbol.NestedList.fromBorrowedSliceDangerous(this.ast.items(.symbols)), + ); var symbols = bun.handleOom(input_symbols.symbols_for_source.clone(this.allocator)); for (symbols.slice(), input_symbols.symbols_for_source.slice()) |*dest, src| { dest.* = bun.handleOom(src.clone(this.allocator)); @@ -412,6 +415,26 @@ pub fn load( } } +/// Transfers ownership of the AST to the graph allocator. +/// This is valid only if all allocators are `MimallocArena`s. +pub fn takeAstOwnership(this: *LinkerGraph) void { + const ast = this.ast.slice(); + const heap: bun.allocators.MimallocArena.Borrowed = .downcast(this.allocator); + if (comptime !bun.collections.baby_list.safety_checks) return; + for (ast.items(.import_records)) |*import_records| { + import_records.transferOwnership(heap); + } + for (ast.items(.parts)) |*parts| { + parts.transferOwnership(heap); + for (parts.slice()) |*part| { + part.dependencies.transferOwnership(heap); + } + } + for (ast.items(.symbols)) |*symbols| { + symbols.transferOwnership(heap); + } +} + pub const File = struct { entry_bits: AutoBitSet = undefined, diff --git a/src/bundler/ParseTask.zig b/src/bundler/ParseTask.zig index caa9d18f39..b59ee29ff8 100644 --- a/src/bundler/ParseTask.zig +++ b/src/bundler/ParseTask.zig @@ -419,12 +419,12 @@ fn getAST( }, Logger.Loc{ .start = 0 }), }; require_args[1] = Expr.init(E.Object, E.Object{ - .properties = G.Property.List.init(object_properties), + .properties = G.Property.List.fromOwnedSlice(object_properties), .is_single_line = true, }, Logger.Loc{ .start = 0 }); const require_call = Expr.init(E.Call, E.Call{ .target = require_property, - .args = BabyList(Expr).init(require_args), + .args = BabyList(Expr).fromOwnedSlice(require_args), }, Logger.Loc{ .start = 0 }); const root = Expr.init(E.Dot, E.Dot{ @@ -460,7 +460,7 @@ fn getAST( const root = Expr.init(E.Call, E.Call{ .target = .{ .data = .{ .e_require_call_target = {} }, .loc = .{ .start = 0 } }, - .args = BabyList(Expr).init(require_args), + .args = BabyList(Expr).fromOwnedSlice(require_args), }, Logger.Loc{ .start = 0 }); unique_key_for_additional_file.* = .{ @@ -1075,7 +1075,7 @@ fn runWithSourceCode( var transpiler = this.transpilerForTarget(task.known_target); errdefer transpiler.resetStore(); - var resolver: *Resolver = &transpiler.resolver; + const resolver: *Resolver = &transpiler.resolver; const file_path = &task.path; const loader = task.loader orelse file_path.loader(&transpiler.options.loaders) orelse options.Loader.file; @@ -1130,19 +1130,14 @@ fn runWithSourceCode( else .none; - if ( - // separate_ssr_graph makes boundaries switch to client because the server file uses that generated file as input. - // this is not done when there is one server graph because it is easier for plugins to deal with. - (use_directive == .client and + if (use_directive == .client and task.known_target != .bake_server_components_ssr and - this.ctx.framework.?.server_components.?.separate_ssr_graph) or - // set the target to the client when bundling client-side files - ((transpiler.options.server_components or transpiler.options.dev_server != null) and - task.known_target == .browser)) + this.ctx.framework.?.server_components.?.separate_ssr_graph and + task.known_target != .browser) { - transpiler = this.ctx.client_transpiler.?; - resolver = &transpiler.resolver; - bun.assert(transpiler.options.target == .browser); + // separate_ssr_graph makes boundaries switch to client because the server file uses that generated file as input. + // this is not done when there is one server graph because it is easier for plugins to deal with. + transpiler = this.transpilerForTarget(.browser); } const source = &Logger.Source{ @@ -1163,7 +1158,7 @@ fn runWithSourceCode( var opts = js_parser.Parser.Options.init(task.jsx, loader); opts.bundle = true; opts.warn_about_unbundled_modules = false; - opts.macro_context = &this.data.macro_context; + opts.macro_context = &transpiler.macro_context.?; opts.package_version = task.package_version; opts.features.allow_runtime = !source.index.isRuntime(); @@ -1175,6 +1170,7 @@ fn runWithSourceCode( opts.output_format = output_format; opts.features.minify_syntax = transpiler.options.minify_syntax; opts.features.minify_identifiers = transpiler.options.minify_identifiers; + opts.features.minify_whitespace = transpiler.options.minify_whitespace; opts.features.emit_decorator_metadata = transpiler.options.emit_decorator_metadata; opts.features.unwrap_commonjs_packages = transpiler.options.unwrap_commonjs_packages; opts.features.hot_module_reloading = output_format == .internal_bake_dev and !source.index.isRuntime(); diff --git a/src/bundler/ThreadPool.zig b/src/bundler/ThreadPool.zig index 693e1d05ee..fb4a8c1db7 100644 --- a/src/bundler/ThreadPool.zig +++ b/src/bundler/ThreadPool.zig @@ -269,10 +269,8 @@ pub const ThreadPool = struct { pub const WorkerData = struct { log: *Logger.Log, estimated_input_lines_of_code: usize = 0, - macro_context: js_ast.Macro.MacroContext, - transpiler: Transpiler = undefined, - other_transpiler: Transpiler = undefined, - has_loaded_other_transpiler: bool = false, + transpiler: Transpiler, + other_transpiler: ?Transpiler = null, }; pub fn init(worker: *Worker, v2: *BundleV2) void { @@ -294,9 +292,8 @@ pub const ThreadPool = struct { this.ast_memory_allocator.reset(); this.data = WorkerData{ - .log = allocator.create(Logger.Log) catch unreachable, - .estimated_input_lines_of_code = 0, - .macro_context = undefined, + .log = bun.handleOom(allocator.create(Logger.Log)), + .transpiler = undefined, }; this.data.log.* = Logger.Log.init(allocator); this.ctx = ctx; @@ -313,20 +310,22 @@ pub const ThreadPool = struct { transpiler.setAllocator(allocator); transpiler.linker.resolver = &transpiler.resolver; transpiler.macro_context = js_ast.Macro.MacroContext.init(transpiler); - this.data.macro_context = transpiler.macro_context.?; const CacheSet = @import("../cache.zig"); transpiler.resolver.caches = CacheSet.Set.init(allocator); } pub fn transpilerForTarget(this: *Worker, target: bun.options.Target) *Transpiler { if (target == .browser and this.data.transpiler.options.target != target) { - if (!this.data.has_loaded_other_transpiler) { - this.data.has_loaded_other_transpiler = true; - this.initializeTranspiler(&this.data.other_transpiler, this.ctx.client_transpiler.?, this.allocator); - } - - bun.debugAssert(this.data.other_transpiler.options.target == target); - return &this.data.other_transpiler; + const other_transpiler = if (this.data.other_transpiler) |*other| + other + else blk: { + this.data.other_transpiler = undefined; + const other = &this.data.other_transpiler.?; + this.initializeTranspiler(other, this.ctx.client_transpiler.?, this.allocator); + break :blk other; + }; + bun.debugAssert(other_transpiler.options.target == target); + return other_transpiler; } return &this.data.transpiler; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index ad2c2a0b1a..f19971e026 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -758,7 +758,7 @@ pub const BundleV2 = struct { if (!this.enqueueOnLoadPluginIfNeeded(task)) { if (loader.shouldCopyForBundling()) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; - additional_files.push(this.allocator(), .{ .source_index = task.source_index.get() }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = task.source_index.get() })); this.graph.input_files.items(.side_effects)[source_index.get()] = .no_side_effects__pure_data; this.graph.estimated_file_loader_count += 1; } @@ -824,7 +824,7 @@ pub const BundleV2 = struct { if (!this.enqueueOnLoadPluginIfNeeded(task)) { if (loader.shouldCopyForBundling()) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; - additional_files.push(this.allocator(), .{ .source_index = task.source_index.get() }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = task.source_index.get() })); this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; this.graph.estimated_file_loader_count += 1; } @@ -1138,8 +1138,8 @@ pub const BundleV2 = struct { bun.safety.alloc.assertEq(this.allocator(), this.transpiler.allocator); bun.safety.alloc.assertEq(this.allocator(), this.linker.graph.allocator); this.linker.graph.ast = try this.graph.ast.clone(this.allocator()); - var ast = this.linker.graph.ast.slice(); - for (ast.items(.module_scope)) |*module_scope| { + + for (this.linker.graph.ast.items(.module_scope)) |*module_scope| { for (module_scope.children.slice()) |child| { child.parent = module_scope; } @@ -1150,6 +1150,10 @@ pub const BundleV2 = struct { module_scope.generated = try module_scope.generated.clone(this.allocator()); } + + // Some parts of the AST are owned by worker allocators at this point. + // Transfer ownership to the graph heap. + this.linker.graph.takeAstOwnership(); } /// This generates the two asts for 'bun:bake/client' and 'bun:bake/server'. Both are generated @@ -1249,7 +1253,7 @@ pub const BundleV2 = struct { try client_manifest_props.append(alloc, .{ .key = client_path, .value = server.newExpr(E.Object{ - .properties = G.Property.List.init(client_manifest_items), + .properties = G.Property.List.fromOwnedSlice(client_manifest_items), }), }); } else { @@ -1264,7 +1268,7 @@ pub const BundleV2 = struct { .ref = try server.newSymbol(.other, "serverManifest"), }, Logger.Loc.Empty), .value = server.newExpr(E.Object{ - .properties = G.Property.List.fromList(server_manifest_props), + .properties = G.Property.List.moveFromList(&server_manifest_props), }), }}), .is_export = true, @@ -1276,7 +1280,7 @@ pub const BundleV2 = struct { .ref = try server.newSymbol(.other, "ssrManifest"), }, Logger.Loc.Empty), .value = server.newExpr(E.Object{ - .properties = G.Property.List.fromList(client_manifest_props), + .properties = G.Property.List.moveFromList(&client_manifest_props), }), }}), .is_export = true, @@ -1316,7 +1320,7 @@ pub const BundleV2 = struct { if (!this.enqueueOnLoadPluginIfNeeded(task)) { if (loader.shouldCopyForBundling()) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; - additional_files.push(this.allocator(), .{ .source_index = task.source_index.get() }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = task.source_index.get() })); this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; this.graph.estimated_file_loader_count += 1; } @@ -1370,7 +1374,7 @@ pub const BundleV2 = struct { if (!this.enqueueOnLoadPluginIfNeeded(task)) { if (loader.shouldCopyForBundling()) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; - additional_files.push(this.allocator(), .{ .source_index = task.source_index.get() }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = task.source_index.get() })); this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; this.graph.estimated_file_loader_count += 1; } @@ -1679,9 +1683,9 @@ pub const BundleV2 = struct { .entry_point_index = null, .is_executable = false, })) catch unreachable; - additional_files[index].push(this.allocator(), AdditionalFile{ + additional_files[index].append(this.allocator(), AdditionalFile{ .output_file = @as(u32, @truncate(additional_output_files.items.len - 1)), - }) catch unreachable; + }) catch |err| bun.handleOom(err); } } @@ -1834,11 +1838,19 @@ pub const BundleV2 = struct { transpiler.options.chunk_naming = config.names.chunk.data; transpiler.options.asset_naming = config.names.asset.data; - transpiler.options.public_path = config.public_path.list.items; transpiler.options.output_format = config.format; transpiler.options.bytecode = config.bytecode; transpiler.options.compile = config.compile != null; + // For compile mode, set the public_path to the target-specific base path + // This ensures embedded resources like yoga.wasm are correctly found + if (config.compile) |compile_opts| { + const base_public_path = bun.StandaloneModuleGraph.targetBasePublicPath(compile_opts.compile_target.os, "root/"); + transpiler.options.public_path = base_public_path; + } else { + transpiler.options.public_path = config.public_path.list.items; + } + transpiler.options.output_dir = config.outdir.slice(); transpiler.options.root_dir = config.rootdir.slice(); transpiler.options.minify_syntax = config.minify.syntax; @@ -1903,11 +1915,18 @@ pub const BundleV2 = struct { const outbuf = bun.path_buffer_pool.get(); defer bun.path_buffer_pool.put(outbuf); + // Always get an absolute path for the outfile to ensure it works correctly with PE metadata operations var full_outfile_path = if (this.config.outdir.slice().len > 0) brk: { const outdir_slice = this.config.outdir.slice(); const top_level_dir = bun.fs.FileSystem.instance.top_level_dir; break :brk bun.path.joinAbsStringBuf(top_level_dir, outbuf, &[_][]const u8{ outdir_slice, compile_options.outfile.slice() }, .auto); - } else compile_options.outfile.slice(); + } else if (std.fs.path.isAbsolute(compile_options.outfile.slice())) + compile_options.outfile.slice() + else brk: { + // For relative paths, ensure we make them absolute relative to the current working directory + const top_level_dir = bun.fs.FileSystem.instance.top_level_dir; + break :brk bun.path.joinAbsStringBuf(top_level_dir, outbuf, &[_][]const u8{compile_options.outfile.slice()}, .auto); + }; // Add .exe extension for Windows targets if not already present if (compile_options.compile_target.os == .windows and !strings.hasSuffixComptime(full_outfile_path, ".exe")) { @@ -1926,19 +1945,32 @@ pub const BundleV2 = struct { } } - if (!(dirname.len == 0 or strings.eqlComptime(dirname, "."))) { + // On Windows, don't change root_dir, just pass the full relative path + // On POSIX, change root_dir to the target directory and pass basename + const outfile_for_executable = if (Environment.isWindows) full_outfile_path else basename; + + if (Environment.isPosix and !(dirname.len == 0 or strings.eqlComptime(dirname, "."))) { + // On POSIX, makeOpenPath and change root_dir root_dir = root_dir.makeOpenPath(dirname, .{}) catch |err| { return bun.StandaloneModuleGraph.CompileResult.fail(bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "Failed to open output directory {s}: {s}", .{ dirname, @errorName(err) }))); }; + } else if (Environment.isWindows and !(dirname.len == 0 or strings.eqlComptime(dirname, "."))) { + // On Windows, ensure directories exist but don't change root_dir + _ = bun.makePath(root_dir, dirname) catch |err| { + return bun.StandaloneModuleGraph.CompileResult.fail(bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "Failed to create output directory {s}: {s}", .{ dirname, @errorName(err) }))); + }; } + // Use the target-specific base path for compile mode, not the user-configured public_path + const module_prefix = bun.StandaloneModuleGraph.targetBasePublicPath(compile_options.compile_target.os, "root/"); + const result = bun.StandaloneModuleGraph.toExecutable( &compile_options.compile_target, bun.default_allocator, output_files.items, root_dir, - this.config.public_path.slice(), - basename, + module_prefix, + outfile_for_executable, this.env, this.config.format, .{ @@ -2231,7 +2263,7 @@ pub const BundleV2 = struct { if (should_copy_for_bundling) { const source_index = load.source_index; var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; - additional_files.push(this.allocator(), .{ .source_index = source_index.get() }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = source_index.get() })); this.graph.input_files.items(.side_effects)[source_index.get()] = .no_side_effects__pure_data; this.graph.estimated_file_loader_count += 1; } @@ -2430,7 +2462,7 @@ pub const BundleV2 = struct { if (!this.enqueueOnLoadPluginIfNeeded(task)) { if (loader.shouldCopyForBundling()) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; - additional_files.push(this.allocator(), .{ .source_index = task.source_index.get() }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = task.source_index.get() })); this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; this.graph.estimated_file_loader_count += 1; } @@ -2464,7 +2496,7 @@ pub const BundleV2 = struct { if (!entry.found_existing) { entry.value_ptr.* = .{}; } - entry.value_ptr.push( + entry.value_ptr.append( this.allocator(), .{ .to_source_index = source_index, @@ -3501,7 +3533,7 @@ pub const BundleV2 = struct { import_record.source_index = fake_input_file.source.index; try this.pathToSourceIndexMap(target).put(this.allocator(), path_text, fake_input_file.source.index.get()); - try graph.html_imports.server_source_indices.push(this.allocator(), fake_input_file.source.index.get()); + try graph.html_imports.server_source_indices.append(this.allocator(), fake_input_file.source.index.get()); this.ensureClientTranspiler(); } @@ -3682,7 +3714,7 @@ pub const BundleV2 = struct { if (loader.shouldCopyForBundling()) { var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; - additional_files.push(this.allocator(), .{ .source_index = new_task.source_index.get() }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = new_task.source_index.get() })); new_input_file.side_effects = _resolver.SideEffects.no_side_effects__pure_data; graph.estimated_file_loader_count += 1; } @@ -3691,7 +3723,7 @@ pub const BundleV2 = struct { } else { if (loader.shouldCopyForBundling()) { var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; - additional_files.push(this.allocator(), .{ .source_index = existing.value_ptr.* }) catch unreachable; + bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = existing.value_ptr.* })); graph.estimated_file_loader_count += 1; } @@ -3707,16 +3739,15 @@ pub const BundleV2 = struct { result.loader.isCSS(); if (this.resolve_tasks_waiting_for_import_source_index.fetchSwapRemove(result.source.index.get())) |pending_entry| { - for (pending_entry.value.slice()) |to_assign| { + var value = pending_entry.value; + for (value.slice()) |to_assign| { if (save_import_record_source_index or input_file_loaders[to_assign.to_source_index.get()].isCSS()) { import_records.slice()[to_assign.import_record_index].source_index = to_assign.to_source_index; } } - - var list = pending_entry.value.list(); - list.deinit(this.allocator()); + value.deinit(this.allocator()); } if (result.ast.css != null) { diff --git a/src/bundler/linker_context/computeChunks.zig b/src/bundler/linker_context/computeChunks.zig index 7f73e32e9d..266d71c35a 100644 --- a/src/bundler/linker_context/computeChunks.zig +++ b/src/bundler/linker_context/computeChunks.zig @@ -286,7 +286,7 @@ pub noinline fn computeChunks( } // We don't care about the order of the HTML chunks that have no JS chunks. - try sorted_chunks.append(this.allocator(), html_chunks.values()); + try sorted_chunks.appendSlice(this.allocator(), html_chunks.values()); break :sort_chunks sorted_chunks.slice(); }; diff --git a/src/bundler/linker_context/computeCrossChunkDependencies.zig b/src/bundler/linker_context/computeCrossChunkDependencies.zig index 111281f41e..8e4c3ba3ac 100644 --- a/src/bundler/linker_context/computeCrossChunkDependencies.zig +++ b/src/bundler/linker_context/computeCrossChunkDependencies.zig @@ -237,7 +237,7 @@ fn computeCrossChunkDependenciesWithChunkMetas(c: *LinkerContext, chunks: []Chun var entry = try js .imports_from_other_chunks .getOrPutValue(c.allocator(), other_chunk_index, .{}); - try entry.value_ptr.push(c.allocator(), .{ + try entry.value_ptr.append(c.allocator(), .{ .ref = import_ref, }); } @@ -272,12 +272,10 @@ fn computeCrossChunkDependenciesWithChunkMetas(c: *LinkerContext, chunks: []Chun const dynamic_chunk_indices = chunk_meta.dynamic_imports.keys(); std.sort.pdq(Index.Int, dynamic_chunk_indices, {}, std.sort.asc(Index.Int)); - var imports = chunk.cross_chunk_imports.listManaged(c.allocator()); - defer chunk.cross_chunk_imports.update(imports); - imports.ensureUnusedCapacity(dynamic_chunk_indices.len) catch unreachable; - const prev_len = imports.items.len; - imports.items.len += dynamic_chunk_indices.len; - for (dynamic_chunk_indices, imports.items[prev_len..]) |dynamic_chunk_index, *item| { + const new_imports = bun.handleOom( + chunk.cross_chunk_imports.writableSlice(c.allocator(), dynamic_chunk_indices.len), + ); + for (dynamic_chunk_indices, new_imports) |dynamic_chunk_index, *item| { item.* = .{ .import_kind = .dynamic, .chunk_index = dynamic_chunk_index, @@ -387,7 +385,7 @@ fn computeCrossChunkDependenciesWithChunkMetas(c: *LinkerContext, chunks: []Chun }); } - cross_chunk_imports.push(c.allocator(), .{ + cross_chunk_imports.append(c.allocator(), .{ .import_kind = .stmt, .chunk_index = cross_chunk_import.chunk_index, }) catch unreachable; @@ -397,7 +395,7 @@ fn computeCrossChunkDependenciesWithChunkMetas(c: *LinkerContext, chunks: []Chun .import_record_index = import_record_index, .namespace_ref = Ref.None, }; - cross_chunk_prefix_stmts.push( + cross_chunk_prefix_stmts.append( c.allocator(), .{ .data = .{ diff --git a/src/bundler/linker_context/convertStmtsForChunk.zig b/src/bundler/linker_context/convertStmtsForChunk.zig index d25ec13780..eb9eb2a3f8 100644 --- a/src/bundler/linker_context/convertStmtsForChunk.zig +++ b/src/bundler/linker_context/convertStmtsForChunk.zig @@ -188,7 +188,7 @@ pub fn convertStmtsForChunk( }, stmt.loc, ), - .args = bun.BabyList(Expr).init(args), + .args = bun.BabyList(Expr).fromOwnedSlice(args), }, stmt.loc, ), @@ -272,7 +272,7 @@ pub fn convertStmtsForChunk( }, stmt.loc, ), - .args = js_ast.ExprNodeList.init(args), + .args = js_ast.ExprNodeList.fromOwnedSlice(args), }, stmt.loc, ), diff --git a/src/bundler/linker_context/convertStmtsForChunkForDevServer.zig b/src/bundler/linker_context/convertStmtsForChunkForDevServer.zig index ca36be7531..144dc62ea2 100644 --- a/src/bundler/linker_context/convertStmtsForChunkForDevServer.zig +++ b/src/bundler/linker_context/convertStmtsForChunkForDevServer.zig @@ -72,7 +72,7 @@ pub fn convertStmtsForChunkForDevServer( .name = if (record.tag == .runtime) "require" else "builtin", .name_loc = stmt.loc, }, stmt.loc), - .args = .init(try allocator.dupe(Expr, &.{Expr.init(E.String, .{ + .args = .fromOwnedSlice(try allocator.dupe(Expr, &.{Expr.init(E.String, .{ .data = if (record.tag == .runtime) "bun:wrap" else record.path.pretty, }, record.range.loc)})), }, stmt.loc); @@ -144,7 +144,7 @@ pub fn convertStmtsForChunkForDevServer( .name_loc = .Empty, }, .Empty), .right = Expr.init(E.Array, .{ - .items = .fromList(esm_callbacks), + .items = .moveFromList(&esm_callbacks), .is_single_line = esm_callbacks.items.len <= 2, }, .Empty), }, .Empty), diff --git a/src/bundler/linker_context/doStep5.zig b/src/bundler/linker_context/doStep5.zig index 4b1d2520ac..440c224e1a 100644 --- a/src/bundler/linker_context/doStep5.zig +++ b/src/bundler/linker_context/doStep5.zig @@ -86,6 +86,10 @@ pub fn doStep5(c: *LinkerContext, source_index_: Index, _: usize) void { const our_imports_to_bind = imports_to_bind[id]; outer: for (parts_slice, 0..) |*part, part_index| { + // Previously owned by `c.allocator()`, which is a `MimallocArena` (from + // `BundleV2.graph.heap`). + part.dependencies.transferOwnership(&worker.heap); + // Now that all files have been parsed, determine which property // accesses off of imported symbols are inlined enum values and // which ones aren't @@ -188,7 +192,7 @@ pub fn doStep5(c: *LinkerContext, source_index_: Index, _: usize) void { if (!local.found_existing or local.value_ptr.* != part_index) { local.value_ptr.* = @as(u32, @intCast(part_index)); // note: if we crash on append, it is due to threadlocal heaps in mimalloc - part.dependencies.push( + part.dependencies.append( allocator, .{ .source_index = Index.source(source_index), @@ -200,7 +204,7 @@ pub fn doStep5(c: *LinkerContext, source_index_: Index, _: usize) void { // Also map from imports to parts that use them if (named_imports.getPtr(ref)) |existing| { - existing.local_parts_with_uses.push(allocator, @intCast(part_index)) catch unreachable; + bun.handleOom(existing.local_parts_with_uses.append(allocator, @intCast(part_index))); } } } @@ -360,7 +364,7 @@ pub fn createExportsForFile( allocator, js_ast.S.Local, .{ - .decls = G.Decl.List.init(decls), + .decls = G.Decl.List.fromOwnedSlice(decls), }, loc, ); @@ -375,7 +379,12 @@ pub fn createExportsForFile( var args = allocator.alloc(js_ast.Expr, 2) catch unreachable; args[0..2].* = [_]js_ast.Expr{ js_ast.Expr.initIdentifier(exports_ref, loc), - js_ast.Expr.allocate(allocator, js_ast.E.Object, .{ .properties = js_ast.G.Property.List.fromList(properties) }, loc), + js_ast.Expr.allocate( + allocator, + js_ast.E.Object, + .{ .properties = .moveFromList(&properties) }, + loc, + ), }; remaining_stmts[0] = js_ast.Stmt.allocate( allocator, @@ -386,7 +395,7 @@ pub fn createExportsForFile( js_ast.E.Call, .{ .target = js_ast.Expr.initIdentifier(export_ref, loc), - .args = js_ast.ExprNodeList.init(args), + .args = js_ast.ExprNodeList.fromOwnedSlice(args), }, loc, ), @@ -433,7 +442,7 @@ pub fn createExportsForFile( E.Call, E.Call{ .target = Expr.initIdentifier(toCommonJSRef, Loc.Empty), - .args = js_ast.ExprNodeList.init(call_args), + .args = js_ast.ExprNodeList.fromOwnedSlice(call_args), }, Loc.Empty, ), @@ -451,7 +460,7 @@ pub fn createExportsForFile( c.graph.ast.items(.parts)[id].slice()[js_ast.namespace_export_part_index] = .{ .stmts = if (c.options.output_format != .internal_bake_dev) all_export_stmts else &.{}, .symbol_uses = ns_export_symbol_uses, - .dependencies = js_ast.Dependency.List.fromList(ns_export_dependencies), + .dependencies = js_ast.Dependency.List.moveFromList(&ns_export_dependencies), .declared_symbols = declared_symbols, // This can be removed if nothing uses it diff --git a/src/bundler/linker_context/findImportedCSSFilesInJSOrder.zig b/src/bundler/linker_context/findImportedCSSFilesInJSOrder.zig index 5d4e995e80..6a60363634 100644 --- a/src/bundler/linker_context/findImportedCSSFilesInJSOrder.zig +++ b/src/bundler/linker_context/findImportedCSSFilesInJSOrder.zig @@ -68,7 +68,7 @@ pub fn findImportedCSSFilesInJSOrder(this: *LinkerContext, temp_allocator: std.m } if (is_css and source_index.isValid()) { - bun.handleOom(o.push(temp, source_index)); + bun.handleOom(o.append(temp, source_index)); } } }.visit; diff --git a/src/bundler/linker_context/findImportedFilesInCSSOrder.zig b/src/bundler/linker_context/findImportedFilesInCSSOrder.zig index 926179da68..2384be6932 100644 --- a/src/bundler/linker_context/findImportedFilesInCSSOrder.zig +++ b/src/bundler/linker_context/findImportedFilesInCSSOrder.zig @@ -63,7 +63,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem } } - visitor.visited.push( + visitor.visited.append( visitor.temp_allocator, source_index, ) catch |err| bun.handleOom(err); @@ -103,7 +103,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem var nested_import_records = bun.handleOom(wrapping_import_records.clone(visitor.allocator)); // Clone these import conditions and append them to the state - bun.handleOom(nested_conditions.push(visitor.allocator, rule.import.conditionsWithImportRecords(visitor.allocator, &nested_import_records))); + bun.handleOom(nested_conditions.append(visitor.allocator, rule.import.conditionsWithImportRecords(visitor.allocator, &nested_import_records))); visitor.visit(record.source_index, &nested_conditions, wrapping_import_records); continue; } @@ -121,8 +121,8 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem // merged. When this happens we need to generate a nested imported // CSS file using a data URL. if (rule.import.hasConditions()) { - bun.handleOom(all_conditions.push(visitor.allocator, rule.import.conditionsWithImportRecords(visitor.allocator, &all_import_records))); - visitor.order.push( + bun.handleOom(all_conditions.append(visitor.allocator, rule.import.conditionsWithImportRecords(visitor.allocator, &all_import_records))); + visitor.order.append( visitor.allocator, Chunk.CssImportOrder{ .kind = .{ @@ -133,7 +133,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem }, ) catch |err| bun.handleOom(err); } else { - visitor.order.push( + visitor.order.append( visitor.allocator, Chunk.CssImportOrder{ .kind = .{ @@ -169,7 +169,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem ); } // Accumulate imports in depth-first postorder - visitor.order.push(visitor.allocator, Chunk.CssImportOrder{ + visitor.order.append(visitor.allocator, Chunk.CssImportOrder{ .kind = .{ .source_index = source_index }, .conditions = wrapping_conditions.*, }) catch |err| bun.handleOom(err); @@ -208,7 +208,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem var is_at_layer_prefix = true; for (order.slice()) |*entry| { if ((entry.kind == .layers and is_at_layer_prefix) or entry.kind == .external_path) { - bun.handleOom(wip_order.push(temp_allocator, entry.*)); + bun.handleOom(wip_order.append(temp_allocator, entry.*)); } if (entry.kind != .layers) { is_at_layer_prefix = false; @@ -219,7 +219,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem is_at_layer_prefix = true; for (order.slice()) |*entry| { if ((entry.kind != .layers or !is_at_layer_prefix) and entry.kind != .external_path) { - bun.handleOom(wip_order.push(temp_allocator, entry.*)); + bun.handleOom(wip_order.append(temp_allocator, entry.*)); } if (entry.kind != .layers) { is_at_layer_prefix = false; @@ -261,7 +261,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem continue :next_backward; } } - bun.handleOom(gop.value_ptr.push(temp_allocator, i)); + bun.handleOom(gop.value_ptr.append(temp_allocator, i)); }, .external_path => |p| { const gop = bun.handleOom(external_path_duplicates.getOrPut(p.text)); @@ -279,7 +279,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem continue :next_backward; } } - bun.handleOom(gop.value_ptr.push(temp_allocator, i)); + bun.handleOom(gop.value_ptr.append(temp_allocator, i)); }, .layers => {}, } @@ -405,7 +405,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem if (index == layer_duplicates.len) { // This is the first time we've seen this combination of layer names. // Allocate a new set of duplicate indices to track this combination. - layer_duplicates.push(temp_allocator, DuplicateEntry{ + layer_duplicates.append(temp_allocator, DuplicateEntry{ .layers = layers_key, }) catch |err| bun.handleOom(err); } @@ -449,7 +449,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem // Non-layer entries still need to be present because they have // other side effects beside inserting things in the layer order - bun.handleOom(wip_order.push(temp_allocator, entry.*)); + bun.handleOom(wip_order.append(temp_allocator, entry.*)); } // Don't add this to the duplicate list below because it's redundant @@ -457,11 +457,11 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem } } - layer_duplicates.mut(index).indices.push( + layer_duplicates.mut(index).indices.append( temp_allocator, wip_order.len, ) catch |err| bun.handleOom(err); - bun.handleOom(wip_order.push(temp_allocator, entry.*)); + bun.handleOom(wip_order.append(temp_allocator, entry.*)); } debugCssOrder(this, &wip_order, .WHILE_OPTIMIZING_REDUNDANT_LAYER_RULES); @@ -484,7 +484,7 @@ pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem did_clone = @intCast(prev_index); } // need to clone the layers here as they could be references to css ast - wip_order.mut(prev_index).kind.layers.toOwned(temp_allocator).append( + wip_order.mut(prev_index).kind.layers.toOwned(temp_allocator).appendSlice( temp_allocator, entry.kind.layers.inner().sliceConst(), ) catch |err| bun.handleOom(err); diff --git a/src/bundler/linker_context/generateCodeForFileInChunkJS.zig b/src/bundler/linker_context/generateCodeForFileInChunkJS.zig index a3486a0214..514abc7e0b 100644 --- a/src/bundler/linker_context/generateCodeForFileInChunkJS.zig +++ b/src/bundler/linker_context/generateCodeForFileInChunkJS.zig @@ -365,7 +365,7 @@ pub fn generateCodeForFileInChunkJS( }, Logger.Loc.Empty, ), - .args = bun.BabyList(Expr).init(cjs_args), + .args = bun.BabyList(Expr).fromOwnedSlice(cjs_args), }, Logger.Loc.Empty, ); @@ -388,7 +388,7 @@ pub fn generateCodeForFileInChunkJS( Stmt.alloc( S.Local, S.Local{ - .decls = G.Decl.List.init(decls), + .decls = G.Decl.List.fromOwnedSlice(decls), }, Logger.Loc.Empty, ), @@ -502,7 +502,7 @@ pub fn generateCodeForFileInChunkJS( Stmt.alloc( S.Local, S.Local{ - .decls = G.Decl.List.fromList(hoist.decls), + .decls = G.Decl.List.moveFromList(&hoist.decls), }, Logger.Loc.Empty, ), @@ -529,7 +529,7 @@ pub fn generateCodeForFileInChunkJS( // "var init_foo = __esm(...);" const value = Expr.init(E.Call, .{ .target = Expr.initIdentifier(c.esm_runtime_ref, Logger.Loc.Empty), - .args = bun.BabyList(Expr).init(esm_args), + .args = bun.BabyList(Expr).fromOwnedSlice(esm_args), }, Logger.Loc.Empty); var decls = bun.handleOom(temp_allocator.alloc(G.Decl, 1)); @@ -546,7 +546,7 @@ pub fn generateCodeForFileInChunkJS( stmts.outside_wrapper_prefix.append( Stmt.alloc(S.Local, .{ - .decls = G.Decl.List.init(decls), + .decls = G.Decl.List.fromOwnedSlice(decls), }, Logger.Loc.Empty), ) catch |err| bun.handleOom(err); } else { @@ -642,12 +642,12 @@ fn mergeAdjacentLocalStmts(stmts: *std.ArrayList(Stmt), allocator: std.mem.Alloc if (did_merge_with_previous_local) { // Avoid O(n^2) behavior for repeated variable declarations // Appending to this decls list is safe because did_merge_with_previous_local is true - before.decls.append(allocator, after.decls.slice()) catch unreachable; + before.decls.appendSlice(allocator, after.decls.slice()) catch unreachable; } else { // Append the declarations to the previous variable statement did_merge_with_previous_local = true; - var clone = std.ArrayList(G.Decl).initCapacity(allocator, before.decls.len + after.decls.len) catch unreachable; + var clone = bun.BabyList(G.Decl).initCapacity(allocator, before.decls.len + after.decls.len) catch unreachable; clone.appendSliceAssumeCapacity(before.decls.slice()); clone.appendSliceAssumeCapacity(after.decls.slice()); // we must clone instead of overwrite in-place incase the same S.Local is used across threads @@ -656,7 +656,7 @@ fn mergeAdjacentLocalStmts(stmts: *std.ArrayList(Stmt), allocator: std.mem.Alloc allocator, S.Local, S.Local{ - .decls = BabyList(G.Decl).fromList(clone), + .decls = clone, .is_export = before.is_export, .was_commonjs_export = before.was_commonjs_export, .was_ts_import_equals = before.was_ts_import_equals, diff --git a/src/bundler/linker_context/generateCompileResultForCssChunk.zig b/src/bundler/linker_context/generateCompileResultForCssChunk.zig index 86546e0658..5ef7dce172 100644 --- a/src/bundler/linker_context/generateCompileResultForCssChunk.zig +++ b/src/bundler/linker_context/generateCompileResultForCssChunk.zig @@ -68,7 +68,9 @@ fn generateCompileResultForCssChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCo }; }, .external_path => { - var import_records = BabyList(ImportRecord).init(css_import.condition_import_records.sliceConst()); + var import_records = BabyList(ImportRecord).fromBorrowedSliceDangerous( + css_import.condition_import_records.sliceConst(), + ); const printer_options = bun.css.PrinterOptions{ // TODO: make this more configurable .minify = c.options.minify_whitespace, diff --git a/src/bundler/linker_context/postProcessJSChunk.zig b/src/bundler/linker_context/postProcessJSChunk.zig index 2328af78f4..ccbc8754d5 100644 --- a/src/bundler/linker_context/postProcessJSChunk.zig +++ b/src/bundler/linker_context/postProcessJSChunk.zig @@ -43,7 +43,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu }; var cross_chunk_import_records = ImportRecord.List.initCapacity(worker.allocator, chunk.cross_chunk_imports.len) catch unreachable; - defer cross_chunk_import_records.deinitWithAllocator(worker.allocator); + defer cross_chunk_import_records.deinit(worker.allocator); for (chunk.cross_chunk_imports.slice()) |import_record| { cross_chunk_import_records.appendAssumeCapacity( .{ diff --git a/src/bundler/linker_context/prepareCssAstsForChunk.zig b/src/bundler/linker_context/prepareCssAstsForChunk.zig index 3c217eb563..c92b3cdc8f 100644 --- a/src/bundler/linker_context/prepareCssAstsForChunk.zig +++ b/src/bundler/linker_context/prepareCssAstsForChunk.zig @@ -50,7 +50,7 @@ fn prepareCssAstsForChunkImpl(c: *LinkerContext, chunk: *Chunk, allocator: std.m var conditions: ?*bun.css.ImportConditions = null; if (entry.conditions.len > 0) { conditions = entry.conditions.mut(0); - entry.condition_import_records.push( + entry.condition_import_records.append( allocator, bun.ImportRecord{ .kind = .at, .path = p.*, .range = Logger.Range{} }, ) catch |err| bun.handleOom(err); @@ -118,7 +118,7 @@ fn prepareCssAstsForChunkImpl(c: *LinkerContext, chunk: *Chunk, allocator: std.m var empty_conditions = bun.css.ImportConditions{}; const actual_conditions = if (conditions) |cc| cc else &empty_conditions; - entry.condition_import_records.push(allocator, bun.ImportRecord{ + entry.condition_import_records.append(allocator, bun.ImportRecord{ .kind = .at, .path = p.*, .range = Logger.Range.none, diff --git a/src/bundler/linker_context/scanImportsAndExports.zig b/src/bundler/linker_context/scanImportsAndExports.zig index 64bd1f6cb4..921f4fd74c 100644 --- a/src/bundler/linker_context/scanImportsAndExports.zig +++ b/src/bundler/linker_context/scanImportsAndExports.zig @@ -372,6 +372,10 @@ pub fn scanImportsAndExports(this: *LinkerContext) !void { LinkerContext.doStep5, this.graph.reachable_files, ); + + // Some parts of the AST may now be owned by worker allocators. Transfer ownership back + // to the graph allocator. + this.graph.takeAstOwnership(); } if (comptime FeatureFlags.help_catch_memory_issues) { @@ -537,10 +541,7 @@ pub fn scanImportsAndExports(this: *LinkerContext) !void { const total_len = parts_declaring_symbol.len + @as(usize, import.re_exports.len) + @as(usize, part.dependencies.len); if (part.dependencies.cap < total_len) { - var list = std.ArrayList(Dependency).init(this.allocator()); - list.ensureUnusedCapacity(total_len) catch unreachable; - list.appendSliceAssumeCapacity(part.dependencies.slice()); - part.dependencies.update(list); + bun.handleOom(part.dependencies.ensureTotalCapacity(this.allocator(), total_len)); } // Depend on the file containing the imported symbol @@ -618,7 +619,7 @@ pub fn scanImportsAndExports(this: *LinkerContext) !void { const entry_point_part_index = this.graph.addPartToFile( id, .{ - .dependencies = js_ast.Dependency.List.fromList(dependencies), + .dependencies = js_ast.Dependency.List.moveFromList(&dependencies), .can_be_removed_if_unused = false, }, ) catch |err| bun.handleOom(err); @@ -1020,7 +1021,7 @@ const ExportStarContext = struct { }) catch |err| bun.handleOom(err); } else if (gop.value_ptr.data.source_index.get() != other_source_index) { // Two different re-exports colliding makes it potentially ambiguous - gop.value_ptr.potentially_ambiguous_export_star_refs.push(this.allocator, .{ + gop.value_ptr.potentially_ambiguous_export_star_refs.append(this.allocator, .{ .data = .{ .source_index = Index.source(other_source_index), .import_ref = name.ref, diff --git a/src/cli.zig b/src/cli.zig index 2f3312f07b..6de4fe4401 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -217,6 +217,21 @@ pub const HelpCommand = struct { switch (reason) { .explicit => { + if (comptime Environment.isDebug) { + if (bun.argv.len == 1) { + if (bun.Output.isAIAgent()) { + if (bun.getenvZ("npm_lifecycle_event")) |event| { + if (bun.strings.hasPrefixComptime(event, "bd")) { + // claude gets very confused by the help menu + // let's give claude some self confidence. + Output.println("BUN COMPILED SUCCESSFULLY! 🎉", .{}); + Global.exit(0); + } + } + } + } + } + Output.pretty( "Bun is a fast JavaScript runtime, package manager, bundler, and test runner. (" ++ Global.package_json_version_with_revision ++ @@ -380,6 +395,8 @@ pub const Command = struct { runtime_options: RuntimeOptions = .{}, filters: []const []const u8 = &.{}, + workspaces: bool = false, + if_present: bool = false, preloads: []const string = &.{}, has_loaded_global_config: bool = false, @@ -815,7 +832,7 @@ pub const Command = struct { const ctx = try Command.init(allocator, log, .RunCommand); ctx.args.target = .bun; - if (ctx.filters.len > 0) { + if (ctx.filters.len > 0 or ctx.workspaces) { FilterRun.runScriptsWithFilter(ctx) catch |err| { Output.prettyErrorln("error: {s}", .{@errorName(err)}); Global.exit(1); @@ -854,7 +871,7 @@ pub const Command = struct { }; ctx.args.target = .bun; - if (ctx.filters.len > 0) { + if (ctx.filters.len > 0 or ctx.workspaces) { FilterRun.runScriptsWithFilter(ctx) catch |err| { Output.prettyErrorln("error: {s}", .{@errorName(err)}); Global.exit(1); diff --git a/src/cli/Arguments.zig b/src/cli/Arguments.zig index ed5ac1be7c..d1680758ad 100644 --- a/src/cli/Arguments.zig +++ b/src/cli/Arguments.zig @@ -116,6 +116,7 @@ pub const auto_or_run_params = [_]ParamType{ clap.parseParam("-F, --filter ... Run a script in all workspace packages matching the pattern") catch unreachable, clap.parseParam("-b, --bun Force a script or package to use Bun's runtime instead of Node.js (via symlinking node)") catch unreachable, clap.parseParam("--shell Control the shell used for package.json scripts. Supports either 'bun' or 'system'") catch unreachable, + clap.parseParam("--workspaces Run a script in all workspace packages (from the \"workspaces\" field in package.json)") catch unreachable, }; pub const auto_only_params = [_]ParamType{ @@ -387,6 +388,8 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C if (cmd == .RunCommand or cmd == .AutoCommand) { ctx.filters = args.options("--filter"); + ctx.workspaces = args.flag("--workspaces"); + ctx.if_present = args.flag("--if-present"); if (args.option("--elide-lines")) |elide_lines| { if (elide_lines.len > 0) { diff --git a/src/cli/audit_command.zig b/src/cli/audit_command.zig index 7cf18672e5..cd7f80d27a 100644 --- a/src/cli/audit_command.zig +++ b/src/cli/audit_command.zig @@ -552,10 +552,23 @@ fn findDependencyPaths( .is_direct = false, }; - var trace = current; + var trace = current.*; + var seen_in_trace = bun.StringHashMap(void).init(allocator); + defer seen_in_trace.deinit(); + while (true) { - try path.path.insert(0, try allocator.dupe(u8, trace.*)); - if (parent_map.get(trace.*)) |*parent| { + // Check for cycle before processing + if (seen_in_trace.contains(trace)) { + // Cycle detected, stop tracing + break; + } + + // Add to path and mark as seen + try path.path.insert(0, try allocator.dupe(u8, trace)); + try seen_in_trace.put(trace, {}); + + // Get parent for next iteration + if (parent_map.get(trace)) |parent| { trace = parent; } else { break; diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 854d90d78e..2fdca20a9d 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -666,7 +666,7 @@ pub const CreateCommand = struct { break :process_package_json; } - const properties_list = std.ArrayList(js_ast.G.Property).fromOwnedSlice(default_allocator, package_json_expr.data.e_object.properties.slice()); + var properties_list = std.ArrayList(js_ast.G.Property).fromOwnedSlice(default_allocator, package_json_expr.data.e_object.properties.slice()); if (ctx.log.errors > 0) { try ctx.log.print(Output.errorWriter()); @@ -744,7 +744,7 @@ pub const CreateCommand = struct { // has_react_scripts = has_react_scripts or property.hasAnyPropertyNamed(&.{"react-scripts"}); // has_relay = has_relay or property.hasAnyPropertyNamed(&.{ "react-relay", "relay-runtime", "babel-plugin-relay" }); - // property.data.e_object.properties = js_ast.G.Property.List.init(Prune.prune(property.data.e_object.properties.slice())); + // property.data.e_object.properties = js_ast.G.Property.List.fromBorrowedSliceDangerous(Prune.prune(property.data.e_object.properties.slice())); if (property.data.e_object.properties.len > 0) { has_dependencies = true; dev_dependencies = q.expr; @@ -765,8 +765,7 @@ pub const CreateCommand = struct { // has_react_scripts = has_react_scripts or property.hasAnyPropertyNamed(&.{"react-scripts"}); // has_relay = has_relay or property.hasAnyPropertyNamed(&.{ "react-relay", "relay-runtime", "babel-plugin-relay" }); - // property.data.e_object.properties = js_ast.G.Property.List.init(Prune.prune(property.data.e_object.properties.slice())); - property.data.e_object.properties = js_ast.G.Property.List.init(property.data.e_object.properties.slice()); + // property.data.e_object.properties = js_ast.G.Property.List.fromBorrowedSliceDangerous(Prune.prune(property.data.e_object.properties.slice())); if (property.data.e_object.properties.len > 0) { has_dependencies = true; @@ -1052,9 +1051,12 @@ pub const CreateCommand = struct { pub const bun_bun_for_nextjs_task: string = "bun bun --use next"; }; - InjectionPrefill.bun_macro_relay_object.properties = js_ast.G.Property.List.init(InjectionPrefill.bun_macro_relay_properties[0..]); - InjectionPrefill.bun_macros_relay_object.properties = js_ast.G.Property.List.init(&InjectionPrefill.bun_macros_relay_object_properties); - InjectionPrefill.bun_macros_relay_only_object.properties = js_ast.G.Property.List.init(&InjectionPrefill.bun_macros_relay_only_object_properties); + InjectionPrefill.bun_macro_relay_object.properties = js_ast.G.Property.List + .fromBorrowedSliceDangerous(InjectionPrefill.bun_macro_relay_properties[0..]); + InjectionPrefill.bun_macros_relay_object.properties = js_ast.G.Property.List + .fromBorrowedSliceDangerous(&InjectionPrefill.bun_macros_relay_object_properties); + InjectionPrefill.bun_macros_relay_only_object.properties = js_ast.G.Property.List + .fromBorrowedSliceDangerous(&InjectionPrefill.bun_macros_relay_only_object_properties); // if (needs_to_inject_dev_dependency and dev_dependencies == null) { // var e_object = try ctx.allocator.create(E.Object); @@ -1264,7 +1266,7 @@ pub const CreateCommand = struct { package_json_expr.data.e_object.is_single_line = false; - package_json_expr.data.e_object.properties = js_ast.G.Property.List.fromList(properties_list); + package_json_expr.data.e_object.properties = js_ast.G.Property.List.moveFromList(&properties_list); { var i: usize = 0; var property_i: usize = 0; @@ -1303,7 +1305,9 @@ pub const CreateCommand = struct { script_property_out_i += 1; } - property.value.?.data.e_object.properties = js_ast.G.Property.List.init(scripts_properties[0..script_property_out_i]); + property.value.?.data.e_object.properties = js_ast.G.Property.List.fromBorrowedSliceDangerous( + scripts_properties[0..script_property_out_i], + ); } } @@ -1382,7 +1386,7 @@ pub const CreateCommand = struct { } } } - package_json_expr.data.e_object.properties = js_ast.G.Property.List.init(package_json_expr.data.e_object.properties.ptr[0..property_i]); + package_json_expr.data.e_object.properties.shrinkRetainingCapacity(property_i); } const file: bun.FD = .fromStdFile(package_json_file.?); diff --git a/src/cli/filter_run.zig b/src/cli/filter_run.zig index 62843379a4..dbde2c4f6f 100644 --- a/src/cli/filter_run.zig +++ b/src/cli/filter_run.zig @@ -433,7 +433,15 @@ pub fn runScriptsWithFilter(ctx: Command.Context) !noreturn { const fsinstance = try bun.fs.FileSystem.init(null); // these things are leaked because we are going to exit - var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, ctx.filters, fsinstance.top_level_dir); + // When --workspaces is set, we want to match all workspace packages + // Otherwise use the provided filters + var filters_to_use = ctx.filters; + if (ctx.workspaces) { + // Use "*" as filter to match all packages in the workspace + filters_to_use = &.{"*"}; + } + + var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, filters_to_use, fsinstance.top_level_dir); var patterns = std.ArrayList([]u8).init(ctx.allocator); // Find package.json at workspace root @@ -453,6 +461,11 @@ pub fn runScriptsWithFilter(ctx: Command.Context) !noreturn { const dirpath = std.fs.path.dirname(package_json_path) orelse Global.crash(); const path = bun.strings.withoutTrailingSlash(dirpath); + // When using --workspaces, skip the root package to prevent recursion + if (ctx.workspaces and strings.eql(path, resolve_root)) { + continue; + } + const pkgjson = bun.PackageJSON.parse(&this_transpiler.resolver, dirpath, .invalid, null, .include_scripts, .main) orelse { Output.warn("Failed to read package.json\n", .{}); continue; @@ -465,8 +478,15 @@ pub fn runScriptsWithFilter(ctx: Command.Context) !noreturn { const PATH = try RunCommand.configurePathForRunWithPackageJsonDir(ctx, dirpath, &this_transpiler, null, dirpath, ctx.debug.run_in_bun); - for (&[3][]const u8{ pre_script_name, script_name, post_script_name }) |name| { - const original_content = pkgscripts.get(name) orelse continue; + for (&[3][]const u8{ pre_script_name, script_name, post_script_name }, 0..) |name, i| { + const original_content = pkgscripts.get(name) orelse { + if (i == 1 and ctx.workspaces and !ctx.if_present) { + Output.errGeneric("Missing '{s}' script at '{s}'", .{ script_name, path }); + Global.exit(1); + } + + continue; + }; var copy_script_capacity: usize = original_content.len; for (ctx.passthrough) |part| copy_script_capacity += 1 + part.len; @@ -500,7 +520,15 @@ pub fn runScriptsWithFilter(ctx: Command.Context) !noreturn { } if (scripts.items.len == 0) { - Output.prettyErrorln("error: No packages matched the filter", .{}); + if (ctx.if_present) { + // Exit silently with success when --if-present is set + Global.exit(0); + } + if (ctx.workspaces) { + Output.errGeneric("No workspace packages have script \"{s}\"", .{script_name}); + } else { + Output.errGeneric("No packages matched the filter", .{}); + } Global.exit(1); } @@ -648,6 +676,7 @@ const bun = @import("bun"); const Environment = bun.Environment; const Global = bun.Global; const Output = bun.Output; +const strings = bun.strings; const transpiler = bun.transpiler; const CLI = bun.cli; diff --git a/src/cli/pm_pkg_command.zig b/src/cli/pm_pkg_command.zig index a694398c0f..cde17580f6 100644 --- a/src/cli/pm_pkg_command.zig +++ b/src/cli/pm_pkg_command.zig @@ -713,7 +713,7 @@ pub const PmPkgCommand = struct { } if (!found) return false; - var new_props: std.ArrayList(js_ast.G.Property) = try .initCapacity(allocator, old_props.len - 1); + var new_props: bun.BabyList(js_ast.G.Property) = try .initCapacity(allocator, old_props.len - 1); for (old_props) |prop| { if (prop.key) |k| { switch (k.data) { @@ -727,8 +727,7 @@ pub const PmPkgCommand = struct { } new_props.appendAssumeCapacity(prop); } - const new_list = js_ast.G.Property.List.fromList(new_props); - obj.data.e_object.properties = new_list; + obj.data.e_object.properties = new_props; return true; } diff --git a/src/cli/pm_view_command.zig b/src/cli/pm_view_command.zig index ec06f94be6..3ab8552edf 100644 --- a/src/cli/pm_view_command.zig +++ b/src/cli/pm_view_command.zig @@ -193,7 +193,7 @@ pub fn view(allocator: std.mem.Allocator, manager: *PackageManager, spec_: strin const versions_array = bun.ast.Expr.init( bun.ast.E.Array, bun.ast.E.Array{ - .items = .init(keys), + .items = .fromOwnedSlice(keys), }, .{ .start = -1 }, ); diff --git a/src/cli/publish_command.zig b/src/cli/publish_command.zig index 015b66735a..1c9fcf9f8d 100644 --- a/src/cli/publish_command.zig +++ b/src/cli/publish_command.zig @@ -900,7 +900,7 @@ pub const PublishCommand = struct { try json.set(allocator, "dist", Expr.init( E.Object, - .{ .properties = G.Property.List.init(dist_props) }, + .{ .properties = G.Property.List.fromOwnedSlice(dist_props) }, logger.Loc.Empty, )); @@ -988,7 +988,7 @@ pub const PublishCommand = struct { json.data.e_object.properties.ptr[bin_query.i].value = Expr.init( E.Object, .{ - .properties = G.Property.List.fromList(bin_props), + .properties = G.Property.List.moveFromList(&bin_props), }, logger.Loc.Empty, ); @@ -1064,7 +1064,7 @@ pub const PublishCommand = struct { json.data.e_object.properties.ptr[bin_query.i].value = Expr.init( E.Object, - .{ .properties = G.Property.List.fromList(bin_props) }, + .{ .properties = G.Property.List.moveFromList(&bin_props) }, logger.Loc.Empty, ); }, @@ -1153,7 +1153,11 @@ pub const PublishCommand = struct { } } - try json.set(allocator, "bin", Expr.init(E.Object, .{ .properties = G.Property.List.fromList(bin_props) }, logger.Loc.Empty)); + try json.set(allocator, "bin", Expr.init( + E.Object, + .{ .properties = G.Property.List.moveFromList(&bin_props) }, + logger.Loc.Empty, + )); } } diff --git a/src/collections.zig b/src/collections.zig index be939b0e03..5cfbc74de1 100644 --- a/src/collections.zig +++ b/src/collections.zig @@ -1,6 +1,8 @@ pub const MultiArrayList = @import("./collections/multi_array_list.zig").MultiArrayList; -pub const BabyList = @import("./collections/baby_list.zig").BabyList; -pub const OffsetList = @import("./collections/baby_list.zig").OffsetList; +pub const baby_list = @import("./collections/baby_list.zig"); +pub const BabyList = baby_list.BabyList; +pub const ByteList = baby_list.ByteList; // alias of BabyList(u8) +pub const OffsetByteList = baby_list.OffsetByteList; pub const bit_set = @import("./collections/bit_set.zig"); pub const HiveArray = @import("./collections/hive_array.zig").HiveArray; -pub const BoundedArray = @import("./collections/BoundedArray.zig").BoundedArray; +pub const BoundedArray = @import("./collections/bounded_array.zig").BoundedArray; diff --git a/src/collections/baby_list.zig b/src/collections/baby_list.zig index a41a6fd8f8..57feab74a6 100644 --- a/src/collections/baby_list.zig +++ b/src/collections/baby_list.zig @@ -1,62 +1,288 @@ /// This is like ArrayList except it stores the length and capacity as u32 /// In practice, it is very unusual to have lengths above 4 GiB pub fn BabyList(comptime Type: type) type { + const Origin = union(enum) { + owned, + borrowed: struct { + trace: if (traces_enabled) StoredTrace else void, + }, + }; + return struct { const Self = @This(); // NOTE: If you add, remove, or rename any public fields, you need to update // `looksLikeListContainerType` in `meta.zig`. - ptr: [*]Type = &[_]Type{}, + + /// Don't access this field directly, as it's not safety-checked. Use `.slice()`, `.at()`, + /// or `.mut()`. + ptr: [*]Type = &.{}, len: u32 = 0, cap: u32 = 0, + #origin: if (safety_checks) Origin else void = if (safety_checks) .owned, #allocator: bun.safety.CheckedAllocator = .{}, pub const Elem = Type; - pub fn parse(input: *bun.css.Parser) bun.css.Result(Self) { - return switch (input.parseCommaSeparated(Type, bun.css.generic.parseFor(Type))) { - .result => |v| return .{ .result = Self{ - .ptr = v.items.ptr, - .len = @intCast(v.items.len), - .cap = @intCast(v.capacity), - } }, - .err => |e| return .{ .err = e }, + pub const empty: Self = .{}; + + pub fn initCapacity(allocator: std.mem.Allocator, len: usize) OOM!Self { + var this = initWithBuffer(try allocator.alloc(Type, len)); + this.#allocator.set(allocator); + return this; + } + + pub fn initOne(allocator: std.mem.Allocator, value: Type) OOM!Self { + var items = try allocator.alloc(Type, 1); + items[0] = value; + return .{ + .ptr = @as([*]Type, @ptrCast(items.ptr)), + .len = 1, + .cap = 1, + .#allocator = .init(allocator), }; } - pub fn toCss(this: *const Self, comptime W: type, dest: *bun.css.Printer(W)) bun.css.PrintErr!void { - return bun.css.to_css.fromBabyList(Type, this, W, dest); - } + pub fn moveFromList(list_ptr: anytype) Self { + const ListType = std.meta.Child(@TypeOf(list_ptr)); - pub fn eql(lhs: *const Self, rhs: *const Self) bool { - if (lhs.len != rhs.len) return false; - for (lhs.sliceConst(), rhs.sliceConst()) |*a, *b| { - if (!bun.css.generic.eql(Type, a, b)) return false; + if (comptime ListType == Self) { + @compileError("unnecessary call to `moveFromList`"); } - return true; + + const unsupported_arg_msg = "unsupported argument to `moveFromList`: *" ++ + @typeName(ListType); + + const items = if (comptime @hasField(ListType, "items")) + list_ptr.items + else if (comptime std.meta.hasFn(ListType, "slice")) + list_ptr.slice() + else + @compileError(unsupported_arg_msg); + + const capacity = if (comptime @hasField(ListType, "capacity")) + list_ptr.capacity + else if (comptime @hasField(ListType, "cap")) + list_ptr.cap + else if (comptime std.meta.hasFn(ListType, "capacity")) + list_ptr.capacity() + else + @compileError(unsupported_arg_msg); + + if (comptime Environment.allow_assert) { + bun.assert(items.len <= capacity); + } + + var this: Self = .{ + .ptr = items.ptr, + .len = @intCast(items.len), + .cap = @intCast(capacity), + }; + + const allocator = if (comptime @hasField(ListType, "allocator")) + list_ptr.allocator + else if (comptime std.meta.hasFn(ListType, "allocator")) + list_ptr.allocator(); + + if (comptime @TypeOf(allocator) == void) { + list_ptr.* = .empty; + } else { + this.#allocator.set(bun.allocators.asStd(allocator)); + list_ptr.* = .init(allocator); + } + return this; } - pub fn set(this: *@This(), slice_: []Type) void { - this.ptr = slice_.ptr; - this.len = @intCast(slice_.len); - this.cap = @intCast(slice_.len); + /// Requirements: + /// + /// * `items` must be owned memory, allocated with some allocator. That same allocator must + /// be passed to methods that expect it, like `append`. + /// + /// * `items` must be the *entire* region of allocated memory. It cannot be a subslice. + /// If you really need an owned subslice, use `shrinkRetainingCapacity` followed by + /// `toOwnedSlice` on an `ArrayList`. + pub fn fromOwnedSlice(items: []Type) Self { + return .{ + .ptr = items.ptr, + .len = @intCast(items.len), + .cap = @intCast(items.len), + }; } - pub fn available(this: *Self) []Type { - return this.ptr[this.len..this.cap]; + /// Same requirements as `fromOwnedSlice`. + pub fn initWithBuffer(buffer: []Type) Self { + return .{ + .ptr = buffer.ptr, + .len = 0, + .cap = @intCast(buffer.len), + }; } - pub fn deinitWithAllocator(this: *Self, allocator: std.mem.Allocator) void { + /// Copies all elements of `items` into new memory. Creates shallow copies. + pub fn fromSlice(allocator: std.mem.Allocator, items: []const Type) OOM!Self { + const allocated = try allocator.alloc(Type, items.len); + bun.copy(Type, allocated, items); + + return Self{ + .ptr = allocated.ptr, + .len = @intCast(allocated.len), + .cap = @intCast(allocated.len), + .#allocator = .init(allocator), + }; + } + + /// This method invalidates the `BabyList`. Use `clearAndFree` if you want to empty the + /// list instead. + pub fn deinit(this: *Self, allocator: std.mem.Allocator) void { + this.assertOwned(); this.listManaged(allocator).deinit(); + this.* = undefined; + } + + pub fn clearAndFree(this: *Self, allocator: std.mem.Allocator) void { + this.deinit(allocator); this.* = .{}; } - pub fn shrinkAndFree(this: *Self, allocator: std.mem.Allocator, size: usize) void { + pub fn clearRetainingCapacity(this: *Self) void { + this.len = 0; + } + + pub fn slice(this: Self) callconv(bun.callconv_inline) []Type { + return this.ptr[0..this.len]; + } + + /// Same as `.slice()`, with an explicit coercion to const. + pub fn sliceConst(this: Self) callconv(bun.callconv_inline) []const Type { + return this.slice(); + } + + pub fn at(this: Self, index: usize) callconv(bun.callconv_inline) *const Type { + bun.assert(index < this.len); + return &this.ptr[index]; + } + + pub fn mut(this: Self, index: usize) callconv(bun.callconv_inline) *Type { + bun.assert(index < this.len); + return &this.ptr[index]; + } + + pub fn first(this: Self) callconv(bun.callconv_inline) ?*Type { + return if (this.len > 0) &this.ptr[0] else null; + } + + pub fn last(this: Self) callconv(bun.callconv_inline) ?*Type { + return if (this.len > 0) &this.ptr[this.len - 1] else null; + } + + /// Empties the `BabyList`. + pub fn toOwnedSlice(this: *Self, allocator: std.mem.Allocator) OOM![]Type { + if ((comptime safety_checks) and this.len != this.cap) this.assertOwned(); var list_ = this.listManaged(allocator); - list_.shrinkAndFree(size); + const result = try list_.toOwnedSlice(); + this.* = .empty; + return result; + } + + pub fn moveToList(this: *Self) std.ArrayListUnmanaged(Type) { + this.assertOwned(); + defer this.* = .empty; + return this.list(); + } + + pub fn moveToListManaged(this: *Self, allocator: std.mem.Allocator) std.ArrayList(Type) { + this.assertOwned(); + defer this.* = .empty; + return this.listManaged(allocator); + } + + pub fn expandToCapacity(this: *Self) void { + this.len = this.cap; + } + + pub fn ensureTotalCapacity( + this: *Self, + allocator: std.mem.Allocator, + new_capacity: usize, + ) !void { + if ((comptime safety_checks) and new_capacity > this.cap) this.assertOwned(); + var list_ = this.listManaged(allocator); + try list_.ensureTotalCapacity(new_capacity); this.update(list_); } + pub fn ensureTotalCapacityPrecise( + this: *Self, + allocator: std.mem.Allocator, + new_capacity: usize, + ) !void { + if ((comptime safety_checks) and new_capacity > this.cap) this.assertOwned(); + var list_ = this.listManaged(allocator); + try list_.ensureTotalCapacityPrecise(new_capacity); + this.update(list_); + } + + pub fn ensureUnusedCapacity( + this: *Self, + allocator: std.mem.Allocator, + count: usize, + ) OOM!void { + if ((comptime safety_checks) and count > this.cap - this.len) this.assertOwned(); + var list_ = this.listManaged(allocator); + try list_.ensureUnusedCapacity(count); + this.update(list_); + } + + pub fn shrinkAndFree(this: *Self, allocator: std.mem.Allocator, new_len: usize) void { + if ((comptime safety_checks) and new_len < this.cap) this.assertOwned(); + var list_ = this.listManaged(allocator); + list_.shrinkAndFree(new_len); + this.update(list_); + } + + pub fn shrinkRetainingCapacity(this: *Self, new_len: usize) void { + bun.assertf( + new_len <= this.len, + "shrinkRetainingCapacity: new len ({d}) cannot exceed old ({d})", + .{ new_len, this.len }, + ); + this.len = @intCast(new_len); + } + + pub fn append(this: *Self, allocator: std.mem.Allocator, value: Type) OOM!void { + if ((comptime safety_checks) and this.len == this.cap) this.assertOwned(); + var list_ = this.listManaged(allocator); + try list_.append(value); + this.update(list_); + } + + pub fn appendAssumeCapacity(this: *Self, value: Type) void { + bun.assert(this.cap > this.len); + this.ptr[this.len] = value; + this.len += 1; + } + + pub fn appendSlice(this: *Self, allocator: std.mem.Allocator, vals: []const Type) !void { + if ((comptime safety_checks) and this.cap - this.len < vals.len) this.assertOwned(); + var list_ = this.listManaged(allocator); + try list_.appendSlice(vals); + this.update(list_); + } + + pub fn appendSliceAssumeCapacity(this: *Self, values: []const Type) void { + bun.assert(this.cap >= this.len + @as(u32, @intCast(values.len))); + const tail = this.ptr[this.len .. this.len + values.len]; + bun.copy(Type, tail, values); + this.len += @intCast(values.len); + bun.assert(this.cap >= this.len); + } + + pub fn pop(this: *Self) ?Type { + if (this.len == 0) return null; + this.len -= 1; + return this.ptr[this.len]; + } + pub fn orderedRemove(this: *Self, index: usize) Type { var l = this.list(); defer this.update(l); @@ -69,70 +295,23 @@ pub fn BabyList(comptime Type: type) type { return l.swapRemove(index); } - pub fn sortAsc(this: *Self) void { - bun.strings.sortAsc(this.slice()); - } - - pub fn contains(this: Self, item: []const Type) bool { - return this.len > 0 and @intFromPtr(item.ptr) >= @intFromPtr(this.ptr) and @intFromPtr(item.ptr) < @intFromPtr(this.ptr) + this.len; - } - - pub fn initConst(items: []const Type) callconv(bun.callconv_inline) Self { - @setRuntimeSafety(false); - return Self{ - // Remove the const qualifier from the items - .ptr = @constCast(items.ptr), - .len = @intCast(items.len), - .cap = @intCast(items.len), - }; - } - - pub fn ensureUnusedCapacity(this: *Self, allocator: std.mem.Allocator, count: usize) !void { + pub fn insert(this: *Self, allocator: std.mem.Allocator, index: usize, val: Type) OOM!void { + if ((comptime safety_checks) and this.len == this.cap) this.assertOwned(); var list_ = this.listManaged(allocator); - try list_.ensureUnusedCapacity(count); + try list_.insert(index, val); this.update(list_); } - pub fn pop(this: *Self) ?Type { - if (this.len == 0) return null; - this.len -= 1; - return this.ptr[this.len]; - } - - pub fn clone(this: Self, allocator: std.mem.Allocator) !Self { - const copy = try this.list().clone(allocator); - return Self{ - .ptr = copy.items.ptr, - .len = @intCast(copy.items.len), - .cap = @intCast(copy.capacity), - }; - } - - pub fn deepClone(this: Self, allocator: std.mem.Allocator) !Self { - if (!@hasDecl(Type, "deepClone")) { - @compileError("Unsupported type for BabyList.deepClone(): " ++ @typeName(Type)); - } - - var list_ = try initCapacity(allocator, this.len); - for (this.slice()) |item| { - const clone_result = item.deepClone(allocator); - const cloned_item = switch (comptime @typeInfo(@TypeOf(clone_result))) { - .error_union => try clone_result, - else => clone_result, - }; - list_.appendAssumeCapacity(cloned_item); - } - return list_; - } - - /// Same as `deepClone` but calls `bun.outOfMemory` instead of returning an error. - /// `Type.deepClone` must not return any error except `error.OutOfMemory`. - pub fn deepCloneInfallible(this: Self, allocator: std.mem.Allocator) Self { - return bun.handleOom(this.deepClone(allocator)); - } - - pub fn clearRetainingCapacity(this: *Self) void { - this.len = 0; + pub fn insertSlice( + this: *Self, + allocator: std.mem.Allocator, + index: usize, + vals: []const Type, + ) OOM!void { + if ((comptime safety_checks) and this.cap - this.len < vals.len) this.assertOwned(); + var list_ = this.listManaged(allocator); + try list_.insertSlice(index, vals); + this.update(list_); } pub fn replaceRange( @@ -141,201 +320,70 @@ pub fn BabyList(comptime Type: type) type { start: usize, len_: usize, new_items: []const Type, - ) !void { + ) OOM!void { var list_ = this.listManaged(allocator); try list_.replaceRange(start, len_, new_items); } - pub fn appendAssumeCapacity(this: *Self, value: Type) void { - bun.assert(this.cap > this.len); - this.ptr[this.len] = value; - this.len += 1; + pub fn clone(this: Self, allocator: std.mem.Allocator) OOM!Self { + var copy = try this.list().clone(allocator); + return .moveFromList(©); } - pub fn writableSlice(this: *Self, allocator: std.mem.Allocator, cap: usize) ![]Type { + pub fn unusedCapacitySlice(this: Self) []Type { + return this.ptr[this.len..this.cap]; + } + + pub fn contains(this: Self, item: []const Type) bool { + return this.len > 0 and + @intFromPtr(item.ptr) >= @intFromPtr(this.ptr) and + @intFromPtr(item.ptr) < @intFromPtr(this.ptr) + this.len; + } + + pub fn sortAsc(this: *Self) void { + bun.strings.sortAsc(this.slice()); + } + + pub fn writableSlice( + this: *Self, + allocator: std.mem.Allocator, + additional: usize, + ) OOM![]Type { + if ((comptime safety_checks) and additional > this.cap - this.len) this.assertOwned(); var list_ = this.listManaged(allocator); - try list_.ensureUnusedCapacity(cap); - const writable = list_.items.ptr[this.len .. this.len + @as(u32, @intCast(cap))]; - list_.items.len += cap; + try list_.ensureUnusedCapacity(additional); + const prev_len = list_.items.len; + list_.items.len += additional; + const writable = list_.items[prev_len..]; this.update(list_); return writable; } - pub fn appendSliceAssumeCapacity(this: *Self, values: []const Type) void { - const tail = this.ptr[this.len .. this.len + values.len]; - bun.assert(this.cap >= this.len + @as(u32, @intCast(values.len))); - bun.copy(Type, tail, values); - this.len += @intCast(values.len); - bun.assert(this.cap >= this.len); - } - - pub fn initCapacity(allocator: std.mem.Allocator, len: usize) std.mem.Allocator.Error!Self { - var this = initWithBuffer(try allocator.alloc(Type, len)); - this.#allocator.set(allocator); - return this; - } - - pub fn initWithBuffer(buffer: []Type) Self { - return Self{ - .ptr = buffer.ptr, - .len = 0, - .cap = @intCast(buffer.len), - }; - } - - pub fn init(items: []const Type) Self { - @setRuntimeSafety(false); - return Self{ - .ptr = @constCast(items.ptr), - .len = @intCast(items.len), - .cap = @intCast(items.len), - }; - } - - pub fn fromList(list_: anytype) Self { - if (comptime @TypeOf(list_) == Self) { - return list_; - } - - if (comptime @TypeOf(list_) == []const Type) { - return init(list_); - } - - if (comptime Environment.allow_assert) { - bun.assert(list_.items.len <= list_.capacity); - } - - return Self{ - .ptr = list_.items.ptr, - .len = @intCast(list_.items.len), - .cap = @intCast(list_.capacity), - }; - } - - pub fn fromSlice(allocator: std.mem.Allocator, items: []const Type) !Self { - const allocated = try allocator.alloc(Type, items.len); - bun.copy(Type, allocated, items); - - return Self{ - .ptr = allocated.ptr, - .len = @intCast(allocated.len), - .cap = @intCast(allocated.len), - .#allocator = .init(allocator), - }; - } - - pub fn allocatedSlice(this: *const Self) []u8 { - if (this.cap == 0) return &.{}; - + pub fn allocatedSlice(this: Self) []Type { return this.ptr[0..this.cap]; } - pub fn update(this: *Self, list_: anytype) void { - this.* = .{ - .ptr = list_.items.ptr, - .len = @intCast(list_.items.len), - .cap = @intCast(list_.capacity), - }; - - if (comptime Environment.allow_assert) { - bun.assert(this.len <= this.cap); - } + pub fn memoryCost(this: Self) usize { + return this.cap; } - pub fn list(this: Self) std.ArrayListUnmanaged(Type) { - return std.ArrayListUnmanaged(Type){ - .items = this.ptr[0..this.len], - .capacity = this.cap, - }; - } - - pub fn listManaged(this: *Self, allocator: std.mem.Allocator) std.ArrayList(Type) { - this.#allocator.set(allocator); - var list_ = this.list(); - return list_.toManaged(allocator); - } - - pub fn first(this: Self) callconv(bun.callconv_inline) ?*Type { - return if (this.len > 0) this.ptr[0] else @as(?*Type, null); - } - - pub fn last(this: Self) callconv(bun.callconv_inline) ?*Type { - return if (this.len > 0) &this.ptr[this.len - 1] else @as(?*Type, null); - } - - pub fn first_(this: Self) callconv(bun.callconv_inline) Type { - return this.ptr[0]; - } - - pub fn at(this: Self, index: usize) callconv(bun.callconv_inline) *const Type { - bun.assert(index < this.len); - return &this.ptr[index]; - } - - pub fn mut(this: Self, index: usize) callconv(bun.callconv_inline) *Type { - bun.assert(index < this.len); - return &this.ptr[index]; - } - - pub fn one(allocator: std.mem.Allocator, value: Type) !Self { - var items = try allocator.alloc(Type, 1); - items[0] = value; - return Self{ - .ptr = @as([*]Type, @ptrCast(items.ptr)), - .len = 1, - .cap = 1, - .#allocator = .init(allocator), - }; - } - - pub fn @"[0]"(this: Self) callconv(bun.callconv_inline) Type { - return this.ptr[0]; - } - const OOM = error{OutOfMemory}; - - pub fn push(this: *Self, allocator: std.mem.Allocator, value: Type) OOM!void { - var list_ = this.listManaged(allocator); - try list_.append(value); - this.update(list_); - } - - pub fn appendFmt(this: *Self, allocator: std.mem.Allocator, comptime fmt: []const u8, args: anytype) !void { + /// This method is available only for `BabyList(u8)`. + pub fn appendFmt( + this: *Self, + allocator: std.mem.Allocator, + comptime fmt: []const u8, + args: anytype, + ) OOM!void { + if ((comptime safety_checks) and this.len == this.cap) this.assertOwned(); var list_ = this.listManaged(allocator); const writer = list_.writer(); try writer.print(fmt, args); - this.update(list_); } - pub fn insert(this: *Self, allocator: std.mem.Allocator, index: usize, val: Type) !void { - var list_ = this.listManaged(allocator); - try list_.insert(index, val); - this.update(list_); - } - - pub fn insertSlice(this: *Self, allocator: std.mem.Allocator, index: usize, vals: []const Type) !void { - var list_ = this.listManaged(allocator); - try list_.insertSlice(index, vals); - this.update(list_); - } - - pub fn append(this: *Self, allocator: std.mem.Allocator, value: []const Type) !void { - var list_ = this.listManaged(allocator); - try list_.appendSlice(value); - this.update(list_); - } - - pub fn slice(this: Self) callconv(bun.callconv_inline) []Type { - @setRuntimeSafety(false); - return this.ptr[0..this.len]; - } - - pub fn sliceConst(this: *const Self) callconv(bun.callconv_inline) []const Type { - @setRuntimeSafety(false); - return this.ptr[0..this.len]; - } - - pub fn write(this: *Self, allocator: std.mem.Allocator, str: []const u8) !u32 { + /// This method is available only for `BabyList(u8)`. + pub fn write(this: *Self, allocator: std.mem.Allocator, str: []const u8) OOM!u32 { + if ((comptime safety_checks) and this.cap - this.len < str.len) this.assertOwned(); if (comptime Type != u8) @compileError("Unsupported for type " ++ @typeName(Type)); const initial = this.len; @@ -345,7 +393,9 @@ pub fn BabyList(comptime Type: type) type { return this.len - initial; } + /// This method is available only for `BabyList(u8)`. pub fn writeLatin1(this: *Self, allocator: std.mem.Allocator, str: []const u8) OOM!u32 { + if ((comptime safety_checks) and str.len > 0) this.assertOwned(); if (comptime Type != u8) @compileError("Unsupported for type " ++ @typeName(Type)); const initial = this.len; @@ -355,7 +405,9 @@ pub fn BabyList(comptime Type: type) type { return this.len - initial; } + /// This method is available only for `BabyList(u8)`. pub fn writeUTF16(this: *Self, allocator: std.mem.Allocator, str: []const u16) OOM!u32 { + if ((comptime safety_checks) and str.len > 0) this.assertOwned(); if (comptime Type != u8) @compileError("Unsupported for type " ++ @typeName(Type)); @@ -407,6 +459,7 @@ pub fn BabyList(comptime Type: type) type { return this.len - initial; } + /// This method is available only for `BabyList(u8)`. pub fn writeTypeAsBytesAssumeCapacity(this: *Self, comptime Int: type, int: Int) void { if (comptime Type != u8) @compileError("Unsupported for type " ++ @typeName(Type)); @@ -415,12 +468,95 @@ pub fn BabyList(comptime Type: type) type { this.len += @sizeOf(Int); } - pub fn memoryCost(self: *const Self) usize { - return self.cap; + pub fn parse(input: *bun.css.Parser) bun.css.Result(Self) { + return switch (input.parseCommaSeparated(Type, bun.css.generic.parseFor(Type))) { + .result => |v| return .{ .result = Self{ + .ptr = v.items.ptr, + .len = @intCast(v.items.len), + .cap = @intCast(v.capacity), + } }, + .err => |e| return .{ .err = e }, + }; + } + + pub fn toCss(this: *const Self, comptime W: type, dest: *bun.css.Printer(W)) bun.css.PrintErr!void { + return bun.css.to_css.fromBabyList(Type, this, W, dest); + } + + pub fn eql(lhs: *const Self, rhs: *const Self) bool { + if (lhs.len != rhs.len) return false; + for (lhs.sliceConst(), rhs.sliceConst()) |*a, *b| { + if (!bun.css.generic.eql(Type, a, b)) return false; + } + return true; + } + + pub fn deepClone(this: Self, allocator: std.mem.Allocator) !Self { + if (!@hasDecl(Type, "deepClone")) { + @compileError("Unsupported type for BabyList.deepClone(): " ++ @typeName(Type)); + } + + var list_ = try initCapacity(allocator, this.len); + for (this.slice()) |item| { + const clone_result = item.deepClone(allocator); + const cloned_item = switch (comptime @typeInfo(@TypeOf(clone_result))) { + .error_union => try clone_result, + else => clone_result, + }; + list_.appendAssumeCapacity(cloned_item); + } + return list_; + } + + /// Same as `deepClone` but calls `bun.outOfMemory` instead of returning an error. + /// `Type.deepClone` must not return any error except `error.OutOfMemory`. + pub fn deepCloneInfallible(this: Self, allocator: std.mem.Allocator) Self { + return bun.handleOom(this.deepClone(allocator)); + } + + /// Avoid using this function. It creates a `BabyList` that will immediately invoke + /// illegal behavior if you call any method that could allocate or free memory. On top of + /// that, if `items` points to read-only memory, any attempt to modify a list element (which + /// is very easy given how many methods return non-const pointers and slices) will also + /// invoke illegal behavior. + /// + /// To find an alternative: + /// + /// 1. Determine how the resulting `BabyList` is being used. Is it stored in a struct field? + /// Is it passed to a function? + /// + /// 2. Determine whether that struct field or function parameter expects the list to be + /// mutable. Does it potentially call any methods that could allocate or free, like + /// `append` or `deinit`? + /// + /// 3. If the list is expected to be mutable, don't use this function, because the returned + /// list will invoke illegal behavior if mutated. Use `fromSlice` or another allocating + /// function instead. + /// + /// 4. If the list is *not* expected to be mutable, don't use a `BabyList` at all. Change + /// the field or parameter to be a plain slice instead. + /// + /// Requirements: + /// + /// * Methods that could potentially free, remap, or resize `items` cannot be called. + pub fn fromBorrowedSliceDangerous(items: []const Type) Self { + var this: Self = .fromOwnedSlice(@constCast(items)); + if (comptime safety_checks) this.#origin = .{ .borrowed = .{ + .trace = if (traces_enabled) .capture(@returnAddress()), + } }; + return this; + } + + /// Transfers ownership of this `BabyList` to a new allocator. + /// + /// This method is valid only if both the old allocator and new allocator are + /// `MimallocArena`s. See `bun.safety.CheckedAllocator.transferOwnership`. + pub fn transferOwnership(this: *Self, new_allocator: anytype) void { + this.#allocator.transferOwnership(new_allocator); } pub fn format( - self: Self, + this: Self, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype, @@ -429,65 +565,113 @@ pub fn BabyList(comptime Type: type) type { return std.fmt.format( writer, "BabyList({s}){{{any}}}", - .{ @typeName(Type), self.list() }, + .{ @typeName(Type), this.list() }, ); } - }; -} -pub fn OffsetList(comptime Type: type) type { - return struct { - head: u32 = 0, - byte_list: List = .{}, + fn assertOwned(this: *Self) void { + if ((comptime !safety_checks) or this.#origin == .owned) return; + if (comptime traces_enabled) { + bun.Output.note("borrowed BabyList created here:", .{}); + bun.crash_handler.dumpStackTrace( + this.#origin.borrowed.trace.trace(), + .{ .frame_count = 10, .stop_at_jsc_llint = true }, + ); + } + std.debug.panic( + "cannot perform this operation on a BabyList that doesn't own its data", + .{}, + ); + } - const List = BabyList(Type); - const Self = @This(); - - pub fn init(head: u32, byte_list: List) Self { + fn list(this: Self) std.ArrayListUnmanaged(Type) { return .{ - .head = head, - .byte_list = byte_list, + .items = this.slice(), + .capacity = this.cap, }; } - pub fn write(self: *Self, allocator: std.mem.Allocator, bytes: []const u8) !void { - _ = try self.byte_list.write(allocator, bytes); + fn listManaged(this: *Self, allocator: std.mem.Allocator) std.ArrayList(Type) { + this.#allocator.set(allocator); + var list_ = this.list(); + return list_.toManaged(allocator); } - pub fn slice(this: *Self) []u8 { - return this.byte_list.slice()[0..this.head]; - } - - pub fn remaining(this: *Self) []u8 { - return this.byte_list.slice()[this.head..]; - } - - pub fn consume(self: *Self, bytes: u32) void { - self.head +|= bytes; - if (self.head >= self.byte_list.len) { - self.head = 0; - self.byte_list.len = 0; + fn update(this: *Self, list_: anytype) void { + this.ptr = list_.items.ptr; + this.len = @intCast(list_.items.len); + this.cap = @intCast(list_.capacity); + if (comptime Environment.allow_assert) { + bun.assert(this.len <= this.cap); } } - - pub fn len(self: *const Self) u32 { - return self.byte_list.len - self.head; - } - - pub fn clear(self: *Self) void { - self.head = 0; - self.byte_list.len = 0; - } - - pub fn deinit(self: *Self, allocator: std.mem.Allocator) void { - self.byte_list.deinitWithAllocator(allocator); - self.* = .{}; - } }; } +pub const ByteList = BabyList(u8); + +pub const OffsetByteList = struct { + const Self = @This(); + + head: u32 = 0, + byte_list: ByteList = .{}, + + pub fn init(head: u32, byte_list: ByteList) Self { + return .{ + .head = head, + .byte_list = byte_list, + }; + } + + pub fn write(self: *Self, allocator: std.mem.Allocator, bytes: []const u8) !void { + _ = try self.byte_list.write(allocator, bytes); + } + + pub fn slice(self: *const Self) []u8 { + return self.byte_list.slice()[0..self.head]; + } + + pub fn remaining(self: *const Self) []u8 { + return self.byte_list.slice()[self.head..]; + } + + pub fn consume(self: *Self, bytes: u32) void { + self.head +|= bytes; + if (self.head >= self.byte_list.len) { + self.head = 0; + self.byte_list.len = 0; + } + } + + pub fn len(self: *const Self) u32 { + return self.byte_list.len - self.head; + } + + pub fn clear(self: *Self) void { + self.head = 0; + self.byte_list.len = 0; + } + + /// This method invalidates `self`. Use `clearAndFree` to reset to empty instead. + pub fn deinit(self: *Self, allocator: std.mem.Allocator) void { + self.byte_list.deinit(allocator); + self.* = undefined; + } + + pub fn clearAndFree(self: *Self, allocator: std.mem.Allocator) void { + self.deinit(allocator); + self.* = .{}; + } +}; + +pub const safety_checks = Environment.ci_assert; + const std = @import("std"); const bun = @import("bun"); -const Environment = bun.Environment; +const OOM = bun.OOM; const strings = bun.strings; +const StoredTrace = bun.crash_handler.StoredTrace; + +const Environment = bun.Environment; +const traces_enabled = Environment.isDebug; diff --git a/src/collections/BoundedArray.zig b/src/collections/bounded_array.zig similarity index 100% rename from src/collections/BoundedArray.zig rename to src/collections/bounded_array.zig diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 38213bc917..2cea2ce25e 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -1667,9 +1667,10 @@ pub fn dumpStackTrace(trace: std.builtin.StackTrace, limits: WriteStackTraceLimi var sfa = std.heap.stackFallback(16384, arena.allocator()); spawnSymbolizer(program, sfa.get(), &trace) catch |err| switch (err) { // try next program if this one wasn't found - error.FileNotFound => {}, - else => return, + error.FileNotFound => continue, + else => {}, }; + return; } } @@ -1706,7 +1707,7 @@ fn spawnSymbolizer(program: [:0]const u8, alloc: std.mem.Allocator, trace: *cons child.progress_node = std.Progress.Node.none; const stderr = std.io.getStdErr().writer(); - child.spawn() catch |err| { + const result = child.spawnAndWait() catch |err| { stderr.print("Failed to invoke command: {s}\n", .{bun.fmt.fmtSlice(argv.items, " ")}) catch {}; if (bun.Environment.isWindows) { stderr.print("(You can compile pdb-addr2line from https://github.com/oven-sh/bun.report, cd pdb-addr2line && cargo build)\n", .{}) catch {}; @@ -1714,11 +1715,6 @@ fn spawnSymbolizer(program: [:0]const u8, alloc: std.mem.Allocator, trace: *cons return err; }; - const result = child.spawnAndWait() catch |err| { - stderr.print("Failed to invoke command: {s}\n", .{bun.fmt.fmtSlice(argv.items, " ")}) catch {}; - return err; - }; - if (result != .Exited or result.Exited != 0) { stderr.print("Failed to invoke command: {s}\n", .{bun.fmt.fmtSlice(argv.items, " ")}) catch {}; } diff --git a/src/css/css_parser.zig b/src/css/css_parser.zig index 6e09de4202..3ec785d33b 100644 --- a/src/css/css_parser.zig +++ b/src/css/css_parser.zig @@ -1416,7 +1416,7 @@ pub const BundlerAtRuleParser = struct { pub fn onImportRule(this: *This, import_rule: *ImportRule, start_position: u32, end_position: u32) void { const import_record_index = this.import_records.len; import_rule.import_record_idx = import_record_index; - this.import_records.push(this.allocator, ImportRecord{ + this.import_records.append(this.allocator, ImportRecord{ .path = bun.fs.Path.init(import_rule.url), .kind = if (import_rule.supports != null) .at_conditional else .at, .range = bun.logger.Range{ @@ -1439,9 +1439,9 @@ pub const BundlerAtRuleParser = struct { cloned.v.ensureTotalCapacity(this.allocator, this.enclosing_layer.v.len() + layer.v.len()); cloned.v.appendSliceAssumeCapacity(this.enclosing_layer.v.slice()); cloned.v.appendSliceAssumeCapacity(layer.v.slice()); - bun.handleOom(this.layer_names.push(this.allocator, cloned)); + bun.handleOom(this.layer_names.append(this.allocator, cloned)); } else { - bun.handleOom(this.layer_names.push(this.allocator, layer.deepClone(this.allocator))); + bun.handleOom(this.layer_names.append(this.allocator, layer.deepClone(this.allocator))); } } } @@ -2688,7 +2688,7 @@ pub fn NestedRuleParser(comptime T: type) type { if (!entry.found_existing) { entry.value_ptr.* = ComposesEntry{}; } - bun.handleOom(entry.value_ptr.*.composes.push(allocator, composes.deepClone(allocator))); + bun.handleOom(entry.value_ptr.*.composes.append(allocator, composes.deepClone(allocator))); } } @@ -3017,7 +3017,7 @@ pub fn fillPropertyBitSet(allocator: Allocator, bitset: *PropertyBitset, block: for (block.declarations.items) |*prop| { const tag = switch (prop.*) { .custom => { - bun.handleOom(custom_properties.push(allocator, prop.custom.name.asStr())); + bun.handleOom(custom_properties.append(allocator, prop.custom.name.asStr())); continue; }, .unparsed => |u| @as(PropertyIdTag, u.property_id), @@ -3030,7 +3030,7 @@ pub fn fillPropertyBitSet(allocator: Allocator, bitset: *PropertyBitset, block: for (block.important_declarations.items) |*prop| { const tag = switch (prop.*) { .custom => { - bun.handleOom(custom_properties.push(allocator, prop.custom.name.asStr())); + bun.handleOom(custom_properties.append(allocator, prop.custom.name.asStr())); continue; }, .unparsed => |u| @as(PropertyIdTag, u.property_id), @@ -3426,7 +3426,7 @@ pub fn StyleSheet(comptime AtRule: type) type { out.v.appendAssumeCapacity(rule.*); const import_record_idx = new_import_records.len; import_rule.import_record_idx = import_record_idx; - new_import_records.push(allocator, ImportRecord{ + new_import_records.append(allocator, ImportRecord{ .path = bun.fs.Path.init(import_rule.url), .kind = if (import_rule.supports != null) .at_conditional else .at, .range = bun.logger.Range.None, @@ -3790,7 +3790,7 @@ const ParseUntilErrorBehavior = enum { // return switch (this.*) { // .list => |list| { // const len = list.len; -// bun.handleOom(list.push(allocator, record)); +// bun.handleOom(list.append(allocator, record)); // return len; // }, // // .dummy => |*d| { @@ -3835,7 +3835,7 @@ pub const Parser = struct { }, .loc = loc, }; - extra.symbols.push(this.allocator(), bun.ast.Symbol{ + extra.symbols.append(this.allocator(), bun.ast.Symbol{ .kind = .local_css, .original_name = name, }) catch |err| bun.handleOom(err); @@ -3854,7 +3854,7 @@ pub const Parser = struct { pub fn addImportRecord(this: *Parser, url: []const u8, start_position: usize, kind: ImportKind) Result(u32) { if (this.import_records) |import_records| { const idx = import_records.len; - import_records.push(this.allocator(), ImportRecord{ + import_records.append(this.allocator(), ImportRecord{ .path = bun.fs.Path.init(url), .kind = kind, .range = bun.logger.Range{ @@ -6975,7 +6975,7 @@ pub const parse_utility = struct { ) Result(T) { // I hope this is okay var import_records = bun.BabyList(bun.ImportRecord){}; - defer import_records.deinitWithAllocator(allocator); + defer import_records.deinit(allocator); var i = ParserInput.new(allocator, input); var parser = Parser.new(&i, &import_records, .{}, null); const result = switch (parse_one(&parser)) { diff --git a/src/css/generics.zig b/src/css/generics.zig index 43503b3469..1a85c08ab3 100644 --- a/src/css/generics.zig +++ b/src/css/generics.zig @@ -483,7 +483,7 @@ pub inline fn deepClone(comptime T: type, this: *const T, allocator: Allocator) @compileError(@typeName(T) ++ " does not have a deepClone() function"); } - return T.deepClone(this, allocator); + return this.deepClone(allocator); } pub inline fn tryFromAngle(comptime T: type, angle: Angle) ?T { diff --git a/src/css/properties/grid.zig b/src/css/properties/grid.zig index db3c595619..39b0abcc7a 100644 --- a/src/css/properties/grid.zig +++ b/src/css/properties/grid.zig @@ -309,6 +309,7 @@ pub const TrackRepeat = struct { if (i.expectComma().asErr()) |e| return .{ .err = e }; + // TODO: this code will not compile if used var line_names = bun.BabyList(CustomIdentList).init(i.allocator); var track_sizes = bun.BabyList(TrackSize).init(i.allocator); diff --git a/src/css/small_list.zig b/src/css/small_list.zig index 132b1609b7..1696a1fa33 100644 --- a/src/css/small_list.zig +++ b/src/css/small_list.zig @@ -117,12 +117,13 @@ pub fn SmallList(comptime T: type, comptime N: comptime_int) type { .data = .{ .heap = .{ .len = list.len, .ptr = list.ptr } }, }; } - defer list.deinitWithAllocator(allocator); + var list_ = list; + defer list_.deinit(allocator); var this: @This() = .{ - .capacity = list.len, + .capacity = list_.len, .data = .{ .inlined = undefined }, }; - @memcpy(this.data.inlined[0..list.len], list.items[0..list.len]); + @memcpy(this.data.inlined[0..list_.len], list_.items[0..list_.len]); return this; } @@ -237,7 +238,7 @@ pub fn SmallList(comptime T: type, comptime N: comptime_int) type { break :images images; }; if (!images.isEmpty()) { - bun.handleOom(res.push(allocator, images)); + bun.handleOom(res.append(allocator, images)); } } @@ -250,7 +251,7 @@ pub fn SmallList(comptime T: type, comptime N: comptime_int) type { const image = in.getImage().getPrefixed(alloc, css.VendorPrefix.fromName(prefix)); out.* = in.withImage(alloc, image); } - bun.handleOom(r.push(alloc, images)); + bun.handleOom(r.append(alloc, images)); } } }.helper; @@ -261,7 +262,7 @@ pub fn SmallList(comptime T: type, comptime N: comptime_int) type { if (prefixes.none) { if (rgb) |r| { - bun.handleOom(res.push(allocator, r)); + bun.handleOom(res.append(allocator, r)); } if (fallbacks.p3) { diff --git a/src/deps/uws/WindowsNamedPipe.zig b/src/deps/uws/WindowsNamedPipe.zig index f45ff568db..bf4238e0c4 100644 --- a/src/deps/uws/WindowsNamedPipe.zig +++ b/src/deps/uws/WindowsNamedPipe.zig @@ -79,10 +79,10 @@ fn onPipeClose(this: *WindowsNamedPipe) void { } fn onReadAlloc(this: *WindowsNamedPipe, suggested_size: usize) []u8 { - var available = this.incoming.available(); + var available = this.incoming.unusedCapacitySlice(); if (available.len < suggested_size) { bun.handleOom(this.incoming.ensureUnusedCapacity(bun.default_allocator, suggested_size)); - available = this.incoming.available(); + available = this.incoming.unusedCapacitySlice(); } return available.ptr[0..suggested_size]; } diff --git a/src/http.zig b/src/http.zig index f4119f2a49..b081db2ecc 100644 --- a/src/http.zig +++ b/src/http.zig @@ -393,6 +393,11 @@ pub const HTTPVerboseLevel = enum { curl, }; +const HTTPUpgradeState = enum(u2) { + none = 0, + pending = 1, + upgraded = 2, +}; pub const Flags = packed struct(u16) { disable_timeout: bool = false, disable_keepalive: bool = false, @@ -405,7 +410,8 @@ pub const Flags = packed struct(u16) { is_preconnect_only: bool = false, is_streaming_request_body: bool = false, defer_fail_until_connecting_is_complete: bool = false, - _padding: u5 = 0, + upgrade_state: HTTPUpgradeState = .none, + _padding: u3 = 0, }; // TODO: reduce the size of this struct @@ -592,6 +598,12 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request { hashHeaderConst("Accept-Encoding") => { override_accept_encoding = true; }, + hashHeaderConst("Upgrade") => { + const value = this.headerStr(header_values[i]); + if (!std.ascii.eqlIgnoreCase(value, "h2") and !std.ascii.eqlIgnoreCase(value, "h2c")) { + this.flags.upgrade_state = .pending; + } + }, hashHeaderConst(chunked_encoded_header.name) => { // We don't want to override chunked encoding header if it was set by the user add_transfer_encoding = false; @@ -651,7 +663,7 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request { if (body_len > 0 or this.method.hasRequestBody()) { if (this.flags.is_streaming_request_body) { - if (add_transfer_encoding) { + if (add_transfer_encoding and this.flags.upgrade_state == .none) { request_headers_buf[header_count] = chunked_encoded_header; header_count += 1; } @@ -1022,14 +1034,26 @@ fn writeToStreamUsingBuffer(this: *HTTPClient, comptime is_ssl: bool, socket: Ne pub fn writeToStream(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, data: []const u8) void { log("flushStream", .{}); + if (this.state.original_request_body != .stream) { + return; + } var stream = &this.state.original_request_body.stream; const stream_buffer = stream.buffer orelse return; + if (this.flags.upgrade_state == .pending) { + // cannot drain yet, upgrade is waiting for upgrade + return; + } const buffer = stream_buffer.acquire(); const wasEmpty = buffer.isEmpty() and data.len == 0; if (wasEmpty and stream.ended) { // nothing is buffered and the stream is done so we just release and detach stream_buffer.release(); stream.detach(); + if (this.flags.upgrade_state == .upgraded) { + // for upgraded connections we need to shutdown the socket to signal the end of the connection + // otherwise the client will wait forever for the connection to be closed + socket.shutdown(); + } return; } @@ -1051,6 +1075,11 @@ pub fn writeToStream(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPCo this.state.request_stage = .done; stream_buffer.release(); stream.detach(); + if (this.flags.upgrade_state == .upgraded) { + // for upgraded connections we need to shutdown the socket to signal the end of the connection + // otherwise the client will wait forever for the connection to be closed + socket.shutdown(); + } } else { // only report drain if we send everything and previous we had something to send if (!wasEmpty) { @@ -1322,9 +1351,8 @@ pub fn handleOnDataHeaders( ctx: *NewHTTPContext(is_ssl), socket: NewHTTPContext(is_ssl).HTTPSocket, ) void { - log("handleOnDataHeaders", .{}); + log("handleOnDataHeader data: {s}", .{incoming_data}); var to_read = incoming_data; - var amount_read: usize = 0; var needs_move = true; if (this.state.response_message_buffer.list.items.len > 0) { // this one probably won't be another chunk, so we use appendSliceExact() to avoid over-allocating @@ -1333,47 +1361,73 @@ pub fn handleOnDataHeaders( needs_move = false; } - // we reset the pending_response each time wich means that on parse error this will be always be empty - this.state.pending_response = picohttp.Response{}; + while (true) { + var amount_read: usize = 0; - // minimal http/1.1 request size is 16 bytes without headers and 26 with Host header - // if is less than 16 will always be a ShortRead - if (to_read.len < 16) { - log("handleShortRead", .{}); - this.handleShortRead(is_ssl, incoming_data, socket, needs_move); - return; - } + // we reset the pending_response each time wich means that on parse error this will be always be empty + this.state.pending_response = picohttp.Response{}; - var response = picohttp.Response.parseParts( - to_read, - &shared_response_headers_buf, - &amount_read, - ) catch |err| { - switch (err) { - error.ShortRead => { - this.handleShortRead(is_ssl, incoming_data, socket, needs_move); - }, - else => { - this.closeAndFail(err, is_ssl, socket); - }, + // minimal http/1.1 request size is 16 bytes without headers and 26 with Host header + // if is less than 16 will always be a ShortRead + if (to_read.len < 16) { + log("handleShortRead", .{}); + this.handleShortRead(is_ssl, incoming_data, socket, needs_move); + return; } - return; - }; - // we save the successful parsed response - this.state.pending_response = response; + const response = picohttp.Response.parseParts( + to_read, + &shared_response_headers_buf, + &amount_read, + ) catch |err| { + switch (err) { + error.ShortRead => { + this.handleShortRead(is_ssl, incoming_data, socket, needs_move); + }, + else => { + this.closeAndFail(err, is_ssl, socket); + }, + } + return; + }; - const body_buf = to_read[@min(@as(usize, @intCast(response.bytes_read)), to_read.len)..]; - // handle the case where we have a 100 Continue - if (response.status_code >= 100 and response.status_code < 200) { - log("information headers", .{}); - // we still can have the 200 OK in the same buffer sometimes - if (body_buf.len > 0) { - log("information headers with body", .{}); - this.onData(is_ssl, body_buf, ctx, socket); + // we save the successful parsed response + this.state.pending_response = response; + + to_read = to_read[@min(@as(usize, @intCast(response.bytes_read)), to_read.len)..]; + + if (response.status_code == 101) { + if (this.flags.upgrade_state == .none) { + // we cannot upgrade to websocket because the client did not request it! + this.closeAndFail(error.UnrequestedUpgrade, is_ssl, socket); + return; + } + // special case for websocket upgrade + this.flags.upgrade_state = .upgraded; + if (this.signals.upgraded) |upgraded| { + upgraded.store(true, .monotonic); + } + // start draining the request body + this.flushStream(is_ssl, socket); + break; } - return; + + // handle the case where we have a 100 Continue + if (response.status_code >= 100 and response.status_code < 200) { + log("information headers", .{}); + + this.state.pending_response = null; + if (to_read.len == 0) { + // we only received 1XX responses, we wanna wait for the next status code + return; + } + // the buffer could still contain more 1XX responses or other status codes, so we continue parsing + continue; + } + + break; } + var response = this.state.pending_response.?; const should_continue = this.handleResponseMetadata( &response, ) catch |err| { @@ -1409,14 +1463,14 @@ pub fn handleOnDataHeaders( if (this.flags.proxy_tunneling and this.proxy_tunnel == null) { // we are proxing we dont need to cloneMetadata yet - this.startProxyHandshake(is_ssl, socket, body_buf); + this.startProxyHandshake(is_ssl, socket, to_read); return; } // we have body data incoming so we clone metadata and keep going this.cloneMetadata(); - if (body_buf.len == 0) { + if (to_read.len == 0) { // no body data yet, but we can report the headers if (this.signals.get(.header_progress)) { this.progressUpdate(is_ssl, ctx, socket); @@ -1426,7 +1480,7 @@ pub fn handleOnDataHeaders( if (this.state.response_stage == .body) { { - const report_progress = this.handleResponseBody(body_buf, true) catch |err| { + const report_progress = this.handleResponseBody(to_read, true) catch |err| { this.closeAndFail(err, is_ssl, socket); return; }; @@ -1439,7 +1493,7 @@ pub fn handleOnDataHeaders( } else if (this.state.response_stage == .body_chunk) { this.setTimeout(socket, 5); { - const report_progress = this.handleResponseBodyChunkedEncoding(body_buf) catch |err| { + const report_progress = this.handleResponseBodyChunkedEncoding(to_read) catch |err| { this.closeAndFail(err, is_ssl, socket); return; }; @@ -2415,6 +2469,11 @@ pub fn handleResponseMetadata( } else { log("handleResponseMetadata: content_length is null and transfer_encoding {}", .{this.state.transfer_encoding}); } + if (this.flags.upgrade_state == .upgraded) { + this.state.content_length = null; + this.state.flags.allow_keepalive = false; + return ShouldContinue.continue_streaming; + } if (this.method.hasBody() and (content_length == null or content_length.? > 0 or !this.state.flags.allow_keepalive or this.state.transfer_encoding == .chunked or is_server_sent_events)) { return ShouldContinue.continue_streaming; diff --git a/src/http/HTTPThread.zig b/src/http/HTTPThread.zig index 8ed4f2f52a..d6946798e8 100644 --- a/src/http/HTTPThread.zig +++ b/src/http/HTTPThread.zig @@ -323,8 +323,8 @@ fn drainEvents(this: *@This()) void { if (client.state.original_request_body == .stream) { var stream = &client.state.original_request_body.stream; stream.ended = ended; - if (messageType == .endChunked) { - // only send the 0-length chunk if the request body is chunked + if (messageType == .endChunked and client.flags.upgrade_state != .upgraded) { + // only send the 0-length chunk if the request body is chunked and not upgraded client.writeToStream(is_tls, socket, bun.http.end_of_chunked_http1_1_encoding_response_body); } else { client.flushStream(is_tls, socket); diff --git a/src/http/Signals.zig b/src/http/Signals.zig index 78531e7f41..bf8d1d8360 100644 --- a/src/http/Signals.zig +++ b/src/http/Signals.zig @@ -4,8 +4,9 @@ header_progress: ?*std.atomic.Value(bool) = null, body_streaming: ?*std.atomic.Value(bool) = null, aborted: ?*std.atomic.Value(bool) = null, cert_errors: ?*std.atomic.Value(bool) = null, +upgraded: ?*std.atomic.Value(bool) = null, pub fn isEmpty(this: *const Signals) bool { - return this.aborted == null and this.body_streaming == null and this.header_progress == null and this.cert_errors == null; + return this.aborted == null and this.body_streaming == null and this.header_progress == null and this.cert_errors == null and this.upgraded == null; } pub const Store = struct { @@ -13,12 +14,14 @@ pub const Store = struct { body_streaming: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), aborted: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), cert_errors: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), + upgraded: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), pub fn to(this: *Store) Signals { return .{ .header_progress = &this.header_progress, .body_streaming = &this.body_streaming, .aborted = &this.aborted, .cert_errors = &this.cert_errors, + .upgraded = &this.upgraded, }; } }; diff --git a/src/install/PackageManager/PackageJSONEditor.zig b/src/install/PackageManager/PackageJSONEditor.zig index 9b157767d8..959bc17871 100644 --- a/src/install/PackageManager/PackageJSONEditor.zig +++ b/src/install/PackageManager/PackageJSONEditor.zig @@ -93,8 +93,8 @@ pub fn editTrustedDependencies(allocator: std.mem.Allocator, package_json: *Expr } const trusted_dependencies_to_add = len; - const new_trusted_deps = brk: { - var deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add); + const new_trusted_deps: JSAst.ExprNodeList = brk: { + const deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add); @memcpy(deps[0..trusted_dependencies.len], trusted_dependencies); @memset(deps[trusted_dependencies.len..], Expr.empty); @@ -127,7 +127,7 @@ pub fn editTrustedDependencies(allocator: std.mem.Allocator, package_json: *Expr for (deps) |dep| bun.assert(dep.data != .e_missing); } - break :brk deps; + break :brk .fromOwnedSlice(deps); }; var needs_new_trusted_dependencies_list = true; @@ -141,20 +141,18 @@ pub fn editTrustedDependencies(allocator: std.mem.Allocator, package_json: *Expr break :brk Expr.init( E.Array, - E.Array{ - .items = JSAst.ExprNodeList.init(new_trusted_deps), - }, + E.Array{ .items = new_trusted_deps }, logger.Loc.Empty, ); }; if (trusted_dependencies_to_add > 0 and new_trusted_deps.len > 0) { - trusted_dependencies_array.data.e_array.items = JSAst.ExprNodeList.init(new_trusted_deps); + trusted_dependencies_array.data.e_array.items = new_trusted_deps; trusted_dependencies_array.data.e_array.alphabetizeStrings(); } if (package_json.data != .e_object or package_json.data.e_object.properties.len == 0) { - var root_properties = try allocator.alloc(JSAst.G.Property, 1); + const root_properties = try allocator.alloc(JSAst.G.Property, 1); root_properties[0] = JSAst.G.Property{ .key = Expr.init( E.String, @@ -169,12 +167,12 @@ pub fn editTrustedDependencies(allocator: std.mem.Allocator, package_json: *Expr package_json.* = Expr.init( E.Object, E.Object{ - .properties = JSAst.G.Property.List.init(root_properties), + .properties = JSAst.G.Property.List.fromOwnedSlice(root_properties), }, logger.Loc.Empty, ); } else if (needs_new_trusted_dependencies_list) { - var root_properties = try allocator.alloc(G.Property, package_json.data.e_object.properties.len + 1); + const root_properties = try allocator.alloc(G.Property, package_json.data.e_object.properties.len + 1); @memcpy(root_properties[0..package_json.data.e_object.properties.len], package_json.data.e_object.properties.slice()); root_properties[root_properties.len - 1] = .{ .key = Expr.init( @@ -189,7 +187,7 @@ pub fn editTrustedDependencies(allocator: std.mem.Allocator, package_json: *Expr package_json.* = Expr.init( E.Object, E.Object{ - .properties = JSAst.G.Property.List.init(root_properties), + .properties = JSAst.G.Property.List.fromOwnedSlice(root_properties), }, logger.Loc.Empty, ); @@ -501,9 +499,12 @@ pub fn edit( } } - var new_dependencies = try allocator.alloc(G.Property, dependencies.len + remaining - replacing); - bun.copy(G.Property, new_dependencies, dependencies); - @memset(new_dependencies[dependencies.len..], G.Property{}); + var new_dependencies = try std.ArrayListUnmanaged(G.Property) + .initCapacity(allocator, dependencies.len + remaining - replacing); + new_dependencies.expandToCapacity(); + + bun.copy(G.Property, new_dependencies.items, dependencies); + @memset(new_dependencies.items[dependencies.len..], G.Property{}); var trusted_dependencies: []Expr = &[_]Expr{}; if (options.add_trusted_dependencies) { @@ -515,10 +516,10 @@ pub fn edit( } const trusted_dependencies_to_add = manager.trusted_deps_to_add_to_package_json.items.len; - const new_trusted_deps = brk: { - if (!options.add_trusted_dependencies or trusted_dependencies_to_add == 0) break :brk &[_]Expr{}; + const new_trusted_deps: JSAst.ExprNodeList = brk: { + if (!options.add_trusted_dependencies or trusted_dependencies_to_add == 0) break :brk .empty; - var deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add); + const deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add); @memcpy(deps[0..trusted_dependencies.len], trusted_dependencies); @memset(deps[trusted_dependencies.len..], Expr.empty); @@ -547,7 +548,7 @@ pub fn edit( for (deps) |dep| bun.assert(dep.data != .e_missing); } - break :brk deps; + break :brk .fromOwnedSlice(deps); }; for (updates.*) |*request| { @@ -555,31 +556,31 @@ pub fn edit( defer if (comptime Environment.allow_assert) bun.assert(request.e_string != null); var k: usize = 0; - while (k < new_dependencies.len) : (k += 1) { - if (new_dependencies[k].key) |key| { + while (k < new_dependencies.items.len) : (k += 1) { + if (new_dependencies.items[k].key) |key| { const name = request.getName(); if (!key.data.e_string.eql(string, name)) continue; if (request.package_id == invalid_package_id) { // Duplicate dependency (e.g., "react" in both "dependencies" and // "optionalDependencies"). Remove the old dependency. - new_dependencies[k] = .{}; - new_dependencies = new_dependencies[0 .. new_dependencies.len - 1]; + new_dependencies.items[k] = .{}; + new_dependencies.items.len -= 1; } } - new_dependencies[k].key = JSAst.Expr.allocate( + new_dependencies.items[k].key = JSAst.Expr.allocate( allocator, JSAst.E.String, .{ .data = try allocator.dupe(u8, request.getResolvedName(manager.lockfile)) }, logger.Loc.Empty, ); - new_dependencies[k].value = JSAst.Expr.allocate(allocator, JSAst.E.String, .{ + new_dependencies.items[k].value = JSAst.Expr.allocate(allocator, JSAst.E.String, .{ // we set it later .data = "", }, logger.Loc.Empty); - request.e_string = new_dependencies[k].value.?.data.e_string; + request.e_string = new_dependencies.items[k].value.?.data.e_string; break; } } @@ -595,12 +596,12 @@ pub fn edit( } break :brk JSAst.Expr.allocate(allocator, JSAst.E.Object, .{ - .properties = JSAst.G.Property.List.init(new_dependencies), + .properties = .empty, }, logger.Loc.Empty); }; - dependencies_object.data.e_object.properties = JSAst.G.Property.List.init(new_dependencies); - if (new_dependencies.len > 1) + dependencies_object.data.e_object.properties = JSAst.G.Property.List.moveFromList(&new_dependencies); + if (dependencies_object.data.e_object.properties.len > 1) dependencies_object.data.e_object.alphabetizeProperties(); var needs_new_trusted_dependencies_list = true; @@ -617,19 +618,19 @@ pub fn edit( } break :brk Expr.allocate(allocator, E.Array, .{ - .items = JSAst.ExprNodeList.init(new_trusted_deps), + .items = new_trusted_deps, }, logger.Loc.Empty); }; if (options.add_trusted_dependencies and trusted_dependencies_to_add > 0) { - trusted_dependencies_array.data.e_array.items = JSAst.ExprNodeList.init(new_trusted_deps); + trusted_dependencies_array.data.e_array.items = new_trusted_deps; if (new_trusted_deps.len > 1) { trusted_dependencies_array.data.e_array.alphabetizeStrings(); } } if (current_package_json.data != .e_object or current_package_json.data.e_object.properties.len == 0) { - var root_properties = try allocator.alloc(JSAst.G.Property, if (options.add_trusted_dependencies) 2 else 1); + const root_properties = try allocator.alloc(JSAst.G.Property, if (options.add_trusted_dependencies) 2 else 1); root_properties[0] = JSAst.G.Property{ .key = JSAst.Expr.allocate(allocator, JSAst.E.String, .{ .data = dependency_list, @@ -647,11 +648,11 @@ pub fn edit( } current_package_json.* = JSAst.Expr.allocate(allocator, JSAst.E.Object, .{ - .properties = JSAst.G.Property.List.init(root_properties), + .properties = JSAst.G.Property.List.fromOwnedSlice(root_properties), }, logger.Loc.Empty); } else { if (needs_new_dependency_list and needs_new_trusted_dependencies_list) { - var root_properties = try allocator.alloc(G.Property, current_package_json.data.e_object.properties.len + 2); + const root_properties = try allocator.alloc(G.Property, current_package_json.data.e_object.properties.len + 2); @memcpy(root_properties[0..current_package_json.data.e_object.properties.len], current_package_json.data.e_object.properties.slice()); root_properties[root_properties.len - 2] = .{ .key = Expr.allocate(allocator, E.String, E.String{ @@ -666,10 +667,10 @@ pub fn edit( .value = trusted_dependencies_array, }; current_package_json.* = Expr.allocate(allocator, E.Object, .{ - .properties = G.Property.List.init(root_properties), + .properties = G.Property.List.fromOwnedSlice(root_properties), }, logger.Loc.Empty); } else if (needs_new_dependency_list or needs_new_trusted_dependencies_list) { - var root_properties = try allocator.alloc(JSAst.G.Property, current_package_json.data.e_object.properties.len + 1); + const root_properties = try allocator.alloc(JSAst.G.Property, current_package_json.data.e_object.properties.len + 1); @memcpy(root_properties[0..current_package_json.data.e_object.properties.len], current_package_json.data.e_object.properties.slice()); root_properties[root_properties.len - 1] = .{ .key = JSAst.Expr.allocate(allocator, JSAst.E.String, .{ @@ -678,7 +679,7 @@ pub fn edit( .value = if (needs_new_dependency_list) dependencies_object else trusted_dependencies_array, }; current_package_json.* = JSAst.Expr.allocate(allocator, JSAst.E.Object, .{ - .properties = JSAst.G.Property.List.init(root_properties), + .properties = JSAst.G.Property.List.fromOwnedSlice(root_properties), }, logger.Loc.Empty); } } diff --git a/src/install/PackageManager/updatePackageJSONAndInstall.zig b/src/install/PackageManager/updatePackageJSONAndInstall.zig index 9407add0fe..da973a2e26 100644 --- a/src/install/PackageManager/updatePackageJSONAndInstall.zig +++ b/src/install/PackageManager/updatePackageJSONAndInstall.zig @@ -165,9 +165,10 @@ fn updatePackageJSONAndInstallWithManagerWithUpdates( // If the dependencies list is now empty, remove it from the package.json // since we're swapRemove, we have to re-sort it if (query.expr.data.e_object.properties.len == 0) { - var arraylist = current_package_json.root.data.e_object.properties.list(); - _ = arraylist.swapRemove(query.i); - current_package_json.root.data.e_object.properties.update(arraylist); + // TODO: Theoretically we could change these two lines to + // `.orderedRemove(query.i)`, but would that change user-facing + // behavior? + _ = current_package_json.root.data.e_object.properties.swapRemove(query.i); current_package_json.root.data.e_object.packageJSONSort(); } else { var obj = query.expr.data.e_object; diff --git a/src/install/PackageManagerTask.zig b/src/install/PackageManagerTask.zig index a78a7fefa2..c324dc8246 100644 --- a/src/install/PackageManagerTask.zig +++ b/src/install/PackageManagerTask.zig @@ -94,17 +94,15 @@ pub fn callback(task: *ThreadPool.Task) void { .package_manifest => { const allocator = bun.default_allocator; var manifest = &this.request.package_manifest; - const body = manifest.network.response_buffer.move(); - defer { - bun.default_allocator.free(body); - } + const body = &manifest.network.response_buffer; + defer body.deinit(); const package_manifest = Npm.Registry.getPackageMetadata( allocator, manager.scopeForPackageName(manifest.name.slice()), (manifest.network.response.metadata orelse @panic("Assertion failure: Expected metadata to be set")).response, - body, + body.slice(), &this.log, manifest.name.slice(), manifest.network.callback.package_manifest.loaded_manifest, @@ -135,15 +133,12 @@ pub fn callback(task: *ThreadPool.Task) void { } }, .extract => { - const bytes = this.request.extract.network.response_buffer.move(); - - defer { - bun.default_allocator.free(bytes); - } + const buffer = &this.request.extract.network.response_buffer; + defer buffer.deinit(); const result = this.request.extract.tarball.run( &this.log, - bytes, + buffer.slice(), ) catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); diff --git a/src/interchange/json.zig b/src/interchange/json.zig index 3109d8c600..bec03a2501 100644 --- a/src/interchange/json.zig +++ b/src/interchange/json.zig @@ -194,7 +194,7 @@ fn JSONLikeParser_( } try p.lexer.expect(.t_close_bracket); return newExpr(E.Array{ - .items = ExprNodeList.fromList(exprs), + .items = ExprNodeList.moveFromList(&exprs), .is_single_line = is_single_line, .was_originally_macro = comptime opts.was_originally_macro, }, loc); @@ -266,7 +266,7 @@ fn JSONLikeParser_( } try p.lexer.expect(.t_close_brace); return newExpr(E.Object{ - .properties = G.Property.List.fromList(properties), + .properties = G.Property.List.moveFromList(&properties), .is_single_line = is_single_line, .was_originally_macro = comptime opts.was_originally_macro, }, loc); @@ -552,21 +552,20 @@ pub fn toAST( }, .@"struct" => |Struct| { const fields: []const std.builtin.Type.StructField = Struct.fields; - var properties = try allocator.alloc(js_ast.G.Property, fields.len); - var property_i: usize = 0; + var properties = try BabyList(js_ast.G.Property).initCapacity(allocator, fields.len); + inline for (fields) |field| { - properties[property_i] = G.Property{ + properties.appendAssumeCapacity(G.Property{ .key = Expr.init(E.String, E.String{ .data = field.name }, logger.Loc.Empty), .value = try toAST(allocator, field.type, @field(value, field.name)), - }; - property_i += 1; + }); } return Expr.init( js_ast.E.Object, js_ast.E.Object{ - .properties = BabyList(G.Property).init(properties[0..property_i]), - .is_single_line = property_i <= 1, + .properties = properties, + .is_single_line = properties.len <= 1, }, logger.Loc.Empty, ); diff --git a/src/interchange/yaml.zig b/src/interchange/yaml.zig index eeba0420ab..b76a0af3a8 100644 --- a/src/interchange/yaml.zig +++ b/src/interchange/yaml.zig @@ -19,13 +19,13 @@ pub const YAML = struct { // multi-document yaml streams are converted into arrays - var items: std.ArrayList(Expr) = try .initCapacity(allocator, stream.docs.items.len); + var items: bun.BabyList(Expr) = try .initCapacity(allocator, stream.docs.items.len); for (stream.docs.items) |doc| { items.appendAssumeCapacity(doc.root); } - return .init(E.Array, .{ .items = .fromList(items) }, .Empty); + return .init(E.Array, .{ .items = items }, .Empty); }, }; } @@ -756,7 +756,7 @@ pub fn Parser(comptime enc: Encoding) type { try self.scan(.{}); - return .init(E.Array, .{ .items = .fromList(seq) }, sequence_start.loc()); + return .init(E.Array, .{ .items = .moveFromList(&seq) }, sequence_start.loc()); } fn parseFlowMapping(self: *@This()) ParseError!Expr { @@ -866,7 +866,7 @@ pub fn Parser(comptime enc: Encoding) type { try self.scan(.{}); - return .init(E.Object, .{ .properties = .fromList(props) }, mapping_start.loc()); + return .init(E.Object, .{ .properties = .moveFromList(&props) }, mapping_start.loc()); } fn parseBlockSequence(self: *@This()) ParseError!Expr { @@ -941,7 +941,7 @@ pub fn Parser(comptime enc: Encoding) type { } } - return .init(E.Array, .{ .items = .fromList(seq) }, sequence_start.loc()); + return .init(E.Array, .{ .items = .moveFromList(&seq) }, sequence_start.loc()); } fn parseBlockMapping( @@ -1022,7 +1022,7 @@ pub fn Parser(comptime enc: Encoding) type { } if (self.context.get() == .flow_in) { - return .init(E.Object, .{ .properties = .fromList(props) }, mapping_start.loc()); + return .init(E.Object, .{ .properties = .moveFromList(&props) }, mapping_start.loc()); } try self.context.set(.block_in); @@ -1126,7 +1126,7 @@ pub fn Parser(comptime enc: Encoding) type { } } - return .init(E.Object, .{ .properties = .fromList(props) }, mapping_start.loc()); + return .init(E.Object, .{ .properties = .moveFromList(&props) }, mapping_start.loc()); } const NodeProperties = struct { diff --git a/src/io/PipeWriter.zig b/src/io/PipeWriter.zig index b172cf133d..30ac28f95e 100644 --- a/src/io/PipeWriter.zig +++ b/src/io/PipeWriter.zig @@ -1127,16 +1127,11 @@ pub const StreamBuffer = struct { } pub fn writeAssumeCapacity(this: *StreamBuffer, buffer: []const u8) void { - var byte_list = bun.ByteList.fromList(this.list); - defer this.list = byte_list.listManaged(this.list.allocator); - byte_list.appendSliceAssumeCapacity(buffer); + this.list.appendSliceAssumeCapacity(buffer); } pub fn ensureUnusedCapacity(this: *StreamBuffer, capacity: usize) OOM!void { - var byte_list = bun.ByteList.fromList(this.list); - defer this.list = byte_list.listManaged(this.list.allocator); - - _ = try byte_list.ensureUnusedCapacity(this.list.allocator, capacity); + return this.list.ensureUnusedCapacity(capacity); } pub fn writeTypeAsBytes(this: *StreamBuffer, comptime T: type, data: *const T) OOM!void { @@ -1144,8 +1139,8 @@ pub const StreamBuffer = struct { } pub fn writeTypeAsBytesAssumeCapacity(this: *StreamBuffer, comptime T: type, data: T) void { - var byte_list = bun.ByteList.fromList(this.list); - defer this.list = byte_list.listManaged(this.list.allocator); + var byte_list = bun.ByteList.moveFromList(&this.list); + defer this.list = byte_list.moveToListManaged(this.list.allocator); byte_list.writeTypeAsBytesAssumeCapacity(T, data); } @@ -1156,16 +1151,16 @@ pub const StreamBuffer = struct { } { - var byte_list = bun.ByteList.fromList(this.list); - defer this.list = byte_list.listManaged(this.list.allocator); + var byte_list = bun.ByteList.moveFromList(&this.list); + defer this.list = byte_list.moveToListManaged(this.list.allocator); _ = try byte_list.writeLatin1(this.list.allocator, buffer); } return this.list.items[this.cursor..]; } else if (comptime @TypeOf(writeFn) == @TypeOf(&writeUTF16) and writeFn == &writeUTF16) { { - var byte_list = bun.ByteList.fromList(this.list); - defer this.list = byte_list.listManaged(this.list.allocator); + var byte_list = bun.ByteList.moveFromList(&this.list); + defer this.list = byte_list.moveToListManaged(this.list.allocator); _ = try byte_list.writeUTF16(this.list.allocator, buffer); } @@ -1185,15 +1180,15 @@ pub const StreamBuffer = struct { } } - var byte_list = bun.ByteList.fromList(this.list); - defer this.list = byte_list.listManaged(this.list.allocator); + var byte_list = bun.ByteList.moveFromList(&this.list); + defer this.list = byte_list.moveToListManaged(this.list.allocator); _ = try byte_list.writeLatin1(this.list.allocator, buffer); } pub fn writeUTF16(this: *StreamBuffer, buffer: []const u16) OOM!void { - var byte_list = bun.ByteList.fromList(this.list); - defer this.list = byte_list.listManaged(this.list.allocator); + var byte_list = bun.ByteList.moveFromList(&this.list); + defer this.list = byte_list.moveToListManaged(this.list.allocator); _ = try byte_list.writeUTF16(this.list.allocator, buffer); } diff --git a/src/js/AGENTS.md b/src/js/AGENTS.md new file mode 120000 index 0000000000..681311eb9c --- /dev/null +++ b/src/js/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/src/js/CLAUDE.md b/src/js/CLAUDE.md new file mode 100644 index 0000000000..ed175a119a --- /dev/null +++ b/src/js/CLAUDE.md @@ -0,0 +1,104 @@ +# JavaScript Builtins in Bun + +Write JS builtins for Bun's Node.js compatibility and APIs. Run `bun bd` after changes. + +## Directory Structure + +- `builtins/` - Individual functions (`*CodeGenerator(vm)` in C++) +- `node/` - Node.js modules (`node:fs`, `node:path`) +- `bun/` - Bun modules (`bun:ffi`, `bun:sqlite`) +- `thirdparty/` - NPM replacements (`ws`, `node-fetch`) +- `internal/` - Internal modules + +## Writing Modules + +Modules are NOT ES modules: + +```typescript +const EventEmitter = require("node:events"); // String literals only +const { validateFunction } = require("internal/validators"); + +export default { + myFunction() { + if (!$isCallable(callback)) { + throw $ERR_INVALID_ARG_TYPE("cb", "function", callback); + } + }, +}; +``` + +## Writing Builtin Functions + +```typescript +export function initializeReadableStream( + this: ReadableStream, + underlyingSource, + strategy, +) { + if (!$isObject(underlyingSource)) { + throw new TypeError( + "ReadableStream constructor takes an object as first argument", + ); + } + $putByIdDirectPrivate(this, "state", $streamReadable); +} +``` + +C++ access: + +```cpp +object->putDirectBuiltinFunction(vm, globalObject, identifier, + readableStreamInitializeReadableStreamCodeGenerator(vm), 0); +``` + +## $ Globals and Special Syntax + +**CRITICAL**: Use `.$call` and `.$apply`, never `.call` or `.apply`: + +```typescript +// ✗ WRONG - User can tamper +callback.call(undefined, arg1); +fn.apply(undefined, args); + +// ✓ CORRECT - Tamper-proof +callback.$call(undefined, arg1); +fn.$apply(undefined, args); + +// $ prefix for private APIs +const arr = $Array.from(...); // Private globals +map.$set(key, value); // Private methods +const newArr = $newArrayWithSize(5); // JSC intrinsics +$debug("Module loaded:", name); // Debug (stripped in release) +$assert(condition, "message"); // Assertions (stripped in release) +``` + +## Validation and Errors + +```typescript +const { validateFunction } = require("internal/validators"); + +function myAPI(callback) { + if (!$isCallable(callback)) { + throw $ERR_INVALID_ARG_TYPE("callback", "function", callback); + } +} +``` + +## Build Process + +`Source TS/JS → Preprocessor → Bundler → C++ Headers` + +1. Assign numeric IDs (A-Z sorted) +2. Replace `$` with `__intrinsic__`, `require("x")` with `$requireId(n)` +3. Bundle, convert `export default` to `return` +4. Replace `__intrinsic__` with `@`, inline into C++ + +ModuleLoader.zig loads modules by numeric ID via `InternalModuleRegistry.cpp`. + +## Key Rules + +- Use `.$call`/`.$apply` not `.call`/`.apply` +- String literal `require()` only +- Export via `export default {}` +- Use JSC intrinsics for performance +- Run `bun bd` after changes diff --git a/src/js/bun/sql.ts b/src/js/bun/sql.ts index 127915395e..db7b0eb871 100644 --- a/src/js/bun/sql.ts +++ b/src/js/bun/sql.ts @@ -32,6 +32,7 @@ function adapterFromOptions(options: Bun.SQL.__internal.DefinedOptions) { case "postgres": return new PostgresAdapter(options); case "mysql": + case "mariadb": return new MySQLAdapter(options); case "sqlite": return new SQLiteAdapter(options); diff --git a/src/js/internal/perf_hooks/monitorEventLoopDelay.ts b/src/js/internal/perf_hooks/monitorEventLoopDelay.ts new file mode 100644 index 0000000000..7466edb75e --- /dev/null +++ b/src/js/internal/perf_hooks/monitorEventLoopDelay.ts @@ -0,0 +1,71 @@ +// Internal module for monitorEventLoopDelay implementation +const { validateObject, validateInteger } = require("internal/validators"); + +// Private C++ bindings for event loop delay monitoring +const cppMonitorEventLoopDelay = $newCppFunction( + "JSNodePerformanceHooksHistogramPrototype.cpp", + "jsFunction_monitorEventLoopDelay", + 1, +) as (resolution: number) => import("node:perf_hooks").RecordableHistogram; + +const cppEnableEventLoopDelay = $newCppFunction( + "JSNodePerformanceHooksHistogramPrototype.cpp", + "jsFunction_enableEventLoopDelay", + 2, +) as (histogram: import("node:perf_hooks").RecordableHistogram, resolution: number) => void; + +const cppDisableEventLoopDelay = $newCppFunction( + "JSNodePerformanceHooksHistogramPrototype.cpp", + "jsFunction_disableEventLoopDelay", + 1, +) as (histogram: import("node:perf_hooks").RecordableHistogram) => void; + +// IntervalHistogram wrapper class for event loop delay monitoring + +let eventLoopDelayHistogram: import("node:perf_hooks").RecordableHistogram | undefined; +let enabled = false; +let resolution = 10; + +function enable() { + if (enabled) { + return false; + } + + enabled = true; + cppEnableEventLoopDelay(eventLoopDelayHistogram!, resolution); + return true; +} + +function disable() { + if (!enabled) { + return false; + } + + enabled = false; + cppDisableEventLoopDelay(eventLoopDelayHistogram!); + return true; +} + +function monitorEventLoopDelay(options?: { resolution?: number }) { + if (options !== undefined) { + validateObject(options, "options"); + } + + resolution = 10; + let resolutionOption = options?.resolution; + if (typeof resolutionOption !== "undefined") { + validateInteger(resolutionOption, "options.resolution", 1); + resolution = resolutionOption; + } + + if (!eventLoopDelayHistogram) { + eventLoopDelayHistogram = cppMonitorEventLoopDelay(resolution); + $putByValDirect(eventLoopDelayHistogram, "enable", enable); + $putByValDirect(eventLoopDelayHistogram, "disable", disable); + $putByValDirect(eventLoopDelayHistogram, Symbol.dispose, disable); + } + + return eventLoopDelayHistogram; +} + +export default monitorEventLoopDelay; diff --git a/src/js/internal/sql/errors.ts b/src/js/internal/sql/errors.ts index 408090085b..a628c87cc1 100644 --- a/src/js/internal/sql/errors.ts +++ b/src/js/internal/sql/errors.ts @@ -7,7 +7,26 @@ class SQLError extends Error implements Bun.SQL.SQLError { export interface PostgresErrorOptions { code: string; + detail?: string | undefined; + hint?: string | undefined; + severity?: string | undefined; + errno?: string | undefined; + position?: string | undefined; + internalPosition?: string | undefined; + internalQuery?: string | undefined; + where?: string | undefined; + schema?: string | undefined; + table?: string | undefined; + column?: string | undefined; + dataType?: string | undefined; + constraint?: string | undefined; + file?: string | undefined; + line?: string | undefined; + routine?: string | undefined; +} +// oxlint-disable-next-line typescript-eslint(no-unsafe-declaration-merging) +interface PostgresError { detail?: string | undefined; hint?: string | undefined; severity?: string | undefined; @@ -28,22 +47,6 @@ export interface PostgresErrorOptions { class PostgresError extends SQLError implements Bun.SQL.PostgresError { public readonly code: string; - public readonly detail: string | undefined; - public readonly hint: string | undefined; - public readonly severity: string | undefined; - public readonly errno: string | undefined; - public readonly position: string | undefined; - public readonly internalPosition: string | undefined; - public readonly internalQuery: string | undefined; - public readonly where: string | undefined; - public readonly schema: string | undefined; - public readonly table: string | undefined; - public readonly column: string | undefined; - public readonly dataType: string | undefined; - public readonly constraint: string | undefined; - public readonly file: string | undefined; - public readonly line: string | undefined; - public readonly routine: string | undefined; constructor(message: string, options: PostgresErrorOptions) { super(message); @@ -51,10 +54,10 @@ class PostgresError extends SQLError implements Bun.SQL.PostgresError { this.name = "PostgresError"; this.code = options.code; + if (options.errno !== undefined) this.errno = options.errno; if (options.detail !== undefined) this.detail = options.detail; if (options.hint !== undefined) this.hint = options.hint; if (options.severity !== undefined) this.severity = options.severity; - if (options.errno !== undefined) this.errno = options.errno; if (options.position !== undefined) this.position = options.position; if (options.internalPosition !== undefined) this.internalPosition = options.internalPosition; if (options.internalQuery !== undefined) this.internalQuery = options.internalQuery; @@ -76,15 +79,20 @@ export interface SQLiteErrorOptions { byteOffset?: number | undefined; } +// oxlint-disable-next-line typescript-eslint(no-unsafe-declaration-merging) +interface SQLiteError { + byteOffset?: number | undefined; +} + class SQLiteError extends SQLError implements Bun.SQL.SQLiteError { public readonly code: string; public readonly errno: number; - public readonly byteOffset: number | undefined; constructor(message: string, options: SQLiteErrorOptions) { super(message); this.name = "SQLiteError"; + this.code = options.code; this.errno = options.errno; @@ -94,22 +102,28 @@ class SQLiteError extends SQLError implements Bun.SQL.SQLiteError { export interface MySQLErrorOptions { code: string; - errno: number | undefined; - sqlState: string | undefined; + errno?: number | undefined; + sqlState?: string | undefined; +} + +// oxlint-disable-next-line typescript-eslint(no-unsafe-declaration-merging) +interface MySQLError { + errno?: number | undefined; + sqlState?: string | undefined; } class MySQLError extends SQLError implements Bun.SQL.MySQLError { public readonly code: string; - public readonly errno: number | undefined; - public readonly sqlState: string | undefined; constructor(message: string, options: MySQLErrorOptions) { super(message); this.name = "MySQLError"; this.code = options.code; - this.errno = options.errno; - this.sqlState = options.sqlState; + + if (options.errno !== undefined) this.errno = options.errno; + if (options.sqlState !== undefined) this.sqlState = options.sqlState; } } + export default { PostgresError, SQLError, SQLiteError, MySQLError }; diff --git a/src/js/internal/sql/mysql.ts b/src/js/internal/sql/mysql.ts index 704a457022..44a1002e5c 100644 --- a/src/js/internal/sql/mysql.ts +++ b/src/js/internal/sql/mysql.ts @@ -109,7 +109,7 @@ export interface MySQLDotZig { password: string, databae: string, sslmode: SSLMode, - tls: Bun.TLSOptions | boolean | null, // boolean true => empty TLSOptions object `{}`, boolean false or null => nothing + tls: Bun.TLSOptions | boolean | null | Bun.BunFile, // boolean true => empty TLSOptions object `{}`, boolean false or null => nothing query: string, path: string, onConnected: (err: Error | null, connection: $ZigGeneratedClasses.MySQLConnection) => void, @@ -126,7 +126,7 @@ export interface MySQLDotZig { columns: string[] | undefined, bigint: boolean, simple: boolean, - ) => $ZigGeneratedClasses.MySQLSQLQuery; + ) => $ZigGeneratedClasses.MySQLQuery; } const enum SQLCommand { @@ -276,10 +276,10 @@ function onQueryFinish(this: PooledMySQLConnection, onClose: (err: Error) => voi class PooledMySQLConnection { private static async createConnection( - options: Bun.SQL.__internal.DefinedMySQLOptions, - onConnected: (err: Error | null, connection: $ZigGeneratedClasses.MySQLSQLConnection) => void, + options: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions, + onConnected: (err: Error | null, connection: $ZigGeneratedClasses.MySQLConnection) => void, onClose: (err: Error | null) => void, - ): Promise<$ZigGeneratedClasses.MySQLSQLConnection | null> { + ): Promise<$ZigGeneratedClasses.MySQLConnection | null> { const { hostname, port, @@ -292,8 +292,6 @@ class PooledMySQLConnection { connectionTimeout = 30 * 1000, maxLifetime = 0, prepare = true, - - // @ts-expect-error path is currently removed from the types path, } = options; @@ -302,10 +300,10 @@ class PooledMySQLConnection { try { if (typeof password === "function") { password = password(); + } - if (password && $isPromise(password)) { - password = await password; - } + if (password && $isPromise(password)) { + password = await password; } return createMySQLConnection( @@ -336,12 +334,12 @@ class PooledMySQLConnection { } adapter: MySQLAdapter; - connection: $ZigGeneratedClasses.MySQLSQLConnection | null = null; + connection: $ZigGeneratedClasses.MySQLConnection | null = null; state: PooledConnectionState = PooledConnectionState.pending; storedError: Error | null = null; queries: Set<(err: Error) => void> = new Set(); onFinish: ((err: Error | null) => void) | null = null; - connectionInfo: Bun.SQL.__internal.DefinedMySQLOptions; + connectionInfo: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions; flags: number = 0; /// queryCount is used to indicate the number of queries using the connection, if a connection is reserved or if its a transaction queryCount will be 1 independently of the number of queries queryCount: number = 0; @@ -392,7 +390,7 @@ class PooledMySQLConnection { // remove from ready connections if its there this.adapter.readyConnections.delete(this); const queries = new Set(this.queries); - this.queries.clear(); + this.queries?.clear?.(); this.queryCount = 0; this.flags &= ~PooledConnectionFlags.reserved; @@ -488,7 +486,7 @@ export class MySQLAdapter implements DatabaseAdapter { - public readonly connectionInfo: Bun.SQL.__internal.DefinedMySQLOptions; + public readonly connectionInfo: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions; public readonly connections: PooledMySQLConnection[]; public readonly readyConnections: Set; @@ -501,7 +499,7 @@ export class MySQLAdapter public totalQueries: number = 0; public onAllQueriesFinished: (() => void) | null = null; - constructor(connectionInfo: Bun.SQL.__internal.DefinedMySQLOptions) { + constructor(connectionInfo: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions) { this.connectionInfo = connectionInfo; this.connections = new Array(connectionInfo.max); this.readyConnections = new Set(); @@ -845,7 +843,7 @@ export class MySQLAdapter return; } - const { promise, resolve } = Promise.withResolvers(); + const { promise, resolve } = Promise.withResolvers(); const timer = setTimeout(() => { // timeout is reached, lets close and probably fail some queries this.#close().finally(resolve); @@ -868,7 +866,7 @@ export class MySQLAdapter } // gracefully close the pool - const { promise, resolve } = Promise.withResolvers(); + const { promise, resolve } = Promise.withResolvers(); this.onAllQueriesFinished = () => { // everything is closed, lets close the pool @@ -1179,7 +1177,7 @@ export class MySQLAdapter export default { MySQLAdapter, - SQLCommand, commandToString, detectCommand, + SQLCommand, }; diff --git a/src/js/internal/sql/postgres.ts b/src/js/internal/sql/postgres.ts index a13af04c96..75ad2085ef 100644 --- a/src/js/internal/sql/postgres.ts +++ b/src/js/internal/sql/postgres.ts @@ -126,7 +126,7 @@ export interface PostgresDotZig { password: string, databae: string, sslmode: SSLMode, - tls: Bun.TLSOptions | boolean | null, // boolean true => empty TLSOptions object `{}`, boolean false or null => nothing + tls: Bun.TLSOptions | boolean | null | Bun.BunFile, // boolean true => empty TLSOptions object `{}`, boolean false or null => nothing query: string, path: string, onConnected: (err: Error | null, connection: $ZigGeneratedClasses.PostgresSQLConnection) => void, @@ -293,7 +293,7 @@ function onQueryFinish(this: PooledPostgresConnection, onClose: (err: Error) => class PooledPostgresConnection { private static async createConnection( - options: Bun.SQL.__internal.DefinedPostgresOptions, + options: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions, onConnected: (err: Error | null, connection: $ZigGeneratedClasses.PostgresSQLConnection) => void, onClose: (err: Error | null) => void, ): Promise<$ZigGeneratedClasses.PostgresSQLConnection | null> { @@ -309,8 +309,6 @@ class PooledPostgresConnection { connectionTimeout = 30 * 1000, maxLifetime = 0, prepare = true, - - // @ts-expect-error path is currently removed from the types path, } = options; @@ -319,10 +317,10 @@ class PooledPostgresConnection { try { if (typeof password === "function") { password = password(); + } - if (password && $isPromise(password)) { - password = await password; - } + if (password && $isPromise(password)) { + password = await password; } return createPostgresConnection( @@ -358,7 +356,7 @@ class PooledPostgresConnection { storedError: Error | null = null; queries: Set<(err: Error) => void> = new Set(); onFinish: ((err: Error | null) => void) | null = null; - connectionInfo: Bun.SQL.__internal.DefinedPostgresOptions; + connectionInfo: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions; flags: number = 0; /// queryCount is used to indicate the number of queries using the connection, if a connection is reserved or if its a transaction queryCount will be 1 independently of the number of queries queryCount: number = 0; @@ -409,7 +407,7 @@ class PooledPostgresConnection { // remove from ready connections if its there this.adapter.readyConnections?.delete(this); const queries = new Set(this.queries); - this.queries.clear(); + this.queries?.clear?.(); this.queryCount = 0; this.flags &= ~PooledConnectionFlags.reserved; @@ -425,7 +423,7 @@ class PooledPostgresConnection { this.adapter.release(this, true); } - constructor(connectionInfo: Bun.SQL.__internal.DefinedPostgresOptions, adapter: PostgresAdapter) { + constructor(connectionInfo: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions, adapter: PostgresAdapter) { this.state = PooledConnectionState.pending; this.adapter = adapter; this.connectionInfo = connectionInfo; @@ -509,7 +507,7 @@ export class PostgresAdapter $ZigGeneratedClasses.PostgresSQLQuery > { - public readonly connectionInfo: Bun.SQL.__internal.DefinedPostgresOptions; + public readonly connectionInfo: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions; public readonly connections: PooledPostgresConnection[]; public readonly readyConnections: Set; @@ -522,7 +520,7 @@ export class PostgresAdapter public totalQueries: number = 0; public onAllQueriesFinished: (() => void) | null = null; - constructor(connectionInfo: Bun.SQL.__internal.DefinedPostgresOptions) { + constructor(connectionInfo: Bun.SQL.__internal.DefinedPostgresOrMySQLOptions) { this.connectionInfo = connectionInfo; this.connections = new Array(connectionInfo.max); this.readyConnections = new Set(); @@ -850,7 +848,7 @@ export class PostgresAdapter return Promise.all(promises); } - async close(options?: { timeout?: number }) { + async close(options?: { timeout?: number }): Promise { if (this.closed) { return; } @@ -869,7 +867,7 @@ export class PostgresAdapter return; } - const { promise, resolve } = Promise.withResolvers(); + const { promise, resolve } = Promise.withResolvers(); const timer = setTimeout(() => { // timeout is reached, lets close and probably fail some queries this.#close().finally(resolve); @@ -892,7 +890,7 @@ export class PostgresAdapter } // gracefully close the pool - const { promise, resolve } = Promise.withResolvers(); + const { promise, resolve } = Promise.withResolvers(); this.onAllQueriesFinished = () => { // everything is closed, lets close the pool diff --git a/src/js/internal/sql/shared.ts b/src/js/internal/sql/shared.ts index 874191aa0c..ea16b2d978 100644 --- a/src/js/internal/sql/shared.ts +++ b/src/js/internal/sql/shared.ts @@ -13,6 +13,7 @@ class SQLResultArray extends PublicArray { public command!: string | null; public lastInsertRowid!: number | bigint | null; public affectedRows!: number | bigint | null; + static [Symbol.toStringTag] = "SQLResults"; constructor(values: T[] = []) { @@ -74,7 +75,7 @@ function normalizeSSLMode(value: string): SSLMode { } } - throw $ERR_INVALID_ARG_VALUE("sslmode", value); + throw $ERR_INVALID_ARG_VALUE("sslmode", value, "must be one of: disable, prefer, require, verify-ca, verify-full"); } export type { SQLHelper }; @@ -114,72 +115,110 @@ class SQLHelper { } } +const SQLITE_MEMORY = ":memory:"; +const SQLITE_MEMORY_VARIANTS: string[] = [":memory:", "sqlite://:memory:", "sqlite:memory"]; + +const sqliteProtocols = [ + { prefix: "sqlite://", stripLength: 9 }, + { prefix: "sqlite:", stripLength: 7 }, + { prefix: "file://", stripLength: -1 }, // Special case we can use Bun.fileURLToPath + { prefix: "file:", stripLength: 5 }, +]; + function parseDefinitelySqliteUrl(value: string | URL | null): string | null { if (value === null) return null; const str = value instanceof URL ? value.toString() : value; - if (str === ":memory:" || str === "sqlite://:memory:" || str === "sqlite:memory") return ":memory:"; + if (SQLITE_MEMORY_VARIANTS.includes(str)) { + return SQLITE_MEMORY; + } - // For any URL-like string, just extract the path portion - // Strip the protocol and handle query params - let path: string; + for (const { prefix, stripLength } of sqliteProtocols) { + if (!str.startsWith(prefix)) continue; - if (str.startsWith("sqlite://")) { - path = str.slice(9); // "sqlite://".length - } else if (str.startsWith("sqlite:")) { - path = str.slice(7); // "sqlite:".length - } else if (str.startsWith("file://")) { - // For file:// URLs, use Bun's built-in converter for correct platform handling - // This properly handles Windows paths, UNC paths, etc. - try { - return Bun.fileURLToPath(str); - } catch { - // Fallback: just strip the protocol - path = str.slice(7); // "file://".length + if (stripLength === -1) { + try { + return Bun.fileURLToPath(str); + } catch { + // if it cant pass it's probably query string, we can just strip it + // slicing off the file:// at the beginning + return str.slice(7); + } } - } else if (str.startsWith("file:")) { - path = str.slice(5); // "file:".length - } else { - // Not a SQLite URL - return null; + + return str.slice(stripLength); } - // Remove query parameters if present (only looking for ?) - const queryIndex = path.indexOf("?"); - if (queryIndex !== -1) { - path = path.slice(0, queryIndex); - } - - return path; + // couldn't reliably determine this was definitely a sqlite url + // it still *could* be, but not unambigously. + return null; } -function parseSQLiteOptionsWithQueryParams( - sqliteOptions: Bun.SQL.__internal.DefinedSQLiteOptions, - urlString: string | URL | null | undefined, +function parseSQLiteOptions( + filenameOrUrl: string | URL | null | undefined, + options: Bun.SQL.__internal.OptionsWithDefinedAdapter, ): Bun.SQL.__internal.DefinedSQLiteOptions { - if (!urlString) return sqliteOptions; + // Start with base options + const sqliteOptions: Bun.SQL.__internal.DefinedSQLiteOptions = { + ...options, + adapter: "sqlite" as const, + filename: ":memory:", + }; - let params: URLSearchParams | null = null; + let filename = filenameOrUrl || ":memory:"; + let originalUrl = filename; // Keep the original URL for query parsing - if (urlString instanceof URL) { - params = urlString.searchParams; - } else { - const queryIndex = urlString.indexOf("?"); - if (queryIndex === -1) return sqliteOptions; - - const queryString = urlString.slice(queryIndex + 1); - params = new URLSearchParams(queryString); + if (filename instanceof URL) { + originalUrl = filename.toString(); + filename = filename.toString(); } - const mode = params.get("mode"); + let queryString: string | null = null; + // Parse query string from the original URL before processing + if (typeof originalUrl === "string") { + const queryIndex = originalUrl.indexOf("?"); + if (queryIndex !== -1) { + queryString = originalUrl.slice(queryIndex + 1); + // Strip query from filename for processing + if (typeof filename === "string") { + filename = filename.slice(0, queryIndex); + } + } + } - if (mode === "ro") { - sqliteOptions.readonly = true; - } else if (mode === "rw") { - sqliteOptions.readonly = false; - } else if (mode === "rwc") { - sqliteOptions.readonly = false; - sqliteOptions.create = true; + // Now parse the filename (this handles file:// URLs and other protocols) + const parsedFilename = parseDefinitelySqliteUrl(filename); + if (parsedFilename !== null) { + filename = parsedFilename; + } + + // Empty filename defaults to :memory: + sqliteOptions.filename = filename || ":memory:"; + + // Parse query parameters if present + if (queryString) { + const params = new URLSearchParams(queryString); + const mode = params.get("mode"); + + if (mode === "ro") { + sqliteOptions.readonly = true; + } else if (mode === "rw") { + sqliteOptions.readonly = false; + } else if (mode === "rwc") { + sqliteOptions.readonly = false; + sqliteOptions.create = true; + } + } + + // Apply other SQLite-specific options + if ("readonly" in options) { + sqliteOptions.readonly = options.readonly; + } + if ("create" in options) { + sqliteOptions.create = options.create; + } + if ("safeIntegers" in options) { + sqliteOptions.safeIntegers = options.safeIntegers; } return sqliteOptions; @@ -201,178 +240,281 @@ function assertIsOptionsOfAdapter( } } -function hasProtocol(url: string) { - if (typeof url !== "string") return false; - const protocols: string[] = [ - "http", - "https", - "ftp", - "postgres", - "postgresql", - "mysql", - "mysql2", - "mariadb", - "file", - "sqlite", - ]; - for (const protocol of protocols) { - if (url.startsWith(protocol + "://")) { - return true; - } +const DEFAULT_PROTOCOL: Bun.SQL.__internal.Adapter = "postgres"; + +const env = Bun.env; + +/** + * Reads environment variables to try and find a connnection string + * @param adapter If an adapter is specified in the options, pass it here and + * this function will only resolve from environment variables that are specific + * to that adapter. Otherwise it will try them all. + */ +function getConnectionDetailsFromEnvironment( + adapter: Bun.SQL.__internal.Adapter | undefined, +): [url: string | null, sslMode: SSLMode | null, adapter: Bun.SQL.__internal.Adapter | null] { + let url: string | null = null; + let sslMode: SSLMode.require | null = null; + + url ||= env.DATABASE_URL || env.DATABASEURL || null; + if (!url) { + url = env.TLS_DATABASE_URL || null; + if (url) sslMode = SSLMode.require; } - return false; + if (url) return [url, sslMode, adapter || null]; + + if (!adapter || adapter === "postgres") { + url ||= env.POSTGRES_URL || env.PGURL || env.PG_URL || env.PGURL || null; + if (!url) { + url = env.TLS_POSTGRES_DATABASE_URL || null; + if (url) sslMode = SSLMode.require; + } + if (url) return [url, sslMode, "postgres"]; + } + + if (!adapter || adapter === "mysql") { + url ||= env.MYSQL_URL || env.MYSQLURL || null; + if (!url) { + url = env.TLS_MYSQL_DATABASE_URL || null; + if (url) sslMode = SSLMode.require; + } + if (url) return [url, sslMode, "mysql"]; + } + + if (!adapter || adapter === "mariadb") { + url ||= env.MARIADB_URL || env.MARIADBURL || null; + if (!url) { + url = env.TLS_MARIADB_DATABASE_URL || null; + if (url) sslMode = SSLMode.require; + } + if (url) return [url, sslMode, "mariadb"]; + } + + if (!adapter || adapter === "sqlite") { + url ||= env.SQLITE_URL || env.SQLITEURL || null; + // No TLS_ check because SQLite has no applicable sslMode + if (url) return [url, sslMode, "sqlite"]; + } + + return [url, sslMode, adapter || null]; } -function defaultToPostgresIfNoProtocol(url: string | URL | null): URL { +function ensureUrlHasProtocol( + url: T | null, + protocol: string, +): (T extends string ? string : T extends URL ? URL : never) | null { + if (url === null) return null; if (url instanceof URL) { - return url; + url.protocol = protocol; + return url as never; } - if (hasProtocol(url as string)) { - return new URL(url as string); - } - return new URL("postgres://" + url); + return `${protocol}://${url}` as never; } -function parseOptions( + +function hasProtocol(url: string | URL): boolean { + if (url instanceof URL) { + return true; + } + + return url.includes("://"); +} + +/** + * @returns A tuple containing the parsed adapter (this is always correct) and a + * url string, that you should continue to use for further options. In some + * cases the it will be a parsed URL instance, and in others a string. This is + * to save unnecessary parses in some cases. The third value is the SSL mode The last value is the options object + * resolved from the possible overloads of the Bun.SQL constructor, it may have modifications + */ +function parseConnectionDetailsFromOptionsOrEnvironment( stringOrUrlOrOptions: Bun.SQL.Options | string | URL | undefined, definitelyOptionsButMaybeEmpty: Bun.SQL.Options, -): Bun.SQL.__internal.DefinedOptions { - const env = Bun.env; +): [url: string | URL | null, sslMode: SSLMode | null, options: Bun.SQL.__internal.OptionsWithDefinedAdapter] { + // Step 1: Determine the options object and initial URL + let options: Bun.SQL.Options; + let stringOrUrl: string | URL | null = null; + let sslMode: SSLMode | null = null; + let adapter: Bun.SQL.__internal.Adapter | null = null; - let [ - stringOrUrl = env.POSTGRES_URL || env.DATABASE_URL || env.PGURL || env.PG_URL || env.MYSQL_URL || null, - options, - ]: [string | URL | null, Bun.SQL.Options] = - typeof stringOrUrlOrOptions === "string" || stringOrUrlOrOptions instanceof URL - ? [stringOrUrlOrOptions, definitelyOptionsButMaybeEmpty] - : stringOrUrlOrOptions - ? [null, { ...stringOrUrlOrOptions, ...definitelyOptionsButMaybeEmpty }] - : [null, definitelyOptionsButMaybeEmpty]; + if (typeof stringOrUrlOrOptions === "string" || stringOrUrlOrOptions instanceof URL) { + stringOrUrl = stringOrUrlOrOptions; + options = definitelyOptionsButMaybeEmpty; + } else { + options = stringOrUrlOrOptions + ? { ...stringOrUrlOrOptions, ...definitelyOptionsButMaybeEmpty } + : definitelyOptionsButMaybeEmpty; + [stringOrUrl, sslMode, adapter] = getConnectionDetailsFromEnvironment(options.adapter); + } - if (options.adapter === undefined && stringOrUrl !== null) { - const sqliteUrl = parseDefinitelySqliteUrl(stringOrUrl); + // Resolve URL based on adapter type + let resolvedUrl: string | URL | null = stringOrUrl; - if (sqliteUrl !== null) { - const sqliteOptions: Bun.SQL.__internal.DefinedSQLiteOptions = { - ...options, - adapter: "sqlite", - filename: sqliteUrl, - }; - - return parseSQLiteOptionsWithQueryParams(sqliteOptions, stringOrUrl); + if (options.adapter === "sqlite") { + // SQLite adapter - only check filename (not url) + if ("filename" in options && options.filename) { + resolvedUrl = options.filename; + } + } else if (!options.adapter) { + // Unknown adapter - check both, filename first (more specific) + if ("filename" in options && options.filename) { + resolvedUrl = options.filename; + } else if ("url" in options && options.url) { + resolvedUrl = options.url; + } + } else { + // Known non-SQLite adapter - only check url (not filename) + if ("url" in options && options.url) { + resolvedUrl = options.url; } } if (options.adapter === "sqlite") { - let filenameFromOptions = options.filename || stringOrUrl; - - // Parse sqlite:// URLs when adapter is explicitly sqlite - if (typeof filenameFromOptions === "string" || filenameFromOptions instanceof URL) { - const parsed = parseDefinitelySqliteUrl(filenameFromOptions); - if (parsed !== null) { - filenameFromOptions = parsed; - } - } - - const sqliteOptions: Bun.SQL.__internal.DefinedSQLiteOptions = { - ...options, - adapter: "sqlite", - filename: filenameFromOptions || ":memory:", - }; - - return parseSQLiteOptionsWithQueryParams(sqliteOptions, stringOrUrl); + return [resolvedUrl, null, options as Bun.SQL.__internal.OptionsWithDefinedAdapter]; } - if (!stringOrUrl) { - const url = options?.url; - if (typeof url === "string") { - stringOrUrl = defaultToPostgresIfNoProtocol(url); - } else if (url instanceof URL) { - stringOrUrl = url; + if (!options.adapter && resolvedUrl !== null) { + const parsedPath = parseDefinitelySqliteUrl(resolvedUrl); + + if (parsedPath !== null) { + // Return the original URL (with query params) for SQLite parsing + return [resolvedUrl, null, { ...options, adapter: "sqlite" }]; } } - let hostname: string | undefined, - port: number | string | undefined, - username: string | null | undefined, - password: string | (() => Bun.MaybePromise) | undefined | null, - database: string | undefined, - tls: Bun.TLSOptions | boolean | undefined, - url: URL | undefined, - query: string, - idleTimeout: number | null | undefined, - connectionTimeout: number | null | undefined, - maxLifetime: number | null | undefined, - onconnect: ((client: Bun.SQL) => void) | undefined, - onclose: ((client: Bun.SQL) => void) | undefined, - max: number | null | undefined, - bigint: boolean | undefined, - path: string, - adapter: Bun.SQL.__internal.Adapter; + // Step 3: Parse protocol and ensure URL format for non-SQLite databases + let protocol: Bun.SQL.__internal.Adapter | (string & {}) = options.adapter || DEFAULT_PROTOCOL; - let prepare = true; - let sslMode: SSLMode = SSLMode.disable; + let urlToProcess = resolvedUrl || stringOrUrl; - if (!stringOrUrl || (typeof stringOrUrl === "string" && stringOrUrl.length === 0)) { - let urlString = env.POSTGRES_URL || env.DATABASE_URL || env.PGURL || env.PG_URL; + if (urlToProcess instanceof URL) { + protocol = urlToProcess.protocol.replace(/:$/, ""); + } else if (urlToProcess !== null) { + if (hasProtocol(urlToProcess)) { + try { + urlToProcess = new URL(urlToProcess); + protocol = urlToProcess.protocol.replace(/:$/, ""); + } catch (e) { + // options.adpater won't be sqlite here, we already did the special case check for it + if (options.adapter && typeof urlToProcess === "string" && urlToProcess.includes("sqlite")) { + throw new Error( + `Invalid URL '${urlToProcess}' for ${options.adapter}. Did you mean to specify \`{ adapter: "sqlite" }\`?`, + { cause: e }, + ); + } - if (!urlString) { - urlString = env.TLS_POSTGRES_DATABASE_URL || env.TLS_DATABASE_URL; - if (urlString) { - sslMode = SSLMode.require; + // unrelated error to do with url parsing, we should re-throw. This is a real user error + throw e; } - } - - if (urlString) { - // Check if it's a SQLite URL before trying to parse as regular URL - const sqliteUrl = parseDefinitelySqliteUrl(urlString); - if (sqliteUrl !== null) { - const sqliteOptions: Bun.SQL.__internal.DefinedSQLiteOptions = { - ...options, - adapter: "sqlite", - filename: sqliteUrl, - }; - return parseSQLiteOptionsWithQueryParams(sqliteOptions, urlString); - } - - url = new URL(urlString); - } - } else if (stringOrUrl && typeof stringOrUrl === "object") { - if (stringOrUrl instanceof URL) { - url = stringOrUrl; - } else if (options?.url) { - const _url = options.url; - if (typeof _url === "string") { - url = defaultToPostgresIfNoProtocol(_url); - } else if (_url && typeof _url === "object" && _url instanceof URL) { - url = _url; - } - } - if (options?.tls) { - sslMode = SSLMode.require; - tls = options.tls; - } - } else if (typeof stringOrUrl === "string") { - try { - url = defaultToPostgresIfNoProtocol(stringOrUrl); - } catch (e) { - throw new Error(`Invalid URL '${stringOrUrl}' for postgres. Did you mean to specify \`{ adapter: "sqlite" }\`?`, { - cause: e, - }); + } else { + // Add protocol if missing + urlToProcess = ensureUrlHasProtocol(urlToProcess, protocol); } } - query = ""; - adapter = options.adapter; + + // Step 4: Set adapter from environment if not already set, but ONLY if not + // already set (options object is highest priority) + if (options.adapter === undefined && adapter !== null) { + options.adapter = adapter; + } + + // Step 5: Return early if adapter is explicitly specified + if (options.adapter) { + // Validate that the adapter is supported + const supportedAdapters = ["postgres", "sqlite", "mysql", "mariadb"]; + if (!supportedAdapters.includes(options.adapter)) { + throw new Error( + `Unsupported adapter: ${options.adapter}. Supported adapters: "postgres", "sqlite", "mysql", "mariadb"`, + ); + } + return [urlToProcess, sslMode, options as Bun.SQL.__internal.OptionsWithDefinedAdapter]; + } + + // Step 6: Infer adapter from protocol + const parsedAdapterFromProtocol = parseAdapterFromProtocol(protocol); + + if (!parsedAdapterFromProtocol) { + throw new Error(`Unsupported protocol: ${protocol}. Supported adapters: "postgres", "sqlite", "mysql", "mariadb"`); + } + + return [urlToProcess, sslMode, { ...options, adapter: parsedAdapterFromProtocol }]; +} + +function parseAdapterFromProtocol(protocol: string): Bun.SQL.__internal.Adapter | null { + switch (protocol) { + case "http": + case "https": + case "ftp": + case "postgres": + case "postgresql": + return "postgres"; + + case "mysql": + case "mysql2": + return "mysql"; + + case "mariadb": + return "mariadb"; + + case "file": + case "sqlite": + return "sqlite"; + + default: + return null; + } +} + +function parseOptions( + stringOrUrlOrOptions: Bun.SQL.Options | string | URL | undefined, + definitelyOptionsButMaybeEmpty: Bun.SQL.Options, +): Bun.SQL.__internal.DefinedOptions { + const [_url, sslModeFromConnectionDetails, options] = parseConnectionDetailsFromOptionsOrEnvironment( + stringOrUrlOrOptions, + definitelyOptionsButMaybeEmpty, + ); + + const adapter = options.adapter; + + if (adapter === "sqlite") { + return parseSQLiteOptions(_url, options); + } + + // The rest of this function is logic specific to postgres/mysql/mariadb (they have the same options object) + + let sslMode: SSLMode = sslModeFromConnectionDetails || SSLMode.disable; + + let url = _url; + + let hostname: string | undefined; + let port: number | string | undefined; + let username: string | null | undefined; + let password: string | (() => Bun.MaybePromise) | undefined | null; + let database: string | undefined; + let tls: Bun.TLSOptions | boolean | undefined; + let query: string = ""; + let idleTimeout: number | null | undefined; + let connectionTimeout: number | null | undefined; + let maxLifetime: number | null | undefined; + let onconnect: ((error?: Error | undefined) => void) | undefined; + let onclose: ((error?: Error | undefined) => void) | undefined; + let max: number | null | undefined; + let bigint: boolean | undefined; + let path: string; + let prepare: boolean = true; + + if (url !== null) { + url = url instanceof URL ? url : new URL(url); + } + if (url) { - ({ hostname, port, username, password, adapter } = options); - // object overrides url - hostname ||= url.hostname; - port ||= url.port; - username ||= decodeIfValid(url.username); - password ||= decodeIfValid(url.password); - adapter ||= url.protocol as Bun.SQL.__internal.Adapter; - if (adapter && adapter[adapter.length - 1] === ":") { - adapter = adapter.slice(0, -1) as Bun.SQL.__internal.Adapter; - } + // TODO(@alii): Move this logic into the switch statements below + // options object is always higher priority + hostname ||= options.host || options.hostname || url.hostname; + port ||= options.port || url.port; + username ||= options.user || options.username || decodeIfValid(url.username); + password ||= options.pass || options.password || decodeIfValid(url.password); + + path ||= options.path || url.pathname; const queryObject = url.searchParams.toJSON(); for (const key in queryObject) { @@ -390,38 +532,38 @@ function parseOptions( } query = query.trim(); } - if (adapter) { - switch (adapter) { - case "http": - case "https": - case "ftp": - case "postgres": - case "postgresql": - adapter = "postgres"; - break; - case "mysql": - case "mysql2": - case "mariadb": - adapter = "mysql"; - break; - case "file": - case "sqlite": - adapter = "sqlite"; - break; - default: - options.adapter satisfies never; // This will type error if we support a new adapter in the future, which will let us know to update this check - throw new Error(`Unsupported adapter: ${options.adapter}. Supported adapters: "postgres", "sqlite", "mysql"`); + + switch (adapter) { + case "postgres": { + hostname ||= options.hostname || options.host || env.PG_HOST || env.PGHOST || "localhost"; + break; + } + case "mysql": { + hostname ||= options.hostname || options.host || env.MYSQL_HOST || env.MYSQLHOST || "localhost"; + break; + } + case "mariadb": { + hostname ||= options.hostname || options.host || env.MARIADB_HOST || env.MARIADBHOST || "localhost"; + break; } - } else { - adapter = "postgres"; } - options.adapter = adapter; - assertIsOptionsOfAdapter(options, adapter); - hostname ||= options.hostname || options.host || env.PGHOST || "localhost"; - port ||= Number(options.port || env.PGPORT || (adapter === "mysql" ? 3306 : 5432)); + switch (adapter) { + case "postgres": { + port ||= Number(options.port || env.PG_PORT || env.PGPORT || "5432"); + break; + } + case "mysql": { + port ||= Number(options.port || env.MYSQL_PORT || env.MYSQLPORT || "3306"); + break; + } + case "mariadb": { + port ||= Number(options.port || env.MARIADB_PORT || env.MARIADBPORT || "3306"); + break; + } + } - path ||= (options as { path?: string }).path || ""; + path ||= options.path || ""; if (adapter === "postgres") { // add /.s.PGSQL.${port} if the unix domain socket is listening on that path @@ -437,21 +579,74 @@ function parseOptions( } } - username ||= - options.username || - options.user || - env.PGUSERNAME || - env.PGUSER || - env.USER || - env.USERNAME || - (adapter === "mysql" ? "root" : "postgres"); // default username for mysql is root and for postgres is postgres; - database ||= - options.database || - options.db || - decodeIfValid((url?.pathname ?? "").slice(1)) || - env.PGDATABASE || - (adapter === "mysql" ? "mysql" : username); // default database; - password ||= options.password || options.pass || env.PGPASSWORD || ""; + switch (adapter) { + case "mysql": { + username ||= options.username || options.user || env.MYSQL_USER || env.MYSQLUSER || env.USER || "root"; + break; + } + case "mariadb": { + username ||= options.username || options.user || env.MARIADB_USER || env.MARIADBUSER || env.USER || "root"; + break; + } + case "postgres": { + username ||= options.username || options.user || env.PG_USER || env.PGUSER || env.USER || "postgres"; + break; + } + } + + switch (adapter) { + case "mysql": { + password ||= options.password || options.pass || env.MYSQL_PASSWORD || env.MYSQLPASSWORD || env.PASSWORD || ""; + break; + } + + case "mariadb": { + password ||= + options.password || options.pass || env.MARIADB_PASSWORD || env.MARIADBPASSWORD || env.PASSWORD || ""; + break; + } + + case "postgres": { + password ||= options.password || options.pass || env.PG_PASSWORD || env.PGPASSWORD || env.PASSWORD || ""; + break; + } + } + + switch (adapter) { + case "postgres": { + database ||= + options.database || + options.db || + env.PG_DATABASE || + env.PGDATABASE || + decodeIfValid((url?.pathname ?? "").slice(1)) || + username; + break; + } + + case "mysql": { + database ||= + options.database || + options.db || + env.MYSQL_DATABASE || + env.MYSQLDATABASE || + decodeIfValid((url?.pathname ?? "").slice(1)) || + "mysql"; + break; + } + + case "mariadb": { + database ||= + options.database || + options.db || + env.MARIADB_DATABASE || + env.MARIADBDATABASE || + decodeIfValid((url?.pathname ?? "").slice(1)) || + "mariadb"; + break; + } + } + const connection = options.connection; if (connection && $isObject(connection)) { for (const key in connection) { @@ -473,6 +668,7 @@ function parseOptions( maxLifetime ??= options.maxLifetime; maxLifetime ??= options.max_lifetime; bigint ??= options.bigint; + // we need to explicitly set prepare to false if it is false if (options.prepare === false) { if (adapter === "mysql") { @@ -483,6 +679,7 @@ function parseOptions( onconnect ??= options.onconnect; onclose ??= options.onclose; + if (onconnect !== undefined) { if (!$isCallable(onconnect)) { throw $ERR_INVALID_ARG_TYPE("onconnect", "function", onconnect); @@ -549,6 +746,7 @@ function parseOptions( if (tls && sslMode === SSLMode.disable) { sslMode = SSLMode.prefer; } + port = Number(port); if (!Number.isSafeInteger(port) || port < 1 || port > 65535) { @@ -617,9 +815,10 @@ export interface DatabaseAdapter { normalizeQuery(strings: string | TemplateStringsArray, values: unknown[]): [sql: string, values: unknown[]]; createQueryHandle(sql: string, values: unknown[], flags: number): QueryHandle; connect(onConnected: OnConnected, reserved?: boolean): void; - release(connection: ConnectionHandle, connectingEvent?: boolean): void; + release(connection: Connection, connectingEvent?: boolean): void; close(options?: { timeout?: number }): Promise; flush(): void; + isConnected(): boolean; get closed(): boolean; @@ -649,7 +848,10 @@ export default { assertIsOptionsOfAdapter, parseOptions, SQLHelper, - SSLMode, normalizeSSLMode, SQLResultArray, + + // @ts-expect-error we're exporting a const enum which works in our builtins + // generator but not in typescript officially + SSLMode, }; diff --git a/src/js/internal/streams/legacy.ts b/src/js/internal/streams/legacy.ts index 5adff57197..005a6f9342 100644 --- a/src/js/internal/streams/legacy.ts +++ b/src/js/internal/streams/legacy.ts @@ -1,6 +1,7 @@ "use strict"; const EE = require("node:events"); +const { isArrayBufferView, isUint8Array } = require("node:util/types"); const ReflectOwnKeys = Reflect.ownKeys; const ArrayIsArray = Array.isArray; @@ -113,6 +114,13 @@ function prependListener(emitter, event, fn) { else emitter._events[event] = [fn, emitter._events[event]]; } +// Add helper methods to Stream +Stream._isArrayBufferView = isArrayBufferView; +Stream._isUint8Array = isUint8Array; +Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { + return new $Buffer(chunk.buffer, chunk.byteOffset, chunk.byteLength); +}; + export default { Stream, prependListener } as unknown as { Stream: typeof import("node:stream").Stream; prependListener: typeof prependListener; diff --git a/src/js/node/child_process.ts b/src/js/node/child_process.ts index 510e4aca7b..35354dfdda 100644 --- a/src/js/node/child_process.ts +++ b/src/js/node/child_process.ts @@ -1513,6 +1513,73 @@ class ChildProcess extends EventEmitter { unref() { if (this.#handle) this.#handle.unref(); } + + // Static initializer to make stdio properties enumerable on the prototype + // This fixes libraries like tinyspawn that use Object.assign(promise, childProcess) + static { + Object.defineProperties(this.prototype, { + stdin: { + get: function () { + const value = (this.#stdin ??= this.#getBunSpawnIo(0, this.#encoding, false)); + // Define as own enumerable property on first access + Object.defineProperty(this, "stdin", { + value: value, + enumerable: true, + configurable: true, + writable: true, + }); + return value; + }, + enumerable: true, + configurable: true, + }, + stdout: { + get: function () { + const value = (this.#stdout ??= this.#getBunSpawnIo(1, this.#encoding, false)); + // Define as own enumerable property on first access + Object.defineProperty(this, "stdout", { + value: value, + enumerable: true, + configurable: true, + writable: true, + }); + return value; + }, + enumerable: true, + configurable: true, + }, + stderr: { + get: function () { + const value = (this.#stderr ??= this.#getBunSpawnIo(2, this.#encoding, false)); + // Define as own enumerable property on first access + Object.defineProperty(this, "stderr", { + value: value, + enumerable: true, + configurable: true, + writable: true, + }); + return value; + }, + enumerable: true, + configurable: true, + }, + stdio: { + get: function () { + const value = (this.#stdioObject ??= this.#createStdioObject()); + // Define as own enumerable property on first access + Object.defineProperty(this, "stdio", { + value: value, + enumerable: true, + configurable: true, + writable: true, + }); + return value; + }, + enumerable: true, + configurable: true, + }, + }); + } } //------------------------------------------------------------------------------ diff --git a/src/js/node/fs.ts b/src/js/node/fs.ts index 1c56c4f754..d2d8aebad0 100644 --- a/src/js/node/fs.ts +++ b/src/js/node/fs.ts @@ -533,7 +533,7 @@ var access = function access(path, mode, callback) { copyFileSync = fs.copyFileSync.bind(fs), // This behavior - never throwing -- matches Node.js behavior. // https://github.com/nodejs/node/blob/c82f3c9e80f0eeec4ae5b7aedd1183127abda4ad/lib/fs.js#L275C1-L295C1 - existsSync = function existsSync() { + existsSync = function existsSync(_path: string) { try { return fs.existsSync.$apply(fs, arguments); } catch { diff --git a/src/js/node/http2.ts b/src/js/node/http2.ts index 551f4c4dd0..055adb333e 100644 --- a/src/js/node/http2.ts +++ b/src/js/node/http2.ts @@ -53,6 +53,7 @@ const Socket = net.Socket; const EventEmitter = require("node:events"); const { Duplex } = Stream; const { SafeArrayIterator, SafeSet } = require("internal/primordials"); +const { promisify } = require("internal/promisify"); const RegExpPrototypeExec = RegExp.prototype.exec; const ObjectAssign = Object.assign; @@ -3923,6 +3924,19 @@ function getDefaultSettings() { return getUnpackedSettings(); } +Object.defineProperty(connect, promisify.custom, { + __proto__: null, + value: function (authority, options) { + const { promise, resolve, reject } = Promise.withResolvers(); + const server = connect(authority, options, () => { + server.removeListener("error", reject); + return resolve(server); + }); + server.once("error", reject); + return promise; + }, +}); + export default { constants, createServer, diff --git a/src/js/node/perf_hooks.ts b/src/js/node/perf_hooks.ts index 64305a693b..f5b05ed542 100644 --- a/src/js/node/perf_hooks.ts +++ b/src/js/node/perf_hooks.ts @@ -1,12 +1,6 @@ // Hardcoded module "node:perf_hooks" const { throwNotImplemented } = require("internal/shared"); -const createFunctionThatMasqueradesAsUndefined = $newCppFunction( - "ZigGlobalObject.cpp", - "jsFunctionCreateFunctionThatMasqueradesAsUndefined", - 2, -); - const cppCreateHistogram = $newCppFunction("JSNodePerformanceHooksHistogram.cpp", "jsFunction_createHistogram", 3) as ( min: number, max: number, @@ -178,8 +172,10 @@ export default { PerformanceObserver, PerformanceObserverEntryList, PerformanceNodeTiming, - // TODO: node:perf_hooks.monitorEventLoopDelay -- https://github.com/oven-sh/bun/issues/17650 - monitorEventLoopDelay: createFunctionThatMasqueradesAsUndefined("", 0), + monitorEventLoopDelay: function monitorEventLoopDelay(options?: { resolution?: number }) { + const impl = require("internal/perf_hooks/monitorEventLoopDelay"); + return impl(options); + }, createHistogram: function createHistogram(options?: { lowest?: number | bigint; highest?: number | bigint; diff --git a/src/js/node/v8.ts b/src/js/node/v8.ts index eda4cf01dd..7ed487bfa8 100644 --- a/src/js/node/v8.ts +++ b/src/js/node/v8.ts @@ -76,7 +76,7 @@ function getHeapStatistics() { // -- Copied from Node: does_zap_garbage: 0, - number_of_native_contexts: 1, + number_of_native_contexts: stats.globalObjectCount, number_of_detached_contexts: 0, total_global_handles_size: 8192, used_global_handles_size: 2208, diff --git a/src/js/private.d.ts b/src/js/private.d.ts index d4763f5bec..82f461d12d 100644 --- a/src/js/private.d.ts +++ b/src/js/private.d.ts @@ -12,9 +12,11 @@ declare function $bundleError(...message: any[]): never; declare module "bun" { namespace SQL.__internal { - type Define = T & { - [Key in K | "adapter"]: NonNullable; - } & {}; + type Define = T extends any + ? T & { + [Key in K | "adapter"]: NonNullable; + } & {} + : never; type Adapter = NonNullable; @@ -24,16 +26,15 @@ declare module "bun" { type DefinedSQLiteOptions = Define; /** - * Represents the result of the `parseOptions()` function in the postgres path + * Represents the result of the `parseOptions()` function in the postgres, mysql or mariadb path */ - type DefinedPostgresOptions = Define & { + type DefinedPostgresOrMySQLOptions = Define & { sslMode: import("internal/sql/shared").SSLMode; query: string; }; - type DefinedMySQLOptions = DefinedPostgresOptions; - - type DefinedOptions = DefinedSQLiteOptions | DefinedPostgresOptions | DefinedMySQLOptions; + type DefinedOptions = DefinedSQLiteOptions | DefinedPostgresOrMySQLOptions; + type OptionsWithDefinedAdapter = Define; } } diff --git a/src/js/thirdparty/node-fetch.ts b/src/js/thirdparty/node-fetch.ts index cbb0c24210..42a5a7c275 100644 --- a/src/js/thirdparty/node-fetch.ts +++ b/src/js/thirdparty/node-fetch.ts @@ -1,5 +1,3 @@ -import type * as s from "stream"; - // Users may override the global fetch implementation, so we need to ensure these are the originals. const bindings = $cpp("NodeFetch.cpp", "createNodeFetchInternalBinding"); const WebResponse: typeof globalThis.Response = bindings[0]; @@ -147,22 +145,16 @@ class Request extends WebRequest { * like `.json()` or `.text()`, which is faster in Bun's native fetch, vs `node-fetch` going * through `node:http`, a node stream, then processing the data. */ -async function fetch(url: any, init?: RequestInit & { body?: any }) { - // input node stream -> web stream - let body: s.Readable | undefined = init?.body; - if (body) { - const chunks: any = []; - const { Readable } = require("node:stream"); - if (body instanceof Readable) { - // TODO: Bun fetch() doesn't support ReadableStream at all. - for await (const chunk of body) { - chunks.push(chunk); - } - init = { ...init, body: new Blob(chunks) }; - } - } +async function fetch( + // eslint-disable-next-line no-unused-vars + url: any, - const response = await nativeFetch(url, init); + // eslint-disable-next-line no-unused-vars + init?: RequestInit & { body?: any }, +) { + // Since `body` accepts async iterables + // We don't need to convert the Readable body into a ReadableStream. + const response = await nativeFetch.$apply(undefined, arguments); Object.setPrototypeOf(response, ResponsePrototype); return response; } diff --git a/src/js_printer.zig b/src/js_printer.zig index a0118ea8b2..b005a6cb12 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -479,6 +479,14 @@ pub const RequireOrImportMeta = struct { }; }; +fn isIdentifierOrNumericConstantOrPropertyAccess(expr: *const Expr) bool { + return switch (expr.data) { + .e_identifier, .e_dot, .e_index => true, + .e_number => |e| std.math.isInf(e.value) or std.math.isNan(e.value), + else => false, + }; +} + pub const PrintResult = union(enum) { result: Success, err: anyerror, @@ -1578,6 +1586,13 @@ fn NewPrinter( return &p.import_records[import_record_index]; } + pub fn isUnboundIdentifier(p: *Printer, expr: *const Expr) bool { + if (expr.data != .e_identifier) return false; + const ref = expr.data.e_identifier.ref; + const symbol = p.symbols().get(p.symbols().follow(ref)) orelse return false; + return symbol.kind == .unbound; + } + pub fn printRequireOrImportExpr( p: *Printer, import_record_index: u32, @@ -2997,13 +3012,26 @@ fn NewPrinter( p.printSpace(); } else { p.printSpaceBeforeOperator(e.op); + if (e.op.isPrefix()) { + p.addSourceMapping(expr.loc); + } p.print(entry.text); p.prev_op = e.op; p.prev_op_end = p.writer.written; } if (e.op.isPrefix()) { - p.printExpr(e.value, Op.Level.sub(.prefix, 1), ExprFlag.None()); + // Never turn "typeof (0, x)" into "typeof x" or "delete (0, x)" into "delete x" + if ((e.op == .un_typeof and !e.flags.was_originally_typeof_identifier and p.isUnboundIdentifier(&e.value)) or + (e.op == .un_delete and !e.flags.was_originally_delete_of_identifier_or_property_access and isIdentifierOrNumericConstantOrPropertyAccess(&e.value))) + { + p.print("(0,"); + p.printSpace(); + p.printExpr(e.value, Op.Level.sub(.prefix, 1), ExprFlag.None()); + p.print(")"); + } else { + p.printExpr(e.value, Op.Level.sub(.prefix, 1), ExprFlag.None()); + } } if (wrap) { @@ -5837,8 +5865,8 @@ pub fn printJSON( var stmts = [_]js_ast.Stmt{stmt}; var parts = [_]js_ast.Part{.{ .stmts = &stmts }}; const ast = Ast.initTest(&parts); - const list = js_ast.Symbol.List.init(ast.symbols.slice()); - const nested_list = js_ast.Symbol.NestedList.init(&[_]js_ast.Symbol.List{list}); + const list = js_ast.Symbol.List.fromBorrowedSliceDangerous(ast.symbols.slice()); + const nested_list = js_ast.Symbol.NestedList.fromBorrowedSliceDangerous(&.{list}); var renamer = rename.NoOpRenamer.init(js_ast.Symbol.Map.initList(nested_list), source); var printer = PrinterType.init( diff --git a/src/linker.zig b/src/linker.zig index 13c1316405..a71015523a 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -112,14 +112,10 @@ pub const Linker = struct { const is_deferred = result.pending_imports.len > 0; - const import_records = result.ast.import_records.listManaged(linker.allocator); - defer { - result.ast.import_records = ImportRecord.List.fromList(import_records); - } // Step 1. Resolve imports & requires switch (result.loader) { .jsx, .js, .ts, .tsx => { - for (import_records.items, 0..) |*import_record, record_i| { + for (result.ast.import_records.slice(), 0..) |*import_record, record_i| { if (import_record.is_unused or (is_bun and is_deferred and !result.isPendingImport(@intCast(record_i)))) continue; diff --git a/src/napi/napi.zig b/src/napi/napi.zig index 046164c4d5..d961c52a8f 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -324,9 +324,11 @@ pub export fn napi_create_array_with_length(env_: napi_env, length: usize, resul return env.invalidArg(); }; - // JSC createEmptyArray takes u32 - // Node and V8 convert out-of-bounds array sizes to 0 - const len = std.math.cast(u32, length) orelse 0; + // https://github.com/nodejs/node/blob/14c68e3b536798e25f810ed7ae180a5cde9e47d3/deps/v8/src/api/api.cc#L8163-L8174 + // size_t immediately cast to int as argument to Array::New, then min 0 + const len_i64: i64 = @bitCast(length); + const len_i32: i32 = @truncate(len_i64); + const len: u32 = if (len_i32 > 0) @bitCast(len_i32) else 0; const array = jsc.JSValue.createEmptyArray(env.toJS(), len) catch return env.setLastError(.pending_exception); array.ensureStillAlive(); @@ -542,7 +544,7 @@ pub export fn napi_get_prototype(env_: napi_env, object_: napi_value, result_: ? pub extern fn napi_set_element(env_: napi_env, object_: napi_value, index: c_uint, value_: napi_value) napi_status; pub extern fn napi_has_element(env_: napi_env, object_: napi_value, index: c_uint, result_: ?*bool) napi_status; pub extern fn napi_get_element(env: napi_env, object: napi_value, index: u32, result: *napi_value) napi_status; -pub extern fn napi_delete_element(env: napi_env, object: napi_value, index: u32, result: *napi_value) napi_status; +pub extern fn napi_delete_element(env: napi_env, object: napi_value, index: u32, result: *bool) napi_status; pub extern fn napi_define_properties(env: napi_env, object: napi_value, property_count: usize, properties: [*c]const napi_property_descriptor) napi_status; pub export fn napi_is_array(env_: napi_env, value_: napi_value, result_: ?*bool) napi_status { log("napi_is_array", .{}); @@ -583,8 +585,7 @@ pub export fn napi_strict_equals(env_: napi_env, lhs_: napi_value, rhs_: napi_va return env.invalidArg(); }; const lhs, const rhs = .{ lhs_.get(), rhs_.get() }; - // TODO: this needs to be strictEquals not isSameValue (NaN !== NaN and -0 === 0) - result.* = lhs.isSameValue(rhs, env.toJS()) catch return env.setLastError(.pending_exception); + result.* = lhs.isStrictEqual(rhs, env.toJS()) catch return env.setLastError(.pending_exception); return env.ok(); } pub extern fn napi_call_function(env: napi_env, recv: napi_value, func: napi_value, argc: usize, argv: [*c]const napi_value, result: *napi_value) napi_status; diff --git a/src/pool.zig b/src/pool.zig index 8e2b538ae8..85d7b8f043 100644 --- a/src/pool.zig +++ b/src/pool.zig @@ -214,8 +214,7 @@ pub fn ObjectPool( if (comptime max_count > 0) { if (data().count >= max_count) { if (comptime log_allocations) std.io.getStdErr().writeAll(comptime std.fmt.comptimePrint("Free {s} - {d} bytes\n", .{ @typeName(Type), @sizeOf(Type) })) catch {}; - if (std.meta.hasFn(Type, "deinit")) node.data.deinit(); - node.allocator.destroy(node); + destroyNode(node); return; } } @@ -242,10 +241,20 @@ pub fn ObjectPool( dat.list.first = null; while (next) |node| { next = node.next; - if (std.meta.hasFn(Type, "deinit")) node.data.deinit(); - node.allocator.destroy(node); + destroyNode(node); } } + + fn destroyNode(node: *LinkedList.Node) void { + // TODO: Once a generic-allocator version of `BabyList` is added, change + // `ByteListPool` in `bun.js/webcore.zig` to use a managed default-allocator + // `ByteList` instead, and then get rid of the special-casing for `ByteList` + // here. This will fix a memory leak. + if (comptime Type != bun.ByteList) { + bun.memory.deinit(&node.data); + } + node.allocator.destroy(node); + } }; } diff --git a/src/ptr/owned.zig b/src/ptr/owned.zig index 1af997a3d9..3dd2b36d2c 100644 --- a/src/ptr/owned.zig +++ b/src/ptr/owned.zig @@ -60,6 +60,7 @@ pub fn OwnedIn(comptime Pointer: type, comptime Allocator: type) type { } }, .slice => struct { + /// Note: this creates *shallow* copies of `elem`. pub fn alloc(count: usize, elem: Child) AllocError!Self { return .allocIn(count, elem, bun.memory.initDefault(Allocator)); } @@ -82,6 +83,7 @@ pub fn OwnedIn(comptime Pointer: type, comptime Allocator: type) type { } }, .slice => struct { + /// Note: this creates *shallow* copies of `elem`. pub fn allocIn(count: usize, elem: Child, allocator_: Allocator) AllocError!Self { const data = try bun.allocators.asStd(allocator_).alloc(Child, count); @memset(data, elem); diff --git a/src/ptr/ref_count.zig b/src/ptr/ref_count.zig index c0433d5da4..afa7d6a8b6 100644 --- a/src/ptr/ref_count.zig +++ b/src/ptr/ref_count.zig @@ -175,11 +175,21 @@ pub fn RefCount(T: type, field_name: []const u8, destructor: anytype, options: O /// The count is 0 after the destructor is called. pub fn assertNoRefs(count: *const @This()) void { - if (enable_debug) { + if (comptime bun.Environment.ci_assert) { bun.assert(count.raw_count == 0); } } + /// Sets the ref count to 0 without running the destructor. + /// + /// Only use this if you're about to free the object (e.g., with `bun.destroy`). + /// + /// Don't modify the ref count or create any `RefPtr`s after calling this method. + pub fn clearWithoutDestructor(count: *@This()) void { + count.assertSingleThreaded(); + count.raw_count = 0; + } + fn assertSingleThreaded(count: *@This()) void { count.thread.lockOrAssert(); } @@ -282,11 +292,23 @@ pub fn ThreadSafeRefCount(T: type, field_name: []const u8, destructor: fn (*T) v /// The count is 0 after the destructor is called. pub fn assertNoRefs(count: *const @This()) void { - if (enable_debug) { + if (comptime bun.Environment.ci_assert) { bun.assert(count.raw_count.load(.seq_cst) == 0); } } + /// Sets the ref count to 0 without running the destructor. + /// + /// Only use this if you're about to free the object (e.g., with `bun.destroy`). + /// + /// Don't modify the ref count or create any `RefPtr`s after calling this method. + pub fn clearWithoutDestructor(count: *@This()) void { + // This method should only be used if you're about the free the object. You shouldn't + // be freeing the object if other threads might be using it, and no memory order can + // help with that, so .monotonic is sufficient. + count.raw_count.store(0, .monotonic); + } + fn getRefCount(self: *T) *@This() { return &@field(self, field_name); } diff --git a/src/runtime.zig b/src/runtime.zig index 2d8ab1e1c0..fd6de9fa07 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -168,6 +168,7 @@ pub const Runtime = struct { minify_syntax: bool = false, minify_identifiers: bool = false, + minify_whitespace: bool = false, dead_code_elimination: bool = true, set_breakpoint_on_first_line: bool = false, diff --git a/src/s3/client.zig b/src/s3/client.zig index 8225d211ab..1117409b8a 100644 --- a/src/s3/client.zig +++ b/src/s3/client.zig @@ -104,7 +104,7 @@ pub fn listObjects( ) void { var search_params: bun.ByteList = .{}; - bun.handleOom(search_params.append(bun.default_allocator, "?")); + bun.handleOom(search_params.appendSlice(bun.default_allocator, "?")); if (listOptions.continuation_token) |continuation_token| { var buff: [1024]u8 = undefined; @@ -127,9 +127,9 @@ pub fn listObjects( if (listOptions.encoding_type != null) { if (listOptions.continuation_token != null or listOptions.delimiter != null) { - bun.handleOom(search_params.append(bun.default_allocator, "&encoding-type=url")); + bun.handleOom(search_params.appendSlice(bun.default_allocator, "&encoding-type=url")); } else { - bun.handleOom(search_params.append(bun.default_allocator, "encoding-type=url")); + bun.handleOom(search_params.appendSlice(bun.default_allocator, "encoding-type=url")); } } @@ -142,9 +142,9 @@ pub fn listObjects( } if (listOptions.continuation_token != null or listOptions.delimiter != null or listOptions.encoding_type != null or listOptions.fetch_owner != null) { - bun.handleOom(search_params.append(bun.default_allocator, "&list-type=2")); + bun.handleOom(search_params.appendSlice(bun.default_allocator, "&list-type=2")); } else { - bun.handleOom(search_params.append(bun.default_allocator, "list-type=2")); + bun.handleOom(search_params.appendSlice(bun.default_allocator, "list-type=2")); } if (listOptions.max_keys) |max_keys| { @@ -170,7 +170,7 @@ pub fn listObjects( .method = .GET, .search_params = search_params.slice(), }, true, null) catch |sign_err| { - search_params.deinitWithAllocator(bun.default_allocator); + search_params.deinit(bun.default_allocator); const error_code_and_message = Error.getSignErrorCodeAndMessage(sign_err); callback(.{ .failure = .{ .code = error_code_and_message.code, .message = error_code_and_message.message } }, callback_context); @@ -178,7 +178,7 @@ pub fn listObjects( return; }; - search_params.deinitWithAllocator(bun.default_allocator); + search_params.deinit(bun.default_allocator); const headers = bun.handleOom(bun.http.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator)); @@ -631,14 +631,14 @@ pub fn readableStream( } if (has_more) { readable.ptr.Bytes.onData( - .{ .temporary = bun.ByteList.initConst(chunk.list.items) }, + .{ .temporary = bun.ByteList.fromBorrowedSliceDangerous(chunk.list.items) }, bun.default_allocator, ); return; } readable.ptr.Bytes.onData( - .{ .temporary_and_done = bun.ByteList.initConst(chunk.list.items) }, + .{ .temporary_and_done = bun.ByteList.fromBorrowedSliceDangerous(chunk.list.items) }, bun.default_allocator, ); return; diff --git a/src/s3/multipart.zig b/src/s3/multipart.zig index 18fe0a8308..acc303469b 100644 --- a/src/s3/multipart.zig +++ b/src/s3/multipart.zig @@ -284,7 +284,7 @@ pub const MultiPartUpload = struct { if (this.multipart_etags.capacity > 0) this.multipart_etags.deinit(bun.default_allocator); if (this.multipart_upload_list.cap > 0) - this.multipart_upload_list.deinitWithAllocator(bun.default_allocator); + this.multipart_upload_list.deinit(bun.default_allocator); bun.destroy(this); } @@ -438,7 +438,10 @@ pub const MultiPartUpload = struct { // sort the etags std.sort.block(UploadPart.UploadPartResult, this.multipart_etags.items, this, UploadPart.sortEtags); // start the multipart upload list - bun.handleOom(this.multipart_upload_list.append(bun.default_allocator, "")); + bun.handleOom(this.multipart_upload_list.appendSlice( + bun.default_allocator, + "", + )); for (this.multipart_etags.items) |tag| { bun.handleOom(this.multipart_upload_list.appendFmt(bun.default_allocator, "{}{s}", .{ tag.number, tag.etag })); @@ -446,7 +449,10 @@ pub const MultiPartUpload = struct { } this.multipart_etags.deinit(bun.default_allocator); this.multipart_etags = .{}; - bun.handleOom(this.multipart_upload_list.append(bun.default_allocator, "")); + bun.handleOom(this.multipart_upload_list.appendSlice( + bun.default_allocator, + "", + )); // will deref and ends after commit this.commitMultiPartRequest(); } else if (this.state == .singlefile_started) { diff --git a/src/safety/CriticalSection.zig b/src/safety/CriticalSection.zig index d308295b5d..39604d9233 100644 --- a/src/safety/CriticalSection.zig +++ b/src/safety/CriticalSection.zig @@ -197,11 +197,11 @@ pub fn end(self: *Self) void { if (comptime enabled) self.internal_state.unlock(); } +pub const enabled = bun.Environment.ci_assert; + const bun = @import("bun"); const invalid_thread_id = @import("./thread_id.zig").invalid; const StoredTrace = bun.crash_handler.StoredTrace; - -const enabled = bun.Environment.ci_assert; const traces_enabled = bun.Environment.isDebug; const std = @import("std"); diff --git a/src/safety/ThreadLock.zig b/src/safety/ThreadLock.zig index 143bd1904a..8d8798a7bb 100644 --- a/src/safety/ThreadLock.zig +++ b/src/safety/ThreadLock.zig @@ -67,11 +67,11 @@ pub fn lockOrAssert(self: *Self) void { } } +pub const enabled = bun.Environment.ci_assert; + const bun = @import("bun"); const invalid_thread_id = @import("./thread_id.zig").invalid; const StoredTrace = bun.crash_handler.StoredTrace; - -const enabled = bun.Environment.ci_assert; const traces_enabled = bun.Environment.isDebug; const std = @import("std"); diff --git a/src/safety/alloc.zig b/src/safety/alloc.zig index 3c544496d8..6a0c6eec48 100644 --- a/src/safety/alloc.zig +++ b/src/safety/alloc.zig @@ -30,7 +30,7 @@ fn hasPtr(alloc: Allocator) bool { bun.MaxHeapAllocator.isInstance(alloc) or alloc.vtable == bun.allocators.c_allocator.vtable or alloc.vtable == bun.allocators.z_allocator.vtable or - bun.MimallocArena.isInstance(alloc) or + MimallocArena.isInstance(alloc) or bun.jsc.CachedBytecode.isInstance(alloc) or bun.bundle_v2.allocatorHasPointer(alloc) or ((comptime bun.heap_breakdown.enabled) and bun.heap_breakdown.Zone.isInstance(alloc)) or @@ -93,7 +93,7 @@ pub const CheckedAllocator = struct { #allocator: if (enabled) NullableAllocator else void = if (enabled) .init(null), #trace: if (traces_enabled) StoredTrace else void = if (traces_enabled) StoredTrace.empty, - pub fn init(alloc: Allocator) Self { + pub inline fn init(alloc: Allocator) Self { var self: Self = .{}; self.set(alloc); return self; @@ -136,15 +136,58 @@ pub const CheckedAllocator = struct { // Assertion will always fail. We want the error message. bun.safety.alloc.assertEq(old_alloc, alloc); } + + /// Transfers ownership of the collection to a new allocator. + /// + /// This method is valid only if both the old allocator and new allocator are `MimallocArena`s. + /// This is okay because data allocated by one `MimallocArena` can always be freed by another + /// (this includes `resize` and `remap`). + /// + /// `new_allocator` should be one of the following: + /// + /// * `*MimallocArena` + /// * `*const MimallocArena` + /// * `MimallocArena.Borrowed` + /// + /// If you only have an `std.mem.Allocator`, see `MimallocArena.Borrowed.downcast`. + pub inline fn transferOwnership(self: *Self, new_allocator: anytype) void { + if (comptime !enabled) return; + const ArgType = @TypeOf(new_allocator); + const new_std = switch (comptime ArgType) { + *MimallocArena, + *const MimallocArena, + MimallocArena.Borrowed, + => new_allocator.allocator(), + else => @compileError("unsupported argument: " ++ @typeName(ArgType)), + }; + + defer self.* = .init(new_std); + const old_allocator = self.#allocator.get() orelse return; + if (MimallocArena.isInstance(old_allocator)) return; + + if (comptime traces_enabled) { + bun.Output.errGeneric("collection first used here:", .{}); + var trace = self.#trace; + bun.crash_handler.dumpStackTrace( + trace.trace(), + .{ .frame_count = 10, .stop_at_jsc_llint = true }, + ); + } + std.debug.panic( + "cannot transfer ownership from non-MimallocArena (old vtable is {*})", + .{old_allocator.vtable}, + ); + } }; +pub const enabled = bun.Environment.ci_assert; + const bun = @import("bun"); const std = @import("std"); const Allocator = std.mem.Allocator; const StoredTrace = bun.crash_handler.StoredTrace; - -const enabled = bun.Environment.ci_assert; const traces_enabled = bun.Environment.isDebug; const LinuxMemFdAllocator = bun.allocators.LinuxMemFdAllocator; +const MimallocArena = bun.allocators.MimallocArena; const NullableAllocator = bun.allocators.NullableAllocator; diff --git a/src/shell/Builtin.zig b/src/shell/Builtin.zig index 1fe2761be0..8578485590 100644 --- a/src/shell/Builtin.zig +++ b/src/shell/Builtin.zig @@ -619,11 +619,17 @@ pub fn done(this: *Builtin, exit_code: anytype) Yield { // Aggregate output data if shell state is piped and this cmd is piped if (cmd.io.stdout == .pipe and cmd.io.stdout == .pipe and this.stdout == .buf) { - bun.handleOom(cmd.base.shell.buffered_stdout().append(bun.default_allocator, this.stdout.buf.items[0..])); + bun.handleOom(cmd.base.shell.buffered_stdout().appendSlice( + bun.default_allocator, + this.stdout.buf.items[0..], + )); } // Aggregate output data if shell state is piped and this cmd is piped if (cmd.io.stderr == .pipe and cmd.io.stderr == .pipe and this.stderr == .buf) { - bun.handleOom(cmd.base.shell.buffered_stderr().append(bun.default_allocator, this.stderr.buf.items[0..])); + bun.handleOom(cmd.base.shell.buffered_stderr().appendSlice( + bun.default_allocator, + this.stderr.buf.items[0..], + )); } return cmd.parent.childDone(cmd, this.exit_code.?); diff --git a/src/shell/IOWriter.zig b/src/shell/IOWriter.zig index a594687ae7..26d0bb9f9b 100644 --- a/src/shell/IOWriter.zig +++ b/src/shell/IOWriter.zig @@ -323,7 +323,7 @@ pub fn doFileWrite(this: *IOWriter) Yield { }; if (child.bytelist) |bl| { const written_slice = this.buf.items[this.total_bytes_written .. this.total_bytes_written + amt]; - bun.handleOom(bl.append(bun.default_allocator, written_slice)); + bun.handleOom(bl.appendSlice(bun.default_allocator, written_slice)); } child.written += amt; if (!child.wroteEverything()) { @@ -347,7 +347,7 @@ pub fn onWritePollable(this: *IOWriter, amount: usize, status: bun.io.WriteStatu } else { if (child.bytelist) |bl| { const written_slice = this.buf.items[this.total_bytes_written .. this.total_bytes_written + amount]; - bun.handleOom(bl.append(bun.default_allocator, written_slice)); + bun.handleOom(bl.appendSlice(bun.default_allocator, written_slice)); } this.total_bytes_written += amount; child.written += amount; diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index e6d1a1db50..b14bcb5b99 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -417,10 +417,10 @@ pub const Interpreter = struct { if (comptime free_buffered_io) { if (this._buffered_stdout == .owned) { - this._buffered_stdout.owned.deinitWithAllocator(bun.default_allocator); + this._buffered_stdout.owned.deinit(bun.default_allocator); } if (this._buffered_stderr == .owned) { - this._buffered_stderr.owned.deinitWithAllocator(bun.default_allocator); + this._buffered_stderr.owned.deinit(bun.default_allocator); } } @@ -1181,10 +1181,10 @@ pub const Interpreter = struct { fn deinitFromFinalizer(this: *ThisInterpreter) void { if (this.root_shell._buffered_stderr == .owned) { - this.root_shell._buffered_stderr.owned.deinitWithAllocator(bun.default_allocator); + this.root_shell._buffered_stderr.owned.deinit(bun.default_allocator); } if (this.root_shell._buffered_stdout == .owned) { - this.root_shell._buffered_stdout.owned.deinitWithAllocator(bun.default_allocator); + this.root_shell._buffered_stdout.owned.deinit(bun.default_allocator); } this.this_jsvalue = .zero; this.allocator.destroy(this); diff --git a/src/shell/shell.zig b/src/shell/shell.zig index 270fe85148..e8ded9f1ae 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -4098,8 +4098,8 @@ pub fn SmolList(comptime T: type, comptime INLINED_MAX: comptime_int) type { pub fn promote(this: *Inlined, n: usize, new: T) bun.BabyList(T) { var list = bun.handleOom(bun.BabyList(T).initCapacity(bun.default_allocator, n)); - bun.handleOom(list.append(bun.default_allocator, this.items[0..INLINED_MAX])); - bun.handleOom(list.push(bun.default_allocator, new)); + bun.handleOom(list.appendSlice(bun.default_allocator, this.items[0..INLINED_MAX])); + bun.handleOom(list.append(bun.default_allocator, new)); return list; } @@ -4244,7 +4244,7 @@ pub fn SmolList(comptime T: type, comptime INLINED_MAX: comptime_int) type { this.inlined.len += 1; }, .heap => { - bun.handleOom(this.heap.push(bun.default_allocator, new)); + bun.handleOom(this.heap.append(bun.default_allocator, new)); }, } } diff --git a/src/shell/states/Cmd.zig b/src/shell/states/Cmd.zig index 4bda507b16..8a732f27d6 100644 --- a/src/shell/states/Cmd.zig +++ b/src/shell/states/Cmd.zig @@ -116,12 +116,9 @@ const BufferedIoClosed = struct { } = .open, owned: bool = false, - /// BufferedInput/Output uses jsc vm allocator - pub fn deinit(this: *BufferedIoState, jsc_vm_allocator: Allocator) void { + pub fn deinit(this: *BufferedIoState) void { if (this.state == .closed and this.owned) { - var list = this.state.closed.listManaged(jsc_vm_allocator); - list.deinit(); - this.state.closed = .{}; + this.state.closed.clearAndFree(bun.default_allocator); } } @@ -130,13 +127,13 @@ const BufferedIoClosed = struct { } }; - fn deinit(this: *BufferedIoClosed, jsc_vm_allocator: Allocator) void { + fn deinit(this: *BufferedIoClosed) void { if (this.stdout) |*io| { - io.deinit(jsc_vm_allocator); + io.deinit(); } if (this.stderr) |*io| { - io.deinit(jsc_vm_allocator); + io.deinit(); } } @@ -157,10 +154,11 @@ const BufferedIoClosed = struct { // If the shell state is piped (inside a cmd substitution) aggregate the output of this command if (cmd.io.stdout == .pipe and cmd.io.stdout == .pipe and !cmd.node.redirect.redirectsElsewhere(.stdout)) { const the_slice = readable.pipe.slice(); - bun.handleOom(cmd.base.shell.buffered_stdout().append(bun.default_allocator, the_slice)); + bun.handleOom(cmd.base.shell.buffered_stdout().appendSlice(bun.default_allocator, the_slice)); } - stdout.state = .{ .closed = bun.ByteList.fromList(readable.pipe.takeBuffer()) }; + var buffer = readable.pipe.takeBuffer(); + stdout.state = .{ .closed = bun.ByteList.moveFromList(&buffer) }; } }, .stderr => { @@ -170,10 +168,11 @@ const BufferedIoClosed = struct { // If the shell state is piped (inside a cmd substitution) aggregate the output of this command if (cmd.io.stderr == .pipe and cmd.io.stderr == .pipe and !cmd.node.redirect.redirectsElsewhere(.stderr)) { const the_slice = readable.pipe.slice(); - bun.handleOom(cmd.base.shell.buffered_stderr().append(bun.default_allocator, the_slice)); + bun.handleOom(cmd.base.shell.buffered_stderr().appendSlice(bun.default_allocator, the_slice)); } - stderr.state = .{ .closed = bun.ByteList.fromList(readable.pipe.takeBuffer()) }; + var buffer = readable.pipe.takeBuffer(); + stderr.state = .{ .closed = bun.ByteList.moveFromList(&buffer) }; } }, .stdin => { @@ -706,7 +705,7 @@ pub fn deinit(this: *Cmd) void { cmd.deinit(); } - this.exec.subproc.buffered_closed.deinit(this.base.eventLoop().allocator()); + this.exec.subproc.buffered_closed.deinit(); } else { this.exec.bltn.deinit(); } @@ -767,7 +766,7 @@ pub fn bufferedOutputCloseStdout(this: *Cmd, err: ?jsc.SystemError) void { if (this.io.stdout == .fd and this.io.stdout.fd.captured != null and !this.node.redirect.redirectsElsewhere(.stdout)) { var buf = this.io.stdout.fd.captured.?; const the_slice = this.exec.subproc.child.stdout.pipe.slice(); - bun.handleOom(buf.append(bun.default_allocator, the_slice)); + bun.handleOom(buf.appendSlice(bun.default_allocator, the_slice)); } this.exec.subproc.buffered_closed.close(this, .{ .stdout = &this.exec.subproc.child.stdout }); this.exec.subproc.child.closeIO(.stdout); @@ -783,14 +782,13 @@ pub fn bufferedOutputCloseStderr(this: *Cmd, err: ?jsc.SystemError) void { } if (this.io.stderr == .fd and this.io.stderr.fd.captured != null and !this.node.redirect.redirectsElsewhere(.stderr)) { var buf = this.io.stderr.fd.captured.?; - bun.handleOom(buf.append(bun.default_allocator, this.exec.subproc.child.stderr.pipe.slice())); + bun.handleOom(buf.appendSlice(bun.default_allocator, this.exec.subproc.child.stderr.pipe.slice())); } this.exec.subproc.buffered_closed.close(this, .{ .stderr = &this.exec.subproc.child.stderr }); this.exec.subproc.child.closeIO(.stderr); } const std = @import("std"); -const Allocator = std.mem.Allocator; const bun = @import("bun"); const assert = bun.assert; diff --git a/src/shell/subproc.zig b/src/shell/subproc.zig index 9a8f838be8..dd308b13e7 100644 --- a/src/shell/subproc.zig +++ b/src/shell/subproc.zig @@ -724,6 +724,9 @@ pub const ShellSubprocess = struct { event_loop: jsc.EventLoopHandle, shellio: *ShellIO, spawn_args_: SpawnArgs, + // We have to use an out pointer because this function may invoke callbacks that expect a + // fully initialized parent object. Writing to this out pointer may be the last step needed + // to initialize the object. out: **@This(), notify_caller_process_already_exited: *bool, ) bun.shell.Result(void) { @@ -732,10 +735,7 @@ pub const ShellSubprocess = struct { var spawn_args = spawn_args_; - _ = switch (spawnMaybeSyncImpl( - .{ - .is_sync = false, - }, + return switch (spawnMaybeSyncImpl( event_loop, arena.allocator(), &spawn_args, @@ -743,25 +743,23 @@ pub const ShellSubprocess = struct { out, notify_caller_process_already_exited, )) { - .result => |subproc| subproc, + .result => .success, .err => |err| return .{ .err = err }, }; - - return .success; } fn spawnMaybeSyncImpl( - comptime config: struct { - is_sync: bool, - }, event_loop: jsc.EventLoopHandle, allocator: Allocator, spawn_args: *SpawnArgs, shellio: *ShellIO, + // We have to use an out pointer because this function may invoke callbacks that expect a + // fully initialized parent object. Writing to this out pointer may be the last step needed + // to initialize the object. out_subproc: **@This(), notify_caller_process_already_exited: *bool, - ) bun.shell.Result(*@This()) { - const is_sync = config.is_sync; + ) bun.shell.Result(void) { + const is_sync = false; if (!spawn_args.override_env and spawn_args.env_array.items.len == 0) { // spawn_args.env_array.items = bun.handleOom(jsc_vm.transpiler.env.map.createNullDelimitedEnvMap(allocator)); @@ -873,14 +871,12 @@ pub const ShellSubprocess = struct { subprocess.stdin.pipe.signal = bun.webcore.streams.Signal.init(&subprocess.stdin); } - if (comptime !is_sync) { - switch (subprocess.process.watch()) { - .result => {}, - .err => { - notify_caller_process_already_exited.* = true; - spawn_args.lazy = false; - }, - } + switch (subprocess.process.watch()) { + .result => {}, + .err => { + notify_caller_process_already_exited.* = true; + spawn_args.lazy = false; + }, } if (subprocess.stdin == .buffer) { @@ -889,7 +885,7 @@ pub const ShellSubprocess = struct { if (subprocess.stdout == .pipe) { subprocess.stdout.pipe.start(subprocess, event_loop).assert(); - if ((is_sync or !spawn_args.lazy) and subprocess.stdout == .pipe) { + if (!spawn_args.lazy and subprocess.stdout == .pipe) { subprocess.stdout.pipe.readAll(); } } @@ -897,7 +893,7 @@ pub const ShellSubprocess = struct { if (subprocess.stderr == .pipe) { subprocess.stderr.pipe.start(subprocess, event_loop).assert(); - if ((is_sync or !spawn_args.lazy) and subprocess.stderr == .pipe) { + if (!spawn_args.lazy and subprocess.stderr == .pipe) { subprocess.stderr.pipe.readAll(); } } @@ -906,7 +902,7 @@ pub const ShellSubprocess = struct { log("returning", .{}); - return .{ .result = subprocess }; + return .{ .result = {} }; } pub fn wait(this: *@This(), sync: bool) void { @@ -985,7 +981,7 @@ pub const PipeReader = struct { pub fn append(this: *BufferedOutput, bytes: []const u8) void { switch (this.*) { .bytelist => { - bun.handleOom(this.bytelist.append(bun.default_allocator, bytes)); + bun.handleOom(this.bytelist.appendSlice(bun.default_allocator, bytes)); }, .array_buffer => { const array_buf_slice = this.array_buffer.buf.slice(); @@ -1001,7 +997,7 @@ pub const PipeReader = struct { pub fn deinit(this: *BufferedOutput) void { switch (this.*) { .bytelist => { - this.bytelist.deinitWithAllocator(bun.default_allocator); + this.bytelist.deinit(bun.default_allocator); }, .array_buffer => { // FIXME: SHOULD THIS BE HERE? diff --git a/src/sourcemap/CodeCoverage.zig b/src/sourcemap/CodeCoverage.zig index eebaa4a7ea..58923ba0f8 100644 --- a/src/sourcemap/CodeCoverage.zig +++ b/src/sourcemap/CodeCoverage.zig @@ -264,7 +264,7 @@ pub const Report = struct { pub fn deinit(this: *Report, allocator: std.mem.Allocator) void { this.executable_lines.deinit(allocator); this.lines_which_have_executed.deinit(allocator); - this.line_hits.deinitWithAllocator(allocator); + this.line_hits.deinit(allocator); this.functions.deinit(allocator); this.stmts.deinit(allocator); this.functions_which_have_executed.deinit(allocator); @@ -445,7 +445,7 @@ pub const ByteRangeMapping = struct { const line_hits_slice = line_hits.slice(); @memset(line_hits_slice, 0); - errdefer line_hits.deinitWithAllocator(allocator); + errdefer line_hits.deinit(allocator); for (blocks, 0..) |block, i| { if (block.endOffset < 0 or block.startOffset < 0) continue; // does not map to anything @@ -535,7 +535,7 @@ pub const ByteRangeMapping = struct { line_hits.len = line_count; const line_hits_slice = line_hits.slice(); @memset(line_hits_slice, 0); - errdefer line_hits.deinitWithAllocator(allocator); + errdefer line_hits.deinit(allocator); for (blocks, 0..) |block, i| { if (block.endOffset < 0 or block.startOffset < 0) continue; // does not map to anything diff --git a/src/sourcemap/LineOffsetTable.zig b/src/sourcemap/LineOffsetTable.zig index 06b4a01db7..59d02ea313 100644 --- a/src/sourcemap/LineOffsetTable.zig +++ b/src/sourcemap/LineOffsetTable.zig @@ -171,7 +171,7 @@ pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_ list.append(allocator, .{ .byte_offset_to_start_of_line = line_byte_offset, .byte_offset_to_first_non_ascii = byte_offset_to_first_non_ascii, - .columns_for_non_ascii = BabyList(i32).init(owned), + .columns_for_non_ascii = BabyList(i32).fromOwnedSlice(owned), }) catch unreachable; column = 0; @@ -213,7 +213,7 @@ pub fn generate(allocator: std.mem.Allocator, contents: []const u8, approximate_ list.append(allocator, .{ .byte_offset_to_start_of_line = line_byte_offset, .byte_offset_to_first_non_ascii = byte_offset_to_first_non_ascii, - .columns_for_non_ascii = BabyList(i32).init(owned), + .columns_for_non_ascii = BabyList(i32).fromOwnedSlice(owned), }) catch unreachable; } diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 4b8378ef7b..7a75934db7 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -203,7 +203,7 @@ pub fn parseJSON( } map_data.mappings.names = names_list.items; - map_data.mappings.names_buffer = .fromList(names_buffer); + map_data.mappings.names_buffer = .moveFromList(&names_buffer); } } } @@ -427,7 +427,7 @@ pub const Mapping = struct { inline else => |*list| list.deinit(allocator), } - self.names_buffer.deinitWithAllocator(allocator); + self.names_buffer.deinit(allocator); allocator.free(self.names); } diff --git a/src/sql/mysql/MySQLConnection.zig b/src/sql/mysql/MySQLConnection.zig index 82bce2824e..6e73f95521 100644 --- a/src/sql/mysql/MySQLConnection.zig +++ b/src/sql/mysql/MySQLConnection.zig @@ -309,7 +309,7 @@ pub fn getConnected(this: *MySQLConnection, _: *jsc.JSGlobalObject) JSValue { pub fn doClose(this: *MySQLConnection, globalObject: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!JSValue { _ = globalObject; this.disconnect(); - this.write_buffer.deinit(bun.default_allocator); + this.write_buffer.clearAndFree(bun.default_allocator); return .js_undefined; } @@ -1913,7 +1913,7 @@ pub fn handleResultSet(this: *MySQLConnection, comptime Context: type, reader: N fn close(this: *@This()) void { this.disconnect(); this.unregisterAutoFlusher(); - this.write_buffer.deinit(bun.default_allocator); + this.write_buffer.clearAndFree(bun.default_allocator); } pub fn closeStatement(this: *MySQLConnection, statement: *MySQLStatement) !void { diff --git a/src/sql/postgres/AnyPostgresError.zig b/src/sql/postgres/AnyPostgresError.zig index e76fd4c02c..8be278832b 100644 --- a/src/sql/postgres/AnyPostgresError.zig +++ b/src/sql/postgres/AnyPostgresError.zig @@ -60,7 +60,7 @@ pub fn createPostgresError( message: []const u8, options: PostgresErrorOptions, ) bun.JSError!JSValue { - const opts_obj = JSValue.createEmptyObject(globalObject, 18); + const opts_obj = JSValue.createEmptyObject(globalObject, 0); opts_obj.ensureStillAlive(); opts_obj.put(globalObject, jsc.ZigString.static("code"), try bun.String.createUTF8ForJS(globalObject, options.code)); inline for (std.meta.fields(PostgresErrorOptions)) |field| { diff --git a/src/sql/postgres/PostgresSQLConnection.zig b/src/sql/postgres/PostgresSQLConnection.zig index a7422f532f..d2c2a31b67 100644 --- a/src/sql/postgres/PostgresSQLConnection.zig +++ b/src/sql/postgres/PostgresSQLConnection.zig @@ -326,7 +326,7 @@ pub fn failWithJSValue(this: *PostgresSQLConnection, value: JSValue) void { this.globalObject, this.js_value, &[_]JSValue{ - value, + value.toError() orelse value, this.getQueriesArray(), }, ) catch |e| this.globalObject.reportActiveExceptionAsUnhandled(e); @@ -484,7 +484,7 @@ fn drainInternal(this: *PostgresSQLConnection) void { this.flushData(); - if (!this.flags.has_backpressure) { + if (!this.flags.has_backpressure and this.flags.is_ready_for_query) { // no backpressure yet so pipeline more if possible and flush again this.advance(); this.flushData(); @@ -871,7 +871,7 @@ pub fn doFlush(this: *PostgresSQLConnection, _: *jsc.JSGlobalObject, _: *jsc.Cal fn close(this: *@This()) void { this.disconnect(); this.unregisterAutoFlusher(); - this.write_buffer.deinit(bun.default_allocator); + this.write_buffer.clearAndFree(bun.default_allocator); } pub fn doClose(this: *@This(), globalObject: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!JSValue { @@ -929,6 +929,7 @@ fn cleanUpRequests(this: *@This(), js_reason: ?jsc.JSValue) void { .running, .partial_response, => { + this.finishRequest(request); if (!this.vm.isShuttingDown()) { if (js_reason) |reason| { request.onJSError(reason, this.globalObject); @@ -1066,15 +1067,23 @@ pub fn bufferedReader(this: *PostgresSQLConnection) protocol.NewReader(Reader) { }; } -fn cleanupSuccessQuery(this: *PostgresSQLConnection, item: *PostgresSQLQuery) void { - if (item.flags.simple) { - this.nonpipelinable_requests -= 1; - } else if (item.flags.pipelined) { - this.pipelined_requests -= 1; - } else if (this.flags.waiting_to_prepare) { - this.flags.waiting_to_prepare = false; +fn finishRequest(this: *@This(), item: *PostgresSQLQuery) void { + switch (item.status) { + .running, .binding, .partial_response => { + if (item.flags.simple) { + this.nonpipelinable_requests -= 1; + } else if (item.flags.pipelined) { + this.pipelined_requests -= 1; + } + }, + .success, .fail, .pending => {}, } } + +pub fn canPrepareQuery(noalias this: *const @This()) bool { + return this.flags.is_ready_for_query and !this.flags.waiting_to_prepare and this.pipelined_requests == 0; +} + fn advance(this: *PostgresSQLConnection) void { var offset: usize = 0; debug("advance", .{}); @@ -1085,7 +1094,6 @@ fn advance(this: *PostgresSQLConnection) void { // so we do the cleanup her switch (result.status) { .success => { - this.cleanupSuccessQuery(result); result.deref(); this.requests.discard(1); continue; @@ -1115,7 +1123,11 @@ fn advance(this: *PostgresSQLConnection) void { defer query_str.deinit(); debug("execute simple query: {s}", .{query_str.slice()}); PostgresRequest.executeQuery(query_str.slice(), PostgresSQLConnection.Writer, this.writer()) catch |err| { - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + if (this.globalObject.tryTakeException()) |err_| { + req.onJSError(err_, this.globalObject); + } else { + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + } if (offset == 0) { req.deref(); this.requests.discard(1); @@ -1131,39 +1143,12 @@ fn advance(this: *PostgresSQLConnection) void { req.status = .running; return; } else { - const stmt = req.statement orelse { - debug("stmt is not set yet waiting it to RUN before actually doing anything", .{}); - // statement is not set yet waiting it to RUN before actually doing anything - offset += 1; - continue; - }; - - switch (stmt.status) { - .failed => { - debug("stmt failed", .{}); - bun.assert(stmt.error_response != null); - if (req.flags.simple) { - this.nonpipelinable_requests -= 1; - } else if (req.flags.pipelined) { - this.pipelined_requests -= 1; - } else if (this.flags.waiting_to_prepare) { - this.flags.waiting_to_prepare = false; - } - req.onError(stmt.error_response.?, this.globalObject); - if (offset == 0) { - req.deref(); - this.requests.discard(1); - } else { - // deinit later - req.status = .fail; - offset += 1; - } - - continue; - }, - .prepared => { - const thisValue = req.thisValue.tryGet() orelse { - bun.assertf(false, "query value was freed earlier than expected", .{}); + if (req.statement) |statement| { + switch (statement.status) { + .failed => { + debug("stmt failed", .{}); + bun.assert(statement.error_response != null); + req.onError(statement.error_response.?, this.globalObject); if (offset == 0) { req.deref(); this.requests.discard(1); @@ -1172,51 +1157,10 @@ fn advance(this: *PostgresSQLConnection) void { req.status = .fail; offset += 1; } - continue; - }; - const binding_value = PostgresSQLQuery.js.bindingGetCached(thisValue) orelse .zero; - const columns_value = PostgresSQLQuery.js.columnsGetCached(thisValue) orelse .zero; - req.flags.binary = stmt.fields.len > 0; - debug("binding and executing stmt", .{}); - PostgresRequest.bindAndExecute(this.globalObject, stmt, binding_value, columns_value, PostgresSQLConnection.Writer, this.writer()) catch |err| { - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - if (offset == 0) { - req.deref(); - this.requests.discard(1); - } else { - // deinit later - req.status = .fail; - offset += 1; - } - debug("bind and execute failed: {s}", .{@errorName(err)}); - continue; - }; - this.flags.is_ready_for_query = false; - req.status = .binding; - if (this.flags.use_unnamed_prepared_statements or !this.canPipeline()) { - debug("cannot pipeline more stmt", .{}); - return; - } - debug("pipelining more stmt", .{}); - // we can pipeline it - this.pipelined_requests += 1; - req.flags.pipelined = true; - offset += 1; - continue; - }, - .pending => { - if (this.pipelined_requests > 0 or !this.flags.is_ready_for_query) { - debug("need to wait to finish the pipeline before starting a new query preparation", .{}); - // need to wait to finish the pipeline before starting a new query preparation - return; - } - // statement is pending, lets write/parse it - var query_str = req.query.toUTF8(bun.default_allocator); - defer query_str.deinit(); - const has_params = stmt.signature.fields.len > 0; - // If it does not have params, we can write and execute immediately in one go - if (!has_params) { + continue; + }, + .prepared => { const thisValue = req.thisValue.tryGet() orelse { bun.assertf(false, "query value was freed earlier than expected", .{}); if (offset == 0) { @@ -1229,77 +1173,158 @@ fn advance(this: *PostgresSQLConnection) void { } continue; }; - // prepareAndQueryWithSignature will write + bind + execute, it will change to running after binding is complete const binding_value = PostgresSQLQuery.js.bindingGetCached(thisValue) orelse .zero; - debug("prepareAndQueryWithSignature", .{}); - PostgresRequest.prepareAndQueryWithSignature(this.globalObject, query_str.slice(), binding_value, PostgresSQLConnection.Writer, this.writer(), &stmt.signature) catch |err| { - stmt.status = .failed; - stmt.error_response = .{ .postgres_error = err }; - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + const columns_value = PostgresSQLQuery.js.columnsGetCached(thisValue) orelse .zero; + req.flags.binary = statement.fields.len > 0; + debug("binding and executing stmt", .{}); + PostgresRequest.bindAndExecute(this.globalObject, statement, binding_value, columns_value, PostgresSQLConnection.Writer, this.writer()) catch |err| { + if (this.globalObject.tryTakeException()) |err_| { + req.onJSError(err_, this.globalObject); + } else { + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + } if (offset == 0) { req.deref(); this.requests.discard(1); } else { // deinit later req.status = .fail; + offset += 1; } - debug("prepareAndQueryWithSignature failed: {s}", .{@errorName(err)}); - + debug("bind and execute failed: {s}", .{@errorName(err)}); continue; }; - this.flags.waiting_to_prepare = true; + this.flags.is_ready_for_query = false; req.status = .binding; - stmt.status = .parsing; + req.flags.pipelined = true; + this.pipelined_requests += 1; + if (this.flags.use_unnamed_prepared_statements or !this.canPipeline()) { + debug("cannot pipeline more stmt", .{}); + return; + } + + offset += 1; + continue; + }, + .pending => { + if (!this.canPrepareQuery()) { + debug("need to wait to finish the pipeline before starting a new query preparation", .{}); + // need to wait to finish the pipeline before starting a new query preparation + return; + } + // statement is pending, lets write/parse it + var query_str = req.query.toUTF8(bun.default_allocator); + defer query_str.deinit(); + const has_params = statement.signature.fields.len > 0; + // If it does not have params, we can write and execute immediately in one go + if (!has_params) { + const thisValue = req.thisValue.tryGet() orelse { + bun.assertf(false, "query value was freed earlier than expected", .{}); + if (offset == 0) { + req.deref(); + this.requests.discard(1); + } else { + // deinit later + req.status = .fail; + offset += 1; + } + continue; + }; + // prepareAndQueryWithSignature will write + bind + execute, it will change to running after binding is complete + const binding_value = PostgresSQLQuery.js.bindingGetCached(thisValue) orelse .zero; + debug("prepareAndQueryWithSignature", .{}); + PostgresRequest.prepareAndQueryWithSignature(this.globalObject, query_str.slice(), binding_value, PostgresSQLConnection.Writer, this.writer(), &statement.signature) catch |err| { + if (this.globalObject.tryTakeException()) |err_| { + req.onJSError(err_, this.globalObject); + } else { + statement.status = .failed; + statement.error_response = .{ .postgres_error = err }; + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + } + if (offset == 0) { + req.deref(); + this.requests.discard(1); + } else { + // deinit later + req.status = .fail; + } + debug("prepareAndQueryWithSignature failed: {s}", .{@errorName(err)}); + + continue; + }; + this.flags.is_ready_for_query = false; + this.flags.waiting_to_prepare = true; + req.status = .binding; + statement.status = .parsing; + this.flushDataAndResetTimeout(); + return; + } + + const connection_writer = this.writer(); + debug("writing query", .{}); + // write query and wait for it to be prepared + PostgresRequest.writeQuery(query_str.slice(), statement.signature.prepared_statement_name, statement.signature.fields, PostgresSQLConnection.Writer, connection_writer) catch |err| { + if (this.globalObject.tryTakeException()) |err_| { + req.onJSError(err_, this.globalObject); + } else { + statement.error_response = .{ .postgres_error = err }; + statement.status = .failed; + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + } + bun.assert(offset == 0); + req.deref(); + this.requests.discard(1); + debug("write query failed: {s}", .{@errorName(err)}); + continue; + }; + connection_writer.write(&protocol.Sync) catch |err| { + if (this.globalObject.tryTakeException()) |err_| { + req.onJSError(err_, this.globalObject); + } else { + statement.error_response = .{ .postgres_error = err }; + statement.status = .failed; + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + } + bun.assert(offset == 0); + req.deref(); + this.requests.discard(1); + debug("write query (sync) failed: {s}", .{@errorName(err)}); + continue; + }; + this.flags.is_ready_for_query = false; + this.flags.waiting_to_prepare = true; + statement.status = .parsing; + this.flushDataAndResetTimeout(); return; - } - - const connection_writer = this.writer(); - debug("writing query", .{}); - // write query and wait for it to be prepared - PostgresRequest.writeQuery(query_str.slice(), stmt.signature.prepared_statement_name, stmt.signature.fields, PostgresSQLConnection.Writer, connection_writer) catch |err| { - stmt.error_response = .{ .postgres_error = err }; - stmt.status = .failed; - - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - bun.assert(offset == 0); - req.deref(); - this.requests.discard(1); - debug("write query failed: {s}", .{@errorName(err)}); + }, + .parsing => { + // we are still parsing, lets wait for it to be prepared or failed + offset += 1; continue; - }; - connection_writer.write(&protocol.Sync) catch |err| { - stmt.error_response = .{ .postgres_error = err }; - stmt.status = .failed; - - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - bun.assert(offset == 0); - req.deref(); - this.requests.discard(1); - debug("write query (sync) failed: {s}", .{@errorName(err)}); - continue; - }; - this.flags.is_ready_for_query = false; - stmt.status = .parsing; - this.flags.waiting_to_prepare = true; - return; - }, - .parsing => { - // we are still parsing, lets wait for it to be prepared or failed - return; - }, + }, + } + } else { + offset += 1; + continue; } } }, .running, .binding, .partial_response => { - // if we are binding it will switch to running immediately - // if we are running, we need to wait for it to be success or fail - return; + if (this.flags.waiting_to_prepare or this.nonpipelinable_requests > 0) { + return; + } + const total_requests_running = this.pipelined_requests; + if (offset < total_requests_running) { + offset += total_requests_running; + } else { + offset += 1; + } + continue; }, .success => { - this.cleanupSuccessQuery(req); if (offset > 0) { // deinit later req.status = .fail; @@ -1427,12 +1452,14 @@ pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_litera try ready_for_query.decodeInternal(Context, reader); this.setStatus(.connected); + this.flags.waiting_to_prepare = false; this.flags.is_ready_for_query = true; this.socket.setTimeout(300); defer this.updateRef(); if (this.current()) |request| { if (request.status == .partial_response) { + this.finishRequest(request); // if is a partial response, just signal that the query is now complete request.onResult("", this.globalObject, this.js_value, true); } @@ -1695,7 +1722,6 @@ pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_litera defer { err.deinit(); } - this.failWithJSValue(err.toJS(this.globalObject)); // it shouldn't enqueue any requests while connecting @@ -1723,8 +1749,9 @@ pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_litera } } } - this.updateRef(); + this.finishRequest(request); + this.updateRef(); request.onError(.{ .protocol = err }, this.globalObject); }, .PortalSuspended => { @@ -1737,11 +1764,7 @@ pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_litera try reader.eatMessage(protocol.CloseComplete); var request = this.current() orelse return error.ExpectedRequest; defer this.updateRef(); - if (request.flags.simple) { - request.onResult("CLOSECOMPLETE", this.globalObject, this.js_value, false); - } else { - request.onResult("CLOSECOMPLETE", this.globalObject, this.js_value, true); - } + request.onResult("CLOSECOMPLETE", this.globalObject, this.js_value, false); }, .CopyInResponse => { debug("TODO CopyInResponse", .{}); @@ -1757,11 +1780,7 @@ pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_litera try reader.eatMessage(protocol.EmptyQueryResponse); var request = this.current() orelse return error.ExpectedRequest; defer this.updateRef(); - if (request.flags.simple) { - request.onResult("", this.globalObject, this.js_value, false); - } else { - request.onResult("", this.globalObject, this.js_value, true); - } + request.onResult("", this.globalObject, this.js_value, false); }, .CopyOutResponse => { debug("TODO CopyOutResponse", .{}); diff --git a/src/sql/postgres/PostgresSQLQuery.zig b/src/sql/postgres/PostgresSQLQuery.zig index 35b1af4906..9860b0273a 100644 --- a/src/sql/postgres/PostgresSQLQuery.zig +++ b/src/sql/postgres/PostgresSQLQuery.zig @@ -94,9 +94,10 @@ pub fn onWriteFail( const vm = jsc.VirtualMachine.get(); const function = vm.rareData().postgresql_context.onQueryRejectFn.get().?; const event_loop = vm.eventLoop(); + const js_err = postgresErrorToJS(globalObject, null, err); event_loop.runCallback(function, globalObject, thisValue, &.{ targetValue, - postgresErrorToJS(globalObject, null, err), + js_err.toError() orelse js_err, queries_array, }); } @@ -116,7 +117,7 @@ pub fn onJSError(this: *@This(), err: jsc.JSValue, globalObject: *jsc.JSGlobalOb const event_loop = vm.eventLoop(); event_loop.runCallback(function, globalObject, thisValue, &.{ targetValue, - err, + err.toError() orelse err, }); } pub fn onError(this: *@This(), err: PostgresSQLStatement.Error, globalObject: *jsc.JSGlobalObject) void { diff --git a/src/sql/postgres/protocol/NotificationResponse.zig b/src/sql/postgres/protocol/NotificationResponse.zig index 8b319e09cd..17229e596d 100644 --- a/src/sql/postgres/protocol/NotificationResponse.zig +++ b/src/sql/postgres/protocol/NotificationResponse.zig @@ -5,8 +5,8 @@ channel: bun.ByteList = .{}, payload: bun.ByteList = .{}, pub fn deinit(this: *@This()) void { - this.channel.deinitWithAllocator(bun.default_allocator); - this.payload.deinitWithAllocator(bun.default_allocator); + this.channel.clearAndFree(bun.default_allocator); + this.payload.clearAndFree(bun.default_allocator); } pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { diff --git a/src/sql/shared/Data.zig b/src/sql/shared/Data.zig index 964cc11525..f63540b93e 100644 --- a/src/sql/shared/Data.zig +++ b/src/sql/shared/Data.zig @@ -20,21 +20,27 @@ pub const Data = union(enum) { inline_storage.len = @truncate(possibly_inline_bytes.len); return .{ .inline_storage = inline_storage }; } - return .{ .owned = bun.ByteList.init(try allocator.dupe(u8, possibly_inline_bytes)) }; + return .{ + .owned = bun.ByteList.fromOwnedSlice(try allocator.dupe(u8, possibly_inline_bytes)), + }; } pub fn toOwned(this: @This()) !bun.ByteList { return switch (this) { .owned => this.owned, - .temporary => bun.ByteList.init(try bun.default_allocator.dupe(u8, this.temporary)), - .empty => bun.ByteList.init(&.{}), - .inline_storage => bun.ByteList.init(try bun.default_allocator.dupe(u8, this.inline_storage.slice())), + .temporary => bun.ByteList.fromOwnedSlice( + try bun.default_allocator.dupe(u8, this.temporary), + ), + .empty => bun.ByteList.empty, + .inline_storage => bun.ByteList.fromOwnedSlice( + try bun.default_allocator.dupe(u8, this.inline_storage.slice()), + ), }; } pub fn deinit(this: *@This()) void { switch (this.*) { - .owned => this.owned.deinitWithAllocator(bun.default_allocator), + .owned => |*owned| owned.clearAndFree(bun.default_allocator), .temporary => {}, .empty => {}, .inline_storage => {}, @@ -45,12 +51,10 @@ pub const Data = union(enum) { /// Generally, for security reasons. pub fn zdeinit(this: *@This()) void { switch (this.*) { - .owned => { - + .owned => |*owned| { // Zero bytes before deinit - @memset(this.owned.slice(), 0); - - this.owned.deinitWithAllocator(bun.default_allocator); + bun.freeSensitive(bun.default_allocator, owned.slice()); + owned.deinit(bun.default_allocator); }, .temporary => {}, .empty => {}, diff --git a/src/string/MutableString.zig b/src/string/MutableString.zig index b4e2da39ea..48a0346f12 100644 --- a/src/string/MutableString.zig +++ b/src/string/MutableString.zig @@ -258,13 +258,6 @@ pub fn slice(self: *MutableString) []u8 { return self.list.items; } -/// Take ownership of the existing value without discarding excess capacity. -pub fn move(self: *MutableString) []u8 { - const out = self.list.items; - self.list = .{}; - return out; -} - /// Appends `0` if needed pub fn sliceWithSentinel(self: *MutableString) [:0]u8 { if (self.list.items.len > 0 and self.list.items[self.list.items.len - 1] != 0) { diff --git a/src/string/SmolStr.zig b/src/string/SmolStr.zig index 55560abd54..77ac00c562 100644 --- a/src/string/SmolStr.zig +++ b/src/string/SmolStr.zig @@ -169,7 +169,7 @@ pub const SmolStr = packed struct(u128) { if (inlined.len() + 1 > Inlined.max_len) { var baby_list = try BabyList(u8).initCapacity(allocator, inlined.len() + 1); baby_list.appendSliceAssumeCapacity(inlined.slice()); - try baby_list.push(allocator, char); + try baby_list.append(allocator, char); this.__len = baby_list.len; this.__ptr = baby_list.ptr; this.cap = baby_list.cap; @@ -188,7 +188,7 @@ pub const SmolStr = packed struct(u128) { .len = this.__len, .cap = this.cap, }; - try baby_list.push(allocator, char); + try baby_list.append(allocator, char); this.__len = baby_list.len; this.__ptr = baby_list.ptr; @@ -217,7 +217,7 @@ pub const SmolStr = packed struct(u128) { .len = this.__len, .cap = this.cap, }; - try baby_list.append(allocator, values); + try baby_list.appendSlice(allocator, values); this.* = SmolStr.fromBabyList(baby_list); return; diff --git a/src/sys.zig b/src/sys.zig index d7908f4f7c..1a91073807 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -3810,6 +3810,7 @@ pub fn moveFileZWithHandle(from_handle: bun.FileDescriptor, from_dir: bun.FileDe if (err.getErrno() == .XDEV) { try copyFileZSlowWithHandle(from_handle, to_dir, destination).unwrap(); _ = unlinkat(from_dir, filename); + return; } return bun.errnoToZigErr(err.errno); diff --git a/src/transpiler.zig b/src/transpiler.zig index ecbbd382a5..2f020eeac8 100644 --- a/src/transpiler.zig +++ b/src/transpiler.zig @@ -775,7 +775,7 @@ pub const Transpiler = struct { bun.perf.trace("JSPrinter.print"); defer tracer.end(); - const symbols = js_ast.Symbol.NestedList.init(&[_]js_ast.Symbol.List{ast.symbols}); + const symbols = js_ast.Symbol.NestedList.fromBorrowedSliceDangerous(&.{ast.symbols}); return switch (format) { .cjs => try js_printer.printCommonJS( @@ -1199,13 +1199,18 @@ pub const Transpiler = struct { const properties: []js_ast.G.Property = expr.data.e_object.properties.slice(); if (properties.len > 0) { var stmts = allocator.alloc(js_ast.Stmt, 3) catch return null; - var decls = allocator.alloc(js_ast.G.Decl, properties.len) catch return null; + var decls = std.ArrayListUnmanaged(js_ast.G.Decl).initCapacity( + allocator, + properties.len, + ) catch |err| bun.handleOom(err); + decls.expandToCapacity(); + symbols = allocator.alloc(js_ast.Symbol, properties.len) catch return null; var export_clauses = allocator.alloc(js_ast.ClauseItem, properties.len) catch return null; var duplicate_key_checker = bun.StringHashMap(u32).init(allocator); defer duplicate_key_checker.deinit(); var count: usize = 0; - for (properties, decls, symbols, 0..) |*prop, *decl, *symbol, i| { + for (properties, decls.items, symbols, 0..) |*prop, *decl, *symbol, i| { const name = prop.key.?.data.e_string.slice(allocator); // Do not make named exports for "default" exports if (strings.eqlComptime(name, "default")) @@ -1213,7 +1218,7 @@ pub const Transpiler = struct { const visited = duplicate_key_checker.getOrPut(name) catch continue; if (visited.found_existing) { - decls[visited.value_ptr.*].value = prop.value.?; + decls.items[visited.value_ptr.*].value = prop.value.?; continue; } visited.value_ptr.* = @truncate(i); @@ -1241,10 +1246,11 @@ pub const Transpiler = struct { count += 1; } + decls.shrinkRetainingCapacity(count); stmts[0] = js_ast.Stmt.alloc( js_ast.S.Local, js_ast.S.Local{ - .decls = js_ast.G.Decl.List.init(decls[0..count]), + .decls = js_ast.G.Decl.List.moveFromList(&decls), .kind = .k_var, }, logger.Loc{ @@ -1297,7 +1303,7 @@ pub const Transpiler = struct { } }; var ast = js_ast.Ast.fromParts(parts); - ast.symbols = js_ast.Symbol.List.init(symbols); + ast.symbols = js_ast.Symbol.List.fromOwnedSlice(symbols); return ParseResult{ .ast = ast, @@ -1324,7 +1330,7 @@ pub const Transpiler = struct { parts[0] = js_ast.Part{ .stmts = stmts }; return ParseResult{ - .ast = js_ast.Ast.initTest(parts), + .ast = js_ast.Ast.fromParts(parts), .source = source.*, .loader = loader, .input_fd = input_fd, diff --git a/src/valkey/valkey.zig b/src/valkey/valkey.zig index 97bd11c2be..87b9cea495 100644 --- a/src/valkey/valkey.zig +++ b/src/valkey/valkey.zig @@ -431,7 +431,7 @@ pub const ValkeyClient = struct { /// Handle connection closed event pub fn onClose(this: *ValkeyClient) void { this.unregisterAutoFlusher(); - this.write_buffer.deinit(this.allocator); + this.write_buffer.clearAndFree(this.allocator); // If manually closing, don't attempt to reconnect if (this.flags.is_manually_closed) { @@ -794,8 +794,8 @@ pub const ValkeyClient = struct { /// Handle socket open event pub fn onOpen(this: *ValkeyClient, socket: uws.AnySocket) void { this.socket = socket; - this.write_buffer.deinit(this.allocator); - this.read_buffer.deinit(this.allocator); + this.write_buffer.clearAndFree(this.allocator); + this.read_buffer.clearAndFree(this.allocator); this.start(); } diff --git a/src/valkey/valkey_protocol.zig b/src/valkey/valkey_protocol.zig index ac59719889..39e27dd2dc 100644 --- a/src/valkey/valkey_protocol.zig +++ b/src/valkey/valkey_protocol.zig @@ -249,8 +249,7 @@ pub const RESPValue = union(RESPType) { fn valkeyStrToJSValue(globalObject: *jsc.JSGlobalObject, str: []const u8, options: *const ToJSOptions) bun.JSError!jsc.JSValue { if (options.return_as_buffer) { // TODO: handle values > 4.7 GB - const buf = try jsc.ArrayBuffer.createBuffer(globalObject, str); - return buf.toJS(globalObject); + return try jsc.ArrayBuffer.createBuffer(globalObject, str); } else { return bun.String.createUTF8ForJS(globalObject, str); } diff --git a/test/bake/bake-harness.ts b/test/bake/bake-harness.ts index 8e91c328c5..f7aa1fd988 100644 --- a/test/bake/bake-harness.ts +++ b/test/bake/bake-harness.ts @@ -1563,7 +1563,11 @@ class OutputLineStream extends EventEmitter { this.lines.push(line); if ( line.includes("============================================================") || - line.includes("Allocation scope leaked") + line.includes("Allocation scope leaked") || + line.includes("collection first used here") || + line.includes("allocator mismatch") || + line.includes("assertion failure") || + line.includes("race condition") ) { // Tell consumers to wait for the process to exit this.panicked = true; diff --git a/test/bundler/bun-build-compile-wasm.test.ts b/test/bundler/bun-build-compile-wasm.test.ts new file mode 100644 index 0000000000..5127f22493 --- /dev/null +++ b/test/bundler/bun-build-compile-wasm.test.ts @@ -0,0 +1,126 @@ +import { describe, expect, test } from "bun:test"; +import { bunEnv, tempDirWithFiles } from "harness"; +import { join } from "path"; + +describe("Bun.build compile with wasm", () => { + test("compile with wasm module imports", async () => { + // This test ensures that embedded wasm modules compile and run correctly + // The regression was that the module prefix wasn't being set correctly + + const dir = tempDirWithFiles("build-compile-wasm", { + "app.js": ` + // Import a wasm module and properly instantiate it + import wasmPath from "./test.wasm"; + + async function main() { + try { + // Read the wasm file as ArrayBuffer + const wasmBuffer = await Bun.file(wasmPath).arrayBuffer(); + const { instance } = await WebAssembly.instantiate(wasmBuffer); + + // Call the add function from wasm + const result = instance.exports.add(2, 3); + console.log("WASM result:", result); + + if (result === 5) { + console.log("WASM module loaded successfully"); + process.exit(0); + } else { + console.error("WASM module returned unexpected result:", result); + process.exit(1); + } + } catch (error) { + console.error("Failed to load WASM module:", error.message); + process.exit(1); + } + } + + main(); + `, + // A real WebAssembly module that exports an 'add' function + // (module + // (func $add (param i32 i32) (result i32) + // local.get 0 + // local.get 1 + // i32.add) + // (export "add" (func $add))) + "test.wasm": Buffer.from([ + 0x00, + 0x61, + 0x73, + 0x6d, // WASM magic number + 0x01, + 0x00, + 0x00, + 0x00, // WASM version 1 + // Type section + 0x01, + 0x07, + 0x01, + 0x60, + 0x02, + 0x7f, + 0x7f, + 0x01, + 0x7f, + // Function section + 0x03, + 0x02, + 0x01, + 0x00, + // Export section + 0x07, + 0x07, + 0x01, + 0x03, + 0x61, + 0x64, + 0x64, + 0x00, + 0x00, + // Code section + 0x0a, + 0x09, + 0x01, + 0x07, + 0x00, + 0x20, + 0x00, + 0x20, + 0x01, + 0x6a, + 0x0b, + ]), + }); + + // Test compilation with default target (current platform) + const result = await Bun.build({ + entrypoints: [join(dir, "app.js")], + compile: { + outfile: join(dir, "app-wasm"), + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + // Run the compiled version to verify it works + const proc = Bun.spawn({ + cmd: [result.outputs[0].path], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + expect(exitCode).toBe(0); + expect(stdout).toContain("WASM result: 5"); + expect(stdout).toContain("WASM module loaded successfully"); + expect(stderr).toBe(""); + }); +}); diff --git a/test/bundler/bun-build-compile.test.ts b/test/bundler/bun-build-compile.test.ts index e8ef46dd70..555aff4ae9 100644 --- a/test/bundler/bun-build-compile.test.ts +++ b/test/bundler/bun-build-compile.test.ts @@ -59,6 +59,76 @@ describe("Bun.build compile", () => { }), ).toThrowErrorMatchingInlineSnapshot(`"Unsupported compile target: bun-windows-arm64"`); }); + test("compile with relative outfile paths", async () => { + using dir = tempDir("build-compile-relative-paths", { + "app.js": `console.log("Testing relative paths");`, + }); + + // Test 1: Nested forward slash path + const result1 = await Bun.build({ + entrypoints: [join(dir + "", "app.js")], + compile: { + outfile: join(dir + "", "output/nested/app1"), + }, + }); + expect(result1.success).toBe(true); + expect(result1.outputs[0].path).toContain(join("output", "nested", isWindows ? "app1.exe" : "app1")); + + // Test 2: Current directory relative path + const result2 = await Bun.build({ + entrypoints: [join(dir + "", "app.js")], + compile: { + outfile: join(dir + "", "app2"), + }, + }); + expect(result2.success).toBe(true); + expect(result2.outputs[0].path).toEndWith(isWindows ? "app2.exe" : "app2"); + + // Test 3: Deeply nested path + const result3 = await Bun.build({ + entrypoints: [join(dir + "", "app.js")], + compile: { + outfile: join(dir + "", "a/b/c/d/app3"), + }, + }); + expect(result3.success).toBe(true); + expect(result3.outputs[0].path).toContain(join("a", "b", "c", "d", isWindows ? "app3.exe" : "app3")); + }); + + test("compile with embedded resources uses correct module prefix", async () => { + using dir = tempDir("build-compile-embedded-resources", { + "app.js": ` + // This test verifies that embedded resources use the correct target-specific base path + // The module prefix should be set to the target's base path + // not the user-configured public_path + import { readFileSync } from 'fs'; + + // Try to read a file that would be embedded in the standalone executable + try { + const embedded = readFileSync('embedded.txt', 'utf8'); + console.log('Embedded file:', embedded); + } catch (e) { + console.log('Reading embedded file'); + } + `, + "embedded.txt": "This is an embedded resource", + }); + + // Test with default target (current platform) + const result = await Bun.build({ + entrypoints: [join(dir + "", "app.js")], + compile: { + outfile: "app-with-resources", + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + expect(result.outputs[0].path).toEndWith(isWindows ? "app-with-resources.exe" : "app-with-resources"); + + // The test passes if compilation succeeds - the actual embedded resource + // path handling is verified by the successful compilation + }); }); // file command test works well diff --git a/test/bundler/bundler_defer.test.ts b/test/bundler/bundler_defer.test.ts index 8f16b9918d..44f72a7e9b 100644 --- a/test/bundler/bundler_defer.test.ts +++ b/test/bundler/bundler_defer.test.ts @@ -536,57 +536,79 @@ warn: (msg: string) => console.warn(\`[WARN] \${msg}\`) const outdir = path.join(folder, "dist"); - const result = await Bun.build({ - entrypoints: [entrypoint], - outdir, - plugins: [ - { - name: "xXx123_import_checker_321xXx", - setup(build) { - type Import = { - imported: string[]; - dep: string; - }; - type Export = { - ident: string; - }; - let imports_and_exports: Record; exports: Array }> = {}; - - build.onLoad({ filter: /\.ts/ }, async ({ path }) => { - const contents = await Bun.$`cat ${path}`.quiet().text(); - - const import_regex = /import\s+(?:([\s\S]*?)\s+from\s+)?['"]([^'"]+)['"];/g; - const imports: Array = [...contents.toString().matchAll(import_regex)].map(m => ({ - imported: m - .slice(1, m.length - 1) - .map(match => (match[0] === "{" ? match.slice(2, match.length - 2) : match)), - dep: m[m.length - 1], - })); - - const export_regex = - /export\s+(?:default\s+|const\s+|let\s+|var\s+|function\s+|class\s+|enum\s+|type\s+|interface\s+)?([\w$]+)?(?:\s*=\s*|(?:\s*{[^}]*})?)?[^;]*;/g; - const exports: Array = [...contents.matchAll(export_regex)].map(m => ({ - ident: m[1], - })); - - imports_and_exports[path.replaceAll("\\", "/").split("/").pop()!] = { imports, exports }; - return undefined; - }); - - build.onLoad({ filter: /module_data\.json/ }, async ({ defer }) => { - await defer(); - const contents = JSON.stringify(imports_and_exports); - - return { - contents, - loader: "json", - }; - }); - }, - }, - ], + let onFinalizeCallCount = 0; + let onFinalizeCalledThrice = Promise.withResolvers(); + let onFinalizeCallRegistry = new FinalizationRegistry(() => { + onFinalizeCallCount++; + if (onFinalizeCallCount === 3) { + onFinalizeCalledThrice.resolve(); + } }); + const result = await (async function () { + return await Bun.build({ + entrypoints: [entrypoint], + outdir, + plugins: [ + (() => { + const plugin = { + name: "xXx123_import_checker_321xXx", + setup(build) { + type Import = { + imported: string[]; + dep: string; + }; + type Export = { + ident: string; + }; + let imports_and_exports: Record; exports: Array }> = {}; + + const onLoadTS = async ({ path }) => { + const contents = await Bun.$`cat ${path}`.quiet().text(); + + const import_regex = /import\s+(?:([\s\S]*?)\s+from\s+)?['"]([^'"]+)['"];/g; + const imports: Array = [...contents.toString().matchAll(import_regex)].map(m => ({ + imported: m + .slice(1, m.length - 1) + .map(match => (match[0] === "{" ? match.slice(2, match.length - 2) : match)), + dep: m[m.length - 1], + })); + + const export_regex = + /export\s+(?:default\s+|const\s+|let\s+|var\s+|function\s+|class\s+|enum\s+|type\s+|interface\s+)?([\w$]+)?(?:\s*=\s*|(?:\s*{[^}]*})?)?[^;]*;/g; + const exports: Array = [...contents.matchAll(export_regex)].map(m => ({ + ident: m[1], + })); + + imports_and_exports[path.replaceAll("\\", "/").split("/").pop()!] = { imports, exports }; + return undefined; + }; + + const onLoadModuleData = async ({ defer }) => { + await defer(); + const contents = JSON.stringify(imports_and_exports); + + return { + contents, + loader: "json", + }; + }; + + build.onLoad({ filter: /\.ts/ }, onLoadTS); + + build.onLoad({ filter: /module_data\.json/ }, onLoadModuleData); + + onFinalizeCallRegistry.register(onLoadTS, undefined); + onFinalizeCallRegistry.register(onLoadModuleData, undefined); + }, + }; + onFinalizeCallRegistry.register(plugin.setup, undefined); + return plugin; + })(), + ], + }); + })(); + expect(result.success).toBeTrue(); await Bun.$`${bunExe()} run ${result.outputs[0].path}`; const output = await Bun.$`cat ${path.join(folder, "dist", "output.json")}`.json(); @@ -619,5 +641,8 @@ warn: (msg: string) => console.warn(\`[WARN] \${msg}\`) "exports": [{ "ident": "logger" }], }, }); + Bun.gc(true); + await onFinalizeCalledThrice.promise; + expect(onFinalizeCallCount).toBe(3); }); }); diff --git a/test/bundler/bundler_minify.test.ts b/test/bundler/bundler_minify.test.ts index 7cd6c18c2b..0317c49f7d 100644 --- a/test/bundler/bundler_minify.test.ts +++ b/test/bundler/bundler_minify.test.ts @@ -1,4 +1,5 @@ import { describe, expect } from "bun:test"; +import { normalizeBunSnapshot } from "harness"; import { itBundled } from "./expectBundled"; describe("bundler", () => { @@ -690,4 +691,379 @@ describe("bundler", () => { stdout: "foo\ntrue\ntrue\ndisabled_for_development", }, }); + + itBundled("minify/ErrorConstructorOptimization", { + files: { + "/entry.js": /* js */ ` + // Test all Error constructors + capture(new Error()); + capture(new Error("message")); + capture(new Error("message", { cause: "cause" })); + + capture(new TypeError()); + capture(new TypeError("type error")); + + capture(new SyntaxError()); + capture(new SyntaxError("syntax error")); + + capture(new RangeError()); + capture(new RangeError("range error")); + + capture(new ReferenceError()); + capture(new ReferenceError("ref error")); + + capture(new EvalError()); + capture(new EvalError("eval error")); + + capture(new URIError()); + capture(new URIError("uri error")); + + capture(new AggregateError([], "aggregate error")); + capture(new AggregateError([new Error("e1")], "multiple")); + + // Test with complex arguments + const msg = "dynamic"; + capture(new Error(msg)); + capture(new TypeError(getErrorMessage())); + + // Test that other constructors are not affected + capture(new Date()); + capture(new Map()); + capture(new Set()); + + function getErrorMessage() { return "computed"; } + `, + }, + capture: [ + "Error()", + 'Error("message")', + 'Error("message", { cause: "cause" })', + "TypeError()", + 'TypeError("type error")', + "SyntaxError()", + 'SyntaxError("syntax error")', + "RangeError()", + 'RangeError("range error")', + "ReferenceError()", + 'ReferenceError("ref error")', + "EvalError()", + 'EvalError("eval error")', + "URIError()", + 'URIError("uri error")', + 'AggregateError([], "aggregate error")', + 'AggregateError([Error("e1")], "multiple")', + "Error(msg)", + "TypeError(getErrorMessage())", + "/* @__PURE__ */ new Date", + "/* @__PURE__ */ new Map", + "/* @__PURE__ */ new Set", + ], + minifySyntax: true, + target: "bun", + }); + + itBundled("minify/ErrorConstructorWithVariables", { + files: { + "/entry.js": /* js */ ` + function capture(val) { console.log(val); return val; } + // Test that Error constructors work with variables and expressions + const e1 = new Error("test1"); + const e2 = new TypeError("test2"); + const e3 = new SyntaxError("test3"); + + capture(e1.message); + capture(e2.message); + capture(e3.message); + + // Test that they're still Error instances + capture(e1 instanceof Error); + capture(e2 instanceof TypeError); + capture(e3 instanceof SyntaxError); + + // Test with try-catch + try { + throw new RangeError("out of range"); + } catch (e) { + capture(e.message); + } + `, + }, + capture: [ + "val", + "e1.message", + "e2.message", + "e3.message", + "e1 instanceof Error", + "e2 instanceof TypeError", + "e3 instanceof SyntaxError", + "e.message", + ], + minifySyntax: true, + target: "bun", + run: { + stdout: "test1\ntest2\ntest3\ntrue\ntrue\ntrue\nout of range", + }, + }); + + itBundled("minify/ErrorConstructorPreservesSemantics", { + files: { + "/entry.js": /* js */ ` + function capture(val) { console.log(val); return val; } + // Verify that Error() and new Error() have identical behavior + const e1 = new Error("with new"); + const e2 = Error("without new"); + + // Both should be Error instances + capture(e1 instanceof Error); + capture(e2 instanceof Error); + + // Both should have the same message + capture(e1.message === "with new"); + capture(e2.message === "without new"); + + // Both should have stack traces + capture(typeof e1.stack === "string"); + capture(typeof e2.stack === "string"); + + // Test all error types + const errors = [ + [new TypeError("t1"), TypeError("t2")], + [new SyntaxError("s1"), SyntaxError("s2")], + [new RangeError("r1"), RangeError("r2")], + ]; + + for (const [withNew, withoutNew] of errors) { + capture(withNew.constructor === withoutNew.constructor); + } + `, + }, + capture: [ + "val", + "e1 instanceof Error", + "e2 instanceof Error", + 'e1.message === "with new"', + 'e2.message === "without new"', + 'typeof e1.stack === "string"', + 'typeof e2.stack === "string"', + "withNew.constructor === withoutNew.constructor", + ], + minifySyntax: true, + target: "bun", + run: { + stdout: "true\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue", + }, + }); + + itBundled("minify/AdditionalGlobalConstructorOptimization", { + files: { + "/entry.js": /* js */ ` + // Test Array constructor + capture(new Array()); + capture(new Array(3)); + capture(new Array(1, 2, 3)); + + // Test Array with non-numeric single arguments (should convert to literal) + capture(new Array("string")); + capture(new Array(true)); + capture(new Array(null)); + capture(new Array(undefined)); + capture(new Array({})); + + // Test Object constructor + capture(new Object()); + capture(new Object(null)); + capture(new Object({ a: 1 })); + + // Test Function constructor + capture(new Function("return 42")); + capture(new Function("a", "b", "return a + b")); + + // Test RegExp constructor + capture(new RegExp("test")); + capture(new RegExp("test", "gi")); + capture(new RegExp(/abc/)); + + // Test with variables + const pattern = "\\d+"; + capture(new RegExp(pattern)); + + // Test that other constructors are preserved + capture(new Date()); + capture(new Map()); + capture(new Set()); + `, + }, + capture: [ + "[]", // new Array() -> [] + "Array(3)", // new Array(3) stays as Array(3) because it creates sparse array + `[ + 1, + 2, + 3 +]`, // new Array(1, 2, 3) -> [1, 2, 3] + `[ + "string" +]`, // new Array("string") -> ["string"] + `[ + !0 +]`, // new Array(true) -> [true] (minified to !0) + `[ + null +]`, // new Array(null) -> [null] + `[ + void 0 +]`, // new Array(undefined) -> [void 0] + `[ + {} +]`, // new Array({}) -> [{}] + "{}", // new Object() -> {} + "{}", // new Object(null) -> {} + "{ a: 1 }", // new Object({ a: 1 }) -> { a: 1 } + 'Function("return 42")', + 'Function("a", "b", "return a + b")', + 'new RegExp("test")', + 'new RegExp("test", "gi")', + "new RegExp(/abc/)", + "new RegExp(pattern)", + "/* @__PURE__ */ new Date", + "/* @__PURE__ */ new Map", + "/* @__PURE__ */ new Set", + ], + minifySyntax: true, + target: "bun", + }); + + itBundled("minify/ArrayConstructorWithNumberAndMinifyWhitespace", { + files: { + "/entry.js": /* js */ ` + capture(new Array(0)); + capture(new Array(1)); + capture(new Array(2)); + capture(new Array(3)); + capture(new Array(4)); + capture(new Array(5)); + capture(new Array(6)); + capture(new Array(7)); + capture(new Array(8)); + capture(new Array(9)); + capture(new Array(10)); + capture(new Array(11)); + capture(new Array(4.5)); + `, + }, + capture: [ + "[]", // new Array() -> [] + "[,]", // new Array(1) -> [undefined] + "[,,]", // new Array(2) -> [undefined, undefined] + "[,,,]", // new Array(3) -> [undefined, undefined, undefined] + "[,,,,]", // new Array(4) -> [undefined, undefined, undefined, undefined] + "[,,,,,]", // new Array(5) -> [undefined x 5] + "[,,,,,,]", // new Array(6) -> [undefined x 6] + "[,,,,,,,]", // new Array(7) -> [undefined x 7] + "[,,,,,,,,]", // new Array(8) -> [undefined x 8] + "[,,,,,,,,,]", // new Array(9) -> [undefined x 9] + "[,,,,,,,,,,]", // new Array(10) -> [undefined x 10] + "Array(11)", // new Array(11) -> Array(11) + "Array(4.5)", // new Array(4.5) is Array(4.5) because it's not an integer + ], + minifySyntax: true, + minifyWhitespace: true, + target: "bun", + }); + + itBundled("minify/GlobalConstructorSemanticsPreserved", { + files: { + "/entry.js": /* js */ ` + function capture(val) { console.log(val); return val; } + + // Test Array semantics + const a1 = new Array(1, 2, 3); + const a2 = Array(1, 2, 3); + capture(JSON.stringify(a1) === JSON.stringify(a2)); + capture(a1.constructor === a2.constructor); + + // Test sparse array semantics - new Array(5) creates sparse array + const sparse = new Array(5); + capture(sparse.length === 5); + capture(0 in sparse === false); // No element at index 0 + capture(JSON.stringify(sparse) === "[null,null,null,null,null]"); + + // Single-arg variable case: must preserve sparse semantics + const n = 3; + const a3 = new Array(n); + const a4 = Array(n); + capture(a3.length === a4.length && a3.length === 3 && a3[0] === undefined); + + // Test Object semantics + const o1 = new Object(); + const o2 = Object(); + capture(typeof o1 === typeof o2); + capture(o1.constructor === o2.constructor); + + // Test Function semantics + const f1 = new Function("return 1"); + const f2 = Function("return 1"); + capture(typeof f1 === typeof f2); + capture(f1() === f2()); + + // Test RegExp semantics + const r1 = new RegExp("test", "g"); + const r2 = RegExp("test", "g"); + capture(r1.source === r2.source); + capture(r1.flags === r2.flags); + `, + }, + capture: [ + "val", + "JSON.stringify(a1) === JSON.stringify(a2)", + "a1.constructor === a2.constructor", + "sparse.length === 5", + "0 in sparse === !1", + 'JSON.stringify(sparse) === "[null,null,null,null,null]"', + "a3.length === a4.length && a3.length === 3 && a3[0] === void 0", + "typeof o1 === typeof o2", + "o1.constructor === o2.constructor", + "typeof f1 === typeof f2", + "f1() === f2()", + "r1.source === r2.source", + "r1.flags === r2.flags", + ], + minifySyntax: true, + target: "bun", + run: { + stdout: "true\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue\ntrue", + }, + }); + + itBundled("minify/TypeofUndefinedOptimization", { + files: { + "/entry.js": /* js */ ` + // Test all equality operators with typeof undefined + console.log(typeof x !== 'undefined'); + console.log(typeof x != 'undefined'); + console.log('undefined' !== typeof x); + console.log('undefined' != typeof x); + + console.log(typeof x === 'undefined'); + console.log(typeof x == 'undefined'); + console.log('undefined' === typeof x); + console.log('undefined' == typeof x); + + // These should not be optimized + console.log(typeof x === 'string'); + console.log(x === 'undefined'); + console.log('undefined' === y); + console.log(typeof x === 'undefinedx'); + `, + }, + minifySyntax: true, + minifyWhitespace: true, + minifyIdentifiers: false, + onAfterBundle(api) { + const file = api.readFile("out.js"); + expect(normalizeBunSnapshot(file)).toMatchInlineSnapshot( + `"console.log(typeof x<"u");console.log(typeof x<"u");console.log(typeof x<"u");console.log(typeof x<"u");console.log(typeof x>"u");console.log(typeof x>"u");console.log(typeof x>"u");console.log(typeof x>"u");console.log(typeof x==="string");console.log(x==="undefined");console.log(y==="undefined");console.log(typeof x==="undefinedx");"`, + ); + }, + }); }); diff --git a/test/bundler/bundler_npm.test.ts b/test/bundler/bundler_npm.test.ts index 5a4c2dea47..55fa25fb20 100644 --- a/test/bundler/bundler_npm.test.ts +++ b/test/bundler/bundler_npm.test.ts @@ -57,17 +57,17 @@ describe("bundler", () => { "../entry.tsx", ], mappings: [ - ["react.development.js:524:'getContextName'", "1:5436:Y1"], - ["react.development.js:2495:'actScopeDepth'", "23:4092:GJ++"], - ["react.development.js:696:''Component'", '1:7498:\'Component "%s"'], - ["entry.tsx:6:'\"Content-Type\"'", '100:18849:"Content-Type"'], - ["entry.tsx:11:''", "100:19103:void"], - ["entry.tsx:23:'await'", "100:19203:await"], + ["react.development.js:524:'getContextName'", "1:5412:Y1"], + ["react.development.js:2495:'actScopeDepth'", "23:4082:GJ++"], + ["react.development.js:696:''Component'", '1:7474:\'Component "%s"'], + ["entry.tsx:6:'\"Content-Type\"'", '100:18809:"Content-Type"'], + ["entry.tsx:11:''", "100:19063:void"], + ["entry.tsx:23:'await'", "100:19163:await"], ], }, }, expectExactFilesize: { - "out/entry.js": 222174, + "out/entry.js": 221726, }, run: { stdout: "

Hello World

This is an example.

", diff --git a/test/bundler/compile-windows-metadata.test.ts b/test/bundler/compile-windows-metadata.test.ts index 6ba0109811..f01c2c023f 100644 --- a/test/bundler/compile-windows-metadata.test.ts +++ b/test/bundler/compile-windows-metadata.test.ts @@ -1,7 +1,7 @@ import { describe, expect, test } from "bun:test"; import { execSync } from "child_process"; import { promises as fs } from "fs"; -import { bunEnv, bunExe, isWindows, tempDirWithFiles } from "harness"; +import { bunEnv, bunExe, isWindows, tempDir } from "harness"; import { join } from "path"; // Helper to ensure executable cleanup @@ -18,11 +18,11 @@ function cleanup(outfile: string) { describe.skipIf(!isWindows)("Windows compile metadata", () => { describe("CLI flags", () => { test("all metadata flags via CLI", async () => { - const dir = tempDirWithFiles("windows-metadata-cli", { + using dir = tempDir("windows-metadata-cli", { "app.js": `console.log("Test app with metadata");`, }); - const outfile = join(dir, "app-with-metadata.exe"); + const outfile = join(String(dir), "app-with-metadata.exe"); await using _cleanup = cleanup(outfile); await using proc = Bun.spawn({ @@ -30,7 +30,7 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", outfile, "--windows-title", @@ -78,11 +78,11 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("partial metadata flags", async () => { - const dir = tempDirWithFiles("windows-metadata-partial", { + using dir = tempDir("windows-metadata-partial", { "app.js": `console.log("Partial metadata test");`, }); - const outfile = join(dir, "app-partial.exe"); + const outfile = join(String(dir), "app-partial.exe"); await using _cleanup = cleanup(outfile); await using proc = Bun.spawn({ @@ -90,7 +90,7 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", outfile, "--windows-title", @@ -122,12 +122,12 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("windows flags without --compile should error", async () => { - const dir = tempDirWithFiles("windows-no-compile", { + using dir = tempDir("windows-no-compile", { "app.js": `console.log("test");`, }); await using proc = Bun.spawn({ - cmd: [bunExe(), "build", join(dir, "app.js"), "--windows-title", "Should Fail"], + cmd: [bunExe(), "build", join(String(dir), "app.js"), "--windows-title", "Should Fail"], env: bunEnv, stdout: "pipe", stderr: "pipe", @@ -140,7 +140,7 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("windows flags with non-Windows target should error", async () => { - const dir = tempDirWithFiles("windows-wrong-target", { + using dir = tempDir("windows-wrong-target", { "app.js": `console.log("test");`, }); @@ -151,7 +151,7 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { "--compile", "--target", "bun-linux-x64", - join(dir, "app.js"), + join(String(dir), "app.js"), "--windows-title", "Should Fail", ], @@ -170,13 +170,13 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { describe("Bun.build() API", () => { test("all metadata via Bun.build()", async () => { - const dir = tempDirWithFiles("windows-metadata-api", { + using dir = tempDir("windows-metadata-api", { "app.js": `console.log("API metadata test");`, }); const result = await Bun.build({ - entrypoints: [join(dir, "app.js")], - outdir: dir, + entrypoints: [join(String(dir), "app.js")], + outdir: String(dir), compile: { target: "bun-windows-x64", outfile: "app-api.exe", @@ -217,13 +217,13 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("partial metadata via Bun.build()", async () => { - const dir = tempDirWithFiles("windows-metadata-api-partial", { + using dir = tempDir("windows-metadata-api-partial", { "app.js": `console.log("Partial API test");`, }); const result = await Bun.build({ - entrypoints: [join(dir, "app.js")], - outdir: dir, + entrypoints: [join(String(dir), "app.js")], + outdir: String(dir), compile: { target: "bun-windows-x64", outfile: "partial-api.exe", @@ -254,12 +254,12 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("relative outdir with compile", async () => { - const dir = tempDirWithFiles("windows-relative-outdir", { + using dir = tempDir("windows-relative-outdir", { "app.js": `console.log("Relative outdir test");`, }); const result = await Bun.build({ - entrypoints: [join(dir, "app.js")], + entrypoints: [join(String(dir), "app.js")], outdir: "./out", compile: { target: "bun-windows-x64", @@ -290,14 +290,23 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { ]; test.each(testVersionFormats)("version format: $input", async ({ input, expected }) => { - const dir = tempDirWithFiles(`windows-version-${input.replace(/\./g, "-")}`, { + using dir = tempDir(`windows-version-${input.replace(/\./g, "-")}`, { "app.js": `console.log("Version test");`, }); - const outfile = join(dir, "version-test.exe"); + const outfile = join(String(dir), "version-test.exe"); await using proc = Bun.spawn({ - cmd: [bunExe(), "build", "--compile", join(dir, "app.js"), "--outfile", outfile, "--windows-version", input], + cmd: [ + bunExe(), + "build", + "--compile", + join(String(dir), "app.js"), + "--outfile", + outfile, + "--windows-version", + input, + ], env: bunEnv, stdout: "pipe", stderr: "pipe", @@ -314,7 +323,7 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("invalid version format should error gracefully", async () => { - const dir = tempDirWithFiles("windows-invalid-version", { + using dir = tempDir("windows-invalid-version", { "app.js": `console.log("Invalid version test");`, }); @@ -332,9 +341,9 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", - join(dir, "test.exe"), + join(String(dir), "test.exe"), "--windows-version", version, ], @@ -349,21 +358,123 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); }); - describe("Edge cases", () => { - test("long strings in metadata", async () => { - const dir = tempDirWithFiles("windows-long-strings", { - "app.js": `console.log("Long strings test");`, + describe("Original Filename removal", () => { + test("Original Filename field should be empty", async () => { + using dir = tempDir("windows-original-filename", { + "app.js": `console.log("Original filename test");`, }); - const longString = Buffer.alloc(255, "A").toString(); - const outfile = join(dir, "long-strings.exe"); + const outfile = join(String(dir), "test-original.exe"); + await using _cleanup = cleanup(outfile); await using proc = Bun.spawn({ cmd: [ bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), + "--outfile", + outfile, + "--windows-title", + "Test Application", + ], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const exitCode = await proc.exited; + expect(exitCode).toBe(0); + + // Check that Original Filename is empty (not "bun.exe") + const getMetadata = (field: string) => { + try { + return execSync(`powershell -Command "(Get-ItemProperty '${outfile}').VersionInfo.${field}"`, { + encoding: "utf8", + }).trim(); + } catch { + return ""; + } + }; + + const originalFilename = getMetadata("OriginalFilename"); + expect(originalFilename).toBe(""); + expect(originalFilename).not.toBe("bun.exe"); + }); + + test("Original Filename should be empty even with all metadata set", async () => { + using dir = tempDir("windows-original-filename-full", { + "app.js": `console.log("Full metadata test");`, + }); + + const outfile = join(String(dir), "full-metadata.exe"); + await using _cleanup = cleanup(outfile); + + await using proc = Bun.spawn({ + cmd: [ + bunExe(), + "build", + "--compile", + join(String(dir), "app.js"), + "--outfile", + outfile, + "--windows-title", + "Complete App", + "--windows-publisher", + "Test Publisher", + "--windows-version", + "5.4.3.2", + "--windows-description", + "Application with full metadata", + "--windows-copyright", + "© 2024 Test", + ], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const exitCode = await proc.exited; + expect(exitCode).toBe(0); + + const getMetadata = (field: string) => { + try { + return execSync(`powershell -Command "(Get-ItemProperty '${outfile}').VersionInfo.${field}"`, { + encoding: "utf8", + }).trim(); + } catch { + return ""; + } + }; + + // Verify all custom metadata is set correctly + expect(getMetadata("ProductName")).toBe("Complete App"); + expect(getMetadata("CompanyName")).toBe("Test Publisher"); + expect(getMetadata("FileDescription")).toBe("Application with full metadata"); + expect(getMetadata("ProductVersion")).toBe("5.4.3.2"); + + // But Original Filename should still be empty + const originalFilename = getMetadata("OriginalFilename"); + expect(originalFilename).toBe(""); + expect(originalFilename).not.toBe("bun.exe"); + }); + }); + + describe("Edge cases", () => { + test("long strings in metadata", async () => { + using dir = tempDir("windows-long-strings", { + "app.js": `console.log("Long strings test");`, + }); + + const longString = Buffer.alloc(255, "A").toString(); + const outfile = join(String(dir), "long-strings.exe"); + + await using proc = Bun.spawn({ + cmd: [ + bunExe(), + "build", + "--compile", + join(String(dir), "app.js"), "--outfile", outfile, "--windows-title", @@ -384,18 +495,18 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("special characters in metadata", async () => { - const dir = tempDirWithFiles("windows-special-chars", { + using dir = tempDir("windows-special-chars", { "app.js": `console.log("Special chars test");`, }); - const outfile = join(dir, "special-chars.exe"); + const outfile = join(String(dir), "special-chars.exe"); await using proc = Bun.spawn({ cmd: [ bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", outfile, "--windows-title", @@ -433,18 +544,18 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("unicode in metadata", async () => { - const dir = tempDirWithFiles("windows-unicode", { + using dir = tempDir("windows-unicode", { "app.js": `console.log("Unicode test");`, }); - const outfile = join(dir, "unicode.exe"); + const outfile = join(String(dir), "unicode.exe"); await using proc = Bun.spawn({ cmd: [ bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", outfile, "--windows-title", @@ -469,11 +580,11 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { }); test("empty strings in metadata", async () => { - const dir = tempDirWithFiles("windows-empty-strings", { + using dir = tempDir("windows-empty-strings", { "app.js": `console.log("Empty strings test");`, }); - const outfile = join(dir, "empty.exe"); + const outfile = join(String(dir), "empty.exe"); await using _cleanup = cleanup(outfile); // Empty strings should be treated as not provided @@ -482,7 +593,7 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", outfile, "--windows-title", @@ -505,18 +616,18 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { describe("Combined with other compile options", () => { test("metadata with --windows-hide-console", async () => { - const dir = tempDirWithFiles("windows-metadata-hide-console", { + using dir = tempDir("windows-metadata-hide-console", { "app.js": `console.log("Hidden console test");`, }); - const outfile = join(dir, "hidden-with-metadata.exe"); + const outfile = join(String(dir), "hidden-with-metadata.exe"); await using proc = Bun.spawn({ cmd: [ bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", outfile, "--windows-hide-console", @@ -577,23 +688,23 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { 0x00, // Offset ]); - const dir = tempDirWithFiles("windows-metadata-icon", { + using dir = tempDir("windows-metadata-icon", { "app.js": `console.log("Icon test");`, "icon.ico": icoHeader, }); - const outfile = join(dir, "icon-with-metadata.exe"); + const outfile = join(String(dir), "icon-with-metadata.exe"); await using proc = Bun.spawn({ cmd: [ bunExe(), "build", "--compile", - join(dir, "app.js"), + join(String(dir), "app.js"), "--outfile", outfile, "--windows-icon", - join(dir, "icon.ico"), + join(String(dir), "icon.ico"), "--windows-title", "App with Icon", "--windows-version", @@ -607,23 +718,21 @@ describe.skipIf(!isWindows)("Windows compile metadata", () => { const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); // Icon might fail but metadata should still work - if (exitCode === 0) { - const exists = await Bun.file(outfile).exists(); - expect(exists).toBe(true); + const exists = await Bun.file(outfile).exists(); + expect(exists).toBe(true); - const getMetadata = (field: string) => { - try { - return execSync(`powershell -Command "(Get-ItemProperty '${outfile}').VersionInfo.${field}"`, { - encoding: "utf8", - }).trim(); - } catch { - return ""; - } - }; + const getMetadata = (field: string) => { + try { + return execSync(`powershell -Command "(Get-ItemProperty '${outfile}').VersionInfo.${field}"`, { + encoding: "utf8", + }).trim(); + } catch { + return ""; + } + }; - expect(getMetadata("ProductName")).toBe("App with Icon"); - expect(getMetadata("ProductVersion")).toBe("2.0.0.0"); - } + expect(getMetadata("ProductName")).toBe("App with Icon"); + expect(getMetadata("ProductVersion")).toBe("2.0.0.0"); }); }); }); diff --git a/test/bundler/esbuild/dce.test.ts b/test/bundler/esbuild/dce.test.ts index 260d709d42..a435d9f8c2 100644 --- a/test/bundler/esbuild/dce.test.ts +++ b/test/bundler/esbuild/dce.test.ts @@ -1926,7 +1926,7 @@ describe("bundler", () => { `, }, format: "iife", - todo: true, + minifySyntax: true, dce: true, }); itBundled("dce/RemoveUnusedImports", { diff --git a/test/cli/hot/hot.test.ts b/test/cli/hot/hot.test.ts index d0fb0de6a6..1a4ad85951 100644 --- a/test/cli/hot/hot.test.ts +++ b/test/cli/hot/hot.test.ts @@ -470,7 +470,7 @@ ${" ".repeat(reloadCounter * 2)}throw new Error(${reloadCounter});`, const match = next.match(/\s*at.*?:1003:(\d+)$/); if (!match) throw new Error("invalid string: " + next); const col = match[1]; - expect(Number(col)).toBe(1 + "throw ".length + (reloadCounter - 1) * 2); + expect(Number(col)).toBe(1 + "throw new ".length + (reloadCounter - 1) * 2); any = true; } diff --git a/test/cli/run/workspaces.test.ts b/test/cli/run/workspaces.test.ts new file mode 100644 index 0000000000..1c741aa3cd --- /dev/null +++ b/test/cli/run/workspaces.test.ts @@ -0,0 +1,103 @@ +import { expect, test } from "bun:test"; +import { bunEnv, bunExe, tempDirWithFiles } from "harness"; + +test("bun run --workspaces runs script in all workspace packages", async () => { + const dir = tempDirWithFiles("workspaces-test", { + "package.json": JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + scripts: { + test: "echo root test", + }, + }), + "packages/a/package.json": JSON.stringify({ + name: "a", + scripts: { + test: "echo package a test", + }, + }), + "packages/b/package.json": JSON.stringify({ + name: "b", + scripts: { + test: "echo package b test", + }, + }), + }); + + const proc = Bun.spawn({ + cmd: [bunExe(), "run", "--workspaces", "test"], + env: bunEnv, + cwd: dir, + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(0); + expect(stdout).toContain("package a test"); + expect(stdout).toContain("package b test"); + // Root should not be included when using --workspaces + expect(stdout).not.toContain("root test"); +}); + +test("bun run --workspaces --if-present succeeds when script is missing", async () => { + const dir = tempDirWithFiles("workspaces-if-present", { + "package.json": JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + }), + "packages/a/package.json": JSON.stringify({ + name: "a", + scripts: { + test: "echo package a test", + }, + }), + "packages/b/package.json": JSON.stringify({ + name: "b", + // No test script + }), + }); + + const proc = Bun.spawn({ + cmd: [bunExe(), "run", "--workspaces", "--if-present", "test"], + env: bunEnv, + cwd: dir, + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(0); + expect(stdout).toContain("package a test"); + // Should not fail for package b +}); + +test("bun run --workspaces fails when no packages have the script", async () => { + const dir = tempDirWithFiles("workspaces-no-script", { + "package.json": JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + }), + "packages/a/package.json": JSON.stringify({ + name: "a", + }), + "packages/b/package.json": JSON.stringify({ + name: "b", + }), + }); + + const proc = Bun.spawn({ + cmd: [bunExe(), "run", "--workspaces", "nonexistent"], + env: bunEnv, + cwd: dir, + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(1); + expect(stderr).toContain("No workspace packages have script"); +}); diff --git a/test/harness.ts b/test/harness.ts index c341ca7564..4f8ef6eb09 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -8,12 +8,11 @@ import { gc as bunGC, sleepSync, spawnSync, unsafe, which, write } from "bun"; import { heapStats } from "bun:jsc"; import { afterAll, beforeAll, describe, expect, test } from "bun:test"; -import { ChildProcess, fork } from "child_process"; +import { ChildProcess, execSync, fork } from "child_process"; import { readdir, readFile, readlink, rm, writeFile } from "fs/promises"; import fs, { closeSync, openSync, rmSync } from "node:fs"; import os from "node:os"; import { dirname, isAbsolute, join } from "path"; -import { execSync } from "child_process"; type Awaitable = T | Promise; @@ -31,7 +30,7 @@ export const libcFamily: "glibc" | "musl" = process.platform !== "linux" ? "glibc" : // process.report.getReport() has incorrect type definitions. - (process.report.getReport() as any).header.glibcVersionRuntime + (process.report.getReport() as { header: { glibcVersionRuntime: boolean } }).header.glibcVersionRuntime ? "glibc" : "musl"; @@ -1301,11 +1300,15 @@ export const expiredTls = Object.freeze({ passphrase: "1234", }); -// ❯ openssl x509 -enddate -noout -in -// notAfter=Sep 5 23:27:34 2025 GMT +// openssl req -x509 -nodes -days 3650 -newkey rsa:2048 \ +// -keyout localhost.key \ +// -out localhost.crt \ +// -subj "/C=US/ST=CA/L=San Francisco/O=Oven/OU=Team Bun/CN=server-bun" \ +// -addext "subjectAltName = DNS:localhost,IP:127.0.0.1,IP:::1" +// notAfter=Sep 4 03:00:49 2035 GMT export const tls = Object.freeze({ - cert: "-----BEGIN CERTIFICATE-----\nMIIDrzCCApegAwIBAgIUHaenuNcUAu0tjDZGpc7fK4EX78gwDQYJKoZIhvcNAQEL\nBQAwaTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh\nbmNpc2NvMQ0wCwYDVQQKDARPdmVuMREwDwYDVQQLDAhUZWFtIEJ1bjETMBEGA1UE\nAwwKc2VydmVyLWJ1bjAeFw0yMzA5MDYyMzI3MzRaFw0yNTA5MDUyMzI3MzRaMGkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNj\nbzENMAsGA1UECgwET3ZlbjERMA8GA1UECwwIVGVhbSBCdW4xEzARBgNVBAMMCnNl\ncnZlci1idW4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+7odzr3yI\nYewRNRGIubF5hzT7Bym2dDab4yhaKf5drL+rcA0J15BM8QJ9iSmL1ovg7x35Q2MB\nKw3rl/Yyy3aJS8whZTUze522El72iZbdNbS+oH6GxB2gcZB6hmUehPjHIUH4icwP\ndwVUeR6fB7vkfDddLXe0Tb4qsO1EK8H0mr5PiQSXfj39Yc1QHY7/gZ/xeSrt/6yn\n0oH9HbjF2XLSL2j6cQPKEayartHN0SwzwLi0eWSzcziVPSQV7c6Lg9UuIHbKlgOF\nzDpcp1p1lRqv2yrT25im/dS6oy9XX+p7EfZxqeqpXX2fr5WKxgnzxI3sW93PG8FU\nIDHtnUsoHX3RAgMBAAGjTzBNMCwGA1UdEQQlMCOCCWxvY2FsaG9zdIcEfwAAAYcQ\nAAAAAAAAAAAAAAAAAAAAATAdBgNVHQ4EFgQUF3y/su4J/8ScpK+rM2LwTct6EQow\nDQYJKoZIhvcNAQELBQADggEBAGWGWp59Bmrk3Gt0bidFLEbvlOgGPWCT9ZrJUjgc\nhY44E+/t4gIBdoKOSwxo1tjtz7WsC2IYReLTXh1vTsgEitk0Bf4y7P40+pBwwZwK\naeIF9+PC6ZoAkXGFRoyEalaPVQDBg/DPOMRG9OH0lKfen9OGkZxmmjRLJzbyfAhU\noI/hExIjV8vehcvaJXmkfybJDYOYkN4BCNqPQHNf87ZNdFCb9Zgxwp/Ou+47J5k4\n5plQ+K7trfKXG3ABMbOJXNt1b0sH8jnpAsyHY4DLEQqxKYADbXsr3YX/yy6c0eOo\nX2bHGD1+zGsb7lGyNyoZrCZ0233glrEM4UxmvldBcWwOWfk=\n-----END CERTIFICATE-----\n", - key: "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC+7odzr3yIYewR\nNRGIubF5hzT7Bym2dDab4yhaKf5drL+rcA0J15BM8QJ9iSmL1ovg7x35Q2MBKw3r\nl/Yyy3aJS8whZTUze522El72iZbdNbS+oH6GxB2gcZB6hmUehPjHIUH4icwPdwVU\neR6fB7vkfDddLXe0Tb4qsO1EK8H0mr5PiQSXfj39Yc1QHY7/gZ/xeSrt/6yn0oH9\nHbjF2XLSL2j6cQPKEayartHN0SwzwLi0eWSzcziVPSQV7c6Lg9UuIHbKlgOFzDpc\np1p1lRqv2yrT25im/dS6oy9XX+p7EfZxqeqpXX2fr5WKxgnzxI3sW93PG8FUIDHt\nnUsoHX3RAgMBAAECggEAAckMqkn+ER3c7YMsKRLc5bUE9ELe+ftUwfA6G+oXVorn\nE+uWCXGdNqI+TOZkQpurQBWn9IzTwv19QY+H740cxo0ozZVSPE4v4czIilv9XlVw\n3YCNa2uMxeqp76WMbz1xEhaFEgn6ASTVf3hxYJYKM0ljhPX8Vb8wWwlLONxr4w4X\nOnQAB5QE7i7LVRsQIpWKnGsALePeQjzhzUZDhz0UnTyGU6GfC+V+hN3RkC34A8oK\njR3/Wsjahev0Rpb+9Pbu3SgTrZTtQ+srlRrEsDG0wVqxkIk9ueSMOHlEtQ7zYZsk\nlX59Bb8LHNGQD5o+H1EDaC6OCsgzUAAJtDRZsPiZEQKBgQDs+YtVsc9RDMoC0x2y\nlVnP6IUDXt+2UXndZfJI3YS+wsfxiEkgK7G3AhjgB+C+DKEJzptVxP+212hHnXgr\n1gfW/x4g7OWBu4IxFmZ2J/Ojor+prhHJdCvD0VqnMzauzqLTe92aexiexXQGm+WW\nwRl3YZLmkft3rzs3ZPhc1G2X9QKBgQDOQq3rrxcvxSYaDZAb+6B/H7ZE4natMCiz\nLx/cWT8n+/CrJI2v3kDfdPl9yyXIOGrsqFgR3uhiUJnz+oeZFFHfYpslb8KvimHx\nKI+qcVDcprmYyXj2Lrf3fvj4pKorc+8TgOBDUpXIFhFDyM+0DmHLfq+7UqvjU9Hs\nkjER7baQ7QKBgQDTh508jU/FxWi9RL4Jnw9gaunwrEt9bxUc79dp+3J25V+c1k6Q\nDPDBr3mM4PtYKeXF30sBMKwiBf3rj0CpwI+W9ntqYIwtVbdNIfWsGtV8h9YWHG98\nJ9q5HLOS9EAnogPuS27walj7wL1k+NvjydJ1of+DGWQi3aQ6OkMIegap0QKBgBlR\nzCHLa5A8plG6an9U4z3Xubs5BZJ6//QHC+Uzu3IAFmob4Zy+Lr5/kITlpCyw6EdG\n3xDKiUJQXKW7kluzR92hMCRnVMHRvfYpoYEtydxcRxo/WS73SzQBjTSQmicdYzLE\ntkLtZ1+ZfeMRSpXy0gR198KKAnm0d2eQBqAJy0h9AoGBAM80zkd+LehBKq87Zoh7\ndtREVWslRD1C5HvFcAxYxBybcKzVpL89jIRGKB8SoZkF7edzhqvVzAMP0FFsEgCh\naClYGtO+uo+B91+5v2CCqowRJUGfbFOtCuSPR7+B3LDK8pkjK2SQ0mFPUfRA5z0z\nNVWtC0EYNBTRkqhYtqr3ZpUc\n-----END PRIVATE KEY-----\n", + cert: "-----BEGIN CERTIFICATE-----\nMIID4jCCAsqgAwIBAgIUcaRq6J/YF++Bo01Zc+HeQvCbnWMwDQYJKoZIhvcNAQEL\nBQAwaTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh\nbmNpc2NvMQ0wCwYDVQQKDARPdmVuMREwDwYDVQQLDAhUZWFtIEJ1bjETMBEGA1UE\nAwwKc2VydmVyLWJ1bjAeFw0yNTA5MDYwMzAwNDlaFw0zNTA5MDQwMzAwNDlaMGkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNj\nbzENMAsGA1UECgwET3ZlbjERMA8GA1UECwwIVGVhbSBCdW4xEzARBgNVBAMMCnNl\ncnZlci1idW4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDlYzosgRgX\nHL6vMh1V0ERFhsvlZrtRojSw6tafr3SQBphU793/rGiYZlL/lJ9HIlLkx9JMbuTj\nNm5U2eRwHiTQIeWD4aCIESwPlkdaVYtC+IOj55bJN8xNa7h5GyJwF7PnPetAsKyE\n8DMBn1gKMhaIis7HHOUtk4/K3Y4peU44d04z0yPt6JtY5Sbvi1E7pGX6T/2c9sHs\ndIDeDctWnewpXXs8zkAla0KNWQfpDnpS53wxAfStTA4lSrA9daxC7hZopQlLxFIb\nJk+0BLbEsXtrJ54T5iguHk+2MDVAy4MOqP9XbKV7eGHk73l6+CSwmHyHBxh4ChxR\nQeT5BP0MUTn1AgMBAAGjgYEwfzAdBgNVHQ4EFgQUw7nEnh4uOdZVZUapQzdAUaVa\nAn0wHwYDVR0jBBgwFoAUw7nEnh4uOdZVZUapQzdAUaVaAn0wDwYDVR0TAQH/BAUw\nAwEB/zAsBgNVHREEJTAjgglsb2NhbGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAA\nAAEwDQYJKoZIhvcNAQELBQADggEBAEA8r1fvDLMSCb8bkAURpFk8chn8pl5MChzT\nYUDaLdCCBjPXJkSXNdyuwS+T/ljAGyZbW5xuDccCNKltawO4CbyEXUEZbYr3w9eq\nj8uqymJPhFf0O1rKOI2han5GBCgHwG13QwKI+4uu7390nD+TlzLOhxFfvOG7OadH\nQNMNLNyldgF4Nb8vWdz0FtQiGUIrO7iq4LFhhd1lCxe0q+FAYSEYcc74WtF/Yo8V\nJQauXuXyoP5FqLzNt/yeNQhceyIXJGKCsjr5/bASBmVlCwgRfsD3jpG37L8YCJs1\nL4WEikcY4Lzb2NF9e94IyZdQsRqd9DFBF5zP013MSUiuhiow32k=\n-----END CERTIFICATE-----\n", + key: "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDlYzosgRgXHL6v\nMh1V0ERFhsvlZrtRojSw6tafr3SQBphU793/rGiYZlL/lJ9HIlLkx9JMbuTjNm5U\n2eRwHiTQIeWD4aCIESwPlkdaVYtC+IOj55bJN8xNa7h5GyJwF7PnPetAsKyE8DMB\nn1gKMhaIis7HHOUtk4/K3Y4peU44d04z0yPt6JtY5Sbvi1E7pGX6T/2c9sHsdIDe\nDctWnewpXXs8zkAla0KNWQfpDnpS53wxAfStTA4lSrA9daxC7hZopQlLxFIbJk+0\nBLbEsXtrJ54T5iguHk+2MDVAy4MOqP9XbKV7eGHk73l6+CSwmHyHBxh4ChxRQeT5\nBP0MUTn1AgMBAAECggEABtPvC5uVGr0DjQX2GxONsK8cOxoVec7U+C4pUMwBcXcM\nyjxwlHdujpi/IDXtjsm+A2rSPu2vGPdKDfMFanPvPxW/Ne99noc6U0VzHsR8lnP8\nwSB328nyJhzOeyZcXk9KTtgIPF7156gZsJLsZTNL+ej90i3xQWvKxCxXmrLuad5O\nz/TrgZkC6wC3fgj1d3e8bMljQ7tLxbshJMYVI5o6RFTxy84DLI+rlvPkf7XbiMPf\n2lsm4jcJKvfx+164HZJ9QVlx8ncqOHAnGvxb2xHHfqv4JAbz615t7yRvtaw4Paj5\n6kQSf0VWnsVzgxNJWvnUZym/i/Qf5nQafjChCyKOEQKBgQD9f4SkvJrp/mFKWLHd\nkDvRpSIIltfJsa5KShn1IHsQXFwc0YgyP4SKQb3Ckv+/9UFHK9EzM+WlPxZi7ZOS\nhsWhIfkI4c4ORpxUQ+hPi0K2k+HIY7eYyONqDAzw5PGkKBo3mSGMHDXYywSqexhB\nCCMHuHdMhwyHdz4PWYOK3C2VMQKBgQDnpsrHK7lM9aVb8wNhTokbK5IlTSzH/5oJ\nlAVu6G6H3tM5YQeoDXztbZClvrvKU8DU5UzwaC+8AEWQwaram29QIDpAI3nVQQ0k\ndmHHp/pCeADdRG2whaGcl418UJMMv8AUpWTRm+kVLTLqfTHBC0ji4NlCQMHCUCfd\nU8TeUi5QBQKBgQDvJNd7mboDOUmLG7VgMetc0Y4T0EnuKsMjrlhimau/OYJkZX84\n+BcPXwmnf4nqC3Lzs3B9/12L0MJLvZjUSHQ0mJoZOPxtF0vvasjEEbp0B3qe0wOn\nDQ0NRCUJNNKJbJOfE8VEKnDZ/lx+f/XXk9eINwvElDrLqUBQtr+TxjbyYQKBgAxQ\nlZ8Y9/TbajsFJDzcC/XhzxckjyjisbGoqNFIkfevJNN8EQgiD24f0Py+swUChtHK\njtiI8WCxMwGLCiYs9THxRKd8O1HW73fswy32BBvcfU9F//7OW9UTSXY+YlLfLrrq\nP/3UqAN0L6y/kxGMJAfLpEEdaC+IS1Y8yc531/ZxAoGASYiasDpePtmzXklDxk3h\njEw64QAdXK2p/xTMjSeTtcqJ7fvaEbg+Mfpxq0mdTjfbTdR9U/nzAkwS7OoZZ4Du\nueMVls0IVqcNnBtikG8wgdxN27b5JPXS+GzQ0zDSpWFfRPZiIh37BAXr0D1voluJ\nrEHkcals6p7hL98BoxjFIvA=\n-----END PRIVATE KEY-----\n", }); export const invalidTls = Object.freeze({ diff --git a/test/integration/bun-types/bun-types.test.ts b/test/integration/bun-types/bun-types.test.ts index 05443dfe41..0cb9ab9997 100644 --- a/test/integration/bun-types/bun-types.test.ts +++ b/test/integration/bun-types/bun-types.test.ts @@ -120,6 +120,7 @@ async function diagnose( // always check lib files for this integration test // (prevent https://github.com/oven-sh/bun/issues/8761 ever happening again) skipLibCheck: false, + skipDefaultLibCheck: false, }; const host: ts.LanguageServiceHost = { @@ -417,173 +418,173 @@ describe("@types/bun integration test", () => { message: "No overload matches this call.", }, { + code: 2353, line: "globals.ts:307:5", message: "Object literal may only specify known properties, and 'headers' does not exist in type 'string[]'.", - code: 2353, }, { + code: 2345, line: "http.ts:43:24", message: "Argument of type '() => AsyncGenerator | \"hey\", void, unknown>' is not assignable to parameter of type 'BodyInit | null | undefined'.", - code: 2345, }, { + code: 2345, line: "http.ts:55:24", message: "Argument of type 'AsyncGenerator | \"it works!\", void, unknown>' is not assignable to parameter of type 'BodyInit | null | undefined'.", - code: 2345, }, { - line: "index.ts:193:14", + code: 2345, + line: "index.ts:196:14", message: "Argument of type 'AsyncGenerator, void, unknown>' is not assignable to parameter of type 'BodyInit | null | undefined'.", - code: 2345, }, { - line: "index.ts:323:29", + code: 2345, + line: "index.ts:326:29", message: "Argument of type '{ headers: { \"x-bun\": string; }; }' is not assignable to parameter of type 'number'.", - code: 2345, }, { + code: 2339, line: "spawn.ts:62:38", message: "Property 'text' does not exist on type 'ReadableStream>'.", - code: 2339, }, { + code: 2339, line: "spawn.ts:107:38", message: "Property 'text' does not exist on type 'ReadableStream>'.", - code: 2339, }, { - line: "streams.ts:18:3", - message: "No overload matches this call.", - code: 2769, + "code": 2769, + "line": "streams.ts:18:3", + "message": "No overload matches this call.", }, { - line: "streams.ts:20:16", - message: "Property 'write' does not exist on type 'ReadableByteStreamController'.", - code: 2339, + "code": 2339, + "line": "streams.ts:20:16", + "message": "Property 'write' does not exist on type 'ReadableByteStreamController'.", }, { - line: "streams.ts:46:19", - message: "Property 'json' does not exist on type 'ReadableStream>'.", - code: 2339, + "code": 2339, + "line": "streams.ts:46:19", + "message": "Property 'json' does not exist on type 'ReadableStream>'.", }, { - line: "streams.ts:47:19", - message: "Property 'bytes' does not exist on type 'ReadableStream>'.", - code: 2339, + "code": 2339, + "line": "streams.ts:47:19", + "message": "Property 'bytes' does not exist on type 'ReadableStream>'.", }, { - line: "streams.ts:48:19", - message: "Property 'text' does not exist on type 'ReadableStream>'.", - code: 2339, + "code": 2339, + "line": "streams.ts:48:19", + "message": "Property 'text' does not exist on type 'ReadableStream>'.", }, { - line: "streams.ts:49:19", - message: "Property 'blob' does not exist on type 'ReadableStream>'.", - code: 2339, + "code": 2339, + "line": "streams.ts:49:19", + "message": "Property 'blob' does not exist on type 'ReadableStream>'.", }, { + code: 2353, line: "websocket.ts:25:5", message: "Object literal may only specify known properties, and 'protocols' does not exist in type 'string[]'.", - code: 2353, }, { + code: 2353, line: "websocket.ts:30:5", message: "Object literal may only specify known properties, and 'protocol' does not exist in type 'string[]'.", - code: 2353, }, { + code: 2353, line: "websocket.ts:35:5", message: "Object literal may only specify known properties, and 'protocol' does not exist in type 'string[]'.", - code: 2353, }, { + code: 2353, line: "websocket.ts:43:5", message: "Object literal may only specify known properties, and 'headers' does not exist in type 'string[]'.", - code: 2353, }, { + code: 2353, line: "websocket.ts:51:5", message: "Object literal may only specify known properties, and 'protocols' does not exist in type 'string[]'.", - code: 2353, }, { + code: 2554, line: "websocket.ts:185:29", message: "Expected 2 arguments, but got 0.", - code: 2554, }, { + code: 2551, line: "websocket.ts:192:17", message: "Property 'URL' does not exist on type 'WebSocket'. Did you mean 'url'?", - code: 2551, }, { + code: 2322, line: "websocket.ts:196:3", message: "Type '\"nodebuffer\"' is not assignable to type 'BinaryType'.", - code: 2322, }, { + code: 2339, line: "websocket.ts:242:6", message: "Property 'ping' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:245:6", message: "Property 'ping' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:249:6", message: "Property 'ping' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:253:6", message: "Property 'ping' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:256:6", message: "Property 'pong' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:259:6", message: "Property 'pong' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:263:6", message: "Property 'pong' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:267:6", message: "Property 'pong' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "websocket.ts:270:6", message: "Property 'terminate' does not exist on type 'WebSocket'.", - code: 2339, }, { + code: 2339, line: "worker.ts:23:11", message: "Property 'ref' does not exist on type 'Worker'.", - code: 2339, }, { + code: 2339, line: "worker.ts:24:11", message: "Property 'unref' does not exist on type 'Worker'.", - code: 2339, }, { + code: 2339, line: "worker.ts:25:11", message: "Property 'threadId' does not exist on type 'Worker'.", - code: 2339, }, ]); }); diff --git a/test/integration/bun-types/fixture/file.json b/test/integration/bun-types/fixture/file.json new file mode 100644 index 0000000000..9310789e7b --- /dev/null +++ b/test/integration/bun-types/fixture/file.json @@ -0,0 +1 @@ +{ "bun": "is cool", "fact": true } diff --git a/test/integration/bun-types/fixture/index.ts b/test/integration/bun-types/fixture/index.ts index a178c91a4b..dd2075a08d 100644 --- a/test/integration/bun-types/fixture/index.ts +++ b/test/integration/bun-types/fixture/index.ts @@ -1,3 +1,6 @@ +import fact from "./file.json"; +console.log(fact); + import * as test from "bun:test"; test.describe; test.it; @@ -401,7 +404,7 @@ Bun.serve({ return new Response(body, { headers, - status: statuses[Math.floor(Math.random() * statuses.length)], + status: statuses[Math.floor(Math.random() * statuses.length)] ?? 200, }); }, }); @@ -435,7 +438,7 @@ serve({ return new Response(body, { headers, - status: statuses[Math.floor(Math.random() * statuses.length)], + status: statuses[Math.floor(Math.random() * statuses.length)] ?? 200, }); }, }); @@ -455,3 +458,9 @@ Bun.serve({ cert, }, }); + +const signal = AbortSignal.timeout(1000); +expectType(signal).is(); +expectType(signal.aborted).is(); + +expectType(RegExp.escape("foo.bar")).is(); diff --git a/test/integration/bun-types/fixture/sql.ts b/test/integration/bun-types/fixture/sql.ts index ccac825fd6..3d72c748e4 100644 --- a/test/integration/bun-types/fixture/sql.ts +++ b/test/integration/bun-types/fixture/sql.ts @@ -273,3 +273,7 @@ expectType>; expectType; expectType; expectType>; + +declare const aSqlInstance: Bun.SQL; +expectType(aSqlInstance.options.host).is(); // property exists in postgres/mysql/mariadb options +expectType(aSqlInstance.options.safeIntegers).is(); // property exits in sqlite options diff --git a/test/integration/bun-types/fixture/test.ts b/test/integration/bun-types/fixture/test.ts index e396d3d1a1..39b86f59c9 100644 --- a/test/integration/bun-types/fixture/test.ts +++ b/test/integration/bun-types/fixture/test.ts @@ -52,12 +52,20 @@ describe("bun:test", () => { expect(1).toBe(1); expect(1).not.toBe(2); // @ts-expect-error - expect({ a: 1 }).toEqual({ a: 1, b: undefined }); + expect({ a: 1 }).toEqual<{ a: number }>({ a: 1, b: undefined }); + + // @ts-expect-error + expect({ a: 1 }).toEqual<{ a: number; b: number }>({ a: 1, b: undefined }); + + // Support passing a type parameter to force exact type matching + expect({ a: 1 }).toEqual<{ a: number; b: number }>({ a: 1, b: 1 }); + expect({ a: 1 }).toStrictEqual({ a: 1 }); expect(new Set()).toHaveProperty("size"); expect(new Uint8Array()).toHaveProperty("byteLength", 0); expect([]).toHaveLength(0); expect(["bun"]).toContain("bun"); + expect("hello").toContain("bun"); expect(true).toBeTruthy(); expect(false).toBeFalsy(); expect(Math.PI).toBeGreaterThan(3.14); @@ -137,6 +145,161 @@ describe.each(dataAsConst)("test.each", (a, b, c) => { expectType<5 | "asdf">(c); }); +expect().pass(); +expect().fail(); + +expectType(expect()).is>(); +expectType(expect()).is>(); +expectType(expect("")).is>(); +expectType(expect("")).is>(); +expectType(expect(undefined, "Fail message")).is>(); +expectType(expect(undefined, "Fail message")).is>(); +expectType(expect("", "Fail message")).is>(); +expectType(expect("", "Fail message")).is>(); + +describe("Matcher Overload Type Tests", () => { + const num = 1; + const str = "hello"; + const numArr = [1, 2, 3]; + const strArr = ["a", "b", "c"]; + const mixedArr = [1, "a", true]; + const obj = { a: 1, b: "world", 10: true }; + const numSet = new Set([10, 20]); + + test("toBe", () => { + expect(num).toBe(1); + expect(str).toBe("hello"); + // @ts-expect-error - Type 'string' is not assignable to type 'number'. + expect(num).toBe("1"); + // @ts-expect-error - Type 'number' is not assignable to type 'string'. + expect(str).toBe(123); + // @ts-expect-error - Type 'boolean' is not assignable to type 'number'. + expect(num).toBe(true); + // @ts-expect-error - Too many arguments for specific overload + expect(num).toBe(1, 2); + // @ts-expect-error - Expecting number, passed function + expect(num).toBe(() => {}); + }); + + test("toEqual", () => { + expect(numArr).toEqual([1, 2, 3]); + expect(obj).toEqual({ a: 1, b: "world", 10: true }); + // @ts-expect-error - Type 'string' is not assignable to type 'number' at index 0. + expect(numArr).toEqual(["1", 2, 3]); + // @ts-expect-error - Property 'c' is missing in type '{ a: number; b: string; 10: boolean; }'. + expect(obj).toEqual({ a: 1, b: "world", c: false }); + // @ts-expect-error - Type 'boolean' is not assignable to type 'number[]'. + expect(numArr).toEqual(true); + // @ts-expect-error - Too many arguments for specific overload + expect(numArr).toEqual([1, 2], [3]); + // @ts-expect-error - Expecting object, passed number + expect(obj).toEqual(123); + }); + + test("toStrictEqual", () => { + expect(numArr).toStrictEqual([1, 2, 3]); + expect(obj).toStrictEqual({ a: 1, b: "world", 10: true }); + // @ts-expect-error - Type 'string' is not assignable to type 'number' at index 0. + expect(numArr).toStrictEqual(["1", 2, 3]); + // @ts-expect-error - Properties are missing + expect(obj).toStrictEqual({ a: 1 }); + // @ts-expect-error - Type 'boolean' is not assignable to type 'number[]'. + expect(numArr).toStrictEqual(true); + // @ts-expect-error - Too many arguments for specific overload + expect(numArr).toStrictEqual([1, 2], [3]); + // @ts-expect-error - Expecting object, passed number + expect(obj).toStrictEqual(123); + }); + + test("toBeOneOf", () => { + expect(num).toBeOneOf([1, 2, 3]); + expect(str).toBeOneOf(strArr); + expect(num).toBeOneOf(numSet); + // @ts-expect-error - Argument of type 'number[]' is not assignable to parameter of type 'Iterable'. + expect(str).toBeOneOf>(numArr); + // @ts-expect-error - Argument of type 'string[]' is not assignable to parameter of type 'Iterable'. + expect(num).toBeOneOf>(strArr); + // @ts-expect-error - Argument of type 'Set' is not assignable to parameter of type 'Iterable'. + expect(str).toBeOneOf>(numSet); + // @ts-expect-error - Argument must be iterable + expect(num).toBeOneOf(1); + // @ts-expect-error - Expecting string iterable, passed number iterable + expect(str).toBeOneOf>([1, 2, 3]); + }); + + test("toContainKey", () => { + expect(obj).toContainKey("a"); + expect(obj).toContainKey(10); // object key is number + // @ts-expect-error - Argument of type '"c"' is not assignable to parameter of type 'number | "a" | "b"'. + expect(obj).toContainKey("c"); + // @ts-expect-error - Argument of type 'boolean' is not assignable to parameter of type 'string | number'. + expect(obj).toContainKey(true); + // @ts-expect-error - Too many arguments for specific overload + expect(obj).toContainKey("a", "b"); + // @ts-expect-error - Argument of type 'symbol' is not assignable to parameter of type 'string | number'. + expect(obj).toContainKey(Symbol("a")); + }); + + test("toContainAllKeys", () => { + expect(obj).toContainAllKeys(["a", "b"]); + expect(obj).toContainAllKeys([10, "a"]); + // @ts-expect-error - Type '"c"' is not assignable to type 'number | "a" | "b"'. + expect(obj).toContainAllKeys<(keyof typeof obj)[]>(["a", "c"]); + // @ts-expect-error - Type 'boolean' is not assignable to type 'string | number'. + expect(obj).toContainAllKeys<(keyof typeof obj)[]>(["a", true]); + // @ts-expect-error - Argument must be an array + expect(obj).toContainAllKeys>("a"); + // @ts-expect-error - Array element type 'symbol' is not assignable to 'string | number'. + expect(obj).toContainAllKeys<(keyof typeof obj)[]>(["a", Symbol("b")]); + }); + + test("toContainAnyKeys", () => { + expect(obj).toContainAnyKeys(["a", "b", 10]); + // @ts-expect-error - 11 is not a key + expect(obj).toContainAnyKeys(["a", "b", 11]); + // @ts-expect-error - c is not a key + expect(obj).toContainAnyKeys(["a", "c"]); // c doesn't exist, but 'a' does + // @ts-expect-error d is not a key + expect(obj).toContainAnyKeys([10, "d"]); + // @ts-expect-error - Type '"c"' is not assignable to type 'number | "a" | "b"'. Type '"d"' is not assignable to type 'number | "a" | "b"'. + expect(obj).toContainAnyKeys<(keyof typeof obj)[]>(["c", "d"]); + // @ts-expect-error - Type 'boolean' is not assignable to type 'string | number'. + expect(obj).toContainAnyKeys<(keyof typeof obj)[]>([true, false]); + // @ts-expect-error - Argument must be an array + expect(obj).toContainAnyKeys>("a"); + // @ts-expect-error - Array element type 'symbol' is not assignable to 'string | number'. + expect(obj).toContainAnyKeys<(keyof typeof obj)[]>([Symbol("a")]); + }); + + test("toContainKeys", () => { + // Alias for toContainAllKeys + expect(obj).toContainKeys(["a", "b"]); + expect(obj).toContainKeys([10, "a"]); + // @ts-expect-error - Type '"c"' is not assignable to type 'number | "a" | "b"'. + expect(obj).toContainKeys<(keyof typeof obj)[]>(["a", "c"]); + // @ts-expect-error - Type 'boolean' is not assignable to type 'string | number'. + expect(obj).toContainKeys<(keyof typeof obj)[]>(["a", true]); + // @ts-expect-error - Argument must be an array + expect(obj).toContainKeys>("a"); + // @ts-expect-error - Array element type 'symbol' is not assignable to 'string | number'. + expect(obj).toContainKeys<(keyof typeof obj)[]>(["a", Symbol("b")]); + }); + + test("toContainEqual", () => { + expect(mixedArr).toContainEqual(1); + expect(mixedArr).toContainEqual("a"); + expect(mixedArr).toContainEqual(true); + // @ts-expect-error - Argument of type 'null' is not assignable to parameter of type 'string | number | boolean'. + expect(mixedArr).toContainEqual(null); + // @ts-expect-error - Argument of type 'number[]' is not assignable to parameter of type 'string | number | boolean'. + expect(mixedArr).toContainEqual(numArr); + // @ts-expect-error - Too many arguments for specific overload + expect(mixedArr).toContainEqual(1, 2); + // @ts-expect-error - Expecting string | number | boolean, got object + expect(mixedArr).toContainEqual({ a: 1 }); + }); +}); + const mySpyOnObjectWithOptionalMethod: { optionalMethod?: (input: { question: string }) => { answer: string }; } = { @@ -173,3 +336,23 @@ test("expectTypeOf basic type checks", () => { }); mock.clearAllMocks(); + +// Advanced use case tests for #18511: + +// 1. => When assignable to, we should pass (e.g. new Set() is assignable to Set). +// But when unassigbale, we should type error (e.g `string` is not assignable to `"bun"`) +// 2. => Expect that exact matches pass +// 3. => Expect that when we opt out of type safety, any value can be passed + +declare const input: "bun" | "baz" | null; +declare const expected: string; + +// @ts-expect-error +/** 1. **/ expect(input).toBe(expected); // Type error - string is not assignable to `'bun' | ...` +/** 2. **/ expect(input).toBe("bun"); // happy! +/** 3. **/ expect(input).toBe(expected); // happy! We opted out of type safety for this expectation + +declare const setOfStrings: Set; +/** 1. **/ expect(setOfStrings).toBe(new Set()); // this is inferrable to Set so this should pass +/** 2. **/ expect(setOfStrings).toBe(new Set()); // exact, so we are happy! +/** 3. **/ expect(setOfStrings).toBe>(new Set()); // happy! We opted out of type safety for this expectation diff --git a/test/internal/ban-limits.json b/test/internal/ban-limits.json index af4ce32d94..2766870d4b 100644 --- a/test/internal/ban-limits.json +++ b/test/internal/ban-limits.json @@ -10,7 +10,7 @@ ".stdDir()": 41, ".stdFile()": 18, "// autofix": 168, - ": [^=]+= undefined,$": 260, + ": [^=]+= undefined,$": 258, "== alloc.ptr": 0, "== allocator.ptr": 0, "@import(\"bun\").": 0, diff --git a/test/js/bun/http/bun-server.test.ts b/test/js/bun/http/bun-server.test.ts index 74834a4e32..aa5f9a79cf 100644 --- a/test/js/bun/http/bun-server.test.ts +++ b/test/js/bun/http/bun-server.test.ts @@ -1,6 +1,6 @@ import type { Server, ServerWebSocket, Socket } from "bun"; import { describe, expect, test } from "bun:test"; -import { bunEnv, bunExe, rejectUnauthorizedScope, tempDirWithFiles } from "harness"; +import { bunEnv, bunExe, rejectUnauthorizedScope, tempDirWithFiles, tls } from "harness"; import path from "path"; describe("Server", () => { @@ -405,10 +405,7 @@ describe("Server", () => { test("handshake failures should not impact future connections", async () => { using server = Bun.serve({ - tls: { - cert: "-----BEGIN CERTIFICATE-----\nMIIDrzCCApegAwIBAgIUHaenuNcUAu0tjDZGpc7fK4EX78gwDQYJKoZIhvcNAQEL\nBQAwaTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh\nbmNpc2NvMQ0wCwYDVQQKDARPdmVuMREwDwYDVQQLDAhUZWFtIEJ1bjETMBEGA1UE\nAwwKc2VydmVyLWJ1bjAeFw0yMzA5MDYyMzI3MzRaFw0yNTA5MDUyMzI3MzRaMGkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNj\nbzENMAsGA1UECgwET3ZlbjERMA8GA1UECwwIVGVhbSBCdW4xEzARBgNVBAMMCnNl\ncnZlci1idW4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+7odzr3yI\nYewRNRGIubF5hzT7Bym2dDab4yhaKf5drL+rcA0J15BM8QJ9iSmL1ovg7x35Q2MB\nKw3rl/Yyy3aJS8whZTUze522El72iZbdNbS+oH6GxB2gcZB6hmUehPjHIUH4icwP\ndwVUeR6fB7vkfDddLXe0Tb4qsO1EK8H0mr5PiQSXfj39Yc1QHY7/gZ/xeSrt/6yn\n0oH9HbjF2XLSL2j6cQPKEayartHN0SwzwLi0eWSzcziVPSQV7c6Lg9UuIHbKlgOF\nzDpcp1p1lRqv2yrT25im/dS6oy9XX+p7EfZxqeqpXX2fr5WKxgnzxI3sW93PG8FU\nIDHtnUsoHX3RAgMBAAGjTzBNMCwGA1UdEQQlMCOCCWxvY2FsaG9zdIcEfwAAAYcQ\nAAAAAAAAAAAAAAAAAAAAATAdBgNVHQ4EFgQUF3y/su4J/8ScpK+rM2LwTct6EQow\nDQYJKoZIhvcNAQELBQADggEBAGWGWp59Bmrk3Gt0bidFLEbvlOgGPWCT9ZrJUjgc\nhY44E+/t4gIBdoKOSwxo1tjtz7WsC2IYReLTXh1vTsgEitk0Bf4y7P40+pBwwZwK\naeIF9+PC6ZoAkXGFRoyEalaPVQDBg/DPOMRG9OH0lKfen9OGkZxmmjRLJzbyfAhU\noI/hExIjV8vehcvaJXmkfybJDYOYkN4BCNqPQHNf87ZNdFCb9Zgxwp/Ou+47J5k4\n5plQ+K7trfKXG3ABMbOJXNt1b0sH8jnpAsyHY4DLEQqxKYADbXsr3YX/yy6c0eOo\nX2bHGD1+zGsb7lGyNyoZrCZ0233glrEM4UxmvldBcWwOWfk=\n-----END CERTIFICATE-----\n", - key: "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC+7odzr3yIYewR\nNRGIubF5hzT7Bym2dDab4yhaKf5drL+rcA0J15BM8QJ9iSmL1ovg7x35Q2MBKw3r\nl/Yyy3aJS8whZTUze522El72iZbdNbS+oH6GxB2gcZB6hmUehPjHIUH4icwPdwVU\neR6fB7vkfDddLXe0Tb4qsO1EK8H0mr5PiQSXfj39Yc1QHY7/gZ/xeSrt/6yn0oH9\nHbjF2XLSL2j6cQPKEayartHN0SwzwLi0eWSzcziVPSQV7c6Lg9UuIHbKlgOFzDpc\np1p1lRqv2yrT25im/dS6oy9XX+p7EfZxqeqpXX2fr5WKxgnzxI3sW93PG8FUIDHt\nnUsoHX3RAgMBAAECggEAAckMqkn+ER3c7YMsKRLc5bUE9ELe+ftUwfA6G+oXVorn\nE+uWCXGdNqI+TOZkQpurQBWn9IzTwv19QY+H740cxo0ozZVSPE4v4czIilv9XlVw\n3YCNa2uMxeqp76WMbz1xEhaFEgn6ASTVf3hxYJYKM0ljhPX8Vb8wWwlLONxr4w4X\nOnQAB5QE7i7LVRsQIpWKnGsALePeQjzhzUZDhz0UnTyGU6GfC+V+hN3RkC34A8oK\njR3/Wsjahev0Rpb+9Pbu3SgTrZTtQ+srlRrEsDG0wVqxkIk9ueSMOHlEtQ7zYZsk\nlX59Bb8LHNGQD5o+H1EDaC6OCsgzUAAJtDRZsPiZEQKBgQDs+YtVsc9RDMoC0x2y\nlVnP6IUDXt+2UXndZfJI3YS+wsfxiEkgK7G3AhjgB+C+DKEJzptVxP+212hHnXgr\n1gfW/x4g7OWBu4IxFmZ2J/Ojor+prhHJdCvD0VqnMzauzqLTe92aexiexXQGm+WW\nwRl3YZLmkft3rzs3ZPhc1G2X9QKBgQDOQq3rrxcvxSYaDZAb+6B/H7ZE4natMCiz\nLx/cWT8n+/CrJI2v3kDfdPl9yyXIOGrsqFgR3uhiUJnz+oeZFFHfYpslb8KvimHx\nKI+qcVDcprmYyXj2Lrf3fvj4pKorc+8TgOBDUpXIFhFDyM+0DmHLfq+7UqvjU9Hs\nkjER7baQ7QKBgQDTh508jU/FxWi9RL4Jnw9gaunwrEt9bxUc79dp+3J25V+c1k6Q\nDPDBr3mM4PtYKeXF30sBMKwiBf3rj0CpwI+W9ntqYIwtVbdNIfWsGtV8h9YWHG98\nJ9q5HLOS9EAnogPuS27walj7wL1k+NvjydJ1of+DGWQi3aQ6OkMIegap0QKBgBlR\nzCHLa5A8plG6an9U4z3Xubs5BZJ6//QHC+Uzu3IAFmob4Zy+Lr5/kITlpCyw6EdG\n3xDKiUJQXKW7kluzR92hMCRnVMHRvfYpoYEtydxcRxo/WS73SzQBjTSQmicdYzLE\ntkLtZ1+ZfeMRSpXy0gR198KKAnm0d2eQBqAJy0h9AoGBAM80zkd+LehBKq87Zoh7\ndtREVWslRD1C5HvFcAxYxBybcKzVpL89jIRGKB8SoZkF7edzhqvVzAMP0FFsEgCh\naClYGtO+uo+B91+5v2CCqowRJUGfbFOtCuSPR7+B3LDK8pkjK2SQ0mFPUfRA5z0z\nNVWtC0EYNBTRkqhYtqr3ZpUc\n-----END PRIVATE KEY-----\n", - }, + tls, fetch() { return new Response("Hello"); }, diff --git a/test/js/bun/test/stack.test.ts b/test/js/bun/test/stack.test.ts index db623a38ad..782215e123 100644 --- a/test/js/bun/test/stack.test.ts +++ b/test/js/bun/test/stack.test.ts @@ -78,7 +78,7 @@ test("err.line and err.column are set", () => { line: 3, column: 17, originalLine: 1, - originalColumn: 22, + originalColumn: 18, }, null, 2, diff --git a/test/js/bun/test/test-error-code-done-callback.test.ts b/test/js/bun/test/test-error-code-done-callback.test.ts index 0d4e9eab0e..4e6fac4adb 100644 --- a/test/js/bun/test/test-error-code-done-callback.test.ts +++ b/test/js/bun/test/test-error-code-done-callback.test.ts @@ -49,7 +49,7 @@ test("verify we print error messages passed to done callbacks", () => { 27 | done(new Error(msg + "(sync)")); ^ error: you should see this(sync) - at (/test-error-done-callback-fixture.ts:27:8) + at (/test-error-done-callback-fixture.ts:27:12) (fail) error done callback (sync) 27 | done(new Error(msg + "(sync)")); 28 | }); @@ -59,7 +59,7 @@ test("verify we print error messages passed to done callbacks", () => { 32 | done(new Error(msg + "(async with await)")); ^ error: you should see this(async with await) - at (/test-error-done-callback-fixture.ts:32:8) + at (/test-error-done-callback-fixture.ts:32:12) (fail) error done callback (async with await) 32 | done(new Error(msg + "(async with await)")); 33 | }); @@ -69,7 +69,7 @@ test("verify we print error messages passed to done callbacks", () => { 37 | done(new Error(msg + "(async with Bun.sleep)")); ^ error: you should see this(async with Bun.sleep) - at (/test-error-done-callback-fixture.ts:37:8) + at (/test-error-done-callback-fixture.ts:37:12) (fail) error done callback (async with Bun.sleep) 37 | done(new Error(msg + "(async with Bun.sleep)")); 38 | }); @@ -79,7 +79,7 @@ test("verify we print error messages passed to done callbacks", () => { 42 | done(new Error(msg + "(async)")); ^ error: you should see this(async) - at (/test-error-done-callback-fixture.ts:42:10) + at (/test-error-done-callback-fixture.ts:42:14) (fail) error done callback (async) 43 | }); 44 | }); @@ -89,7 +89,7 @@ test("verify we print error messages passed to done callbacks", () => { 48 | done(new Error(msg + "(async, setTimeout)")); ^ error: you should see this(async, setTimeout) - at (/test-error-done-callback-fixture.ts:48:10) + at (/test-error-done-callback-fixture.ts:48:14) (fail) error done callback (async, setTimeout) 49 | }, 0); 50 | }); @@ -99,7 +99,7 @@ test("verify we print error messages passed to done callbacks", () => { 54 | done(new Error(msg + "(async, setImmediate)")); ^ error: you should see this(async, setImmediate) - at (/test-error-done-callback-fixture.ts:54:10) + at (/test-error-done-callback-fixture.ts:54:14) (fail) error done callback (async, setImmediate) 55 | }); 56 | }); @@ -109,7 +109,7 @@ test("verify we print error messages passed to done callbacks", () => { 60 | done(new Error(msg + "(async, nextTick)")); ^ error: you should see this(async, nextTick) - at (/test-error-done-callback-fixture.ts:60:10) + at (/test-error-done-callback-fixture.ts:60:14) (fail) error done callback (async, nextTick) 62 | }); 63 | @@ -119,7 +119,7 @@ test("verify we print error messages passed to done callbacks", () => { 67 | done(new Error(msg + "(async, setTimeout, Promise.resolve)")); ^ error: you should see this(async, setTimeout, Promise.resolve) - at (/test-error-done-callback-fixture.ts:67:12) + at (/test-error-done-callback-fixture.ts:67:16) (fail) error done callback (async, setTimeout, Promise.resolve) 70 | }); 71 | @@ -129,7 +129,7 @@ test("verify we print error messages passed to done callbacks", () => { 75 | done(new Error(msg + "(async, setImmediate, Promise.resolve)")); ^ error: you should see this(async, setImmediate, Promise.resolve) - at (/test-error-done-callback-fixture.ts:75:12) + at (/test-error-done-callback-fixture.ts:75:16) (fail) error done callback (async, setImmediate, Promise.resolve) 0 pass diff --git a/test/js/bun/test/test-test.test.ts b/test/js/bun/test/test-test.test.ts index 5a29eba3ab..e102a73211 100644 --- a/test/js/bun/test/test-test.test.ts +++ b/test/js/bun/test/test-test.test.ts @@ -733,10 +733,10 @@ test("my-test", () => { const stackLines = output.split("\n").filter(line => line.trim().startsWith("at ")); expect(stackLines.length).toBeGreaterThan(0); if (process.platform === "win32") { - expect(stackLines[0]).toContain(`\\my-test.test.js:5:11`.replace("", test_dir)); + expect(stackLines[0]).toContain(`\\my-test.test.js:5:15`.replace("", test_dir)); } if (process.platform !== "win32") { - expect(stackLines[0]).toContain(`/my-test.test.js:5:11`.replace("", test_dir)); + expect(stackLines[0]).toContain(`/my-test.test.js:5:15`.replace("", test_dir)); } if (stage === "beforeEach") { diff --git a/test/js/bun/util/inspect-error.test.js b/test/js/bun/util/inspect-error.test.js index 5b5fe2ac54..2c94698e16 100644 --- a/test/js/bun/util/inspect-error.test.js +++ b/test/js/bun/util/inspect-error.test.js @@ -13,17 +13,17 @@ test("error.cause", () => { 3 | test("error.cause", () => { 4 | const err = new Error("error 1"); 5 | const err2 = new Error("error 2", { cause: err }); - ^ + ^ error: error 2 - at ([dir]/inspect-error.test.js:5:16) + at ([dir]/inspect-error.test.js:5:20) 1 | import { describe, expect, jest, test } from "bun:test"; 2 | 3 | test("error.cause", () => { 4 | const err = new Error("error 1"); - ^ + ^ error: error 1 - at ([dir]/inspect-error.test.js:4:15) + at ([dir]/inspect-error.test.js:4:19) " `); }); @@ -41,9 +41,9 @@ test("Error", () => { 30 | 31 | test("Error", () => { 32 | const err = new Error("my message"); - ^ + ^ error: my message - at ([dir]/inspect-error.test.js:32:15) + at ([dir]/inspect-error.test.js:32:19) " `); }); @@ -118,7 +118,7 @@ test("Error inside minified file (no color) ", () => { 26 | exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};expo error: error inside long minified file! - at ([dir]/inspect-error-fixture.min.js:26:2846) + at ([dir]/inspect-error-fixture.min.js:26:2850) at ([dir]/inspect-error-fixture.min.js:26:2890) at ([dir]/inspect-error.test.js:101:7)" `); @@ -149,7 +149,7 @@ test("Error inside minified file (color) ", () => { 26 | exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};exports.forwardRef=function(a){return{$$typeof:v,render:a}};expo | ... truncated error: error inside long minified file! - at ([dir]/inspect-error-fixture.min.js:26:2846) + at ([dir]/inspect-error-fixture.min.js:26:2850) at ([dir]/inspect-error-fixture.min.js:26:2890) at ([dir]/inspect-error.test.js:129:7)" `); diff --git a/test/js/bun/util/inspect.test.js b/test/js/bun/util/inspect.test.js index b31afbb640..a28e2c6313 100644 --- a/test/js/bun/util/inspect.test.js +++ b/test/js/bun/util/inspect.test.js @@ -661,7 +661,7 @@ it("ErrorEvent", () => { NNN | lineno: 42, NNN | colno: 10, NNN | error: new Error("Test error"), - ^ + ^ error: Test error at (file:NN:NN) , diff --git a/test/js/bun/util/reportError.test.ts b/test/js/bun/util/reportError.test.ts index 7ec0f8d7ac..16e3b939eb 100644 --- a/test/js/bun/util/reportError.test.ts +++ b/test/js/bun/util/reportError.test.ts @@ -21,9 +21,9 @@ test("reportError", () => { expect(output.replaceAll("\\", "/").replaceAll("/reportError.ts", "[file]")).toMatchInlineSnapshot( ` "1 | reportError(new Error("reportError Test!")); - ^ + ^ error: reportError Test! - at [file]:1:13 + at [file]:1:17 at loadAndEvaluateModule (2:1) error: true true diff --git a/test/js/node/buffer.test.js b/test/js/node/buffer.test.js index 8918d46f31..814ba2153c 100644 --- a/test/js/node/buffer.test.js +++ b/test/js/node/buffer.test.js @@ -193,6 +193,7 @@ for (let withOverridenBufferWrite of [false, true]) { expect(isAscii(new Buffer(""))).toBeTrue(); expect(isAscii(new Buffer([32, 32, 128]))).toBeFalse(); expect(isAscii(new Buffer("What did the 🦊 say?"))).toBeFalse(); + expect(new isAscii(new Buffer("What did the 🦊 say?"))).toBeFalse(); expect(isAscii(new Buffer("").buffer)).toBeTrue(); expect(isAscii(new Buffer([32, 32, 128]).buffer)).toBeFalse(); }); diff --git a/test/js/node/http/node-fetch.test.js b/test/js/node/http/node-fetch.test.js index 1e748fda6a..961bd893cd 100644 --- a/test/js/node/http/node-fetch.test.js +++ b/test/js/node/http/node-fetch.test.js @@ -90,3 +90,71 @@ test("node-fetch uses node streams instead of web streams", async () => { expect(Buffer.concat(chunks).toString()).toBe("hello world"); } }); + +test("node-fetch request body streams properly", async () => { + let responseResolve; + const responsePromise = new Promise(resolve => { + responseResolve = resolve; + }); + + let receivedChunks = []; + let requestBodyComplete = false; + + using server = Bun.serve({ + port: 0, + async fetch(req, server) { + const reader = req.body.getReader(); + + // Read first chunk + const { value: firstChunk } = await reader.read(); + receivedChunks.push(firstChunk); + + // Signal that response can be sent + responseResolve(); + + // Continue reading remaining chunks + let result; + while (!(result = await reader.read()).done) { + receivedChunks.push(result.value); + } + + requestBodyComplete = true; + return new Response("response sent"); + }, + }); + + const requestBody = new stream.Readable({ + read() { + // Will be controlled manually + }, + }); + + // Start the fetch request + const fetchPromise = fetch2(server.url.href, { + body: requestBody, + method: "POST", + }); + + // Send first chunk + requestBody.push("first chunk"); + + // Wait for response to be available (server has read first chunk) + await responsePromise; + + // Response is available, but request body should still be streaming + expect(requestBodyComplete).toBe(false); + + // Send more data after response is available + requestBody.push("second chunk"); + requestBody.push("third chunk"); + requestBody.push(null); // End the stream + + // Now wait for the fetch to complete + const result = await fetchPromise; + expect(await result.text()).toBe("response sent"); + + // Verify all chunks were received + const allData = Buffer.concat(receivedChunks).toString(); + expect(allData).toBe("first chunksecond chunkthird chunk"); + expect(requestBodyComplete).toBe(true); +}); diff --git a/test/js/node/perf_hooks/perf_hooks.test.ts b/test/js/node/perf_hooks/perf_hooks.test.ts index 3e9a8c43cc..29e9655237 100644 --- a/test/js/node/perf_hooks/perf_hooks.test.ts +++ b/test/js/node/perf_hooks/perf_hooks.test.ts @@ -2,8 +2,6 @@ import { expect, test } from "bun:test"; import perf from "perf_hooks"; test("stubs", () => { - expect(!!perf.monitorEventLoopDelay).toBeFalse(); - expect(() => perf.monitorEventLoopDelay()).toThrow(); expect(perf.performance.nodeTiming).toBeObject(); expect(perf.performance.now()).toBeNumber(); diff --git a/test/js/node/test/fixtures/.gitignore b/test/js/node/test/fixtures/.gitignore deleted file mode 100644 index 736e8ae58a..0000000000 --- a/test/js/node/test/fixtures/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!node_modules \ No newline at end of file diff --git a/test/js/node/test/fixtures/.node_repl_history_multiline b/test/js/node/test/fixtures/.node_repl_history_multiline new file mode 100644 index 0000000000..9f1ccf5e9a --- /dev/null +++ b/test/js/node/test/fixtures/.node_repl_history_multiline @@ -0,0 +1,4 @@ +] } ] } b: 4, a: 3, { c: [{ a: 1, b: 2 }, b: 4, a: 3, { }, b: 2, a: 1, { var d = [ +] } b: 2, a: 1, { const c = [ +]` 4, 3, 2, 1, `const b = [ +I can be as long as I want` I am a multiline string a = ` \ No newline at end of file diff --git a/test/js/node/test/fixtures/agent8-cert.pem b/test/js/node/test/fixtures/agent8-cert.pem deleted file mode 100644 index ee976a4528..0000000000 --- a/test/js/node/test/fixtures/agent8-cert.pem +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDUDCCAjgCAQMwDQYJKoZIhvcNAQELBQAwfTELMAkGA1UEBhMCSUwxFjAUBgNV -BAoMDVN0YXJ0Q29tIEx0ZC4xKzApBgNVBAsMIlNlY3VyZSBEaWdpdGFsIENlcnRp -ZmljYXRlIFNpZ25pbmcxKTAnBgNVBAMMIFN0YXJ0Q29tIENlcnRpZmljYXRpb24g -QXV0aG9yaXR5MCAXDTE2MTAyMDIzNTk1OVoYDzIyOTYwNjE3MTQ0NjUyWjBdMQsw -CQYDVQQGEwJVUzELMAkGA1UECAwCQ0ExCzAJBgNVBAcMAlNGMQ8wDQYDVQQKDAZO -T0RFSlMxDzANBgNVBAsMBmFnZW50ODESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8qCR7vlhx6Fr109bIS6dQUU2Iqwn -4CbYXjxfKMPj4cdCB9l68cRDNystAgNzc7RPUoiz7+gdvY9o8QCL+hiZOArH5xpR -lBq57hp9uXIMiZLKuZEZODWr2h1eE0rg8x4aqfWR0/JgPup3d9bOvD47pF7wGmFz -mtWlpptjXA6y7mt0ZamYdNoWkoUabrQIheEV/zspbgTJ1mhFkVeGnch5DE/AfNvs -M+cml5ZzQnm5FLKtp1CcHPaPDGUd5D3jNmNq55iZTEPQtcYErwHX9aLWQxrl8ZSq -4Xo67HP6TjL0zTzzcoJz5H68+FDVoa/gVxwpv/Cka0ief0nNgl17V8aWIQIDAQAB -MA0GCSqGSIb3DQEBCwUAA4IBAQB2z3MF4x/1WXcpzqEcyPyowEzczsCZLkhy0cG4 -eY0mt/+8+JbXdPDgrWNtfqCT2h4KMZu41kquRb63cUYy9DPwFrg8a09picvJWoBp -PMXv0o/CttFLYkQ+o0kXTy5DvGUPw9FLoPVncTkGhhX/lOvHKReplhS6lot/5r0g -nXlRaMAbzCDRxW5AAUK2p0WR4Ih84lI++1M2m6ac0q7efz3TGpyz0lukHYxNJak0 -dh7ToIpvQ54MZkxFgG0ej2HGtNBHVnCpMk9bhupDIJ65fybMtIXy8bhUuj4KX/hm -tALVY3gVezswj90SGBMxeMwcE7z/jDUpkEAIP4FM3Y+yYfmS ------END CERTIFICATE----- diff --git a/test/js/node/test/fixtures/agent8-key.pem b/test/js/node/test/fixtures/agent8-key.pem deleted file mode 100644 index 0f846c1a42..0000000000 --- a/test/js/node/test/fixtures/agent8-key.pem +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpgIBAAKCAQEA8qCR7vlhx6Fr109bIS6dQUU2Iqwn4CbYXjxfKMPj4cdCB9l6 -8cRDNystAgNzc7RPUoiz7+gdvY9o8QCL+hiZOArH5xpRlBq57hp9uXIMiZLKuZEZ -ODWr2h1eE0rg8x4aqfWR0/JgPup3d9bOvD47pF7wGmFzmtWlpptjXA6y7mt0ZamY -dNoWkoUabrQIheEV/zspbgTJ1mhFkVeGnch5DE/AfNvsM+cml5ZzQnm5FLKtp1Cc -HPaPDGUd5D3jNmNq55iZTEPQtcYErwHX9aLWQxrl8ZSq4Xo67HP6TjL0zTzzcoJz -5H68+FDVoa/gVxwpv/Cka0ief0nNgl17V8aWIQIDAQABAoIBAQC4ERcFXE5Q++Zr -bvmsv8dveAls3nxV8kJdo6FxtMMSS2+NsvExr3pqxSedCm8xDU7MR4dy7v55C+5K -P+bxsm2y9YLYkb/oAyqhN5m/8YUPbby8cRbX7OfWTkdLjZgA+Qqze+jJCWz47jn6 -QY2PhAsNVTUEXoAsq/7C2cnUUhZvBr4LfL4rPXrSCIbYsZBcZkR2fSYXLfhAJPND -FtRNteiSmQyQovkTl4RCtCpw9iVK/JLwLVOIhKUODbDC2lIIYf3j6g8Uot1RnWzm -cjyWiqsMz0eGLvdBae8HnJVVoyr3oe32Fm61qM/ONpvVydHZzULJJj16ApZgi1ag -YpzqP2fNAoGBAP4wpoqUVUN6dXlsur73DVVHMRxUf5U1zDZmSUheDidz2qxbuq8Q -kjsD3TZktqKcD5eQDWJxAOxsrOCjJmvhvt6PfYm96eSOMiLf1GksOSncJuA3gkse -EV140os7kSuuzf4Hc6hF1ZTVyo7ecSulrnl7dTylHvUgBL7bhiRA62TTAoGBAPRa -156aestNDqlbr857qiuzGnp7ZWtBy8mtjMFzjP8PhKXu+KVlW89tOloMvjskK1+3 -gFWYXz39Tt4C9tPebZ4yLcw66buGi8UUMXA+vDKTavDErmPHDIgyqx/cQwLcLr5D -P9RrOF8/u3hHKEdnWFFDKe42JtvM1zGINCnnJlC7AoGBANsqoX4dNYMQBFgkysO7 -CjD8SDjwFm1VzHUfLpKKHlQgDWzNTqKBfEQMKeErZ1m/i6YX26KEYtJ3RXwO0CL2 -qvcE664nJJMfk9UD/waLzeHs40wyMFKKY1ifw5GvU5VBjHU6gZuWUviYeaVD4HpM -yaoPK9+VU6Lw74aMixWZMB1nAoGBALXyeoEnp+1/iD5E/ihy3qhBaaLwBPmTeYnH -h3p4bvFw/aWMxmppia5vN7bbrD5fVUilW5LgrXJ8DmCztlTWV6sm1AExkN7IdYSe -350jqYDDUirLWMsE6Oj1SYSkvuT/THLxojKqT8RksVQDMBPS+OkxaKRugArEgSvp -rmXRLy+HAoGBAPNJaegjDv4WWd4Q2IXacebHchBlGH1KhQd8pBWJbnRO/Zq0z65f -Au7bMl6AxMfNDnSeh/UGhPNqBzoHvt9l3WgC/0T+tO00AhlhXxpQBw1OG6R9XhzQ -iObkAkHkfUnpkP91/U9d42SvZisnhqZk5K5BIxOmlY5HsejOChu0DT8/ ------END RSA PRIVATE KEY----- diff --git a/test/js/node/test/fixtures/console/stack_overflow.js b/test/js/node/test/fixtures/console/stack_overflow.js index 565692b6d6..14bceef878 100644 --- a/test/js/node/test/fixtures/console/stack_overflow.js +++ b/test/js/node/test/fixtures/console/stack_overflow.js @@ -26,11 +26,15 @@ Error.stackTraceLimit = 0; console.error('before'); +// Invalidate elements protector to force slow-path. +// The fast-path of JSON.stringify is iterative and won't throw. +Array.prototype[2] = 'foo'; + // Trigger stack overflow by stringifying a deeply nested array. -let array = []; -for (let i = 0; i < 100000; i++) { - array = [ array ]; -} +// eslint-disable-next-line no-sparse-arrays +let array = [,]; +for (let i = 0; i < 10000; i++) + array = [array]; JSON.stringify(array); diff --git a/test/js/node/test/fixtures/copy/utf/新建文件夹/experimental.json b/test/js/node/test/fixtures/copy/utf/新建文件夹/experimental.json new file mode 100644 index 0000000000..12611d2385 --- /dev/null +++ b/test/js/node/test/fixtures/copy/utf/新建文件夹/experimental.json @@ -0,0 +1,3 @@ +{ + "ofLife": 42 +} diff --git a/test/js/node/test/fixtures/dotenv/lines-with-only-spaces.env b/test/js/node/test/fixtures/dotenv/lines-with-only-spaces.env new file mode 100644 index 0000000000..5eeb5f48f5 --- /dev/null +++ b/test/js/node/test/fixtures/dotenv/lines-with-only-spaces.env @@ -0,0 +1,8 @@ + +EMPTY_LINE='value after an empty line' + +SPACES_LINE='value after a line with just some spaces' + +TABS_LINE='value after a line with just some tabs' + +SPACES_TABS_LINE='value after a line with just some spaces and tabs' diff --git a/test/js/node/test/fixtures/dotenv/node-options-no-tranform.env b/test/js/node/test/fixtures/dotenv/node-options-no-tranform.env new file mode 100644 index 0000000000..88ecfa8352 --- /dev/null +++ b/test/js/node/test/fixtures/dotenv/node-options-no-tranform.env @@ -0,0 +1 @@ +NODE_OPTIONS="--no-experimental-strip-types" diff --git a/test/js/node/test/fixtures/dotenv/valid.env b/test/js/node/test/fixtures/dotenv/valid.env index 120488d579..6df454da65 100644 --- a/test/js/node/test/fixtures/dotenv/valid.env +++ b/test/js/node/test/fixtures/dotenv/valid.env @@ -6,6 +6,8 @@ BASIC=basic # previous line intentionally left blank AFTER_LINE=after_line +A="B=C" +B=C=D EMPTY= EMPTY_SINGLE_QUOTES='' EMPTY_DOUBLE_QUOTES="" diff --git a/test/js/node/test/fixtures/errors/core_line_numbers.snapshot b/test/js/node/test/fixtures/errors/core_line_numbers.snapshot index 54cdb52744..9ef06c33af 100644 --- a/test/js/node/test/fixtures/errors/core_line_numbers.snapshot +++ b/test/js/node/test/fixtures/errors/core_line_numbers.snapshot @@ -1,10 +1,10 @@ -node:punycode:49 +node:punycode:54 throw new RangeError(errors[type]); ^ RangeError: Invalid input - at error (node:punycode:49:8) - at Object.decode (node:punycode:242:5) + at error (node:punycode:54:8) + at Object.decode (node:punycode:247:5) at Object. (*core_line_numbers.js:13:10) Node.js * diff --git a/test/js/node/test/fixtures/errors/force_colors.js b/test/js/node/test/fixtures/errors/force_colors.js index 0f3c92c6f8..a19a78f092 100644 --- a/test/js/node/test/fixtures/errors/force_colors.js +++ b/test/js/node/test/fixtures/errors/force_colors.js @@ -1 +1,2 @@ -throw new Error('Should include grayed stack trace') +'use strict'; +throw new Error('Should include grayed stack trace'); diff --git a/test/js/node/test/fixtures/errors/force_colors.snapshot b/test/js/node/test/fixtures/errors/force_colors.snapshot index e5a03ca609..93ac005e83 100644 --- a/test/js/node/test/fixtures/errors/force_colors.snapshot +++ b/test/js/node/test/fixtures/errors/force_colors.snapshot @@ -1,9 +1,9 @@ -*force_colors.js:1 -throw new Error('Should include grayed stack trace') +*force_colors.js:2 +throw new Error('Should include grayed stack trace'); ^ Error: Should include grayed stack trace - at Object. (/test*force_colors.js:1:7) + at Object. (/test*force_colors.js:2:7)  at *  at *  at * diff --git a/test/js/node/test/fixtures/errors/throw_in_eval_anonymous.js b/test/js/node/test/fixtures/errors/throw_in_eval_anonymous.js index aa9ab6a058..e325841f4b 100644 --- a/test/js/node/test/fixtures/errors/throw_in_eval_anonymous.js +++ b/test/js/node/test/fixtures/errors/throw_in_eval_anonymous.js @@ -6,4 +6,4 @@ eval(` throw new Error('error in anonymous script'); -`) +`); diff --git a/test/js/node/test/fixtures/errors/throw_in_eval_named.js b/test/js/node/test/fixtures/errors/throw_in_eval_named.js index 0d33fcf4d0..e04d8f7f29 100644 --- a/test/js/node/test/fixtures/errors/throw_in_eval_named.js +++ b/test/js/node/test/fixtures/errors/throw_in_eval_named.js @@ -6,4 +6,4 @@ eval(` throw new Error('error in named script'); -//# sourceURL=evalscript.js`) +//# sourceURL=evalscript.js`); diff --git a/test/js/node/test/fixtures/errors/throw_in_line_with_tabs.js b/test/js/node/test/fixtures/errors/throw_in_line_with_tabs.js index b62d422597..f38ebfbb32 100644 --- a/test/js/node/test/fixtures/errors/throw_in_line_with_tabs.js +++ b/test/js/node/test/fixtures/errors/throw_in_line_with_tabs.js @@ -19,7 +19,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -/* eslint-disable indent, no-tabs */ +/* eslint-disable @stylistic/js/indent, @stylistic/js/no-tabs */ 'use strict'; require('../../common'); diff --git a/test/js/node/test/fixtures/es-modules/dep.wasm b/test/js/node/test/fixtures/es-modules/dep.wasm new file mode 100644 index 0000000000..ad9abfaa66 Binary files /dev/null and b/test/js/node/test/fixtures/es-modules/dep.wasm differ diff --git a/test/js/node/test/fixtures/es-modules/exports-cases.js b/test/js/node/test/fixtures/es-modules/exports-cases.js index 94bbde74d1..a3ee194ae3 100644 --- a/test/js/node/test/fixtures/es-modules/exports-cases.js +++ b/test/js/node/test/fixtures/es-modules/exports-cases.js @@ -7,3 +7,4 @@ exports['\u{D83C}'] = 'no'; exports['\u{D83C}\u{DF10}'] = 'yes'; exports.package = 10; // reserved word Object.defineProperty(exports, 'z', { value: 'yes' }); +exports['module.exports'] = 5; diff --git a/test/js/node/test/fixtures/es-modules/globals.js b/test/js/node/test/fixtures/es-modules/globals.js new file mode 100644 index 0000000000..0b01c0225e --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/globals.js @@ -0,0 +1,18 @@ +// globals.js - Direct global exports for WebAssembly imports + +// Immutable globals (simple values) +const i32_value = 42; +export { i32_value as '🚀i32_value' } +export const i64_value = 9223372036854775807n; // Max i64 value +export const f32_value = 3.14159; +export const f64_value = 3.141592653589793; + +// Mutable globals with WebAssembly.Global wrapper +export const i32_mut_value = new WebAssembly.Global({ value: 'i32', mutable: true }, 100); +export const i64_mut_value = new WebAssembly.Global({ value: 'i64', mutable: true }, 200n); +export const f32_mut_value = new WebAssembly.Global({ value: 'f32', mutable: true }, 2.71828); +export const f64_mut_value = new WebAssembly.Global({ value: 'f64', mutable: true }, 2.718281828459045); + +export const externref_value = { hello: 'world' }; +export const externref_mut_value = new WebAssembly.Global({ value: 'externref', mutable: true }, { mutable: 'global' }); +export const null_externref_value = null; diff --git a/test/js/node/test/fixtures/es-modules/globals.wasm b/test/js/node/test/fixtures/es-modules/globals.wasm new file mode 100644 index 0000000000..45188ab26e Binary files /dev/null and b/test/js/node/test/fixtures/es-modules/globals.wasm differ diff --git a/test/js/node/test/fixtures/es-modules/import-meta-main.mjs b/test/js/node/test/fixtures/es-modules/import-meta-main.mjs new file mode 100644 index 0000000000..bee2c8e265 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/import-meta-main.mjs @@ -0,0 +1 @@ +export const isMain = import.meta.main; diff --git a/test/js/node/test/fixtures/es-modules/import-meta-main.ts b/test/js/node/test/fixtures/es-modules/import-meta-main.ts new file mode 100644 index 0000000000..bee2c8e265 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/import-meta-main.ts @@ -0,0 +1 @@ +export const isMain = import.meta.main; diff --git a/test/js/node/test/fixtures/es-modules/network-import.mjs b/test/js/node/test/fixtures/es-modules/network-import.mjs deleted file mode 100644 index 529d563b4d..0000000000 --- a/test/js/node/test/fixtures/es-modules/network-import.mjs +++ /dev/null @@ -1 +0,0 @@ -import 'http://example.com/foo.js'; diff --git a/test/js/node/test/fixtures/es-modules/require-module-instantiated/a.mjs b/test/js/node/test/fixtures/es-modules/require-module-instantiated/a.mjs new file mode 100644 index 0000000000..2918d41462 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/require-module-instantiated/a.mjs @@ -0,0 +1,2 @@ +export { default as b } from './b.cjs'; +export { default as c } from './c.mjs'; diff --git a/test/js/node/test/fixtures/es-modules/require-module-instantiated/b.cjs b/test/js/node/test/fixtures/es-modules/require-module-instantiated/b.cjs new file mode 100644 index 0000000000..1e23a5d46d --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/require-module-instantiated/b.cjs @@ -0,0 +1 @@ +module.exports = require('./c.mjs'); diff --git a/test/js/node/test/fixtures/es-modules/require-module-instantiated/c.mjs b/test/js/node/test/fixtures/es-modules/require-module-instantiated/c.mjs new file mode 100644 index 0000000000..a5b4faccf9 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/require-module-instantiated/c.mjs @@ -0,0 +1,3 @@ +const foo = 1; +export default foo; +export { foo as 'module.exports' }; diff --git a/test/js/node/test/fixtures/es-modules/tla/unresolved-with-listener.mjs b/test/js/node/test/fixtures/es-modules/tla/unresolved-with-listener.mjs new file mode 100644 index 0000000000..8bd2c0a080 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/tla/unresolved-with-listener.mjs @@ -0,0 +1,6 @@ +process.on('exit', (exitCode) => { + console.log(`the exit listener received code: ${exitCode}`); + console.log(`process.exitCode inside the exist listener: ${process.exitCode}`); +}) + +await new Promise(() => {}); diff --git a/test/js/node/test/fixtures/es-modules/tla/unresolved-withexitcode-and-listener.mjs b/test/js/node/test/fixtures/es-modules/tla/unresolved-withexitcode-and-listener.mjs new file mode 100644 index 0000000000..fa18609123 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/tla/unresolved-withexitcode-and-listener.mjs @@ -0,0 +1,8 @@ +process.on('exit', (exitCode) => { + console.log(`the exit listener received code: ${exitCode}`); + console.log(`process.exitCode inside the exist listener: ${process.exitCode}`); +}); + +process.exitCode = 42; + +await new Promise(() => {}); diff --git a/test/js/node/test/fixtures/es-modules/tla/unresolved-withexitcode.mjs b/test/js/node/test/fixtures/es-modules/tla/unresolved-withexitcode.mjs index 1cb9823110..0316dae1cd 100644 --- a/test/js/node/test/fixtures/es-modules/tla/unresolved-withexitcode.mjs +++ b/test/js/node/test/fixtures/es-modules/tla/unresolved-withexitcode.mjs @@ -1,2 +1,7 @@ +process.on('exit', (exitCode) => { + console.log(`the exit listener received code: ${exitCode}`); +}); + process.exitCode = 42; + await new Promise(() => {}); diff --git a/test/js/node/test/fixtures/es-modules/tla/unresolved.mjs b/test/js/node/test/fixtures/es-modules/tla/unresolved.mjs index 231a8cd634..37566bd568 100644 --- a/test/js/node/test/fixtures/es-modules/tla/unresolved.mjs +++ b/test/js/node/test/fixtures/es-modules/tla/unresolved.mjs @@ -1 +1,5 @@ +process.on('exit', (exitCode) => { + console.log(`the exit listener received code: ${exitCode}`); +}) + await new Promise(() => {}); diff --git a/test/js/node/test/fixtures/es-modules/top-level-wasm.wasm b/test/js/node/test/fixtures/es-modules/top-level-wasm.wasm new file mode 100644 index 0000000000..085472e7c3 Binary files /dev/null and b/test/js/node/test/fixtures/es-modules/top-level-wasm.wasm differ diff --git a/test/js/node/test/fixtures/es-modules/unimportable.wasm b/test/js/node/test/fixtures/es-modules/unimportable.wasm new file mode 100644 index 0000000000..74f97158e9 Binary files /dev/null and b/test/js/node/test/fixtures/es-modules/unimportable.wasm differ diff --git a/test/js/node/test/fixtures/es-modules/wasm-function.js b/test/js/node/test/fixtures/es-modules/wasm-function.js new file mode 100644 index 0000000000..b33b08a10e --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/wasm-function.js @@ -0,0 +1,11 @@ +export function call1 (func, thisObj, arg0) { + return func.call(thisObj, arg0); +} + +export function call2 (func, thisObj, arg0, arg1) { + return func.call(thisObj, arg0, arg1); +} + +export function call3 (func, thisObj, arg0, arg1, arg2) { + return func.call(thisObj, arg0, arg1, arg2); +} diff --git a/test/js/node/test/fixtures/es-modules/wasm-object.js b/test/js/node/test/fixtures/es-modules/wasm-object.js new file mode 100644 index 0000000000..70318fea8a --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/wasm-object.js @@ -0,0 +1,3 @@ +export const { get: getProperty, set: setProperty } = Reflect; +export const { create } = Object; +export const global = globalThis; diff --git a/test/js/node/test/fixtures/es-modules/wasm-source-phase.js b/test/js/node/test/fixtures/es-modules/wasm-source-phase.js new file mode 100644 index 0000000000..0485caa8c7 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/wasm-source-phase.js @@ -0,0 +1,7 @@ +import source mod from './simple.wasm'; + +export function dyn (specifier) { + return import.source(specifier); +} + +export { mod }; diff --git a/test/js/node/test/fixtures/es-modules/wasm-string-constants.js b/test/js/node/test/fixtures/es-modules/wasm-string-constants.js new file mode 100644 index 0000000000..89cbd44f34 --- /dev/null +++ b/test/js/node/test/fixtures/es-modules/wasm-string-constants.js @@ -0,0 +1,6 @@ +const console = 'console'; +const hello_world = 'hello world'; +const log = 'log'; +const prop = 'prop'; + +export { console, hello_world as 'hello world', log, prop } diff --git a/test/js/node/test/fixtures/eval/eval_messages.snapshot b/test/js/node/test/fixtures/eval/eval_messages.snapshot index bed1674244..a80c5eee8e 100644 --- a/test/js/node/test/fixtures/eval/eval_messages.snapshot +++ b/test/js/node/test/fixtures/eval/eval_messages.snapshot @@ -2,11 +2,7 @@ [eval]:1 with(this){__filename} ^^^^ - x The 'with' statement is not supported. All symbols in a 'with' block will have type 'any'. - ,---- - 1 | with(this){__filename} - : ^^^^ - `---- +The 'with' statement is not supported. All symbols in a 'with' block will have type 'any'. SyntaxError: Strict mode code may not include a with statement @@ -40,7 +36,7 @@ Node.js * var ______________________________________________; throw 10 ^ 10 -(Use `node --trace-uncaught ...` to show where the exception was thrown) +(Use `* --trace-uncaught ...` to show where the exception was thrown) Node.js * @@ -48,7 +44,7 @@ Node.js * var ______________________________________________; throw 10 ^ 10 -(Use `node --trace-uncaught ...` to show where the exception was thrown) +(Use `* --trace-uncaught ...` to show where the exception was thrown) Node.js * done diff --git a/test/js/node/test/fixtures/eval/eval_typescript.js b/test/js/node/test/fixtures/eval/eval_typescript.js index 2c96b66f70..9fd1f3315d 100644 --- a/test/js/node/test/fixtures/eval/eval_typescript.js +++ b/test/js/node/test/fixtures/eval/eval_typescript.js @@ -5,21 +5,21 @@ require('../../common'); const spawnSync = require('child_process').spawnSync; const queue = [ - 'enum Foo{};', - 'throw new SyntaxError("hello")', - 'const foo;', - 'let x: number = 100;x;', - 'const foo: string = 10;', - 'function foo(){};foo(1);', - 'interface Foo{};const foo;', - 'function foo(){ await Promise.resolve(1)};', + 'enum Foo{};', + 'throw new SyntaxError("hello")', + 'const foo;', + 'let x: number = 100;x;', + 'const foo: string = 10;', + 'function foo(){};foo(1);', + 'interface Foo{};const foo;', + 'function foo(){ await Promise.resolve(1)};', ]; for (const cmd of queue) { - const args = ['--disable-warning=ExperimentalWarning', '-p', cmd]; - const result = spawnSync(process.execPath, args, { - stdio: 'pipe' - }); - process.stdout.write(result.stdout); - process.stdout.write(result.stderr); + const args = ['--disable-warning=ExperimentalWarning', '-p', cmd]; + const result = spawnSync(process.execPath, args, { + stdio: 'pipe', + }); + process.stdout.write(result.stdout); + process.stdout.write(result.stderr); } diff --git a/test/js/node/test/fixtures/eval/eval_typescript.snapshot b/test/js/node/test/fixtures/eval/eval_typescript.snapshot index 074e966e51..df0c221124 100644 --- a/test/js/node/test/fixtures/eval/eval_typescript.snapshot +++ b/test/js/node/test/fixtures/eval/eval_typescript.snapshot @@ -1,11 +1,7 @@ [eval]:1 enum Foo{}; ^^^^ - x TypeScript enum is not supported in strip-only mode - ,---- - 1 | enum Foo{}; - : ^^^^^^^^^^ - `---- +TypeScript enum is not supported in strip-only mode SyntaxError: Unexpected reserved word @@ -20,6 +16,7 @@ Node.js * [eval]:1 const foo; ^^^ +'const' declarations must be initialized SyntaxError: Missing initializer in const declaration @@ -28,20 +25,17 @@ Node.js * undefined false [eval]:1 - ;const foo; - ^^^ +interface Foo{};const foo; + ^^^ +'const' declarations must be initialized -SyntaxError: Missing initializer in const declaration +SyntaxError: Unexpected identifier 'Foo' Node.js * [eval]:1 function foo(){ await Promise.resolve(1)}; ^^^^^ - x await isn't allowed in non-async function - ,---- - 1 | function foo(){ await Promise.resolve(1)}; - : ^^^^^^^ - `---- +await isn't allowed in non-async function SyntaxError: await is only valid in async functions and the top level bodies of modules diff --git a/test/js/node/test/fixtures/eval/stdin_messages.snapshot b/test/js/node/test/fixtures/eval/stdin_messages.snapshot index 66bd506f75..d7ec8a0d17 100644 --- a/test/js/node/test/fixtures/eval/stdin_messages.snapshot +++ b/test/js/node/test/fixtures/eval/stdin_messages.snapshot @@ -2,11 +2,7 @@ [stdin]:1 with(this){__filename} ^^^^ - x The 'with' statement is not supported. All symbols in a 'with' block will have type 'any'. - ,---- - 1 | with(this){__filename} - : ^^^^ - `---- +The 'with' statement is not supported. All symbols in a 'with' block will have type 'any'. SyntaxError: Strict mode code may not include a with statement @@ -40,7 +36,7 @@ Node.js * let ______________________________________________; throw 10 ^ 10 -(Use `node --trace-uncaught ...` to show where the exception was thrown) +(Use `* --trace-uncaught ...` to show where the exception was thrown) Node.js * @@ -48,7 +44,7 @@ Node.js * let ______________________________________________; throw 10 ^ 10 -(Use `node --trace-uncaught ...` to show where the exception was thrown) +(Use `* --trace-uncaught ...` to show where the exception was thrown) Node.js * done diff --git a/test/js/node/test/fixtures/eval/stdin_typescript.js b/test/js/node/test/fixtures/eval/stdin_typescript.js index d47c495f86..e1acaf8a6b 100644 --- a/test/js/node/test/fixtures/eval/stdin_typescript.js +++ b/test/js/node/test/fixtures/eval/stdin_typescript.js @@ -5,34 +5,34 @@ require('../../common'); const spawn = require('child_process').spawn; function run(cmd, strict, cb) { - const args = ['--disable-warning=ExperimentalWarning']; - if (strict) args.push('--use_strict'); - args.push('-p'); - const child = spawn(process.execPath, args); - child.stdout.pipe(process.stdout); - child.stderr.pipe(process.stdout); - child.stdin.end(cmd); - child.on('close', cb); + const args = ['--disable-warning=ExperimentalWarning']; + if (strict) args.push('--use_strict'); + args.push('-p'); + const child = spawn(process.execPath, args); + child.stdout.pipe(process.stdout); + child.stderr.pipe(process.stdout); + child.stdin.end(cmd); + child.on('close', cb); } const queue = [ - 'enum Foo{};', - 'throw new SyntaxError("hello")', - 'const foo;', - 'let x: number = 100;x;', - 'const foo: string = 10;', - 'function foo(){};foo(1);', - 'interface Foo{};const foo;', - 'function foo(){ await Promise.resolve(1)};', + 'enum Foo{};', + 'throw new SyntaxError("hello")', + 'const foo;', + 'let x: number = 100;x;', + 'const foo: string = 10;', + 'function foo(){};foo(1);', + 'interface Foo{};const foo;', + 'function foo(){ await Promise.resolve(1)};', ]; function go() { - const c = queue.shift(); - if (!c) return console.log('done'); - run(c, false, function () { - run(c, true, go); - }); + const c = queue.shift(); + if (!c) return console.log('done'); + run(c, false, function() { + run(c, true, go); + }); } go(); diff --git a/test/js/node/test/fixtures/eval/stdin_typescript.snapshot b/test/js/node/test/fixtures/eval/stdin_typescript.snapshot index 3e209e6db2..d693ec34f5 100644 --- a/test/js/node/test/fixtures/eval/stdin_typescript.snapshot +++ b/test/js/node/test/fixtures/eval/stdin_typescript.snapshot @@ -1,11 +1,7 @@ [stdin]:1 enum Foo{}; ^^^^ - x TypeScript enum is not supported in strip-only mode - ,---- - 1 | enum Foo{}; - : ^^^^^^^^^^ - `---- +TypeScript enum is not supported in strip-only mode SyntaxError: Unexpected reserved word @@ -13,11 +9,7 @@ Node.js * [stdin]:1 enum Foo{}; ^^^^ - x TypeScript enum is not supported in strip-only mode - ,---- - 1 | enum Foo{}; - : ^^^^^^^^^^ - `---- +TypeScript enum is not supported in strip-only mode SyntaxError: Unexpected reserved word @@ -39,6 +31,7 @@ Node.js * [stdin]:1 const foo; ^^^ +'const' declarations must be initialized SyntaxError: Missing initializer in const declaration @@ -46,6 +39,7 @@ Node.js * [stdin]:1 const foo; ^^^ +'const' declarations must be initialized SyntaxError: Missing initializer in const declaration @@ -57,27 +51,25 @@ undefined false false [stdin]:1 - ;const foo; - ^^^ +interface Foo{};const foo; + ^^^ +'const' declarations must be initialized -SyntaxError: Missing initializer in const declaration +SyntaxError: Unexpected identifier 'Foo' Node.js * [stdin]:1 - ;const foo; - ^^^ +interface Foo{};const foo; +^^^^^^^^^ +'const' declarations must be initialized -SyntaxError: Missing initializer in const declaration +SyntaxError: Unexpected strict mode reserved word Node.js * [stdin]:1 function foo(){ await Promise.resolve(1)}; ^^^^^ - x await isn't allowed in non-async function - ,---- - 1 | function foo(){ await Promise.resolve(1)}; - : ^^^^^^^ - `---- +await isn't allowed in non-async function SyntaxError: await is only valid in async functions and the top level bodies of modules @@ -85,11 +77,7 @@ Node.js * [stdin]:1 function foo(){ await Promise.resolve(1)}; ^^^^^ - x await isn't allowed in non-async function - ,---- - 1 | function foo(){ await Promise.resolve(1)}; - : ^^^^^^^ - `---- +await isn't allowed in non-async function SyntaxError: await is only valid in async functions and the top level bodies of modules diff --git a/test/js/node/test/fixtures/fake-startcom-root-cert.pem b/test/js/node/test/fixtures/fake-startcom-root-cert.pem deleted file mode 100644 index 48e5713ccb..0000000000 --- a/test/js/node/test/fixtures/fake-startcom-root-cert.pem +++ /dev/null @@ -1,22 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDjzCCAnegAwIBAgIJAIIPb0xPNcgKMA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNV -BAYTAklMMRYwFAYDVQQKDA1TdGFydENvbSBMdGQuMSswKQYDVQQLDCJTZWN1cmUg -RGlnaXRhbCBDZXJ0aWZpY2F0ZSBTaWduaW5nMSkwJwYDVQQDDCBTdGFydENvbSBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAgFw0xODExMTYxODQyMjFaGA8yMjkyMDgz -MDE4NDIyMVowfTELMAkGA1UEBhMCSUwxFjAUBgNVBAoMDVN0YXJ0Q29tIEx0ZC4x -KzApBgNVBAsMIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcxKTAn -BgNVBAMMIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkq -hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1mZ/bufFVPGxKagC8W7hpBephIFIZw9K -bX6ska2PXZkyqRToU5UFgTYhdBwkCNJMwaYfTqLpc9y/goRpVlLSAFk/t4W6Z0w1 -b80T149XvmelAUQTBJR49kkYspN+Jw627pf8tmmSkG5qcHykB9gr/nvoTpXtlk2t -um/SL3BQSqXmqffBM/6VpFvGAB2FNWGQUIxj55e/7p9Opjo8yS4s2lnbovV6OSJ/ -CnqEYt6Ur4kdLwVOLKlMKRG3H4q65UXfoVpE+XhFgKADAiMZySSGjBsbjF6ADPnP -/zNklvYwcM0phtQivmkKEcSOvJNsZodszYhoiwie5OknOo7Mqz9jqQIDAQABoxAw -DjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBrsLtF6MEMCWQF6YXP -DLw4friQhYzoB7w1W+fgksOOIyLyRmUEEA9X0FSfNW2a6KLmMtSoNYn3y5cLkmGr -+JE4U3ovvXDU8C3r09dynuHywcib4oFRaG8NKNqldUryO3abk+kbdxMvxQlA/NHb -33ABKPX7UTnTr6CexZ5Qr0ss62w0ELwxC3eVugJrVtDOmFt/yZF75lc0OgifK4Nj -dii7g+sQvzymIgdWLAIbbrc3r/NfymFgmTEMPY/M17QEIdr9YS1qAHmqA6vGvmBz -v2fCr+xrOQRzq+HO1atOmz8gOdtYJwDfUl2CWgJ2r8iMRsOTE7QgEl/+zpOM3fe+ -JU1b ------END CERTIFICATE----- diff --git a/test/js/node/test/fixtures/fetch-and-log.mjs b/test/js/node/test/fixtures/fetch-and-log.mjs new file mode 100644 index 0000000000..d019d29aa2 --- /dev/null +++ b/test/js/node/test/fixtures/fetch-and-log.mjs @@ -0,0 +1,3 @@ +const response = await fetch(process.env.FETCH_URL); +const body = await response.text(); +console.log(body); diff --git a/test/js/node/test/fixtures/guess-hash-seed.js b/test/js/node/test/fixtures/guess-hash-seed.js deleted file mode 100644 index c6166450b4..0000000000 --- a/test/js/node/test/fixtures/guess-hash-seed.js +++ /dev/null @@ -1,165 +0,0 @@ -'use strict'; -function run_repeated(n, fn) { - const res = []; - for (let i = 0; i < n; i++) res.push(fn()); - return res; -} - -const INT_MAX = 0x7fffffff; - -// from src/js/collection.js -// key must be a signed 32-bit number! -function ComputeIntegerHash(key/*, seed*/) { - let hash = key; - hash = hash ^ 0/*seed*/; - hash = ~hash + (hash << 15); // hash = (hash << 15) - hash - 1; - hash = hash ^ (hash >>> 12); - hash = hash + (hash << 2); - hash = hash ^ (hash >>> 4); - hash = (hash * 2057) | 0; // hash = (hash + (hash << 3)) + (hash << 11); - hash = hash ^ (hash >>> 16); - return hash & 0x3fffffff; -} - -const kNofHashBitFields = 2; -const kHashShift = kNofHashBitFields; -const kHashBitMask = 0xffffffff >>> kHashShift; -const kZeroHash = 27; - -function string_to_array(str) { - const res = new Array(str.length); - for (let i = 0; i < str.length; i++) { - res[i] = str.charCodeAt(i); - } - return res; -} - -function gen_specialized_hasher(str) { - const str_arr = string_to_array(str); - return Function('seed', ` - var running_hash = seed; - ${str_arr.map((c) => ` - running_hash += ${c}; - running_hash &= 0xffffffff; - running_hash += (running_hash << 10); - running_hash &= 0xffffffff; - running_hash ^= (running_hash >>> 6); - running_hash &= 0xffffffff; - `).join('')} - running_hash += (running_hash << 3); - running_hash &= 0xffffffff; - running_hash ^= (running_hash >>> 11); - running_hash &= 0xffffffff; - running_hash += (running_hash << 15); - running_hash &= 0xffffffff; - if ((running_hash & ${kHashBitMask}) == 0) { - return ${kZeroHash}; - } - return running_hash; - `); -} - -// adapted from HashToEntry -function hash_to_bucket(hash, numBuckets) { - return (hash & ((numBuckets) - 1)); -} - -function time_set_lookup(set, value) { - const t1 = process.hrtime(); - for (let i = 0; i < 100; i++) { - set.has(value); - } - const t = process.hrtime(t1); - const secs = t[0]; - const nanos = t[1]; - return secs * 1e9 + nanos; -} - -// Prevent optimization of SetHas(). -%NeverOptimizeFunction(time_set_lookup); - -// Set with 256 buckets; bucket 0 full, others empty -const tester_set_buckets = 256; -const tester_set = new Set(); -let tester_set_treshold; -(function() { - // fill bucket 0 and find extra numbers mapping to bucket 0 and a different - // bucket `capacity == numBuckets * 2` - let needed = Math.floor(tester_set_buckets * 1.5) + 1; - let positive_test_value; - let negative_test_value; - for (let i = 0; true; i++) { - if (i > INT_MAX) throw new Error('i too high'); - if (hash_to_bucket(ComputeIntegerHash(i), tester_set_buckets) !== 0) { - negative_test_value = i; - break; - } - } - for (let i = 0; needed > 0; i++) { - if (i > INT_MAX) throw new Error('i too high'); - if (hash_to_bucket(ComputeIntegerHash(i), tester_set_buckets) === 0) { - needed--; - if (needed == 0) { - positive_test_value = i; - } else { - tester_set.add(i); - } - } - } - - // calibrate Set access times for accessing the full bucket / an empty bucket - const pos_time = - Math.min(...run_repeated(10000, time_set_lookup.bind(null, tester_set, - positive_test_value))); - const neg_time = - Math.min(...run_repeated(10000, time_set_lookup.bind(null, tester_set, - negative_test_value))); - tester_set_treshold = (pos_time + neg_time) / 2; - // console.log(`pos_time: ${pos_time}, neg_time: ${neg_time},`, - // `threshold: ${tester_set_treshold}`); -})(); - -// determine hash seed -const slow_str_gen = (function*() { - let strgen_i = 0; - outer: - while (1) { - const str = `#${strgen_i++}`; - for (let i = 0; i < 1000; i++) { - if (time_set_lookup(tester_set, str) < tester_set_treshold) - continue outer; - } - yield str; - } -})(); - -const first_slow_str = slow_str_gen.next().value; -// console.log('first slow string:', first_slow_str); -const first_slow_str_special_hasher = gen_specialized_hasher(first_slow_str); -let seed_candidates = []; -//var t_before_first_seed_brute = performance.now(); -for (let seed_candidate = 0; seed_candidate < 0x100000000; seed_candidate++) { - if (hash_to_bucket(first_slow_str_special_hasher(seed_candidate), - tester_set_buckets) == 0) { - seed_candidates.push(seed_candidate); - } -} -// console.log(`got ${seed_candidates.length} candidates`); -// after ${performance.now()-t_before_first_seed_brute} -while (seed_candidates.length > 1) { - const slow_str = slow_str_gen.next().value; - const special_hasher = gen_specialized_hasher(slow_str); - const new_seed_candidates = []; - for (const seed_candidate of seed_candidates) { - if (hash_to_bucket(special_hasher(seed_candidate), tester_set_buckets) == - 0) { - new_seed_candidates.push(seed_candidate); - } - } - seed_candidates = new_seed_candidates; - // console.log(`reduced to ${seed_candidates.length} candidates`); -} -if (seed_candidates.length != 1) - throw new Error('no candidates remaining'); -const seed = seed_candidates[0]; -console.log(seed); diff --git a/test/js/node/test/fixtures/icu-punycode-toascii.json b/test/js/node/test/fixtures/icu-punycode-toascii.json deleted file mode 100644 index 814f06e794..0000000000 --- a/test/js/node/test/fixtures/icu-punycode-toascii.json +++ /dev/null @@ -1,149 +0,0 @@ -[ - "This resource is focused on highlighting issues with UTS #46 ToASCII", - { - "comment": "Label with hyphens in 3rd and 4th position", - "input": "aa--", - "output": "aa--" - }, - { - "input": "a†--", - "output": "xn--a---kp0a" - }, - { - "input": "ab--c", - "output": "ab--c" - }, - { - "comment": "Label with leading hyphen", - "input": "-x", - "output": "-x" - }, - { - "input": "-†", - "output": "xn----xhn" - }, - { - "input": "-x.xn--nxa", - "output": "-x.xn--nxa" - }, - { - "input": "-x.β", - "output": "-x.xn--nxa" - }, - { - "comment": "Label with trailing hyphen", - "input": "x-.xn--nxa", - "output": "x-.xn--nxa" - }, - { - "input": "x-.β", - "output": "x-.xn--nxa" - }, - { - "comment": "Empty labels", - "input": "x..xn--nxa", - "output": "x..xn--nxa" - }, - { - "input": "x..β", - "output": "x..xn--nxa" - }, - { - "comment": "Invalid Punycode", - "input": "xn--a", - "output": null - }, - { - "input": "xn--a.xn--nxa", - "output": null - }, - { - "input": "xn--a.β", - "output": null - }, - { - "comment": "Valid Punycode", - "input": "xn--nxa.xn--nxa", - "output": "xn--nxa.xn--nxa" - }, - { - "comment": "Mixed", - "input": "xn--nxa.β", - "output": "xn--nxa.xn--nxa" - }, - { - "input": "ab--c.xn--nxa", - "output": "ab--c.xn--nxa" - }, - { - "input": "ab--c.β", - "output": "ab--c.xn--nxa" - }, - { - "comment": "CheckJoiners is true", - "input": "\u200D.example", - "output": null - }, - { - "input": "xn--1ug.example", - "output": null - }, - { - "comment": "CheckBidi is true", - "input": "يa", - "output": null - }, - { - "input": "xn--a-yoc", - "output": null - }, - { - "comment": "processing_option is Nontransitional_Processing", - "input": "ශ්‍රී", - "output": "xn--10cl1a0b660p" - }, - { - "input": "نامه‌ای", - "output": "xn--mgba3gch31f060k" - }, - { - "comment": "U+FFFD", - "input": "\uFFFD.com", - "output": null - }, - { - "comment": "U+FFFD character encoded in Punycode", - "input": "xn--zn7c.com", - "output": null - }, - { - "comment": "Label longer than 63 code points", - "input": "x01234567890123456789012345678901234567890123456789012345678901x", - "output": "x01234567890123456789012345678901234567890123456789012345678901x" - }, - { - "input": "x01234567890123456789012345678901234567890123456789012345678901†", - "output": "xn--x01234567890123456789012345678901234567890123456789012345678901-6963b" - }, - { - "input": "x01234567890123456789012345678901234567890123456789012345678901x.xn--nxa", - "output": "x01234567890123456789012345678901234567890123456789012345678901x.xn--nxa" - }, - { - "input": "x01234567890123456789012345678901234567890123456789012345678901x.β", - "output": "x01234567890123456789012345678901234567890123456789012345678901x.xn--nxa" - }, - { - "comment": "Domain excluding TLD longer than 253 code points", - "input": "01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.0123456789012345678901234567890123456789012345678.x", - "output": "01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.0123456789012345678901234567890123456789012345678.x" - }, - { - "input": "01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.0123456789012345678901234567890123456789012345678.xn--nxa", - "output": "01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.0123456789012345678901234567890123456789012345678.xn--nxa" - }, - { - "input": "01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.0123456789012345678901234567890123456789012345678.β", - "output": "01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.01234567890123456789012345678901234567890123456789.0123456789012345678901234567890123456789012345678.xn--nxa" - } -] diff --git a/test/js/node/test/fixtures/icu/localizationData-v74.2.json b/test/js/node/test/fixtures/icu/localizationData-v74.2.json index 65671ba5ac..1cca79672a 100644 --- a/test/js/node/test/fixtures/icu/localizationData-v74.2.json +++ b/test/js/node/test/fixtures/icu/localizationData-v74.2.json @@ -20,14 +20,14 @@ "dateTimeFormats": { "en": "7/25/1980, 1:35:33 AM", "zh": "1980/7/25 01:35:33", - "hi": "25/7/1980, 1:35:33 am", + "hi": "25/7/1980, पू 1:35:33", "es": "25/7/1980, 1:35:33", "fr": "25/07/1980 01:35:33", - "ar": "٢٥‏/٧‏/١٩٨٠، ١:٣٥:٣٣ ص", + "ar": "25‏/7‏/1980، 1:35:33 ص", "bn": "২৫/৭/১৯৮০, ১:৩৫:৩৩ AM", "ru": "25.07.1980, 01:35:33", "pt": "25/07/1980, 01:35:33", - "ur": "25/7/1980، 1:35:33 AM", + "ur": "25/7/1980، 1:35:33 ق.د.", "id": "25/7/1980, 01.35.33", "de": "25.7.1980, 01:35:33", "ja": "1980/7/25 1:35:33", @@ -41,7 +41,7 @@ "hi": "25/7/1980", "es": "25/7/1980", "fr": "25/07/1980", - "ar": "٢٥‏/٧‏/١٩٨٠", + "ar": "25‏/7‏/1980", "bn": "২৫/৭/১৯৮০", "ru": "25.07.1980", "pt": "25/07/1980", @@ -77,7 +77,7 @@ "hi": "2,75,760.913", "es": "275.760,913", "fr": "275 760,913", - "ar": "٢٧٥٬٧٦٠٫٩١٣", + "ar": "275,760.913", "bn": "২,৭৫,৭৬০.৯১৩", "ru": "275 760,913", "pt": "275.760,913", @@ -113,7 +113,7 @@ "hi": "5,86,920.617 घंटे पहले", "es": "hace 586.920,617 horas", "fr": "il y a 586 920,617 heures", - "ar": "قبل ٥٨٦٬٩٢٠٫٦١٧ ساعة", + "ar": "قبل 586,920.617 ساعة", "bn": "৫,৮৬,৯২০.৬১৭ ঘন্টা আগে", "ru": "586 920,617 часа назад", "pt": "há 586.920,617 horas", diff --git a/test/js/node/test/fixtures/import-require-cycle/a.js b/test/js/node/test/fixtures/import-require-cycle/a.js new file mode 100644 index 0000000000..595a5085cf --- /dev/null +++ b/test/js/node/test/fixtures/import-require-cycle/a.js @@ -0,0 +1 @@ +module.exports.b = require('./b.js'); diff --git a/test/js/node/test/fixtures/import-require-cycle/b.js b/test/js/node/test/fixtures/import-require-cycle/b.js new file mode 100644 index 0000000000..869be25731 --- /dev/null +++ b/test/js/node/test/fixtures/import-require-cycle/b.js @@ -0,0 +1 @@ +module.exports.a = require('./a.js'); diff --git a/test/js/node/test/fixtures/import-require-cycle/c.js b/test/js/node/test/fixtures/import-require-cycle/c.js new file mode 100644 index 0000000000..39099ad760 --- /dev/null +++ b/test/js/node/test/fixtures/import-require-cycle/c.js @@ -0,0 +1,3 @@ +const obj = require('./b.js'); + +console.log('cycle equality', obj.a.b === obj); diff --git a/test/js/node/test/fixtures/import-require-cycle/preload.mjs b/test/js/node/test/fixtures/import-require-cycle/preload.mjs new file mode 100644 index 0000000000..81eed70009 --- /dev/null +++ b/test/js/node/test/fixtures/import-require-cycle/preload.mjs @@ -0,0 +1,7 @@ +import * as mod from "module"; + +mod.registerHooks({ + load(url, context, nextLoad) { + return nextLoad(url, context); + }, +}); diff --git a/test/js/node/test/fixtures/inspect-worker/index.js b/test/js/node/test/fixtures/inspect-worker/index.js new file mode 100644 index 0000000000..b0f883ef4b --- /dev/null +++ b/test/js/node/test/fixtures/inspect-worker/index.js @@ -0,0 +1,3 @@ +const { Worker } = require('worker_threads'); + +new Worker(__dirname + '/worker.js', { type: 'module' }); diff --git a/test/js/node/test/fixtures/inspect-worker/worker.js b/test/js/node/test/fixtures/inspect-worker/worker.js new file mode 100644 index 0000000000..9729bd7b41 --- /dev/null +++ b/test/js/node/test/fixtures/inspect-worker/worker.js @@ -0,0 +1,4 @@ +console.log("worker thread"); +process.on('exit', () => { + console.log('Worker1: Exiting...'); +}); diff --git a/test/js/node/test/fixtures/keys/Makefile b/test/js/node/test/fixtures/keys/Makefile index 3339f4b912..ffb84ec353 100644 --- a/test/js/node/test/fixtures/keys/Makefile +++ b/test/js/node/test/fixtures/keys/Makefile @@ -40,6 +40,14 @@ all: \ ec-cert.pem \ ec.pfx \ fake-cnnic-root-cert.pem \ + intermediate-ca-cert.pem \ + intermediate-ca-key.pem \ + leaf-from-intermediate-cert.pem \ + leaf-from-intermediate-key.pem \ + non-trusted-intermediate-ca-cert.pem \ + non-trusted-intermediate-ca-key.pem \ + non-trusted-leaf-from-intermediate-cert.pem \ + non-trusted-leaf-from-intermediate-key.pem \ rsa_private.pem \ rsa_private_encrypted.pem \ rsa_private_pkcs8.pem \ @@ -236,6 +244,102 @@ fake-startcom-root-cert.pem: fake-startcom-root.cnf \ echo '01' > fake-startcom-root-serial touch fake-startcom-root-database.txt + +intermediate-ca-key.pem: + openssl genrsa -out intermediate.key 2048 + +intermediate-ca-cert.pem: intermediate-ca-key.pem + openssl req -new \ + -sha256 \ + -nodes \ + -key intermediate.key \ + -subj "/C=US/ST=CA/L=SF/O=NODEJS/CN=NodeJS-Test-Intermediate-CA" \ + -out test-intermediate-ca.csr + + openssl x509 -req \ + -extensions v3_ca \ + -extfile fake-startcom-root.cnf \ + -in test-intermediate-ca.csr \ + -CA fake-startcom-root-cert.pem \ + -CAkey fake-startcom-root-key.pem \ + -CAcreateserial \ + -out intermediate-ca.pem \ + -days 99999 \ + -sha256 + rm -f test-intermediate-ca.csr + +leaf-from-intermediate-key.pem: + openssl genrsa -out leaf-from-intermediate-key.pem 2048 + +leaf-from-intermediate-cert.pem: leaf-from-intermediate-key.pem + openssl genrsa -out leaf-from-intermediate-key.pem 2048 + openssl req -new \ + -sha256 \ + -nodes \ + -key leaf-from-intermediate-key.pem \ + -addext "subjectAltName = DNS:localhost" \ + -subj "/C=US/ST=CA/L=SF/O=NODEJS/CN=localhost" \ + -out leaf-from-intermediate-cert.csr + openssl x509 -req \ + -in leaf-from-intermediate-cert.csr \ + -CA intermediate-ca.pem \ + -CAkey intermediate.key \ + -CAcreateserial \ + -out leaf-from-intermediate-cert.pem \ + -days 99999 \ + -copy_extensions copy \ + -sha256 + + rm -f leaf-from-intermediate-cert.csr + +non-trusted-intermediate-ca-key.pem: + openssl genrsa -out non-trusted-intermediate.key 2048 + +non-trusted-intermediate-ca-cert.pem: non-trusted-intermediate-ca-key.pem + openssl req -new \ + -sha256 \ + -nodes \ + -key non-trusted-intermediate.key \ + -subj "/C=US/ST=CA/L=SF/O=NODEJS/CN=NodeJS-Non-Trusted-Test-Intermediate-CA" \ + -out non-trusted-test-intermediate-ca.csr + + openssl x509 -req \ + -extensions v3_ca \ + -extfile fake-startcom-root.cnf \ + -in non-trusted-test-intermediate-ca.csr \ + -passin "pass:password" \ + -CA ca1-cert.pem \ + -CAkey ca1-key.pem \ + -CAcreateserial \ + -out non-trusted-intermediate-ca.pem \ + -days 99999 \ + -sha256 + rm -f non-trusted-test-intermediate-ca.csr + +non-trusted-leaf-from-intermediate-key.pem: + openssl genrsa -out non-trusted-leaf-from-intermediate-key.pem 2048 + +non-trusted-leaf-from-intermediate-cert.pem: non-trusted-leaf-from-intermediate-key.pem + openssl genrsa -out non-trusted-leaf-from-intermediate-key.pem 2048 + openssl req -new \ + -sha256 \ + -nodes \ + -key non-trusted-leaf-from-intermediate-key.pem \ + -addext "subjectAltName = DNS:localhost" \ + -subj "/C=US/ST=CA/L=SF/O=NODEJS/CN=localhost" \ + -out non-trusted-leaf-from-intermediate-cert.csr + openssl x509 -req \ + -in non-trusted-leaf-from-intermediate-cert.csr \ + -CA non-trusted-intermediate-ca.pem \ + -CAkey non-trusted-intermediate.key \ + -CAcreateserial \ + -out non-trusted-leaf-from-intermediate-cert.pem \ + -days 99999 \ + -copy_extensions copy \ + -sha256 + + rm -f non-trusted-leaf-from-intermediate-cert.csr + # # agent1 is signed by ca1. # diff --git a/test/js/node/test/fixtures/keys/ca1-cert.srl b/test/js/node/test/fixtures/keys/ca1-cert.srl index 79dbb4bd80..f4de097137 100644 --- a/test/js/node/test/fixtures/keys/ca1-cert.srl +++ b/test/js/node/test/fixtures/keys/ca1-cert.srl @@ -1 +1 @@ -147D36C1C2F74206DE9FAB5F2226D78ADB00A426 +147D36C1C2F74206DE9FAB5F2226D78ADB00A428 diff --git a/test/js/node/test/fixtures/keys/fake-startcom-root-cert.cer b/test/js/node/test/fixtures/keys/fake-startcom-root-cert.cer new file mode 100644 index 0000000000..117acd21b7 Binary files /dev/null and b/test/js/node/test/fixtures/keys/fake-startcom-root-cert.cer differ diff --git a/test/js/node/test/fixtures/keys/intermediate-ca.pem b/test/js/node/test/fixtures/keys/intermediate-ca.pem new file mode 100644 index 0000000000..c4a3807972 --- /dev/null +++ b/test/js/node/test/fixtures/keys/intermediate-ca.pem @@ -0,0 +1,25 @@ +-----BEGIN CERTIFICATE----- +MIIEOTCCAyGgAwIBAgIULe6EHUBNm9nZz+fYRZx1P8uqmGwwDQYJKoZIhvcNAQEL +BQAwfTELMAkGA1UEBhMCSUwxFjAUBgNVBAoMDVN0YXJ0Q29tIEx0ZC4xKzApBgNV +BAsMIlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcxKTAnBgNVBAMM +IFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MCAXDTI1MDIyMTIyMTYx +N1oYDzIyOTgxMjA2MjIxNjE3WjBeMQswCQYDVQQGEwJVUzELMAkGA1UECAwCQ0Ex +CzAJBgNVBAcMAlNGMQ8wDQYDVQQKDAZOT0RFSlMxJDAiBgNVBAMMG05vZGVKUy1U +ZXN0LUludGVybWVkaWF0ZS1DQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAKfGhM1vXISvBuEJv4yapacu1CFnH9hQ6Z7e8p1kjMjaSg+NSvofPeb6byel +Jk7GI9wRN4ZQISpKNxvQAjyc9RqkAwUDPY9KEp38PSQFU4osqvJDP4zf2dn0Hl55 +4DW22JzaWdwGgvq0admVwUBMnly4fVGBuxvy1m/j5wM6DHoSbC0Kgs13P2TpaqRT +jz7jzN5YaT16M3kTDKVcTQGzZOCro0JF+V4xIDiOV9v9Cy4F6FRuksHx/e7gWXSF +qaHqzblr9k/c8/3md5aBwHeUGJHe1+U/hhfE4D8IgG3ZdwNFI9KH5Zc8KfGTgr6s +fgbpnNg7p9d5VJNOOM4So8ybig8CAwEAAaOBzTCByjAMBgNVHRMEBTADAQH/MB0G +A1UdDgQWBBR6olPWoViHQBOxuAyYPRUSGaoEYDCBmgYDVR0jBIGSMIGPoYGBpH8w +fTELMAkGA1UEBhMCSUwxFjAUBgNVBAoMDVN0YXJ0Q29tIEx0ZC4xKzApBgNVBAsM +IlNlY3VyZSBEaWdpdGFsIENlcnRpZmljYXRlIFNpZ25pbmcxKTAnBgNVBAMMIFN0 +YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggkAgg9vTE81yAowDQYJKoZI +hvcNAQELBQADggEBAC7nBG4JxrSFT/mJlCJxeHfFQj3xqduYePWK5H/h+buuX6OW +pjMA8se2SjQUfVn81GAtNxb1kX8o9HjmaTvkx8bq6iuF9oyJh96N22Hl3kfWXX6H +jy74Ur/pq73gpC90Xx8/DALpAYr9vKOKJM7DHWW9iuksRRvM1yh8kZagO0ewI8xU +I9DLzl6+Zu6ZChosMlIn7yGdXB3Wi5mO+1fN+ryFlOVfTurzeinDbLm4xHb6pLnP +x3VL1kKzQurUcvQvaIT3x3vd/FP+O7B+pWNyUE7HXZ9J4E2maUC+q81cpgAiCFoN +ks7RFmz1z2myhB8opEpgRFYu6lxjCtHsr+meLjo= +-----END CERTIFICATE----- diff --git a/test/js/node/test/fixtures/keys/intermediate.key b/test/js/node/test/fixtures/keys/intermediate.key new file mode 100644 index 0000000000..222d393a9e --- /dev/null +++ b/test/js/node/test/fixtures/keys/intermediate.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCnxoTNb1yErwbh +Cb+MmqWnLtQhZx/YUOme3vKdZIzI2koPjUr6Hz3m+m8npSZOxiPcETeGUCEqSjcb +0AI8nPUapAMFAz2PShKd/D0kBVOKLKryQz+M39nZ9B5eeeA1ttic2lncBoL6tGnZ +lcFATJ5cuH1Rgbsb8tZv4+cDOgx6EmwtCoLNdz9k6WqkU48+48zeWGk9ejN5Ewyl +XE0Bs2Tgq6NCRfleMSA4jlfb/QsuBehUbpLB8f3u4Fl0hamh6s25a/ZP3PP95neW +gcB3lBiR3tflP4YXxOA/CIBt2XcDRSPSh+WXPCnxk4K+rH4G6ZzYO6fXeVSTTjjO +EqPMm4oPAgMBAAECggEAAMP0GSfX6TcPNfmgaRjPhqq9BwX8bDU6S6JCwxsRVV1B +lz6Sx/9affJIjYrAWP2objmZ4j/9Vr8N70+MoxAoQh3bcatpHX0+BoB/Gun3TpsT +kJVj9dWTnd3yQYYW0sfpxxVr8YgKEvC9xuNbBVsUIeIpmDSaUO9TsSD+DdK2+duX +wKPjCe097669ZG994GP9ilG6FdfIlVNWHWPExmFgbx0ydXr97nDuurt72HnqCVRR +95g9SNAbkadUVj7iTSVovuaIQpQY4BMFICsGGRo10mMFGTzpAUwsl6OVZTUZXaST +dg/Wl8ZD98CucVFmk546pJrfPDvk+qLqt0hlkXA5mQKBgQDrqPCNzz/VhsIlTmuO +Dgmf4q9/hglR8JKjMQTuEXLGAhA09ZZrhKsGYSkciXEzmlL5mGZX+83Ss+ns8nI7 +21e6ZYm5hokltVbZ2Of2xGyeZ0SZ22QwIm4Eg2MmEpmyXAMTKAfvuvfQW1dC0UXG +JEiRBYq3Chxv82ExmlkU5gZNIwKBgQC2QaCnPVV/VkwF0912lto8IRpwgZ0Jrj4b +xqKTCc7oFNzd4Ua/I0W9qPqR1ORyVpq0li7cjHDmFWCZZMbCgy7+g5eclaZ3qWZZ +Faj4rpv7y7ODKz2W2cmug9fWrrtsr96ohW1rfVn5racbHKAsT4f+RB+Gi1NK6aWp +tOmh4MRMJQKBgQDLSk5RluJTOc/LTO39emCVG4EXejIaDHUC8Ct3j3e6FleSx/S9 +xZGfjDth0bLkuBEyHWTUK3UveWKns7IVrq7sLeF0OPmgnOFSRgo81s94ik8khpzT +5S+RFyJ12n/Z3AQPB25pQJm8lL8e9dbCCdTLvcMfCUrkzEgg+Sw1mgT/jwKBgQCM +7xbB/CW/AAZtgzV/3IsJcDe3xCKhN8IDTIiu1yjOQkPAt9EzQJ1PWfnZBx1YZSvg +dTnrhhZPdTxroYgpJbQTT8LPbNF7Ot1QCfXNx4gLH6vCxI8ttV/FuWIQOrHoC99L +xVGlixsmfWf5CRu66A0rS5ZtPhO8nAxkvOblLJ/emQKBgQCQkhBrZTDwgD4W6yxe +juo/H/y6PMD4vp68zk/GmuV7mzHpYg18+gGAI57dQoxWjjMxxhkB8WKpnEkXXiva +5YHq4ARUhXnPuNckvnOBj9jjy8HMeDKTPfZ6frv+B9i1y0N3ArerhPx44zCFpllH +BlVhzBa52wYAtbjg291+/G1ndw== +-----END PRIVATE KEY----- diff --git a/test/js/node/test/fixtures/keys/leaf-from-intermediate-cert.pem b/test/js/node/test/fixtures/keys/leaf-from-intermediate-cert.pem new file mode 100644 index 0000000000..8c12d33c59 --- /dev/null +++ b/test/js/node/test/fixtures/keys/leaf-from-intermediate-cert.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIUPgpDrWcCOmjk4xOAkLpxa7UTx/4wDQYJKoZIhvcNAQEL +BQAwXjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjEPMA0G +A1UECgwGTk9ERUpTMSQwIgYDVQQDDBtOb2RlSlMtVGVzdC1JbnRlcm1lZGlhdGUt +Q0EwIBcNMjUwMjIxMjIxNjUyWhgPMjI5ODEyMDYyMjE2NTJaMEwxCzAJBgNVBAYT +AlVTMQswCQYDVQQIDAJDQTELMAkGA1UEBwwCU0YxDzANBgNVBAoMBk5PREVKUzES +MBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAnnWYLNbVnE2veKzF28rarJh0En4Rd5+1ZwHp7+iP2gjEVmjBaSGK/F80MV9l +S/wtZskUoZH0aKwiq9ly6Jp9IETte9Tk1Td6jTUeG8Vs9N6zoZcXM2Q359xbA+0X +YzvHwD6TM5LQ6l3RKhJT2BRNz0oOCVQGHGepbcLbX99E3yXW0yXvZKAIcZY0NEk2 +AZ1eDz7QAhdPQ6W8QuYjlqOa+wmxqzVb3RReMg3zrL9jfd4AgCT9IN7HMB0FkQys +y78EUHa12wlJkzHzz9N8+Qjt0537LjDpBuUBgnPn7Ukvz1kzD6q8a/dbB2RIbfVK +7o0I/P9hJuXPhRpZQeDRQmDt+QIDAQABo1gwVjAUBgNVHREEDTALgglsb2NhbGhv +c3QwHQYDVR0OBBYEFJHfQLpEP+M7+PYoxk/bY1vuDv/4MB8GA1UdIwQYMBaAFHqi +U9ahWIdAE7G4DJg9FRIZqgRgMA0GCSqGSIb3DQEBCwUAA4IBAQCXckUku5JZiXSb +qvlFH1JS7/SVeugquYZyI+boIzS2ykrLBkCVCbg6dD75Nu5VlcEGq4UNlY7vdfhk +wG/jHNe6Hm36Lm2vbwH3z21IIGZlkw4cbNzdeT5WQuQNoembtbaZSsE7s1Hs052l +kVJnq0ZJ7YgO54/0C9mE7dqhWHHWm9wPUC4emucqCKYcu1M9/onZgjjmAh39G473 +1qlWuTacywQHHCg8B0w+iZlV1rJ93dTyxJvg+fgmQj2FqBNqOXu6ojhOWHt62D3Y +55zXFoUqToY6kgF+e9Rkn2vbZsSQO+cXSKVyRjnfIOCC4zO37yl31q02ouVv1Uct +ubqxlcPA +-----END CERTIFICATE----- diff --git a/test/js/node/test/fixtures/keys/leaf-from-intermediate-key.pem b/test/js/node/test/fixtures/keys/leaf-from-intermediate-key.pem new file mode 100644 index 0000000000..4d074b9e66 --- /dev/null +++ b/test/js/node/test/fixtures/keys/leaf-from-intermediate-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCedZgs1tWcTa94 +rMXbytqsmHQSfhF3n7VnAenv6I/aCMRWaMFpIYr8XzQxX2VL/C1myRShkfRorCKr +2XLomn0gRO171OTVN3qNNR4bxWz03rOhlxczZDfn3FsD7RdjO8fAPpMzktDqXdEq +ElPYFE3PSg4JVAYcZ6ltwttf30TfJdbTJe9koAhxljQ0STYBnV4PPtACF09DpbxC +5iOWo5r7CbGrNVvdFF4yDfOsv2N93gCAJP0g3scwHQWRDKzLvwRQdrXbCUmTMfPP +03z5CO3TnfsuMOkG5QGCc+ftSS/PWTMPqrxr91sHZEht9UrujQj8/2Em5c+FGllB +4NFCYO35AgMBAAECggEACIfP4A0WPZaEjWhus+cLJ+rCp+qzxcb6KPAWUBkq4lvh +tv2neOGKhgzZhlVqgoFST+PgGZUeDWMD8FCx4hIMDahMSSP0SEK29SJgizHxDEsv +bDHyOKzq4g9vsmnJfij+F0w/GDINj2pqy9sl+p5YNII5+HhWpmGRwlQQw4vlXSZq +hcubO1DyL/3FL0gVMHUZex86QJ9cYXkf++omaFNPaOsiKbZu7Whtg4rxJOBw38FD +/fX4U6SQwSxI6ffxFbmGvSBAQW4333Qvbs0xZnusKrcaKNQ3kCoQ7+cgyDogwSAE +TQN1mqPynGlMmTW4KyyR1/W0jpQEW+pll2DNCqHb8QKBgQDONX8QXu2mp/5qjXJK +Sa1orgqneadbWiUfq+6vWEIwAWbcUYGqgzUNa9OeK8jV5hEsCJOrfPvhKYdyVrfr +cu8mLtQFQLZzTlaEyX4a8Euk2xlHIYG7/giEnBugdHcHu9MV7TLRFzunc5Y4cA4W +3crScf/gl+LDO3TZ5E3ZHu4u8QKBgQDEuIagHlhcuyEfHUPRJk6ZXexlkQ383f3/ +g1aqWQxxPnlZuo/wFyxVl7YP5VNELOsiCQHm2efk+8dx0Fc8jzuafp8iSnSOJnNM +7C9K5JcbkxsJxArx1Z2ZMPfFM40Nw5kFYNCPhsuzZ/w+/eOe2EyFEZMkWdH5lMpw +Y6GvxiS/iQKBgB6WLs/F1OhoeMNjUbWVMiSZ1Di9Qca6G1GUViYqKD8ophI+AMbD +CYaBHPWUNwkLRDbM2uKP+miOmWmrVUKWXMTEI2zYCXgXAZxWqt7iD8ZXPWugm7a/ +2pGY+jwVqmY6RPg2o9gB4zZWXcznSh+4LFKE2Fh/DwK4ef+r7qQrA1dxAoGAdIEI +EfoGwNx+cCvnxw0VzZSndTtj+lcKn3GMORTF3qduPOrVZg6DTimsRFu/ZYfotV56 +RtrUkHNgmhIWKCJ33TaSTj+kKa+x52OVWphouYb0o2L8TF8Dl/89LggqyHUHwfyl +Z+sf5p9172RzktZs8v4Gk6eySEqLXeZTkoMZrmkCgYEAg8QV0rE1GprYoL02DiMT +/KlRyOUGawz559Rr5Ufdrm/SA37Yhyp/eADq1jrkpoL0uBd4YsqOFAtkCofxnI9i +BonK/T1JV1+wDnXYCU9Tis/d043/vCR4RVXQGfucmrPxjuObXCu5c8Q0DzpzLG3u +HmotaQ9Z3Wdd9PaX4le87R8= +-----END PRIVATE KEY----- diff --git a/test/js/node/test/fixtures/keys/non-trusted-intermediate-ca.pem b/test/js/node/test/fixtures/keys/non-trusted-intermediate-ca.pem new file mode 100644 index 0000000000..d735bfc177 --- /dev/null +++ b/test/js/node/test/fixtures/keys/non-trusted-intermediate-ca.pem @@ -0,0 +1,25 @@ +-----BEGIN CERTIFICATE----- +MIIESTCCAzGgAwIBAgIUFH02wcL3Qgben6tfIibXitsApCgwDQYJKoZIhvcNAQEL +BQAwejELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjEPMA0G +A1UECgwGSm95ZW50MRAwDgYDVQQLDAdOb2RlLmpzMQwwCgYDVQQDDANjYTExIDAe +BgkqhkiG9w0BCQEWEXJ5QHRpbnljbG91ZHMub3JnMCAXDTI1MDIyNzA4MTczM1oY +DzIyOTgxMjEyMDgxNzMzWjBqMQswCQYDVQQGEwJVUzELMAkGA1UECAwCQ0ExCzAJ +BgNVBAcMAlNGMQ8wDQYDVQQKDAZOT0RFSlMxMDAuBgNVBAMMJ05vZGVKUy1Ob24t +VHJ1c3RlZC1UZXN0LUludGVybWVkaWF0ZS1DQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMH8MfKXtkBMn58gJVCwe2w/XOl9rNK0M348KFcYTStC2ta0 +pwaB4ax7NeXs/xCDqtbuweZ0SLcS/nAOP9KQHN+fNSiXQ0gnHh23rZRri9VCvLWE +5mGle2yjBApz7JERLW7gZX1Xtw/X5Qt9CtIYVKf7rGTgkq0kSvJQf6DhJ8e68HwG +EQCp8ZmPQTFhIgzB35wYTgeKTU3uvQAYsAIw9fC5Vta8U9uU0VyN7mFxsoMXm4/u +prk9L4AYSOFIV+njTd8xL+puSfZSKQA8yLcZ1LeRkAZo3RjUcEUPRDdLxB1UAZvh +LYcJggWmx7799MZOsF1u9d2wR9HJ1Nzg3+IJiW0CAwEAAaOB1DCB0TAMBgNVHRME +BTADAQH/MB0GA1UdDgQWBBR9aYwxOpYpUe2jMoN0MAqeG4A8GzCBoQYDVR0jBIGZ +MIGWoX6kfDB6MQswCQYDVQQGEwJVUzELMAkGA1UECAwCQ0ExCzAJBgNVBAcMAlNG +MQ8wDQYDVQQKDAZKb3llbnQxEDAOBgNVBAsMB05vZGUuanMxDDAKBgNVBAMMA2Nh +MTEgMB4GCSqGSIb3DQEJARYRcnlAdGlueWNsb3Vkcy5vcmeCFEqxbI39an0NLfyr +35xLDpLGrQIpMA0GCSqGSIb3DQEBCwUAA4IBAQDADBpifaPV4jRtSefetMnhxxwj +tPlLXRWqEJpJy+nHYJJdwQHxFHVoZSPinGpYpECCV73Gkh/rMKa+cvR4dBBIK6DP +Bl1IQNP4Jr90z9c0T/zzUxVXE4iwcv2/Vg5OvVHU3z5gW4Mk3R4Rb+69UWHB1z8D +41sm9w4u30vKGJrkdQ5ZLtfRLonncwLQexTlj1k/8VRytP4S9uIAmXwQpEPZxsto +pRcMO2aWW0PvDzk7WPU+ZKnf1RC+pQx+PPH1/ZfyXHy7njJKZ04plIdTA/ah9pPw +Bl++VCO7LSwDz+FlmuHnxc2LMR2EIRiNV03ooSc5XGGhIOKLl6+nMPQ0dlta +-----END CERTIFICATE----- diff --git a/test/js/node/test/fixtures/keys/non-trusted-intermediate-ca.srl b/test/js/node/test/fixtures/keys/non-trusted-intermediate-ca.srl new file mode 100644 index 0000000000..52098411fb --- /dev/null +++ b/test/js/node/test/fixtures/keys/non-trusted-intermediate-ca.srl @@ -0,0 +1 @@ +78A88418149F0BFCEC38DC14D085BA43D36090F0 diff --git a/test/js/node/test/fixtures/keys/non-trusted-intermediate.key b/test/js/node/test/fixtures/keys/non-trusted-intermediate.key new file mode 100644 index 0000000000..54b73ef5ff --- /dev/null +++ b/test/js/node/test/fixtures/keys/non-trusted-intermediate.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDB/DHyl7ZATJ+f +ICVQsHtsP1zpfazStDN+PChXGE0rQtrWtKcGgeGsezXl7P8Qg6rW7sHmdEi3Ev5w +Dj/SkBzfnzUol0NIJx4dt62Ua4vVQry1hOZhpXtsowQKc+yRES1u4GV9V7cP1+UL +fQrSGFSn+6xk4JKtJEryUH+g4SfHuvB8BhEAqfGZj0ExYSIMwd+cGE4Hik1N7r0A +GLACMPXwuVbWvFPblNFcje5hcbKDF5uP7qa5PS+AGEjhSFfp403fMS/qbkn2UikA +PMi3GdS3kZAGaN0Y1HBFD0Q3S8QdVAGb4S2HCYIFpse+/fTGTrBdbvXdsEfRydTc +4N/iCYltAgMBAAECggEALR4V1OVd1Xss1gMRQsDlV/Itzz20dEZGwrnFrSohCqqQ +QQc/4MbVIPuAN/PFCEeDdN2PuiS6I+B2TsQ1qmjr2kQKhmAWHUJB4ioOJHrWCVou +D27zcWsed5A7uJ2pPD1ZSpRE7p/32ya85kzlNyPDDtX9jPHhk4UhLFY2NQohKTYF +CM2+YL6V8x2Kq9OOjGxPrX3t5H0cgVW7f+mMwhCSevJQAoLWO7cNbbN/fWHEK0jn +ovHkpmK7dWejWN8MYMQOhmIuUV54aLIKoNLEAhnFj70/36I/GMUSQf+rCjjQtLXb +lmNiKF33+3L6ti9HdcznhJujtMjiAXloRkESKcYPoQKBgQDoKO9wj7ZUvapt7ZnD +9ALwkVpxPPCA7rDdu9DmEmXt8Zf3pVCaw1K2qPWM1iAoL6/AZDPs2E1nJGsmDNYv +wMPt09TsZTURKvQYfdFs0uZPcTCJAXg36+dgxXq1OUNirB7Z+F1QPE3EHZT5AaPc +vxRfA4RyJ+DcfMFzUcjePd2MTQKBgQDV57bQKf5wHkWCLVl/ZJB+hy1futN9In6u +n0UeqSK+8m7Go8rPvNFlTeY6/lz/bm58u0mJFKd7vNQs+l7Y1DitC7BLItNwtcuW +OEnhltbhry6i/9lieF607kwq9sNTVpp+iROF1BRmeDh3d3ByBa9Y9HSjfMPUgy6r +Tb6lgMgBoQKBgDmL9BYtuV92CCnpjITzFkt1bDrHhUIgaHz+EkEFbHi3uxiqxLko +E3etl/hKF3x+nY0OCYT69OzNLTYoVmtN2AM6z/us9qODxy/O+DuGZ4pnn0VGtPr/ +ocHuEYWcZSSvT5JuKws5d3lWb9ftXSXZw33tzEXTtrxQvE8OhcD5CtK9AoGBAMk0 +kqOwPKOd9egDXGIWaEx8PtQDWpgkcGE1c8Dpe8N9K3Ix874AcD8ITX5EcZnbeJZf +XUZSZVBhSHuebsUqqr0rd4LVmWo1tvDwtZ47UpkrPYUZgJO9gehTFtZ7EzQ7DEvm +CLUjzqSshQDrGpxGeLAGEgkOfO5TDv0XvjLTtk7BAoGBAM9ObVMPg+RhnVUY5oNT +2A+Qq/3sitcbaJ2JKCjJEhttF0fF+0VYXf8c1YNE1AOfA/YnEazfCvPEOVmXGAeq +iKf0FohQ1+dh9ShOK5tcR3jmMzrCwBJFlqjX942m/8FFg6B1za8nrrkSnWNCbJi5 +rmSv7B4llshgzTeEKqgM6GX1 +-----END PRIVATE KEY----- diff --git a/test/js/node/test/fixtures/keys/non-trusted-leaf-from-intermediate-cert.pem b/test/js/node/test/fixtures/keys/non-trusted-leaf-from-intermediate-cert.pem new file mode 100644 index 0000000000..66de118525 --- /dev/null +++ b/test/js/node/test/fixtures/keys/non-trusted-leaf-from-intermediate-cert.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDnjCCAoagAwIBAgIUeKiEGBSfC/zsONwU0IW6Q9NgkPAwDQYJKoZIhvcNAQEL +BQAwajELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjEPMA0G +A1UECgwGTk9ERUpTMTAwLgYDVQQDDCdOb2RlSlMtTm9uLVRydXN0ZWQtVGVzdC1J +bnRlcm1lZGlhdGUtQ0EwIBcNMjUwMjI3MDgxNzUwWhgPMjI5ODEyMTIwODE3NTBa +MEwxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTELMAkGA1UEBwwCU0YxDzANBgNV +BAoMBk5PREVKUzESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAshskMqfwX4J5IA0poqWfm2jF23rBDBFw5FTdZP/dbYrS +UCBOYqg5Jxgq4BxCnGq8ArGAQajOAAiwISK3h/WQ+XqaeEh5PmL4/dW3UZCvcR8I +NN7LCXPnQcvJu1G4VbBDm8WbkkmGJvy6553kA+8SXyeoEs3nXTqQWVINo/8alt6m +bRe2KA8FWgPrEUJgb+Vvl/z7a1V7PQSvWSuL0pBcj04tJQ5WrXAl72GI6eArJrM4 +Yl7Z08ZeGsSKAN+9aFnFyBfRmUeHgDTI9OQjw6FcwArCXZRmaX3CyGZJYgL6DAyf +ukyyRXUT8Ii37W306Vp6d1prqZ4A2fih2sfbcpeLrwIDAQABo1gwVjAUBgNVHREE +DTALgglsb2NhbGhvc3QwHQYDVR0OBBYEFAa6wwZ2tpzJdeCtsG0sUw7MpG39MB8G +A1UdIwQYMBaAFH1pjDE6lilR7aMyg3QwCp4bgDwbMA0GCSqGSIb3DQEBCwUAA4IB +AQBWyVgyhKnRomPa23axktq8/8RC7h6mSJEOW+uTlwam/TqnWQFJspwosStOQFu4 +pg7Ww9MtKJSr9/vxxsyvNaKH5ZNTtgqqlzfYzVLbfwOirNSx4Mp1izQ0G5mfx3Yj ++WEXarNaY8R0benqWMeArTFb9CdDcxvMcSdtkGrMXMuKXFN67zou8NQVkvGzc/tb +imS/Ur9goJYUPlg2xor+P09tiIT+pEG+bpjYZ0U/1D5lIjQYCmZiy9ECL3WBc4df +NKsJnlA2GZ4TXh2jFzQw3yZPSLCqNdy+9RdOB058wRYooaFYrOkRiUe9ZV5w1MW5 +mVuwUmrRSI79K26jdTav44PZ +-----END CERTIFICATE----- diff --git a/test/js/node/test/fixtures/keys/non-trusted-leaf-from-intermediate-key.pem b/test/js/node/test/fixtures/keys/non-trusted-leaf-from-intermediate-key.pem new file mode 100644 index 0000000000..cca4657598 --- /dev/null +++ b/test/js/node/test/fixtures/keys/non-trusted-leaf-from-intermediate-key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCyGyQyp/Bfgnkg +DSmipZ+baMXbesEMEXDkVN1k/91titJQIE5iqDknGCrgHEKcarwCsYBBqM4ACLAh +IreH9ZD5epp4SHk+Yvj91bdRkK9xHwg03ssJc+dBy8m7UbhVsEObxZuSSYYm/Lrn +neQD7xJfJ6gSzeddOpBZUg2j/xqW3qZtF7YoDwVaA+sRQmBv5W+X/PtrVXs9BK9Z +K4vSkFyPTi0lDlatcCXvYYjp4CsmszhiXtnTxl4axIoA371oWcXIF9GZR4eANMj0 +5CPDoVzACsJdlGZpfcLIZkliAvoMDJ+6TLJFdRPwiLftbfTpWnp3WmupngDZ+KHa +x9tyl4uvAgMBAAECggEAMKa1VpkFUjGjwJuX2fQAC0Wtdmyruw4wlk6QQ3yZEckv +9e89OjnVktZJL/rIP03wmZO/AzCHRP8ajZKcK6lqtptFAsJZTC9g8IWmk8NACVh+ +t2J8d9KPQyvVqTODdPS3Ix/xhR5MZO34aDh7BpARpqiAgtJ39sF+mMePLlMLAlbO +U7/u1cttplvgiBRWTIiisyl9O+G2OCre1CXacEqkZ8jYWTP7sLofGCXCpgjBVKgl +8q4ktgPlREMVD/QW78CIdrKuOdmzV42zSeFfPoZjUC3nLCdIALquPJyBSSZvDEeA +T+eWSaIm5JcSTBjxG0f9riLQdup2Gz5NjPALHUTxMQKBgQDq2jyr1g0BUMFAJTQR +6LraWcCOz+7l/oH6WuFGm7gUBf5yrdykeWvd8cSfwZRm2tzoxVu44+M05X3ORMHR +wPyckITG9kWndzcOXpEOSiaObfqmEuz5gkpyzaUs5c9AE4pMhzIKNnruavPbD9Hy +4AiLIT3ssjAL14/cjFuZTXl/dQKBgQDCJMxq0jf2rtVfrPrpEh8CyNu4sUQs8O5t +9u4cvjGmHCfFpcdvCjS7gLuAZeFww3qjiv4pM0K5b7rjY3CelB+jlF2TG+4Jxf6h +y/9iPSN98i2FT4Jxc02GYxsPa3mYAxykmqqvIkak+2omaJake2tCyjE49QrfGx0r +TivZnwn+EwKBgQDe0a4MjqqKG/cuB94uO7PEZLE4DfooRl9Fi6H+3tE4VjOC1Ifp +mLYJvk+CDyTgrTg4tL8AXV59GltRL5UAkGxbkxYWuyN87rPSs1BG0X1hVuEfXgdt +9vrxj0Dupx8KOT/WudJ1NBlQSTMSHSFhoMMaVbCt+KVzJtL8OkLR4Vqr3QKBgAy8 +MziSn58r6s1C4JanXKdnG5qq7ijwiQNnnkj+ZO1bjXRWopVzGvBtyl7qz/YArKvL +s05qkWbuoFjILhwI5WZqlhTPUTcM6N4eLpt4HTrmxvumsozUnnJBUAYb67cABUH6 +71VbrzylTVpFpBQYEHoqHz54PIVUFv6/OvskhphHAoGAJukr8k+rvxXIXOjvgE2O +9sf2h7YZoW2AKK3tHPlG7XCuIFZJKKhkh+cVRorg/Ws5LLF/5egf234sfeZzdrvP +O2TA/0Hf4mhaJhn53E/PLSLEDVTzORs1L+PfLrFptrP2Eq7iAnbTwaWnjMfAcsy2 +4ukRw65bBMLqv62KLTEZ5uk= +-----END PRIVATE KEY----- diff --git a/test/js/node/test/fixtures/module-mocking/basic.json b/test/js/node/test/fixtures/module-mocking/basic.json new file mode 100644 index 0000000000..2393cd01d4 --- /dev/null +++ b/test/js/node/test/fixtures/module-mocking/basic.json @@ -0,0 +1 @@ +{"foo":"bar"} diff --git a/test/js/node/test/fixtures/module-require/relative/subdir/relative-subdir.js b/test/js/node/test/fixtures/module-require/relative/subdir/relative-subdir.js new file mode 100644 index 0000000000..34eb71b3c6 --- /dev/null +++ b/test/js/node/test/fixtures/module-require/relative/subdir/relative-subdir.js @@ -0,0 +1 @@ +exports.value = 'relative subdir'; diff --git a/test/js/node/test/fixtures/packages/unrecognised-export-keys/index.js b/test/js/node/test/fixtures/packages/unrecognised-export-keys/index.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/js/node/test/fixtures/packages/unrecognised-export-keys/package.json b/test/js/node/test/fixtures/packages/unrecognised-export-keys/package.json new file mode 100644 index 0000000000..e3cef44544 --- /dev/null +++ b/test/js/node/test/fixtures/packages/unrecognised-export-keys/package.json @@ -0,0 +1,10 @@ +{ + "name": "pkg-with-unrecognised-export-keys", + "exports": { + ".": { + "default": "./index.js", + "FtLcAG": "./whatever.ext", + "types": "./index.d.ts" + } + } +} diff --git a/test/js/node/test/fixtures/permission/fs-read-loader.js b/test/js/node/test/fixtures/permission/fs-read-loader.js new file mode 100644 index 0000000000..aaef61e8ce --- /dev/null +++ b/test/js/node/test/fixtures/permission/fs-read-loader.js @@ -0,0 +1,15 @@ +const fs = require('node:fs') +const path = require('node:path') +const assert = require('node:assert'); + +{ + fs.readFileSync(__filename); + console.log('Read its own contents') // Should not throw +} +{ + const simpleLoaderPath = path.join(__dirname, 'simple-loader.js'); + fs.readFile(simpleLoaderPath, (err) => { + assert.ok(err.code, 'ERR_ACCESS_DENIED'); + assert.ok(err.permission, 'FileSystemRead'); + }); // Should throw ERR_ACCESS_DENIED +} \ No newline at end of file diff --git a/test/js/node/test/fixtures/permission/fs-read.js b/test/js/node/test/fixtures/permission/fs-read.js index fa4ea1207f..22f4c4184a 100644 --- a/test/js/node/test/fixtures/permission/fs-read.js +++ b/test/js/node/test/fixtures/permission/fs-read.js @@ -14,6 +14,16 @@ const blockedFolder = process.env.BLOCKEDFOLDER; const allowedFolder = process.env.ALLOWEDFOLDER; const regularFile = __filename; +// Guarantee the error message suggest the --allow-fs-read +{ + fs.readFile(blockedFile, common.expectsError({ + message: 'Access to this API has been restricted. Use --allow-fs-read to manage permissions.', + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemRead', + resource: path.toNamespacedPath(blockedFile), + })); +} + // fs.readFile { fs.readFile(blockedFile, common.expectsError({ diff --git a/test/js/node/test/fixtures/permission/fs-write.js b/test/js/node/test/fixtures/permission/fs-write.js index 0c0ec72602..590df0b658 100644 --- a/test/js/node/test/fixtures/permission/fs-write.js +++ b/test/js/node/test/fixtures/permission/fs-write.js @@ -1,7 +1,11 @@ 'use strict'; const common = require('../../common'); -common.skipIfWorker(); +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { + common.skip('This test only works on a main thread'); +} const assert = require('assert'); const fs = require('fs'); @@ -21,6 +25,15 @@ const relativeProtectedFolder = process.env.RELATIVEBLOCKEDFOLDER; assert.ok(!process.permission.has('fs.write', blockedFile)); } +// Guarantee the error message suggest the --allow-fs-write +{ + fs.writeFile(blockedFile, 'example', common.expectsError({ + message: 'Access to this API has been restricted. Use --allow-fs-write to manage permissions.', + code: 'ERR_ACCESS_DENIED', + permission: 'FileSystemWrite', + })); +} + // fs.writeFile { assert.throws(() => { @@ -553,4 +566,4 @@ const relativeProtectedFolder = process.env.RELATIVEBLOCKEDFOLDER; }, { code: 'ERR_ACCESS_DENIED', }); -} \ No newline at end of file +} diff --git a/test/js/node/test/fixtures/permission/hello-world.js b/test/js/node/test/fixtures/permission/hello-world.js new file mode 100644 index 0000000000..f5bda8dadd --- /dev/null +++ b/test/js/node/test/fixtures/permission/hello-world.js @@ -0,0 +1 @@ +console.log('Hello world') \ No newline at end of file diff --git a/test/js/node/test/fixtures/permission/processbinding.js b/test/js/node/test/fixtures/permission/processbinding.js index bdb958fb01..8857a55498 100644 --- a/test/js/node/test/fixtures/permission/processbinding.js +++ b/test/js/node/test/fixtures/permission/processbinding.js @@ -1,5 +1,9 @@ const common = require('../../common'); -common.skipIfWorker(); +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { + common.skip('This test only works on a main thread'); +} const assert = require('assert'); @@ -11,14 +15,6 @@ const assert = require('assert'); })); } -{ - assert.throws(() => { - process.binding('async_wrap'); - }, common.expectsError({ - code: 'ERR_ACCESS_DENIED', - })); -} - { assert.throws(() => { process.binding('fs'); diff --git a/test/js/node/test/fixtures/permission/simple-loader.js b/test/js/node/test/fixtures/permission/simple-loader.js new file mode 100644 index 0000000000..43e2a9bb77 --- /dev/null +++ b/test/js/node/test/fixtures/permission/simple-loader.js @@ -0,0 +1,3 @@ +// Simulate a regular loading without fs operations +// but with access to Node core modules +require('node:fs') \ No newline at end of file diff --git a/test/js/node/test/fixtures/process-exit-code-cases.js b/test/js/node/test/fixtures/process-exit-code-cases.js deleted file mode 100644 index 05b01afd8f..0000000000 --- a/test/js/node/test/fixtures/process-exit-code-cases.js +++ /dev/null @@ -1,136 +0,0 @@ -'use strict'; - -const assert = require('assert'); - -function getTestCases(isWorker = false) { - const cases = []; - function exitsOnExitCodeSet() { - process.exitCode = 42; - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 42); - assert.strictEqual(code, 42); - }); - } - cases.push({ func: exitsOnExitCodeSet, result: 42 }); - - function changesCodeViaExit() { - process.exitCode = 99; - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 42); - assert.strictEqual(code, 42); - }); - process.exit(42); - } - cases.push({ func: changesCodeViaExit, result: 42 }); - - function changesCodeZeroExit() { - process.exitCode = 99; - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 0); - assert.strictEqual(code, 0); - }); - process.exit(0); - } - cases.push({ func: changesCodeZeroExit, result: 0 }); - - function exitWithOneOnUncaught() { - process.exitCode = 99; - process.on('exit', (code) => { - // cannot use assert because it will be uncaughtException -> 1 exit code - // that will render this test useless - if (code !== 1 || process.exitCode !== 1) { - console.log('wrong code! expected 1 for uncaughtException'); - process.exit(99); - } - }); - throw new Error('ok'); - } - cases.push({ - func: exitWithOneOnUncaught, - result: 1, - error: /^Error: ok$/, - }); - - function changeCodeInsideExit() { - process.exitCode = 95; - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 95); - assert.strictEqual(code, 95); - process.exitCode = 99; - }); - } - cases.push({ func: changeCodeInsideExit, result: 99 }); - - function zeroExitWithUncaughtHandler() { - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 0); - assert.strictEqual(code, 0); - }); - process.on('uncaughtException', () => { }); - throw new Error('ok'); - } - cases.push({ func: zeroExitWithUncaughtHandler, result: 0 }); - - function changeCodeInUncaughtHandler() { - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 97); - assert.strictEqual(code, 97); - }); - process.on('uncaughtException', () => { - process.exitCode = 97; - }); - throw new Error('ok'); - } - cases.push({ func: changeCodeInUncaughtHandler, result: 97 }); - - function changeCodeInExitWithUncaught() { - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 1); - assert.strictEqual(code, 1); - process.exitCode = 98; - }); - throw new Error('ok'); - } - cases.push({ - func: changeCodeInExitWithUncaught, - result: 98, - error: /^Error: ok$/, - }); - - function exitWithZeroInExitWithUncaught() { - process.on('exit', (code) => { - assert.strictEqual(process.exitCode, 1); - assert.strictEqual(code, 1); - process.exitCode = 0; - }); - throw new Error('ok'); - } - cases.push({ - func: exitWithZeroInExitWithUncaught, - result: 0, - error: /^Error: ok$/, - }); - - function exitWithThrowInUncaughtHandler() { - process.on('uncaughtException', () => { - throw new Error('ok') - }); - throw new Error('bad'); - } - cases.push({ - func: exitWithThrowInUncaughtHandler, - result: isWorker ? 1 : 7, - error: /^Error: ok$/, - }); - - function exitWithUndefinedFatalException() { - process._fatalException = undefined; - throw new Error('ok'); - } - cases.push({ - func: exitWithUndefinedFatalException, - result: 6, - }); - return cases; -} -exports.getTestCases = getTestCases; diff --git a/test/js/node/test/fixtures/rc/broken-node-options.json b/test/js/node/test/fixtures/rc/broken-node-options.json new file mode 100644 index 0000000000..beea3f7143 --- /dev/null +++ b/test/js/node/test/fixtures/rc/broken-node-options.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "inspect-port + } +} diff --git a/test/js/node/test/fixtures/rc/broken.json b/test/js/node/test/fixtures/rc/broken.json new file mode 100644 index 0000000000..98232c64fc --- /dev/null +++ b/test/js/node/test/fixtures/rc/broken.json @@ -0,0 +1 @@ +{ diff --git a/test/js/node/test/fixtures/rc/default/node.config.json b/test/js/node/test/fixtures/rc/default/node.config.json new file mode 100644 index 0000000000..54bcbfef04 --- /dev/null +++ b/test/js/node/test/fixtures/rc/default/node.config.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "max-http-header-size": 10 + } +} diff --git a/test/js/node/test/fixtures/rc/default/override.json b/test/js/node/test/fixtures/rc/default/override.json new file mode 100644 index 0000000000..0f6f763cad --- /dev/null +++ b/test/js/node/test/fixtures/rc/default/override.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "max-http-header-size": 20 + } +} diff --git a/test/js/node/test/fixtures/rc/duplicate-namespace-option/node.config.json b/test/js/node/test/fixtures/rc/duplicate-namespace-option/node.config.json new file mode 100644 index 0000000000..4d948fbd33 --- /dev/null +++ b/test/js/node/test/fixtures/rc/duplicate-namespace-option/node.config.json @@ -0,0 +1,6 @@ +{ + "testRunner": { + "test-name-pattern": "first-pattern", + "test-name-pattern": "second-pattern" + } +} diff --git a/test/js/node/test/fixtures/rc/empty-object.json b/test/js/node/test/fixtures/rc/empty-object.json new file mode 100644 index 0000000000..0db3279e44 --- /dev/null +++ b/test/js/node/test/fixtures/rc/empty-object.json @@ -0,0 +1,3 @@ +{ + +} diff --git a/test/js/node/test/fixtures/rc/empty-valid-namespace.json b/test/js/node/test/fixtures/rc/empty-valid-namespace.json new file mode 100644 index 0000000000..dbeb33d7aa --- /dev/null +++ b/test/js/node/test/fixtures/rc/empty-valid-namespace.json @@ -0,0 +1,3 @@ +{ + "testRunner": {} +} diff --git a/test/js/node/test/fixtures/rc/empty.json b/test/js/node/test/fixtures/rc/empty.json new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/test/js/node/test/fixtures/rc/empty.json @@ -0,0 +1 @@ + diff --git a/test/js/node/test/fixtures/rc/host-port.json b/test/js/node/test/fixtures/rc/host-port.json new file mode 100644 index 0000000000..48fb16edae --- /dev/null +++ b/test/js/node/test/fixtures/rc/host-port.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "inspect-port": 65535 + } +} diff --git a/test/js/node/test/fixtures/rc/import-as-string.json b/test/js/node/test/fixtures/rc/import-as-string.json new file mode 100644 index 0000000000..b1e1feb96a --- /dev/null +++ b/test/js/node/test/fixtures/rc/import-as-string.json @@ -0,0 +1,5 @@ +{ + "nodeOptions":{ + "import": "./test/fixtures/printA.js" + } +} diff --git a/test/js/node/test/fixtures/rc/import.json b/test/js/node/test/fixtures/rc/import.json new file mode 100644 index 0000000000..c0f74ed62b --- /dev/null +++ b/test/js/node/test/fixtures/rc/import.json @@ -0,0 +1,9 @@ +{ + "nodeOptions": { + "import": [ + "./test/fixtures/printA.js", + "./test/fixtures/printB.js", + "./test/fixtures/printC.js" + ] + } +} diff --git a/test/js/node/test/fixtures/rc/inspect-false.json b/test/js/node/test/fixtures/rc/inspect-false.json new file mode 100644 index 0000000000..32bb5961f2 --- /dev/null +++ b/test/js/node/test/fixtures/rc/inspect-false.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "inspect": false + } +} diff --git a/test/js/node/test/fixtures/rc/inspect-true.json b/test/js/node/test/fixtures/rc/inspect-true.json new file mode 100644 index 0000000000..684571a5a6 --- /dev/null +++ b/test/js/node/test/fixtures/rc/inspect-true.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "inspect": true + } +} diff --git a/test/js/node/test/fixtures/rc/invalid-import.json b/test/js/node/test/fixtures/rc/invalid-import.json new file mode 100644 index 0000000000..8d6a1a0777 --- /dev/null +++ b/test/js/node/test/fixtures/rc/invalid-import.json @@ -0,0 +1,7 @@ +{ + "nodeOptions": { + "import": [ + 1 + ] + } +} diff --git a/test/js/node/test/fixtures/rc/namespace-with-array.json b/test/js/node/test/fixtures/rc/namespace-with-array.json new file mode 100644 index 0000000000..056a4291e9 --- /dev/null +++ b/test/js/node/test/fixtures/rc/namespace-with-array.json @@ -0,0 +1,5 @@ +{ + "testRunner": { + "test-coverage-exclude": ["config-pattern1", "config-pattern2"] + } +} diff --git a/test/js/node/test/fixtures/rc/namespace-with-disallowed-envvar.json b/test/js/node/test/fixtures/rc/namespace-with-disallowed-envvar.json new file mode 100644 index 0000000000..6152684e05 --- /dev/null +++ b/test/js/node/test/fixtures/rc/namespace-with-disallowed-envvar.json @@ -0,0 +1,6 @@ +{ + "testRunner": { + "test-concurrency": 1, + "experimental-test-coverage": true + } +} diff --git a/test/js/node/test/fixtures/rc/namespaced/node.config.json b/test/js/node/test/fixtures/rc/namespaced/node.config.json new file mode 100644 index 0000000000..df929d25c1 --- /dev/null +++ b/test/js/node/test/fixtures/rc/namespaced/node.config.json @@ -0,0 +1,5 @@ +{ + "testRunner": { + "test-isolation": "none" + } +} diff --git a/test/js/node/test/fixtures/rc/negative-numeric.json b/test/js/node/test/fixtures/rc/negative-numeric.json new file mode 100644 index 0000000000..f0b6d57369 --- /dev/null +++ b/test/js/node/test/fixtures/rc/negative-numeric.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "max-http-header-size": -1 + } +} diff --git a/test/js/node/test/fixtures/rc/no-op.json b/test/js/node/test/fixtures/rc/no-op.json new file mode 100644 index 0000000000..a8e0a191ca --- /dev/null +++ b/test/js/node/test/fixtures/rc/no-op.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "http-parser": true + } +} diff --git a/test/js/node/test/fixtures/rc/non-object-node-options.json b/test/js/node/test/fixtures/rc/non-object-node-options.json new file mode 100644 index 0000000000..5dc596e467 --- /dev/null +++ b/test/js/node/test/fixtures/rc/non-object-node-options.json @@ -0,0 +1,3 @@ +{ + "nodeOptions": "string" +} diff --git a/test/js/node/test/fixtures/rc/non-object-root.json b/test/js/node/test/fixtures/rc/non-object-root.json new file mode 100644 index 0000000000..fe51488c70 --- /dev/null +++ b/test/js/node/test/fixtures/rc/non-object-root.json @@ -0,0 +1 @@ +[] diff --git a/test/js/node/test/fixtures/rc/non-readable/node.config.json b/test/js/node/test/fixtures/rc/non-readable/node.config.json new file mode 100755 index 0000000000..21e2b85fbd --- /dev/null +++ b/test/js/node/test/fixtures/rc/non-readable/node.config.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "max-http-header-size": 10 + } +} diff --git a/test/js/node/test/fixtures/rc/not-node-options-flag.json b/test/js/node/test/fixtures/rc/not-node-options-flag.json new file mode 100644 index 0000000000..c35ff6064e --- /dev/null +++ b/test/js/node/test/fixtures/rc/not-node-options-flag.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "test": true + } +} diff --git a/test/js/node/test/fixtures/rc/numeric.json b/test/js/node/test/fixtures/rc/numeric.json new file mode 100644 index 0000000000..c9d5d6241f --- /dev/null +++ b/test/js/node/test/fixtures/rc/numeric.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "max-http-header-size": 4294967295 + } +} diff --git a/test/js/node/test/fixtures/rc/override-namespace.json b/test/js/node/test/fixtures/rc/override-namespace.json new file mode 100644 index 0000000000..acb37b2eec --- /dev/null +++ b/test/js/node/test/fixtures/rc/override-namespace.json @@ -0,0 +1,8 @@ +{ + "testRunner": { + "test-isolation": "process" + }, + "nodeOptions": { + "test-isolation": "none" + } +} diff --git a/test/js/node/test/fixtures/rc/override-node-option-with-namespace.json b/test/js/node/test/fixtures/rc/override-node-option-with-namespace.json new file mode 100644 index 0000000000..2db9e1a47f --- /dev/null +++ b/test/js/node/test/fixtures/rc/override-node-option-with-namespace.json @@ -0,0 +1,8 @@ +{ + "nodeOptions": { + "test-isolation": "none" + }, + "testRunner": { + "test-isolation": "process" + } +} diff --git a/test/js/node/test/fixtures/rc/override-property.json b/test/js/node/test/fixtures/rc/override-property.json new file mode 100644 index 0000000000..9e76f24fcd --- /dev/null +++ b/test/js/node/test/fixtures/rc/override-property.json @@ -0,0 +1,6 @@ +{ + "nodeOptions": { + "experimental-transform-types": true, + "experimental-transform-types": false + } +} diff --git a/test/js/node/test/fixtures/rc/sneaky-flag.json b/test/js/node/test/fixtures/rc/sneaky-flag.json new file mode 100644 index 0000000000..0b2342539e --- /dev/null +++ b/test/js/node/test/fixtures/rc/sneaky-flag.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "import": "./test/fixtures/printA.js --experimental-transform-types" + } +} diff --git a/test/js/node/test/fixtures/rc/string.json b/test/js/node/test/fixtures/rc/string.json new file mode 100644 index 0000000000..54dd0964b3 --- /dev/null +++ b/test/js/node/test/fixtures/rc/string.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "test-reporter": "dot" + } +} diff --git a/test/js/node/test/fixtures/rc/test.js b/test/js/node/test/fixtures/rc/test.js new file mode 100644 index 0000000000..7775b14987 --- /dev/null +++ b/test/js/node/test/fixtures/rc/test.js @@ -0,0 +1,6 @@ +const { test } = require('node:test'); +const { ok } = require('node:assert'); + +test('should pass', () => { + ok(true); +}); diff --git a/test/js/node/test/fixtures/rc/transform-types.json b/test/js/node/test/fixtures/rc/transform-types.json new file mode 100644 index 0000000000..ea5a9f9f16 --- /dev/null +++ b/test/js/node/test/fixtures/rc/transform-types.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "experimental-transform-types": true + } +} diff --git a/test/js/node/test/fixtures/rc/unknown-flag-namespace.json b/test/js/node/test/fixtures/rc/unknown-flag-namespace.json new file mode 100644 index 0000000000..b5d87ad8dd --- /dev/null +++ b/test/js/node/test/fixtures/rc/unknown-flag-namespace.json @@ -0,0 +1,5 @@ +{ + "testRunner": { + "unknown-flag": true + } +} diff --git a/test/js/node/test/fixtures/rc/unknown-flag.json b/test/js/node/test/fixtures/rc/unknown-flag.json new file mode 100644 index 0000000000..31087baa00 --- /dev/null +++ b/test/js/node/test/fixtures/rc/unknown-flag.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "some-unknown-flag": true + } +} diff --git a/test/js/node/test/fixtures/rc/unknown-namespace.json b/test/js/node/test/fixtures/rc/unknown-namespace.json new file mode 100644 index 0000000000..14730d83ef --- /dev/null +++ b/test/js/node/test/fixtures/rc/unknown-namespace.json @@ -0,0 +1,5 @@ +{ + "an-invalid-namespace": { + "a-key": "a-value" + } +} diff --git a/test/js/node/test/fixtures/rc/v8-flag.json b/test/js/node/test/fixtures/rc/v8-flag.json new file mode 100644 index 0000000000..5f74095306 --- /dev/null +++ b/test/js/node/test/fixtures/rc/v8-flag.json @@ -0,0 +1,5 @@ +{ + "nodeOptions": { + "abort-on-uncaught-exception": true + } +} diff --git a/test/js/node/test/fixtures/sea.js b/test/js/node/test/fixtures/sea.js index 6dea696099..65bb8d37e0 100644 --- a/test/js/node/test/fixtures/sea.js +++ b/test/js/node/test/fixtures/sea.js @@ -10,9 +10,9 @@ const builtinWarning = To load a module from disk after the single executable application is launched, use require("module").createRequire(). Support for bundled module loading or virtual file systems are under discussions in https://github.com/nodejs/single-executable`; -expectWarning('Warning', builtinWarning); // Triggered by require() calls below. // This additionally makes sure that no unexpected warnings are emitted. if (!createdRequire('./sea-config.json').disableExperimentalSEAWarning) { + expectWarning('Warning', builtinWarning); // Triggered by require() calls below. expectWarning('ExperimentalWarning', 'Single executable application is an experimental feature and ' + 'might change at any time'); diff --git a/test/js/node/test/fixtures/typescript/ts/test-get-callsite-explicit.ts b/test/js/node/test/fixtures/typescript/ts/test-get-callsites-explicit.ts similarity index 77% rename from test/js/node/test/fixtures/typescript/ts/test-get-callsite-explicit.ts rename to test/js/node/test/fixtures/typescript/ts/test-get-callsites-explicit.ts index 331495419a..8b37db9f72 100644 --- a/test/js/node/test/fixtures/typescript/ts/test-get-callsite-explicit.ts +++ b/test/js/node/test/fixtures/typescript/ts/test-get-callsites-explicit.ts @@ -7,4 +7,4 @@ interface CallSite { const callSite = getCallSites({ sourceMap: false })[0]; -console.log('mapCallSite: ', callSite); +console.log('mapCallSites: ', callSite); diff --git a/test/js/node/test/fixtures/typescript/ts/test-get-callsite.ts b/test/js/node/test/fixtures/typescript/ts/test-get-callsites.ts similarity index 74% rename from test/js/node/test/fixtures/typescript/ts/test-get-callsite.ts rename to test/js/node/test/fixtures/typescript/ts/test-get-callsites.ts index e3186ec889..06ddf05538 100644 --- a/test/js/node/test/fixtures/typescript/ts/test-get-callsite.ts +++ b/test/js/node/test/fixtures/typescript/ts/test-get-callsites.ts @@ -7,4 +7,4 @@ interface CallSite { const callSite = getCallSites()[0]; -console.log('getCallSite: ', callSite); +console.log('getCallSites: ', callSite); diff --git a/test/js/node/test/fixtures/tz-version.txt b/test/js/node/test/fixtures/tz-version.txt index 699e50d4d3..ef468adcec 100644 --- a/test/js/node/test/fixtures/tz-version.txt +++ b/test/js/node/test/fixtures/tz-version.txt @@ -1 +1 @@ -2024b +2025b diff --git a/test/js/node/test/parallel/test-gc-http-client-connaborted.js b/test/js/node/test/parallel/test-gc-http-client-connaborted.js deleted file mode 100644 index e52a555d78..0000000000 --- a/test/js/node/test/parallel/test-gc-http-client-connaborted.js +++ /dev/null @@ -1,65 +0,0 @@ -'use strict'; -// Flags: --expose-gc -// just like test-gc-http-client.js, -// but aborting every connection that comes in. - -const common = require('../common'); -const { onGC } = require('../common/gc'); -const http = require('http'); -const os = require('os'); - -const cpus = os.availableParallelism(); -let createClients = true; -let done = 0; -let count = 0; -let countGC = 0; - -function serverHandler(req, res) { - res.connection.destroy(); -} - -const server = http.createServer(serverHandler); -server.listen(0, common.mustCall(() => { - for (let i = 0; i < cpus; i++) - getAll(); -})); - -function getAll() { - if (!createClients) - return; - - const req = http.get({ - hostname: 'localhost', - pathname: '/', - port: server.address().port - }, cb).on('error', cb); - - count++; - onGC(req, { ongc }); - - setImmediate(getAll); -} - -function cb(res) { - done += 1; -} - -function ongc() { - countGC++; -} - -setImmediate(status); - -function status() { - if (done > 0) { - createClients = false; - globalThis.gc(); - console.log(`done/collected/total: ${done}/${countGC}/${count}`); - if (countGC === count) { - server.close(); - return; - } - } - - setImmediate(status); -} diff --git a/test/js/node/test/parallel/test-http2-client-promisify-connect-error.js b/test/js/node/test/parallel/test-http2-client-promisify-connect-error.js new file mode 100644 index 0000000000..4cd4f48e4c --- /dev/null +++ b/test/js/node/test/parallel/test-http2-client-promisify-connect-error.js @@ -0,0 +1,22 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const http2 = require('http2'); +const util = require('util'); + +const connect = util.promisify(http2.connect); + +const error = new Error('Unable to resolve hostname'); + +function lookup(hostname, options, callback) { + callback(error); +} + +assert.rejects( + connect('http://hostname', { lookup }), + error, +).then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-http2-client-promisify-connect.js b/test/js/node/test/parallel/test-http2-client-promisify-connect.js new file mode 100644 index 0000000000..3e41bee49b --- /dev/null +++ b/test/js/node/test/parallel/test-http2-client-promisify-connect.js @@ -0,0 +1,32 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const http2 = require('http2'); +const util = require('util'); + +const server = http2.createServer(); +server.on('stream', common.mustCall((stream) => { + stream.respond(); + stream.end('ok'); +})); +server.listen(0, common.mustCall(() => { + const connect = util.promisify(http2.connect); + + connect(`http://localhost:${server.address().port}`) + .then(common.mustCall((client) => { + assert(client); + const req = client.request(); + let data = ''; + req.setEncoding('utf8'); + req.on('data', (chunk) => data += chunk); + req.on('end', common.mustCall(() => { + assert.strictEqual(data, 'ok'); + client.close(); + server.close(); + })); + })); +})); diff --git a/test/js/node/test/parallel/test-require-resolve.js b/test/js/node/test/parallel/test-require-resolve.js index 6aec57189e..0181880a35 100644 --- a/test/js/node/test/parallel/test-require-resolve.js +++ b/test/js/node/test/parallel/test-require-resolve.js @@ -64,15 +64,14 @@ require(fixtures.path('resolve-paths', 'default', 'verify-paths.js')); // TODO(@jasnell): Remove once node:quic is no longer flagged if (mod === 'node:quic') return; - assert.strictEqual(require.resolve.paths(mod), null, `require.resolve.paths(${mod}) should return null`); + assert.strictEqual(require.resolve.paths(mod), null); if (!mod.startsWith('node:')) { try { require.resolve(`node:${mod}`); } catch (e) { return; // skip modules that don't support the node prefix, such as 'bun:ffi' -> 'node:bun:ffi' } - - assert.strictEqual(require.resolve.paths(`node:${mod}`), null, `require.resolve.paths(node:${mod}) should return null`); + assert.strictEqual(require.resolve.paths(`node:${mod}`), null); } }); diff --git a/test/js/node/test/parallel/test-util-parse-env.js b/test/js/node/test/parallel/test-util-parse-env.js index 13d2fda37a..80ab736dd3 100644 --- a/test/js/node/test/parallel/test-util-parse-env.js +++ b/test/js/node/test/parallel/test-util-parse-env.js @@ -11,6 +11,8 @@ const fs = require('node:fs'); const validContent = fs.readFileSync(validEnvFilePath, 'utf8'); assert.deepStrictEqual(util.parseEnv(validContent), { + A: 'B=C', + B: 'C=D', AFTER_LINE: 'after_line', BACKTICKS: 'backticks', BACKTICKS_INSIDE_DOUBLE: '`backticks` work inside double quotes', diff --git a/test/js/node/test/sequential/test-performance-eventloopdelay.js b/test/js/node/test/sequential/test-performance-eventloopdelay.js new file mode 100644 index 0000000000..a0c11dbd35 --- /dev/null +++ b/test/js/node/test/sequential/test-performance-eventloopdelay.js @@ -0,0 +1,110 @@ +// Flags: --expose-gc --expose-internals +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const os = require('os'); +const { + monitorEventLoopDelay +} = require('perf_hooks'); +const sleep = typeof Bun === 'object' ? Bun.sleepSync : require('internal/util').sleep; + +{ + const histogram = monitorEventLoopDelay(); + assert(histogram); + assert(histogram.enable()); + assert(!histogram.enable()); + histogram.reset(); + assert(histogram.disable()); + assert(!histogram.disable()); +} + +{ + [null, 'a', 1, false, Infinity].forEach((i) => { + assert.throws( + () => monitorEventLoopDelay(i), + { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' + } + ); + }); + + [null, 'a', false, {}, []].forEach((i) => { + assert.throws( + () => monitorEventLoopDelay({ resolution: i }), + { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' + } + ); + }); + + [-1, 0, 2 ** 53, Infinity].forEach((i) => { + assert.throws( + () => monitorEventLoopDelay({ resolution: i }), + { + name: 'RangeError', + code: 'ERR_OUT_OF_RANGE' + } + ); + }); +} + +{ + const s390x = os.arch() === 's390x'; + const histogram = monitorEventLoopDelay({ resolution: 1 }); + histogram.enable(); + let m = 5; + if (s390x) { + m = m * 2; + } + function spinAWhile() { + sleep(1000); + if (--m > 0) { + setTimeout(spinAWhile, common.platformTimeout(500)); + } else { + histogram.disable(); + // The values are non-deterministic, so we just check that a value is + // present, as opposed to a specific value. + assert(histogram.min > 0); + assert(histogram.max > 0); + assert(histogram.stddev > 0); + assert(histogram.mean > 0); + assert(histogram.percentiles.size > 0); + for (let n = 1; n < 100; n = n + 0.1) { + assert(histogram.percentile(n) >= 0); + } + histogram.reset(); + assert.strictEqual(histogram.min, 9223372036854776000); + assert.strictEqual(histogram.max, 0); + assert(Number.isNaN(histogram.stddev)); + assert(Number.isNaN(histogram.mean)); + assert.strictEqual(histogram.percentiles.size, 1); + + ['a', false, {}, []].forEach((i) => { + assert.throws( + () => histogram.percentile(i), + { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE' + } + ); + }); + [-1, 0, 101, NaN].forEach((i) => { + assert.throws( + () => histogram.percentile(i), + { + name: 'RangeError', + code: 'ERR_OUT_OF_RANGE' + } + ); + }); + } + } + spinAWhile(); +} + +// Make sure that the histogram instances can be garbage-collected without +// and not just implicitly destroyed when the Environment is torn down. +process.on('exit', global.gc); diff --git a/test/js/node/tls/node-tls-connect.test.ts b/test/js/node/tls/node-tls-connect.test.ts index 914cd62098..56171db865 100644 --- a/test/js/node/tls/node-tls-connect.test.ts +++ b/test/js/node/tls/node-tls-connect.test.ts @@ -236,23 +236,23 @@ for (const { name, connect } of tests) { }); expect(cert.subjectaltname).toBe("DNS:localhost, IP Address:127.0.0.1, IP Address:0:0:0:0:0:0:0:1"); expect(cert.infoAccess).toBeUndefined(); - expect(cert.ca).toBeFalse(); + expect(cert.ca).toBe(true); expect(cert.bits).toBe(2048); expect(cert.modulus).toBe( - "beee8773af7c8861ec11351188b9b1798734fb0729b674369be3285a29fe5dacbfab700d09d7904cf1027d89298bd68be0ef1df94363012b0deb97f632cb76894bcc216535337b9db6125ef68996dd35b4bea07e86c41da071907a86651e84f8c72141f889cc0f770554791e9f07bbe47c375d2d77b44dbe2ab0ed442bc1f49abe4f8904977e3dfd61cd501d8eff819ff1792aedffaca7d281fd1db8c5d972d22f68fa7103ca11ac9aaed1cdd12c33c0b8b47964b37338953d2415edce8b83d52e2076ca960385cc3a5ca75a75951aafdb2ad3db98a6fdd4baa32f575fea7b11f671a9eaa95d7d9faf958ac609f3c48dec5bddcf1bc1542031ed9d4b281d7dd1", + "e5633a2c8118171cbeaf321d55d0444586cbe566bb51a234b0ead69faf7490069854efddffac68986652ff949f472252e4c7d24c6ee4e3366e54d9e4701e24d021e583e1a088112c0f96475a558b42f883a3e796c937cc4d6bb8791b227017b3e73deb40b0ac84f033019f580a3216888acec71ce52d938fcadd8e29794e38774e33d323ede89b58e526ef8b513ba465fa4ffd9cf6c1ec7480de0dcb569dec295d7b3cce40256b428d5907e90e7a52e77c3101f4ad4c0e254ab03d75ac42ee1668a5094bc4521b264fb404b6c4b17b6b279e13e6282e1e4fb6303540cb830ea8ff576ca57b7861e4ef797af824b0987c870718780a1c5141e4f904fd0c5139f5", ); expect(cert.exponent).toBe("0x10001"); expect(cert.pubkey).toBeInstanceOf(Buffer); - expect(cert.valid_from).toBe("Sep 6 23:27:34 2023 GMT"); // yes this space is intentional - expect(cert.valid_to).toBe("Sep 5 23:27:34 2025 GMT"); - expect(cert.fingerprint).toBe("E3:90:9C:A8:AB:80:48:37:8D:CE:11:64:45:3A:EB:AD:C8:3C:B3:5C"); + expect(cert.valid_from).toBe("Sep 6 03:00:49 2025 GMT"); // yes this space is intentional + expect(cert.valid_to).toBe("Sep 4 03:00:49 2035 GMT"); + expect(cert.fingerprint).toBe("D2:5E:B9:AD:8B:48:3B:7A:35:D3:1A:45:BD:32:AC:AD:55:4A:BA:AD"); expect(cert.fingerprint256).toBe( - "53:DD:15:78:60:FD:66:8C:43:9E:19:7E:CF:2C:AF:49:3C:D1:11:EC:61:2D:F5:DC:1D:0A:FA:CD:12:F9:F8:E0", + "85:F4:47:0C:6D:D8:DE:C8:68:77:7C:5E:3F:9B:56:A6:D3:69:C7:C2:1A:E8:B8:F8:1C:16:1D:04:78:A0:E9:91", ); expect(cert.fingerprint512).toBe( - "2D:31:CB:D2:A0:CA:E5:D4:B5:59:11:48:4B:BC:65:11:4F:AB:02:24:59:D8:73:43:2F:9A:31:92:BC:AF:26:66:CD:DB:8B:03:74:0C:C1:84:AF:54:2D:7C:FD:EF:07:6E:85:66:98:6B:82:4F:A5:72:97:A2:19:8C:7B:57:D6:15", + "CE:00:17:97:29:5E:1C:7E:59:86:8D:1F:F0:F4:AF:A0:B0:10:F2:2E:0E:79:D1:32:D0:44:F9:B4:3A:DE:D5:83:A9:15:0E:E4:47:24:D4:2A:10:FB:21:BE:3A:38:21:FC:40:20:B3:BC:52:64:F7:38:93:EF:C9:3F:C8:57:89:31", ); - expect(cert.serialNumber).toBe("1da7a7b8d71402ed2d8c3646a5cedf2b8117efc8"); + expect(cert.serialNumber).toBe("71a46ae89fd817ef81a34d5973e1de42f09b9d63"); expect(cert.raw).toBeInstanceOf(Buffer); } finally { socket.end(); diff --git a/test/js/node/tls/node-tls-server.test.ts b/test/js/node/tls/node-tls-server.test.ts index b4e0080f23..9cece7adf2 100644 --- a/test/js/node/tls/node-tls-server.test.ts +++ b/test/js/node/tls/node-tls-server.test.ts @@ -316,24 +316,24 @@ describe("tls.createServer", () => { ST: "CA", }); - expect(cert.ca).toBeFalse(); + expect(cert.ca).toBe(true); expect(cert.bits).toBe(2048); expect(cert.modulus).toBe( - "beee8773af7c8861ec11351188b9b1798734fb0729b674369be3285a29fe5dacbfab700d09d7904cf1027d89298bd68be0ef1df94363012b0deb97f632cb76894bcc216535337b9db6125ef68996dd35b4bea07e86c41da071907a86651e84f8c72141f889cc0f770554791e9f07bbe47c375d2d77b44dbe2ab0ed442bc1f49abe4f8904977e3dfd61cd501d8eff819ff1792aedffaca7d281fd1db8c5d972d22f68fa7103ca11ac9aaed1cdd12c33c0b8b47964b37338953d2415edce8b83d52e2076ca960385cc3a5ca75a75951aafdb2ad3db98a6fdd4baa32f575fea7b11f671a9eaa95d7d9faf958ac609f3c48dec5bddcf1bc1542031ed9d4b281d7dd1", + "e5633a2c8118171cbeaf321d55d0444586cbe566bb51a234b0ead69faf7490069854efddffac68986652ff949f472252e4c7d24c6ee4e3366e54d9e4701e24d021e583e1a088112c0f96475a558b42f883a3e796c937cc4d6bb8791b227017b3e73deb40b0ac84f033019f580a3216888acec71ce52d938fcadd8e29794e38774e33d323ede89b58e526ef8b513ba465fa4ffd9cf6c1ec7480de0dcb569dec295d7b3cce40256b428d5907e90e7a52e77c3101f4ad4c0e254ab03d75ac42ee1668a5094bc4521b264fb404b6c4b17b6b279e13e6282e1e4fb6303540cb830ea8ff576ca57b7861e4ef797af824b0987c870718780a1c5141e4f904fd0c5139f5", ); expect(cert.exponent).toBe("0x10001"); expect(cert.pubkey).toBeInstanceOf(Buffer); // yes these spaces are intentional - expect(cert.valid_from).toBe("Sep 6 23:27:34 2023 GMT"); - expect(cert.valid_to).toBe("Sep 5 23:27:34 2025 GMT"); - expect(cert.fingerprint).toBe("E3:90:9C:A8:AB:80:48:37:8D:CE:11:64:45:3A:EB:AD:C8:3C:B3:5C"); + expect(cert.valid_from).toBe("Sep 6 03:00:49 2025 GMT"); + expect(cert.valid_to).toBe("Sep 4 03:00:49 2035 GMT"); + expect(cert.fingerprint).toBe("D2:5E:B9:AD:8B:48:3B:7A:35:D3:1A:45:BD:32:AC:AD:55:4A:BA:AD"); expect(cert.fingerprint256).toBe( - "53:DD:15:78:60:FD:66:8C:43:9E:19:7E:CF:2C:AF:49:3C:D1:11:EC:61:2D:F5:DC:1D:0A:FA:CD:12:F9:F8:E0", + "85:F4:47:0C:6D:D8:DE:C8:68:77:7C:5E:3F:9B:56:A6:D3:69:C7:C2:1A:E8:B8:F8:1C:16:1D:04:78:A0:E9:91", ); expect(cert.fingerprint512).toBe( - "2D:31:CB:D2:A0:CA:E5:D4:B5:59:11:48:4B:BC:65:11:4F:AB:02:24:59:D8:73:43:2F:9A:31:92:BC:AF:26:66:CD:DB:8B:03:74:0C:C1:84:AF:54:2D:7C:FD:EF:07:6E:85:66:98:6B:82:4F:A5:72:97:A2:19:8C:7B:57:D6:15", + "CE:00:17:97:29:5E:1C:7E:59:86:8D:1F:F0:F4:AF:A0:B0:10:F2:2E:0E:79:D1:32:D0:44:F9:B4:3A:DE:D5:83:A9:15:0E:E4:47:24:D4:2A:10:FB:21:BE:3A:38:21:FC:40:20:B3:BC:52:64:F7:38:93:EF:C9:3F:C8:57:89:31", ); - expect(cert.serialNumber).toBe("1da7a7b8d71402ed2d8c3646a5cedf2b8117efc8"); + expect(cert.serialNumber).toBe("71a46ae89fd817ef81a34d5973e1de42f09b9d63"); expect(cert.raw).toBeInstanceOf(Buffer); client?.end(); diff --git a/test/js/node/tls/renegotiation-feature.js b/test/js/node/tls/renegotiation-feature.js index c110760191..b26c338d23 100644 --- a/test/js/node/tls/renegotiation-feature.js +++ b/test/js/node/tls/renegotiation-feature.js @@ -1,7 +1,7 @@ const server = require("https").createServer( { - cert: "-----BEGIN CERTIFICATE-----\nMIIDrzCCApegAwIBAgIUHaenuNcUAu0tjDZGpc7fK4EX78gwDQYJKoZIhvcNAQEL\nBQAwaTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh\nbmNpc2NvMQ0wCwYDVQQKDARPdmVuMREwDwYDVQQLDAhUZWFtIEJ1bjETMBEGA1UE\nAwwKc2VydmVyLWJ1bjAeFw0yMzA5MDYyMzI3MzRaFw0yNTA5MDUyMzI3MzRaMGkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNj\nbzENMAsGA1UECgwET3ZlbjERMA8GA1UECwwIVGVhbSBCdW4xEzARBgNVBAMMCnNl\ncnZlci1idW4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+7odzr3yI\nYewRNRGIubF5hzT7Bym2dDab4yhaKf5drL+rcA0J15BM8QJ9iSmL1ovg7x35Q2MB\nKw3rl/Yyy3aJS8whZTUze522El72iZbdNbS+oH6GxB2gcZB6hmUehPjHIUH4icwP\ndwVUeR6fB7vkfDddLXe0Tb4qsO1EK8H0mr5PiQSXfj39Yc1QHY7/gZ/xeSrt/6yn\n0oH9HbjF2XLSL2j6cQPKEayartHN0SwzwLi0eWSzcziVPSQV7c6Lg9UuIHbKlgOF\nzDpcp1p1lRqv2yrT25im/dS6oy9XX+p7EfZxqeqpXX2fr5WKxgnzxI3sW93PG8FU\nIDHtnUsoHX3RAgMBAAGjTzBNMCwGA1UdEQQlMCOCCWxvY2FsaG9zdIcEfwAAAYcQ\nAAAAAAAAAAAAAAAAAAAAATAdBgNVHQ4EFgQUF3y/su4J/8ScpK+rM2LwTct6EQow\nDQYJKoZIhvcNAQELBQADggEBAGWGWp59Bmrk3Gt0bidFLEbvlOgGPWCT9ZrJUjgc\nhY44E+/t4gIBdoKOSwxo1tjtz7WsC2IYReLTXh1vTsgEitk0Bf4y7P40+pBwwZwK\naeIF9+PC6ZoAkXGFRoyEalaPVQDBg/DPOMRG9OH0lKfen9OGkZxmmjRLJzbyfAhU\noI/hExIjV8vehcvaJXmkfybJDYOYkN4BCNqPQHNf87ZNdFCb9Zgxwp/Ou+47J5k4\n5plQ+K7trfKXG3ABMbOJXNt1b0sH8jnpAsyHY4DLEQqxKYADbXsr3YX/yy6c0eOo\nX2bHGD1+zGsb7lGyNyoZrCZ0233glrEM4UxmvldBcWwOWfk=\n-----END CERTIFICATE-----\n", - key: "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC+7odzr3yIYewR\nNRGIubF5hzT7Bym2dDab4yhaKf5drL+rcA0J15BM8QJ9iSmL1ovg7x35Q2MBKw3r\nl/Yyy3aJS8whZTUze522El72iZbdNbS+oH6GxB2gcZB6hmUehPjHIUH4icwPdwVU\neR6fB7vkfDddLXe0Tb4qsO1EK8H0mr5PiQSXfj39Yc1QHY7/gZ/xeSrt/6yn0oH9\nHbjF2XLSL2j6cQPKEayartHN0SwzwLi0eWSzcziVPSQV7c6Lg9UuIHbKlgOFzDpc\np1p1lRqv2yrT25im/dS6oy9XX+p7EfZxqeqpXX2fr5WKxgnzxI3sW93PG8FUIDHt\nnUsoHX3RAgMBAAECggEAAckMqkn+ER3c7YMsKRLc5bUE9ELe+ftUwfA6G+oXVorn\nE+uWCXGdNqI+TOZkQpurQBWn9IzTwv19QY+H740cxo0ozZVSPE4v4czIilv9XlVw\n3YCNa2uMxeqp76WMbz1xEhaFEgn6ASTVf3hxYJYKM0ljhPX8Vb8wWwlLONxr4w4X\nOnQAB5QE7i7LVRsQIpWKnGsALePeQjzhzUZDhz0UnTyGU6GfC+V+hN3RkC34A8oK\njR3/Wsjahev0Rpb+9Pbu3SgTrZTtQ+srlRrEsDG0wVqxkIk9ueSMOHlEtQ7zYZsk\nlX59Bb8LHNGQD5o+H1EDaC6OCsgzUAAJtDRZsPiZEQKBgQDs+YtVsc9RDMoC0x2y\nlVnP6IUDXt+2UXndZfJI3YS+wsfxiEkgK7G3AhjgB+C+DKEJzptVxP+212hHnXgr\n1gfW/x4g7OWBu4IxFmZ2J/Ojor+prhHJdCvD0VqnMzauzqLTe92aexiexXQGm+WW\nwRl3YZLmkft3rzs3ZPhc1G2X9QKBgQDOQq3rrxcvxSYaDZAb+6B/H7ZE4natMCiz\nLx/cWT8n+/CrJI2v3kDfdPl9yyXIOGrsqFgR3uhiUJnz+oeZFFHfYpslb8KvimHx\nKI+qcVDcprmYyXj2Lrf3fvj4pKorc+8TgOBDUpXIFhFDyM+0DmHLfq+7UqvjU9Hs\nkjER7baQ7QKBgQDTh508jU/FxWi9RL4Jnw9gaunwrEt9bxUc79dp+3J25V+c1k6Q\nDPDBr3mM4PtYKeXF30sBMKwiBf3rj0CpwI+W9ntqYIwtVbdNIfWsGtV8h9YWHG98\nJ9q5HLOS9EAnogPuS27walj7wL1k+NvjydJ1of+DGWQi3aQ6OkMIegap0QKBgBlR\nzCHLa5A8plG6an9U4z3Xubs5BZJ6//QHC+Uzu3IAFmob4Zy+Lr5/kITlpCyw6EdG\n3xDKiUJQXKW7kluzR92hMCRnVMHRvfYpoYEtydxcRxo/WS73SzQBjTSQmicdYzLE\ntkLtZ1+ZfeMRSpXy0gR198KKAnm0d2eQBqAJy0h9AoGBAM80zkd+LehBKq87Zoh7\ndtREVWslRD1C5HvFcAxYxBybcKzVpL89jIRGKB8SoZkF7edzhqvVzAMP0FFsEgCh\naClYGtO+uo+B91+5v2CCqowRJUGfbFOtCuSPR7+B3LDK8pkjK2SQ0mFPUfRA5z0z\nNVWtC0EYNBTRkqhYtqr3ZpUc\n-----END PRIVATE KEY-----\n", + cert: process.env.SERVER_CERT, + key: process.env.SERVER_KEY, rejectUnauthorized: false, hostname: "localhost", minVersion: "TLSv1.2", diff --git a/test/js/node/tls/renegotiation.test.ts b/test/js/node/tls/renegotiation.test.ts index f51b807934..bf848794bb 100644 --- a/test/js/node/tls/renegotiation.test.ts +++ b/test/js/node/tls/renegotiation.test.ts @@ -1,5 +1,6 @@ import type { Subprocess } from "bun"; import { afterAll, beforeAll, expect, it } from "bun:test"; +import { bunEnv, tls } from "harness"; import type { IncomingMessage } from "http"; import { join } from "path"; let url: URL; @@ -9,6 +10,11 @@ beforeAll(async () => { stdout: "pipe", stderr: "inherit", stdin: "ignore", + env: { + ...bunEnv, + SERVER_CERT: tls.cert, + SERVER_KEY: tls.key, + }, }); const { value } = await process.stdout.getReader().read(); url = new URL(new TextDecoder().decode(value)); diff --git a/test/js/sql/adapter-env-var-precedence.test.ts b/test/js/sql/adapter-env-var-precedence.test.ts new file mode 100644 index 0000000000..4f3fa796d2 --- /dev/null +++ b/test/js/sql/adapter-env-var-precedence.test.ts @@ -0,0 +1,475 @@ +import { SQL } from "bun"; +import { afterAll, beforeEach, describe, expect, test } from "bun:test"; +import { isWindows } from "harness"; +import { unlinkSync } from "js/node/fs/export-star-from"; + +declare module "bun" { + namespace SQL { + export interface PostgresOrMySQLOptions { + sslMode?: number; + } + } +} + +describe("SQL adapter environment variable precedence", () => { + const originalEnv = { ...process.env }; + + // prettier-ignore + const SQL_ENV_VARS = [ + 'DATABASE_URL', 'DATABASEURL', + 'TLS_DATABASE_URL', + 'POSTGRES_URL', 'PGURL', 'PG_URL', + 'TLS_POSTGRES_DATABASE_URL', + 'MYSQL_URL', 'MYSQLURL', + 'TLS_MYSQL_DATABASE_URL', + 'MARIADB_URL', 'MARIADBURL', + 'TLS_MARIADB_DATABASE_URL', + 'SQLITE_URL', 'SQLITEURL', + 'PGHOST', 'PGUSER', 'PGPASSWORD', 'PGDATABASE', 'PGPORT', + 'MYSQL_HOST', 'MYSQL_USER', 'MYSQL_PASSWORD', 'MYSQL_DATABASE', 'MYSQL_PORT' + ]; + + beforeEach(() => { + for (const key of Object.keys(process.env).concat(...Object.keys(Bun.env), ...Object.keys(import.meta.env))) { + delete process.env[key]; + delete Bun.env[key]; + delete import.meta.env[key]; + } + + for (const key in originalEnv) { + process.env[key] = originalEnv[key]; + Bun.env[key] = originalEnv[key]; + import.meta.env[key] = originalEnv[key]; + } + + for (const key of SQL_ENV_VARS) { + delete process.env[key]; + delete Bun.env[key]; + delete import.meta.env[key]; + } + }); + + afterAll(() => { + for (const key of Object.keys(process.env).concat(...Object.keys(Bun.env), ...Object.keys(import.meta.env))) { + delete process.env[key]; + delete Bun.env[key]; + delete import.meta.env[key]; + } + + for (const key in originalEnv) { + process.env[key] = originalEnv[key]; + Bun.env[key] = originalEnv[key]; + import.meta.env[key] = originalEnv[key]; + } + + for (const key of SQL_ENV_VARS) { + delete process.env[key]; + delete Bun.env[key]; + delete import.meta.env[key]; + } + }); + + test("should not prioritize DATABASE_URL over explicit options (issue #22147)", () => { + process.env.DATABASE_URL = "foo_url"; + + const options = new SQL({ + hostname: "bar_url", + username: "postgres", + password: "postgres", + port: 5432, + }); + + expect(options.options.adapter).toBe("postgres"); + expect(options.options.hostname).toBe("bar_url"); + expect(options.options.port).toBe(5432); + expect(options.options.username).toBe("postgres"); + }); + + test("should only read PostgreSQL env vars when adapter is postgres", () => { + process.env.PGHOST = "pg-host"; + process.env.PGUSER = "pg-user"; + process.env.PGPASSWORD = "pg-pass"; + process.env.MYSQL_URL = "mysql://mysql-host/db"; + + const options = new SQL({ + adapter: "postgres", + }); + + expect(options.options.hostname).toBe("pg-host"); + expect(options.options.username).toBe("pg-user"); + expect(options.options.password).toBe("pg-pass"); + // Should not use MYSQL_URL + expect(options.options.hostname).not.toBe("mysql-host"); + }); + + test("should only read MySQL env vars when adapter is mysql", () => { + process.env.PGHOST = "pg-host"; + process.env.PGUSER = "pg-user"; + process.env.MYSQL_URL = "mysql://mysql-host/db"; + + const options = new SQL({ + adapter: "mysql", + }); + + // Should use MYSQL_URL and not read PostgreSQL env vars + expect(options.options.hostname).toBe("mysql-host"); + expect(options.options.username).not.toBe("pg-user"); + }); + + test("should infer postgres adapter from postgres:// protocol", () => { + const options = new SQL("postgres://user:pass@host:5432/db"); + expect(options.options.adapter).toBe("postgres"); + }); + + test("should infer mysql adapter from mysql:// protocol", () => { + const options = new SQL("mysql://user:pass@host:3306/db"); + expect(options.options.adapter).toBe("mysql"); + }); + + test("should default to postgres when no protocol specified", () => { + const options = new SQL("user:pass@host/db"); + expect(options.options.adapter).toBe("postgres"); + }); + + test("adapter-specific env vars should take precedence over generic ones", () => { + process.env.USER = "generic-user"; + process.env.PGUSER = "postgres-user"; + + const options = new SQL({ + adapter: "postgres", + }); + + expect(options.options.username).toBe("postgres-user"); + }); + + test("should infer mysql adapter from MYSQL_URL env var", () => { + process.env.MYSQL_URL = "mysql://user:pass@host:3306/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(3306); + }); + + test("should default to port 3306 for MySQL when no port specified", () => { + process.env.MYSQL_URL = "mysql://user:pass@host/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(3306); // Should default to MySQL port + }); + + test("should default to port 3306 for explicit MySQL adapter", () => { + const options = new SQL({ + adapter: "mysql", + hostname: "localhost", + }); + + expect(options.options.adapter).toBe("mysql"); + expect(options.options.port).toBe(3306); // Should default to MySQL port + }); + + test("should infer postgres adapter from POSTGRES_URL env var", () => { + process.env.POSTGRES_URL = "postgres://user:pass@host:5432/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("postgres"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(5432); + }); + + test("POSTGRES_URL should take precedence over MYSQL_URL", () => { + process.env.POSTGRES_URL = "postgres://pg-host:5432/pgdb"; + process.env.MYSQL_URL = "mysql://mysql-host:3306/mysqldb"; + + const options = new SQL(); + expect(options.options.adapter).toBe("postgres"); + expect(options.options.hostname).toBe("pg-host"); + expect(options.options.port).toBe(5432); + }); + + test("should infer mysql from MYSQL_URL even without protocol", () => { + process.env.MYSQL_URL = "root@localhost:3306/test"; + + const options = new SQL(); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("localhost"); + expect(options.options.port).toBe(3306); + expect(options.options.username).toBe("root"); + }); + + test("should infer postgres from POSTGRES_URL even without protocol", () => { + process.env.POSTGRES_URL = "user@localhost:5432/test"; + + const options = new SQL(); + expect(options.options.adapter).toBe("postgres"); + expect(options.options.hostname).toBe("localhost"); + expect(options.options.port).toBe(5432); + expect(options.options.username).toBe("user"); + }); + + test("environment variable name should override protocol (PGURL with mysql protocol should be postgres)", () => { + process.env.PGURL = "mysql://host:3306/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("postgres"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(3306); + }); + + test("environment variable name should override protocol (MYSQL_URL with postgres protocol should be mysql)", () => { + process.env.MYSQL_URL = "postgres://host:5432/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(5432); + }); + test("should use MySQL-specific environment variables", () => { + process.env.MYSQL_HOST = "mysql-server"; + process.env.MYSQL_PORT = "3307"; + process.env.MYSQL_USER = "admin"; + process.env.MYSQL_PASSWORD = "secret"; + process.env.MYSQL_DATABASE = "production"; + + const options = new SQL({ adapter: "mysql" }); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("mysql-server"); + expect(options.options.port).toBe(3307); + expect(options.options.username).toBe("admin"); + expect(options.options.password).toBe("secret"); + expect(options.options.database).toBe("production"); + }); + + test("MySQL-specific env vars should take precedence over generic ones", () => { + process.env.USER = "generic-user"; + process.env.MYSQL_USER = "mysql-user"; + + const options = new SQL({ adapter: "mysql" }); + expect(options.options.username).toBe("mysql-user"); + }); + + test("should default to database name 'mysql' for MySQL adapter", () => { + const options = new SQL({ adapter: "mysql", hostname: "localhost" }); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.database).toBe("mysql"); + }); + + test("should default to username as database name for PostgreSQL adapter", () => { + const options = new SQL({ adapter: "postgres", hostname: "localhost", username: "testuser" }); + expect(options.options.adapter).toBe("postgres"); + expect(options.options.database).toBe("testuser"); + }); + + test("should infer mysql adapter from TLS_MYSQL_DATABASE_URL", () => { + process.env.TLS_MYSQL_DATABASE_URL = "mysql://user:pass@host:3306/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(3306); + expect(options.options.sslMode).toBe(2); // SSLMode.require + }); + + test("should infer postgres adapter from TLS_POSTGRES_DATABASE_URL", () => { + process.env.TLS_POSTGRES_DATABASE_URL = "postgres://user:pass@host:5432/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("postgres"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(5432); + expect(options.options.sslMode).toBe(2); // SSLMode.require + }); + + test("should infer adapter from TLS_DATABASE_URL using protocol", () => { + process.env.TLS_DATABASE_URL = "mysql://user:pass@host:3306/db"; + + const options = new SQL(); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(3306); + expect(options.options.sslMode).toBe(2); // SSLMode.require + }); + + describe("Adapter-Protocol Validation", () => { + test("should work with explicit adapter and URL without protocol", () => { + const options = new SQL("user:pass@host:3306/db", { adapter: "mysql" }); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(3306); + }); + + test("should work with explicit adapter and matching protocol", () => { + const options = new SQL("mysql://user:pass@host:3306/db", { adapter: "mysql" }); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("host"); + expect(options.options.port).toBe(3306); + }); + + test.skipIf(isWindows)("should work with unix:// protocol and explicit adapter", () => { + using sock = Bun.listen({ + unix: "/tmp/thisisacoolmysql.sock", + socket: { + data: console.log, + }, + }); + + const options = new SQL(`unix://${sock.unix}`, { adapter: "mysql" }); + expect(options.options.adapter).toBe("mysql"); + expect(options.options.path).toBe("/tmp/thisisacoolmysql.sock"); + + unlinkSync(sock.unix); + }); + + test("should work with sqlite:// protocol and sqlite adapter", () => { + const options = new SQL("sqlite:///tmp/test.db", { adapter: "sqlite" }); + expect(options.options.adapter).toBe("sqlite"); + expect(options.options.filename).toBe("/tmp/test.db"); + }); + + test("should work with sqlite:// protocol without adapter", () => { + const options = new SQL("sqlite:///tmp/test.db"); + expect(options.options.adapter).toBe("sqlite"); + expect(options.options.filename).toBe("/tmp/test.db"); + }); + + describe("Explicit options override URL parameters", () => { + test("explicit hostname should override URL hostname", () => { + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + hostname: "explicithost", + }); + + expect(options.options.hostname).toBe("explicithost"); + expect(options.options.port).toBe(1234); // URL port should remain + expect(options.options.username).toBe("urluser"); // URL username should remain + expect(options.options.database).toBe("urldb"); // URL database should remain + }); + + test("explicit port should override URL port", () => { + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + port: 5432, + }); + + expect(options.options.hostname).toBe("urlhost"); // URL hostname should remain + expect(options.options.port).toBe(5432); + expect(options.options.username).toBe("urluser"); // URL username should remain + expect(options.options.database).toBe("urldb"); // URL database should remain + }); + + test("explicit username should override URL username", () => { + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + username: "explicituser", + }); + + expect(options.options.hostname).toBe("urlhost"); // URL hostname should remain + expect(options.options.port).toBe(1234); // URL port should remain + expect(options.options.username).toBe("explicituser"); + expect(options.options.database).toBe("urldb"); // URL database should remain + }); + + test("explicit password should override URL password", () => { + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + password: "explicitpass", + }); + + expect(options.options.hostname).toBe("urlhost"); // URL hostname should remain + expect(options.options.port).toBe(1234); // URL port should remain + expect(options.options.username).toBe("urluser"); // URL username should remain + expect(options.options.password).toBe("explicitpass"); + expect(options.options.database).toBe("urldb"); // URL database should remain + }); + + test("explicit database should override URL database", () => { + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + database: "explicitdb", + }); + + expect(options.options.hostname).toBe("urlhost"); // URL hostname should remain + expect(options.options.port).toBe(1234); // URL port should remain + expect(options.options.username).toBe("urluser"); // URL username should remain + expect(options.options.database).toBe("explicitdb"); + }); + + test("multiple explicit options should override corresponding URL parameters", () => { + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + hostname: "explicithost", + port: 5432, + username: "explicituser", + password: "explicitpass", + database: "explicitdb", + }); + + expect(options.options.hostname).toBe("explicithost"); + expect(options.options.port).toBe(5432); + expect(options.options.username).toBe("explicituser"); + expect(options.options.password).toBe("explicitpass"); + expect(options.options.database).toBe("explicitdb"); + }); + + test("should work with MySQL URLs and explicit options", () => { + const options = new SQL("mysql://urluser:urlpass@urlhost:3306/urldb", { + hostname: "explicithost", + port: 3307, + username: "explicituser", + }); + + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("explicithost"); + expect(options.options.port).toBe(3307); + expect(options.options.username).toBe("explicituser"); + expect(options.options.password).toBe("urlpass"); // URL password should remain + expect(options.options.database).toBe("urldb"); // URL database should remain + }); + + test("should work with alternative option names (user, pass, db, host)", () => { + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + host: "explicithost", + user: "explicituser", + pass: "explicitpass", + db: "explicitdb", + }); + + expect(options.options.hostname).toBe("explicithost"); + expect(options.options.username).toBe("explicituser"); + expect(options.options.password).toBe("explicitpass"); + expect(options.options.database).toBe("explicitdb"); + }); + + test("explicit options should override URL even when environment variables are present", () => { + process.env.PGHOST = "envhost"; + process.env.PGPORT = "9999"; + process.env.PGUSER = "envuser"; + + const options = new SQL("postgres://urluser:urlpass@urlhost:1234/urldb", { + hostname: "explicithost", + port: 5432, + username: "explicituser", + }); + + expect(options.options.hostname).toBe("explicithost"); + expect(options.options.port).toBe(5432); + expect(options.options.username).toBe("explicituser"); + expect(options.options.password).toBe("urlpass"); // URL password should remain since no explicit password + expect(options.options.database).toBe("urldb"); // URL database should remain + }); + + test("explicit options should have higher precedence than environment-specific variables", () => { + process.env.MYSQL_HOST = "mysqlhost"; + process.env.MYSQL_USER = "mysqluser"; + process.env.MYSQL_PASSWORD = "mysqlpass"; + + const options = new SQL("mysql://urluser:urlpass@urlhost:3306/urldb", { + hostname: "explicithost", + username: "explicituser", + }); + + expect(options.options.adapter).toBe("mysql"); + expect(options.options.hostname).toBe("explicithost"); + expect(options.options.username).toBe("explicituser"); + expect(options.options.password).toBe("urlpass"); // URL password (not env) + expect(options.options.database).toBe("urldb"); // URL database should remain + }); + }); + }); +}); diff --git a/test/js/sql/sql-mysql.test.ts b/test/js/sql/sql-mysql.test.ts index cdce289482..42a5e5635b 100644 --- a/test/js/sql/sql-mysql.test.ts +++ b/test/js/sql/sql-mysql.test.ts @@ -44,7 +44,7 @@ if (docker) { env: image.env, }, (port: number) => { - const options = { + const options: Bun.SQL.Options = { url: `mysql://root:bun@localhost:${port}`, max: 1, tls: @@ -143,11 +143,13 @@ if (docker) { onconnect, onclose, }); - expect(await sql`select 123 as x`).toEqual([{ x: 123 }]); + expect<[{ x: number }]>(await sql`select 123 as x`).toEqual([{ x: 123 }]); expect(onconnect).toHaveBeenCalledTimes(1); expect(onclose).not.toHaveBeenCalled(); const err = await onClosePromise.promise; - expect(err.code).toBe(`ERR_MYSQL_IDLE_TIMEOUT`); + expect(err).toBeInstanceOf(SQL.SQLError); + expect(err).toBeInstanceOf(SQL.MySQLError); + expect((err as SQL.MySQLError).code).toBe(`ERR_MYSQL_IDLE_TIMEOUT`); }); test("Max lifetime works", async () => { @@ -162,8 +164,8 @@ if (docker) { onconnect, onclose, }); - let error: any; - expect(await sql`select 1 as x`).toEqual([{ x: 1 }]); + let error: unknown; + expect<[{ x: number }]>(await sql`select 1 as x`).toEqual([{ x: 1 }]); expect(onconnect).toHaveBeenCalledTimes(1); try { while (true) { @@ -177,7 +179,9 @@ if (docker) { expect(onclose).toHaveBeenCalledTimes(1); - expect(error.code).toBe(`ERR_MYSQL_LIFETIME_TIMEOUT`); + expect(error).toBeInstanceOf(SQL.SQLError); + expect(error).toBeInstanceOf(SQL.MySQLError); + expect((error as SQL.MySQLError).code).toBe(`ERR_MYSQL_LIFETIME_TIMEOUT`); }); // Last one wins. diff --git a/test/js/sql/sql.test.ts b/test/js/sql/sql.test.ts index 57740fa7eb..1f063f468a 100644 --- a/test/js/sql/sql.test.ts +++ b/test/js/sql/sql.test.ts @@ -424,6 +424,61 @@ if (isDockerEnabled()) { expect(sql.options.database).toBe("bun@bun"); }); + test("Minimal reproduction of Bun.SQL PostgreSQL hang bug (#22395)", async () => { + for (let i = 0; i < 10; i++) { + await using sql = new SQL({ + ...options, + idleTimeout: 10, + connectionTimeout: 10, + maxLifetime: 10, + }); + + const random_id = randomUUIDv7() + "test_hang"; + // Setup: Create table with exclusion constraint + await sql`DROP TABLE IF EXISTS ${sql(random_id)} CASCADE`; + await sql`CREATE EXTENSION IF NOT EXISTS btree_gist`; + await sql` + CREATE TABLE ${sql(random_id)} ( + id SERIAL PRIMARY KEY, + start_time TIMESTAMPTZ NOT NULL, + end_time TIMESTAMPTZ NOT NULL, + resource_id INT NOT NULL, + EXCLUDE USING gist ( + resource_id WITH =, + tstzrange(start_time, end_time) WITH && + ) + ) + `; + + // Step 1: Insert a row (succeeds) + await sql` + INSERT INTO ${sql(random_id)} (start_time, end_time, resource_id) + VALUES ('2024-01-01 10:00:00', '2024-01-01 12:00:00', 1) + `; + + // Step 2: Try to insert conflicting row (throws expected error) + try { + await sql` + INSERT INTO ${sql(random_id)} (start_time, end_time, resource_id) + VALUES (${"2024-01-01 11:00:00"}, ${"2024-01-01 13:00:00"}, ${1}) + `; + expect.unreachable(); + } catch {} + + // Step 3: Try another query - THIS WILL HANG + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error("TIMEOUT")), 200); + }); + + try { + const result = await Promise.race([sql`SELECT COUNT(*) FROM ${sql(random_id)}`, timeoutPromise]); + expect(result[0].count).toBe("1"); + } catch (err: any) { + expect(err.message).not.toBe("TIMEOUT"); + } + } + }); + test("Connects with no options", async () => { // we need at least the usename and port await using sql = postgres({ max: 1, port: container.port, username: login.username }); diff --git a/test/js/sql/sqlite-sql.test.ts b/test/js/sql/sqlite-sql.test.ts index 9d7deee6d2..12d9189dc6 100644 --- a/test/js/sql/sqlite-sql.test.ts +++ b/test/js/sql/sqlite-sql.test.ts @@ -226,46 +226,62 @@ describe("Connection & Initialization", () => { await sql.close(); }); + }); - test("should NOT use PG_URL for SQLite", async () => { - Bun.env.PG_URL = "postgres://user:pass@localhost:5432/mydb"; + describe("options.url overrides first argument", () => { + test("should use options.url for postgres when it overrides first argument", () => { + const sql = new SQL("http://wrong-host/db", { + adapter: "postgres", + url: "postgres://correct-host:5432/mydb", + }); - const sql = new SQL({ adapter: "sqlite", filename: ":memory:" }); - expect(sql.options.adapter).toBe("sqlite"); - expect(sql.options.filename).toBe(":memory:"); - - await sql.close(); - }); - - test("should throw error when POSTGRES_URL is used without adapter specification", () => { - Bun.env.POSTGRES_URL = "postgres://user:pass@localhost:5432/mydb"; - Bun.env.DATABASE_URL = undefined; - - // This should create a postgres connection, not sqlite - const sql = new SQL(); expect(sql.options.adapter).toBe("postgres"); + expect(sql.options.hostname).toBe("correct-host"); + expect(sql.options.port).toBe(5432); + expect(sql.options.database).toBe("mydb"); + sql.close(); }); - test("should handle multiple env vars with precedence", async () => { - // Test precedence: POSTGRES_URL > DATABASE_URL > PGURL > PG_URL - Bun.env.PG_URL = "postgres://pg_url@localhost:5432/pg_db"; - Bun.env.PGURL = "postgres://pgurl@localhost:5432/pgurl_db"; - Bun.env.DATABASE_URL = "sqlite://:memory:"; - Bun.env.POSTGRES_URL = "postgres://postgres@localhost:5432/postgres_db"; + test("should use options.url for mysql when it overrides first argument", () => { + const sql = new SQL("http://wrong-host/wrongdb", { + adapter: "mysql", + url: "mysql://user:pass@mysql-host:3306/correctdb", + }); + + expect(sql.options.adapter).toBe("mysql"); + expect(sql.options.hostname).toBe("mysql-host"); + expect(sql.options.port).toBe(3306); + expect(sql.options.database).toBe("correctdb"); + + sql.close(); + }); + + test("should use options.url for mariadb when it overrides first argument", () => { + const sql = new SQL("http://wrong-host:1234/wrongdb", { + adapter: "mariadb", + url: "mariadb://maria-host:3307/mariadb", + }); + + expect(sql.options.adapter).toBe("mariadb"); + expect(sql.options.hostname).toBe("maria-host"); + expect(sql.options.port).toBe(3307); + expect(sql.options.database).toBe("mariadb"); + + sql.close(); + }); + + test("should use first argument when options.url is not provided", () => { + const sql = new SQL("postgres://first-arg-host:5432/firstdb", { + adapter: "postgres", + }); - const sql = new SQL(); - // POSTGRES_URL takes precedence expect(sql.options.adapter).toBe("postgres"); - await sql.close(); + expect(sql.options.hostname).toBe("first-arg-host"); + expect(sql.options.port).toBe(5432); + expect(sql.options.database).toBe("firstdb"); - // Remove POSTGRES_URL - delete Bun.env.POSTGRES_URL; - const sql2 = new SQL(); - // DATABASE_URL takes next precedence and it's SQLite (detected via :memory:) - expect(sql2.options.adapter).toBe("sqlite"); - expect(sql2.options.filename).toBe(":memory:"); - await sql2.close(); + sql.close(); }); }); @@ -579,14 +595,14 @@ describe("Connection & Initialization", () => { test("should handle sqlite: without path", () => { const sql = new SQL("sqlite:"); expect(sql.options.adapter).toBe("sqlite"); - expect(sql.options.filename).toBe(""); + expect(sql.options.filename).toBe(":memory:"); sql.close(); }); test("should handle sqlite:// without path", () => { const sql = new SQL("sqlite://"); expect(sql.options.adapter).toBe("sqlite"); - expect(sql.options.filename).toBe(""); + expect(sql.options.filename).toBe(":memory:"); sql.close(); }); @@ -671,7 +687,7 @@ describe("Connection & Initialization", () => { describe("Error Cases", () => { test("should throw for unsupported adapter", () => { expect(() => new SQL({ adapter: "mssql" as any })).toThrowErrorMatchingInlineSnapshot( - `"Unsupported adapter: mssql. Supported adapters: "postgres", "sqlite", "mysql""`, + `"Unsupported adapter: mssql. Supported adapters: "postgres", "sqlite", "mysql", "mariadb""`, ); }); diff --git a/test/js/sql/sqlite-url-parsing.test.ts b/test/js/sql/sqlite-url-parsing.test.ts index 006bd73d50..2ad42a02a7 100644 --- a/test/js/sql/sqlite-url-parsing.test.ts +++ b/test/js/sql/sqlite-url-parsing.test.ts @@ -22,7 +22,7 @@ describe("SQLite URL Parsing Matrix", () => { { input: "test@symbol.db", expected: "test@symbol.db", name: "@ in filename" }, { input: "test&.db", expected: "test&.db", name: "ampersand in filename" }, { input: "test%20encoded.db", expected: "test%20encoded.db", name: "percent encoding" }, - { input: "", expected: "", name: "empty path" }, + { input: "", expected: ":memory:", name: "empty path" }, ] as const; const testMatrix = protocols @@ -67,6 +67,10 @@ describe("SQLite URL Parsing Matrix", () => { // Not a valid file:// URL, so implementation just strips the prefix expected = testCase.url.slice(7); // "file://".length } + // Empty filename should default to :memory: + if (expected === "") { + expected = ":memory:"; + } expect(filename).toBe(expected); } else { expect(sql.options.filename).toBe(testCase.expected); diff --git a/test/js/sql/tls-sql.test.ts b/test/js/sql/tls-sql.test.ts index 9257b1d3f4..88c0653b4a 100644 --- a/test/js/sql/tls-sql.test.ts +++ b/test/js/sql/tls-sql.test.ts @@ -34,6 +34,11 @@ for (const options of [ transactionPool: false, }, ] satisfies (Bun.SQL.Options & { transactionPool?: boolean })[]) { + if (options.url === undefined) { + console.log("SKIPPING TEST", JSON.stringify(options), "BECAUSE MISSING THE URL SECRET"); + continue; + } + describe(`${options.transactionPool ? "Transaction Pooling" : `Prepared Statements (${options.prepare ? "on" : "off"})`}`, () => { test("default sql", async () => { expect(sql.reserve).toBeDefined(); @@ -199,7 +204,7 @@ for (const options of [ expect( await sql .begin(sql => [sql`select wat`, sql`select current_setting('bun_sql.test') as x, ${1} as a`]) - .catch(e => e.errno || e), + .catch(e => e.errno), ).toBe("42703"); }); diff --git a/test/js/third_party/next-auth/next-auth.test.ts b/test/js/third_party/next-auth/next-auth.test.ts index bed77f2275..5b5d63ab78 100644 --- a/test/js/third_party/next-auth/next-auth.test.ts +++ b/test/js/third_party/next-auth/next-auth.test.ts @@ -23,12 +23,16 @@ describe("next-auth", () => { }, }); + console.log("running bun install"); await runBunInstall(bunEnv, testDir, { savesLockfile: false }); - console.log(testDir); + console.log("starting server"); const result = bunRun(join(testDir, "server.js"), { AUTH_SECRET: "I7Jiq12TSMlPlAzyVAT+HxYX7OQb/TTqIbfTTpr1rg8=", }); + + console.log(result.stdout); + console.log(result.stderr); expect(result.stderr).toBe(""); expect(result.stdout).toBeDefined(); const lines = result.stdout?.split("\n") ?? []; diff --git a/test/js/third_party/pg-gateway/pglite.test.ts b/test/js/third_party/pg-gateway/pglite.test.ts index 9c63350053..7b1fcfd976 100644 --- a/test/js/third_party/pg-gateway/pglite.test.ts +++ b/test/js/third_party/pg-gateway/pglite.test.ts @@ -2,25 +2,19 @@ import { PGlite } from "@electric-sql/pglite"; import { SQL, randomUUIDv7 } from "bun"; import { expect, test } from "bun:test"; import { once } from "events"; -import { isCI, isLinux } from "harness"; import net, { AddressInfo } from "node:net"; import { fromNodeSocket } from "pg-gateway/node"; -// TODO(@190n) linux-x64 sometimes fails due to JavaScriptCore bug -// https://github.com/oven-sh/bun/issues/17841#issuecomment-2695792567 -// https://bugs.webkit.org/show_bug.cgi?id=289009 -test.todoIf(isCI && isLinux && process.arch == "x64")( - "pglite should be able to query using pg-gateway and Bun.SQL", - async () => { - const name = "test_" + randomUUIDv7("hex").replaceAll("-", ""); - const dataDir = `memory://${name}`; - const db = new PGlite(dataDir); +test("pglite should be able to query using pg-gateway and Bun.SQL", async () => { + const name = "test_" + randomUUIDv7("hex").replaceAll("-", ""); + const dataDir = `memory://${name}`; + const db = new PGlite(dataDir); - // Wait for the database to initialize - await db.waitReady; + // Wait for the database to initialize + await db.waitReady; - // Create a simple test table - await db.exec(` + // Create a simple test table + await db.exec(` CREATE TABLE IF NOT EXISTS test_table ( id SERIAL PRIMARY KEY, name TEXT NOT NULL @@ -29,70 +23,74 @@ test.todoIf(isCI && isLinux && process.arch == "x64")( INSERT INTO test_table (name) VALUES ('Test 1'), ('Test 2'), ('Test 3'); `); - // Create a simple server using pg-gateway - const server = net.createServer(async socket => { - await fromNodeSocket(socket, { - serverVersion: "16.3", - auth: { - method: "trust", - }, - async onStartup() { - // Wait for PGlite to be ready before further processing - await db.waitReady; - }, - async onMessage(data, { isAuthenticated }: { isAuthenticated: boolean }) { - // Only forward messages to PGlite after authentication - if (!isAuthenticated) { - return; - } + // Create a simple server using pg-gateway + const server = net.createServer(async socket => { + await fromNodeSocket(socket, { + serverVersion: "16.3", + auth: { + method: "trust", + }, + async onStartup() { + // Wait for PGlite to be ready before further processing + await db.waitReady; + }, + async onMessage(data, { isAuthenticated }: { isAuthenticated: boolean }) { + // Only forward messages to PGlite after authentication + if (!isAuthenticated) { + return; + } - return await db.execProtocolRaw(data); - }, - }); + return await db.execProtocolRaw(data); + }, }); + }); - // Start listening - await once(server.listen(0), "listening"); + // Start listening + await once(server.listen(0), "listening"); - const port = (server.address() as AddressInfo).port; + const port = (server.address() as AddressInfo).port; - await using sql = new SQL({ - hostname: "localhost", - port: port, - database: name, - max: 1, - }); + await using sql = new SQL({ + hostname: "localhost", + port: port, + database: name, + max: 1, + }); - { - // prepared statement without parameters - const result = await sql`SELECT * FROM test_table WHERE id = 1`; - expect(result).toBeDefined(); - expect(result.length).toBe(1); - expect(result[0]).toEqual({ id: 1, name: "Test 1" }); - } + expect(sql.options.hostname).toBe("localhost"); + expect(sql.options.port).toBe(port); + expect(sql.options.database).toBe(name); + expect(sql.options.max).toBe(1); - { - // using prepared statement - const result = await sql`SELECT * FROM test_table WHERE id = ${1}`; - expect(result).toBeDefined(); - expect(result.length).toBe(1); - expect(result[0]).toEqual({ id: 1, name: "Test 1" }); - } + { + // prepared statement without parameters + const result = await sql`SELECT * FROM test_table WHERE id = 1`; + expect(result).toBeDefined(); + expect(result.length).toBe(1); + expect(result[0]).toEqual({ id: 1, name: "Test 1" }); + } - { - // using simple query - const result = await sql`SELECT * FROM test_table WHERE id = 1`.simple(); - expect(result).toBeDefined(); - expect(result.length).toBe(1); - expect(result[0]).toEqual({ id: 1, name: "Test 1" }); - } + { + // using prepared statement + const result = await sql`SELECT * FROM test_table WHERE id = ${1}`; + expect(result).toBeDefined(); + expect(result.length).toBe(1); + expect(result[0]).toEqual({ id: 1, name: "Test 1" }); + } - { - // using unsafe with parameters - const result = await sql.unsafe("SELECT * FROM test_table WHERE id = $1", [1]); - expect(result).toBeDefined(); - expect(result.length).toBe(1); - expect(result[0]).toEqual({ id: 1, name: "Test 1" }); - } - }, -); + { + // using simple query + const result = await sql`SELECT * FROM test_table WHERE id = 1`.simple(); + expect(result).toBeDefined(); + expect(result.length).toBe(1); + expect(result[0]).toEqual({ id: 1, name: "Test 1" }); + } + + { + // using unsafe with parameters + const result = await sql.unsafe("SELECT * FROM test_table WHERE id = $1", [1]); + expect(result).toBeDefined(); + expect(result.length).toBe(1); + expect(result[0]).toEqual({ id: 1, name: "Test 1" }); + } +}); diff --git a/test/js/web/console/console-log.test.ts b/test/js/web/console/console-log.test.ts index 660b3e29ea..d08657381e 100644 --- a/test/js/web/console/console-log.test.ts +++ b/test/js/web/console/console-log.test.ts @@ -130,7 +130,7 @@ Quote"Backslash 55 | console.warn("Warning log"); 56 | console.warn(new Error("console.warn an error")); 57 | console.error(new Error("console.error an error")); - ^ + ^ error: console.error an error at :NN:NN at loadAndEvaluateModule (N:NN) diff --git a/test/js/web/fetch/fetch.upgrade.test.ts b/test/js/web/fetch/fetch.upgrade.test.ts new file mode 100644 index 0000000000..58bc438f7f --- /dev/null +++ b/test/js/web/fetch/fetch.upgrade.test.ts @@ -0,0 +1,63 @@ +import { describe, expect, test } from "bun:test"; +import { decodeFrames, encodeCloseFrame, encodeTextFrame, upgradeHeaders } from "./websocket.helpers"; + +describe("fetch upgrade", () => { + test("should upgrade to websocket", async () => { + const serverMessages: string[] = []; + using server = Bun.serve({ + port: 0, + fetch(req) { + if (server.upgrade(req)) return; + return new Response("Hello World"); + }, + websocket: { + open(ws) { + ws.send("Hello World"); + }, + message(ws, message) { + serverMessages.push(message as string); + }, + close(ws) { + serverMessages.push("close"); + }, + }, + }); + const res = await fetch(server.url, { + method: "GET", + headers: upgradeHeaders(), + async *body() { + yield encodeTextFrame("hello"); + yield encodeTextFrame("world"); + yield encodeTextFrame("bye"); + yield encodeCloseFrame(); + }, + }); + expect(res.status).toBe(101); + expect(res.headers.get("upgrade")).toBe("websocket"); + expect(res.headers.get("sec-websocket-accept")).toBeString(); + expect(res.headers.get("connection")).toBe("Upgrade"); + + const clientMessages: string[] = []; + const { promise, resolve } = Promise.withResolvers(); + const reader = res.body!.getReader(); + + while (true) { + const { value, done } = await reader.read(); + if (done) break; + for (const msg of decodeFrames(Buffer.from(value))) { + if (typeof msg === "string") { + clientMessages.push(msg); + } else { + clientMessages.push(msg.type); + } + + if (msg.type === "close") { + resolve(); + } + } + } + await promise; + expect(serverMessages).toEqual(["hello", "world", "bye", "close"]); + expect(clientMessages).toEqual(["Hello World", "close"]); + }); +}); diff --git a/test/js/web/fetch/websocket.helpers.ts b/test/js/web/fetch/websocket.helpers.ts new file mode 100644 index 0000000000..6425735039 --- /dev/null +++ b/test/js/web/fetch/websocket.helpers.ts @@ -0,0 +1,156 @@ +import { createHash, randomBytes } from "node:crypto"; + +// RFC 6455 magic GUID +const WS_GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; + +function makeKey() { + return randomBytes(16).toString("base64"); +} + +function acceptFor(key) { + return createHash("sha1") + .update(key + WS_GUID) + .digest("base64"); +} + +export function encodeCloseFrame(code = 1000, reason = "") { + const reasonBuf = Buffer.from(reason, "utf8"); + const payloadLen = 2 + reasonBuf.length; // 2 bytes for code + reason + const header = []; + let headerLen = 2; + if (payloadLen < 126) { + // masked bit (0x80) + length + header.push(0x88, 0x80 | payloadLen); + } else if (payloadLen <= 0xffff) { + headerLen += 2; + header.push(0x88, 0x80 | 126, payloadLen >> 8, payloadLen & 0xff); + } else { + throw new Error("Close reason too long"); + } + + const mask = randomBytes(4); + const buf = Buffer.alloc(headerLen + 4 + payloadLen); + Buffer.from(header).copy(buf, 0); + mask.copy(buf, headerLen); + + // write code + reason + const unmasked = Buffer.alloc(payloadLen); + unmasked.writeUInt16BE(code, 0); + reasonBuf.copy(unmasked, 2); + + // apply mask + for (let i = 0; i < payloadLen; i++) { + buf[headerLen + 4 + i] = unmasked[i] ^ mask[i & 3]; + } + + return buf; +} +export function* decodeFrames(buffer) { + let i = 0; + while (i + 2 <= buffer.length) { + const b0 = buffer[i++]; + const b1 = buffer[i++]; + const fin = (b0 & 0x80) !== 0; + const opcode = b0 & 0x0f; + const masked = (b1 & 0x80) !== 0; + let len = b1 & 0x7f; + + if (len === 126) { + if (i + 2 > buffer.length) break; + len = buffer.readUInt16BE(i); + i += 2; + } else if (len === 127) { + if (i + 8 > buffer.length) break; + const big = buffer.readBigUInt64BE(i); + i += 8; + if (big > BigInt(Number.MAX_SAFE_INTEGER)) throw new Error("frame too large"); + len = Number(big); + } + + let mask; + if (masked) { + if (i + 4 > buffer.length) break; + mask = buffer.subarray(i, i + 4); + i += 4; + } + + if (i + len > buffer.length) break; + let payload = buffer.subarray(i, i + len); + i += len; + + if (masked && mask) { + const unmasked = Buffer.alloc(len); + for (let j = 0; j < len; j++) unmasked[j] = payload[j] ^ mask[j & 3]; + payload = unmasked; + } + + if (!fin) throw new Error("fragmentation not supported in this demo"); + if (opcode === 0x1) { + // text + yield payload.toString("utf8"); + } else if (opcode === 0x8) { + // CLOSE + yield { type: "close" }; + return; + } else if (opcode === 0x9) { + // PING -> respond with PONG if you implement writes here + yield { type: "ping", data: payload }; + } else if (opcode === 0xa) { + // PONG + yield { type: "pong", data: payload }; + } else { + // ignore other opcodes for brevity + } + } +} + +// Encode a single unfragmented TEXT frame (client -> server must be masked) +export function encodeTextFrame(str) { + const payload = Buffer.from(str, "utf8"); + const len = payload.length; + + let headerLen = 2; + if (len >= 126 && len <= 0xffff) headerLen += 2; + else if (len > 0xffff) headerLen += 8; + const maskKeyLen = 4; + + const buf = Buffer.alloc(headerLen + maskKeyLen + len); + // FIN=1, RSV=0, opcode=0x1 (text) + buf[0] = 0x80 | 0x1; + + // Set masked bit and length field(s) + let offset = 1; + if (len < 126) { + buf[offset++] = 0x80 | len; // mask bit + length + } else if (len <= 0xffff) { + buf[offset++] = 0x80 | 126; + buf.writeUInt16BE(len, offset); + offset += 2; + } else { + buf[offset++] = 0x80 | 127; + buf.writeBigUInt64BE(BigInt(len), offset); + offset += 8; + } + + // Mask key + const mask = randomBytes(4); + mask.copy(buf, offset); + offset += 4; + + // Mask the payload + for (let i = 0; i < len; i++) { + buf[offset + i] = payload[i] ^ mask[i & 3]; + } + + return buf; +} + +export function upgradeHeaders() { + const secWebSocketKey = makeKey(); + return { + "Connection": "Upgrade", + "Upgrade": "websocket", + "Sec-WebSocket-Version": "13", + "Sec-WebSocket-Key": secWebSocketKey, + }; +} diff --git a/test/napi/napi-app/standalone_tests.cpp b/test/napi/napi-app/standalone_tests.cpp index c31c0a1c18..c712ef81cd 100644 --- a/test/napi/napi-app/standalone_tests.cpp +++ b/test/napi/napi-app/standalone_tests.cpp @@ -807,6 +807,438 @@ static napi_value test_deferred_exceptions(const Napi::CallbackInfo &info) { return ok(env); } +// Test for napi_create_array_with_length boundary handling +// Bun converts out-of-bounds lengths to 0, Node may handle differently +static napi_value +test_napi_create_array_boundary(const Napi::CallbackInfo &info) { + Napi::Env env = info.Env(); + + // Test with negative length + napi_value array_neg; + napi_status status = napi_create_array_with_length(env, -1, &array_neg); + + if (status == napi_ok) { + uint32_t length; + NODE_API_CALL(env, napi_get_array_length(env, array_neg, &length)); + printf("PASS: napi_create_array_with_length(-1) created array with length " + "%u\n", + length); + } else { + printf("FAIL: napi_create_array_with_length(-1) failed with status %d\n", + status); + } + + // Test with very large length (larger than max u32) + napi_value array_large; + size_t huge_length = (size_t)0xFFFFFFFF + 100; + status = napi_create_array_with_length(env, huge_length, &array_large); + + if (status == napi_ok) { + uint32_t length; + NODE_API_CALL(env, napi_get_array_length(env, array_large, &length)); + printf("PASS: napi_create_array_with_length(0x%zx) created array with " + "length %u\n", + huge_length, length); + } else if (status == napi_invalid_arg || status == napi_generic_failure) { + printf( + "PASS: napi_create_array_with_length(0x%zx) rejected with status %d\n", + huge_length, status); + } else { + printf("FAIL: napi_create_array_with_length(0x%zx) returned unexpected " + "status %d\n", + huge_length, status); + } + + // Test with value that becomes negative when cast to i32 (should become 0) + napi_value array_negative; + size_t negative_when_signed = 0x80000000; // 2^31 - becomes negative in i32 + status = + napi_create_array_with_length(env, negative_when_signed, &array_negative); + + if (status == napi_ok) { + uint32_t length; + NODE_API_CALL(env, napi_get_array_length(env, array_negative, &length)); + if (length == 0) { + printf("PASS: napi_create_array_with_length(0x%zx) created array with " + "length 0 (clamped negative)\n", + negative_when_signed); + } else { + printf("FAIL: napi_create_array_with_length(0x%zx) created array with " + "length %u (expected 0)\n", + negative_when_signed, length); + } + } else { + printf("FAIL: napi_create_array_with_length(0x%zx) failed with status %d\n", + negative_when_signed, status); + } + + // Test with normal length to ensure it still works + napi_value array_normal; + status = napi_create_array_with_length(env, 10, &array_normal); + + if (status == napi_ok) { + uint32_t length; + NODE_API_CALL(env, napi_get_array_length(env, array_normal, &length)); + if (length == 10) { + printf("PASS: napi_create_array_with_length(10) created array with " + "correct length\n"); + } else { + printf("FAIL: napi_create_array_with_length(10) created array with " + "length %u\n", + length); + } + } else { + printf("FAIL: napi_create_array_with_length(10) failed with status %d\n", + status); + } + + return ok(env); +} + +// Test for napi_call_function recv parameter validation +// Node validates recv parameter, Bun might not +static napi_value +test_napi_call_function_recv_null(const Napi::CallbackInfo &info) { + Napi::Env env = info.Env(); + + // Create a simple function + napi_value global, function_val; + NODE_API_CALL(env, napi_get_global(env, &global)); + + // Get Array constructor as our test function + napi_value array_constructor; + NODE_API_CALL( + env, napi_get_named_property(env, global, "Array", &array_constructor)); + + // Try to call with null recv (this) parameter + napi_value result; + napi_status status = + napi_call_function(env, nullptr, array_constructor, 0, nullptr, &result); + + if (status == napi_ok) { + printf("PASS: napi_call_function with null recv succeeded\n"); + } else if (status == napi_invalid_arg) { + printf( + "PASS: napi_call_function with null recv returned napi_invalid_arg\n"); + } else { + printf("FAIL: napi_call_function with null recv returned unexpected " + "status: %d\n", + status); + } + + // Also test with a valid recv to ensure normal operation works + status = + napi_call_function(env, global, array_constructor, 0, nullptr, &result); + if (status == napi_ok) { + printf("PASS: napi_call_function with valid recv succeeded\n"); + } else { + printf("FAIL: napi_call_function with valid recv failed with status: %d\n", + status); + } + + return ok(env); +} + +// Test for napi_strict_equals - should match JavaScript === operator behavior +// This tests that NaN !== NaN and -0 === 0 +static napi_value test_napi_strict_equals(const Napi::CallbackInfo &info) { + Napi::Env env = info.Env(); + + // Test NaN !== NaN + napi_value nan1, nan2; + NODE_API_CALL(env, napi_create_double( + env, std::numeric_limits::quiet_NaN(), &nan1)); + NODE_API_CALL(env, napi_create_double( + env, std::numeric_limits::quiet_NaN(), &nan2)); + + bool nan_equals; + NODE_API_CALL(env, napi_strict_equals(env, nan1, nan2, &nan_equals)); + + if (nan_equals) { + printf("FAIL: NaN === NaN returned true, expected false\n"); + } else { + printf("PASS: NaN !== NaN\n"); + } + + // Test -0 === 0 + napi_value neg_zero, pos_zero; + NODE_API_CALL(env, napi_create_double(env, -0.0, &neg_zero)); + NODE_API_CALL(env, napi_create_double(env, 0.0, &pos_zero)); + + bool zero_equals; + NODE_API_CALL(env, napi_strict_equals(env, neg_zero, pos_zero, &zero_equals)); + + if (!zero_equals) { + printf("FAIL: -0 === 0 returned false, expected true\n"); + } else { + printf("PASS: -0 === 0\n"); + } + + // Test normal values work correctly + napi_value val1, val2, val3; + NODE_API_CALL(env, napi_create_double(env, 42.0, &val1)); + NODE_API_CALL(env, napi_create_double(env, 42.0, &val2)); + NODE_API_CALL(env, napi_create_double(env, 43.0, &val3)); + + bool same_equals, diff_equals; + NODE_API_CALL(env, napi_strict_equals(env, val1, val2, &same_equals)); + NODE_API_CALL(env, napi_strict_equals(env, val1, val3, &diff_equals)); + + if (!same_equals) { + printf("FAIL: 42 === 42 returned false, expected true\n"); + } else { + printf("PASS: 42 === 42\n"); + } + + if (diff_equals) { + printf("FAIL: 42 === 43 returned true, expected false\n"); + } else { + printf("PASS: 42 !== 43\n"); + } + + return ok(env); +} + +// Test for dataview bounds checking and error messages +static napi_value +test_napi_dataview_bounds_errors(const Napi::CallbackInfo &info) { + Napi::Env env = info.Env(); + + // Create an ArrayBuffer + napi_value arraybuffer; + void *data = nullptr; + NODE_API_CALL(env, napi_create_arraybuffer(env, 100, &data, &arraybuffer)); + + // Test 1: DataView exceeding buffer bounds + napi_value dataview; + napi_status status = napi_create_dataview(env, 50, arraybuffer, 60, + &dataview); // 60 + 50 = 110 > 100 + + if (status == napi_ok) { + printf("FAIL: napi_create_dataview allowed DataView exceeding buffer " + "bounds\n"); + } else { + printf("PASS: napi_create_dataview rejected DataView exceeding buffer " + "bounds\n"); + + // Check if an exception was thrown with the expected error + bool is_exception_pending = false; + NODE_API_CALL(env, napi_is_exception_pending(env, &is_exception_pending)); + + if (is_exception_pending) { + napi_value exception; + NODE_API_CALL(env, napi_get_and_clear_last_exception(env, &exception)); + + // Try to get error message + napi_value message_val; + napi_status msg_status = + napi_get_named_property(env, exception, "message", &message_val); + + if (msg_status == napi_ok) { + char message[256]; + size_t message_len; + napi_get_value_string_utf8(env, message_val, message, sizeof(message), + &message_len); + printf(" Error message: %s\n", message); + } + } + } + + // Test 2: DataView at exact boundary (should work) + napi_value boundary_dataview; + status = napi_create_dataview(env, 40, arraybuffer, 60, + &boundary_dataview); // 60 + 40 = 100 exactly + + if (status != napi_ok) { + printf("FAIL: napi_create_dataview rejected valid DataView at exact " + "boundary\n"); + } else { + printf("PASS: napi_create_dataview accepted valid DataView at exact " + "boundary\n"); + } + + // Test 3: DataView with offset beyond buffer + napi_value beyond_dataview; + status = napi_create_dataview(env, 1, arraybuffer, 101, + &beyond_dataview); // offset 101 > 100 + + if (status == napi_ok) { + printf("FAIL: napi_create_dataview allowed DataView with offset beyond " + "buffer\n"); + } else { + printf("PASS: napi_create_dataview rejected DataView with offset beyond " + "buffer\n"); + } + + return ok(env); +} + +// Test for napi_typeof with potentially empty/invalid values +static napi_value test_napi_typeof_empty_value(const Napi::CallbackInfo &info) { + Napi::Env env = info.Env(); + + // Test 1: Create an uninitialized napi_value (simulating empty JSValue) + // This is technically undefined behavior but can reveal differences + napi_value uninit_value; + memset(&uninit_value, 0, sizeof(uninit_value)); + + napi_valuetype type; + napi_status status = napi_typeof(env, uninit_value, &type); + + if (status == napi_ok) { + if (type == napi_undefined) { + printf("PASS: napi_typeof(zero-initialized value) returned " + "napi_undefined (Bun behavior)\n"); + } else { + printf("FAIL: napi_typeof(zero-initialized value) returned %d\n", type); + } + } else { + printf("PASS: napi_typeof(zero-initialized value) returned error status %d " + "(Node behavior)\n", + status); + } + + // Test 2: Try accessing deleted reference (undefined behavior per spec) + // This is actually undefined behavior according to N-API documentation + // Both Node.js and Bun may crash or behave unpredictably + printf("INFO: Accessing deleted reference is undefined behavior - test " + "skipped\n"); + // After napi_delete_reference, the ref is invalid and should not be used + + // Test 3: Check with reinterpret_cast of nullptr + // This is the most likely way to get an empty JSValue + napi_value *null_ptr = nullptr; + napi_value null_value = reinterpret_cast(null_ptr); + + status = napi_typeof(env, null_value, &type); + if (status == napi_ok) { + if (type == napi_undefined) { + printf("WARN: napi_typeof(nullptr) returned napi_undefined - Bun's " + "isEmpty() check\n"); + } else { + printf("INFO: napi_typeof(nullptr) returned type %d\n", type); + } + } else { + printf("INFO: napi_typeof(nullptr) returned error %d (safer behavior)\n", + status); + } + + return ok(env); +} + +// Test for Object.freeze and Object.seal with indexed properties +static napi_value +test_napi_freeze_seal_indexed(const Napi::CallbackInfo &info) { + Napi::Env env = info.Env(); + + // Test 1: Freeze array (has indexed properties) + napi_value array; + NODE_API_CALL(env, napi_create_array_with_length(env, 3, &array)); + + // Set some values + napi_value val; + NODE_API_CALL(env, napi_create_int32(env, 42, &val)); + NODE_API_CALL(env, napi_set_element(env, array, 0, val)); + + // Try to freeze the array + napi_status freeze_status = napi_object_freeze(env, array); + + if (freeze_status == napi_ok) { + // Try to modify after freeze + napi_value new_val; + NODE_API_CALL(env, napi_create_int32(env, 99, &new_val)); + napi_status set_status = napi_set_element(env, array, 1, new_val); + + if (set_status != napi_ok) { + printf("PASS: Array was frozen - cannot modify elements\n"); + } else { + // Check if it actually changed + napi_value get_val; + NODE_API_CALL(env, napi_get_element(env, array, 1, &get_val)); + int32_t num; + NODE_API_CALL(env, napi_get_value_int32(env, get_val, &num)); + + if (num == 99) { + printf("FAIL: Array with indexed properties was NOT actually frozen " + "(Bun behavior?)\n"); + } else { + printf("INFO: Array freeze had partial effect\n"); + } + } + } else { + printf("INFO: napi_object_freeze failed on array with status %d\n", + freeze_status); + } + + // Test 2: Seal array (has indexed properties) + napi_value array2; + NODE_API_CALL(env, napi_create_array_with_length(env, 3, &array2)); + NODE_API_CALL(env, napi_set_element(env, array2, 0, val)); + + // Try to seal the array + napi_status seal_status = napi_object_seal(env, array2); + + if (seal_status == napi_ok) { + // Try to add new property after seal + napi_value prop_val; + NODE_API_CALL( + env, napi_create_string_utf8(env, "test", NAPI_AUTO_LENGTH, &prop_val)); + napi_status set_status = + napi_set_named_property(env, array2, "newProp", prop_val); + + if (set_status != napi_ok) { + printf("PASS: Array was sealed - cannot add new properties\n"); + } else { + // Check if it actually was added + napi_value get_prop; + napi_status get_status = + napi_get_named_property(env, array2, "newProp", &get_prop); + + if (get_status == napi_ok) { + printf("FAIL: Array with indexed properties was NOT actually sealed " + "(Bun behavior?)\n"); + } else { + printf("INFO: Array seal had partial effect\n"); + } + } + } else { + printf("INFO: napi_object_seal failed on array with status %d\n", + seal_status); + } + + // Test 3: Freeze regular object (no indexed properties) + napi_value obj; + NODE_API_CALL(env, napi_create_object(env, &obj)); + NODE_API_CALL(env, napi_set_named_property(env, obj, "prop", val)); + + napi_status obj_freeze_status = napi_object_freeze(env, obj); + + if (obj_freeze_status == napi_ok) { + // Try to modify after freeze + napi_value new_val; + NODE_API_CALL(env, napi_create_int32(env, 999, &new_val)); + napi_status set_status = napi_set_named_property(env, obj, "prop", new_val); + + if (set_status != napi_ok) { + printf("PASS: Regular object was frozen correctly\n"); + } else { + // Check if it actually changed + napi_value get_val; + NODE_API_CALL(env, napi_get_named_property(env, obj, "prop", &get_val)); + int32_t num; + NODE_API_CALL(env, napi_get_value_int32(env, get_val, &num)); + + if (num == 999) { + printf("FAIL: Regular object was not frozen\n"); + } else { + printf("PASS: Regular object freeze prevented modification\n"); + } + } + } + + return ok(env); +} + void register_standalone_tests(Napi::Env env, Napi::Object exports) { REGISTER_FUNCTION(env, exports, test_issue_7685); REGISTER_FUNCTION(env, exports, test_issue_11949); @@ -829,6 +1261,12 @@ void register_standalone_tests(Napi::Env env, Napi::Object exports) { REGISTER_FUNCTION(env, exports, test_is_buffer); REGISTER_FUNCTION(env, exports, test_is_typedarray); REGISTER_FUNCTION(env, exports, test_deferred_exceptions); + REGISTER_FUNCTION(env, exports, test_napi_strict_equals); + REGISTER_FUNCTION(env, exports, test_napi_call_function_recv_null); + REGISTER_FUNCTION(env, exports, test_napi_create_array_boundary); + REGISTER_FUNCTION(env, exports, test_napi_dataview_bounds_errors); + REGISTER_FUNCTION(env, exports, test_napi_typeof_empty_value); + REGISTER_FUNCTION(env, exports, test_napi_freeze_seal_indexed); } } // namespace napitests diff --git a/test/napi/napi.test.ts b/test/napi/napi.test.ts index 7d55e027b5..66d525262a 100644 --- a/test/napi/napi.test.ts +++ b/test/napi/napi.test.ts @@ -630,6 +630,61 @@ describe("cleanup hooks", () => { }); }); + describe("napi_strict_equals", () => { + it("should match JavaScript === operator behavior", async () => { + const output = await checkSameOutput("test_napi_strict_equals", []); + expect(output).toContain("PASS: NaN !== NaN"); + expect(output).toContain("PASS: -0 === 0"); + expect(output).toContain("PASS: 42 === 42"); + expect(output).toContain("PASS: 42 !== 43"); + expect(output).not.toContain("FAIL"); + }); + }); + + describe("napi_call_function", () => { + it("should handle null recv parameter consistently", async () => { + const output = await checkSameOutput("test_napi_call_function_recv_null", []); + expect(output).toContain("PASS"); + expect(output).toContain("napi_call_function with valid recv succeeded"); + expect(output).not.toContain("FAIL"); + }); + }); + + describe("napi_create_array_with_length", () => { + it("should handle boundary values consistently", async () => { + const output = await checkSameOutput("test_napi_create_array_boundary", []); + expect(output).toContain("PASS"); + expect(output).toContain("napi_create_array_with_length(10) created array with correct length"); + expect(output).not.toContain("FAIL"); + }); + }); + + describe("napi_create_dataview", () => { + it("should validate bounds and provide consistent error messages", async () => { + const output = await checkSameOutput("test_napi_dataview_bounds_errors", []); + expect(output).toContain("napi_create_dataview"); + // Check for proper bounds validation + }); + }); + + describe("napi_typeof", () => { + it("should handle empty/invalid values", async () => { + const output = await checkSameOutput("test_napi_typeof_empty_value", []); + // This test explores edge cases with empty/invalid napi_values + // Bun has special handling for isEmpty() that Node doesn't have + expect(output).toContain("napi_typeof"); + }); + }); + + describe("napi_object_freeze and napi_object_seal", () => { + it("should handle arrays with indexed properties", async () => { + const output = await checkSameOutput("test_napi_freeze_seal_indexed", []); + // Bun has a check for indexed properties that Node.js doesn't have + // This might cause different behavior when freezing/sealing arrays + expect(output).toContain("freeze"); + }); + }); + describe("error handling", () => { it("removing non-existent env cleanup hook should not crash", async () => { // Test that removing non-existent hooks doesn't crash the process diff --git a/test/regression/issue/12548.test.ts b/test/regression/issue/12548.test.ts new file mode 100644 index 0000000000..09874cc53a --- /dev/null +++ b/test/regression/issue/12548.test.ts @@ -0,0 +1,76 @@ +import { expect, test } from "bun:test"; +import { bunEnv, bunExe, tempDir } from "harness"; + +test("issue #12548: TypeScript syntax should work with 'ts' loader in BunPlugin", async () => { + using dir = tempDir("issue-12548", { + "index.js": ` + import plugin from "./plugin.js"; + + Bun.plugin(plugin); + + // This should work with 'ts' loader + console.log(require('virtual-ts-module')); + `, + "plugin.js": ` + export default { + setup(build) { + build.module('virtual-ts-module', () => ({ + contents: "import { type TSchema } from '@sinclair/typebox'; export const test = 'works';", + loader: 'ts', + })); + }, + }; + `, + }); + + await using proc = Bun.spawn({ + cmd: [bunExe(), "index.js"], + env: bunEnv, + cwd: String(dir), + stderr: "pipe", + stdout: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(0); + expect(stderr).toBe(""); + expect(stdout).toContain('test: "works"'); +}); + +test("issue #12548: TypeScript type imports work with 'ts' loader", async () => { + using dir = tempDir("issue-12548-type-imports", { + "index.js": ` + Bun.plugin({ + setup(build) { + build.module('test-module', () => ({ + contents: \` + import { type TSchema } from '@sinclair/typebox'; + type MyType = { a: number }; + export type { MyType }; + export const value = 42; + \`, + loader: 'ts', + })); + }, + }); + + const mod = require('test-module'); + console.log(JSON.stringify(mod)); + `, + }); + + await using proc = Bun.spawn({ + cmd: [bunExe(), "index.js"], + env: bunEnv, + cwd: String(dir), + stderr: "pipe", + stdout: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(0); + expect(stderr).toBe(""); + expect(stdout).toContain('{"value":42}'); +}); diff --git a/test/regression/issue/21311.test.ts b/test/regression/issue/21311.test.ts index 214ea70879..7b9cfb81cf 100644 --- a/test/regression/issue/21311.test.ts +++ b/test/regression/issue/21311.test.ts @@ -6,102 +6,90 @@ const databaseUrl = getSecret("TLS_POSTGRES_DATABASE_URL"); describe("postgres batch insert crash fix #21311", () => { test("should handle large batch inserts without crashing", async () => { - const sql = postgres(databaseUrl!); - try { - // Create a test table - await sql`DROP TABLE IF EXISTS test_batch_21311`; - await sql`CREATE TABLE test_batch_21311 ( + await using sql = postgres(databaseUrl!); + // Create a test table + await sql`DROP TABLE IF EXISTS test_batch_21311`; + await sql`CREATE TABLE test_batch_21311 ( id serial PRIMARY KEY, data VARCHAR(100) );`; - // Generate a large batch of data to insert - const batchSize = 100; - const values = Array.from({ length: batchSize }, (_, i) => `('batch_data_${i}')`).join(", "); + // Generate a large batch of data to insert + const batchSize = 100; + const values = Array.from({ length: batchSize }, (_, i) => `('batch_data_${i}')`).join(", "); - // This query would previously crash with "index out of bounds: index 0, len 0" - // on Windows when the fields metadata wasn't properly initialized - const insertQuery = `INSERT INTO test_batch_21311 (data) VALUES ${values} RETURNING id, data`; + // This query would previously crash with "index out of bounds: index 0, len 0" + // on Windows when the fields metadata wasn't properly initialized + const insertQuery = `INSERT INTO test_batch_21311 (data) VALUES ${values} RETURNING id, data`; - const results = await sql.unsafe(insertQuery); + const results = await sql.unsafe(insertQuery); - expect(results).toHaveLength(batchSize); - expect(results[0]).toHaveProperty("id"); - expect(results[0]).toHaveProperty("data"); - expect(results[0].data).toBe("batch_data_0"); - expect(results[batchSize - 1].data).toBe(`batch_data_${batchSize - 1}`); + expect(results).toHaveLength(batchSize); + expect(results[0]).toHaveProperty("id"); + expect(results[0]).toHaveProperty("data"); + expect(results[0].data).toBe("batch_data_0"); + expect(results[batchSize - 1].data).toBe(`batch_data_${batchSize - 1}`); - // Cleanup - await sql`DROP TABLE test_batch_21311`; - } finally { - await sql.end(); - } + // Cleanup + await sql`DROP TABLE test_batch_21311`; }); test("should handle empty result sets without crashing", async () => { - const sql = postgres(databaseUrl!); - try { - // Create a temporary table that will return no results - await sql`DROP TABLE IF EXISTS test_empty_21311`; - await sql`CREATE TABLE test_empty_21311 ( + await using sql = postgres(databaseUrl!); + // Create a temporary table that will return no results + await sql`DROP TABLE IF EXISTS test_empty_21311`; + await sql`CREATE TABLE test_empty_21311 ( id serial PRIMARY KEY, data VARCHAR(100) );`; - // Query that returns no rows - this tests the empty fields scenario - const results = await sql`SELECT * FROM test_empty_21311 WHERE id = -1`; + // Query that returns no rows - this tests the empty fields scenario + const results = await sql`SELECT * FROM test_empty_21311 WHERE id = -1`; - expect(results).toHaveLength(0); + expect(results).toHaveLength(0); - // Cleanup - await sql`DROP TABLE test_empty_21311`; - } finally { - await sql.end(); - } + // Cleanup + await sql`DROP TABLE test_empty_21311`; }); test("should handle mixed date formats in batch operations", async () => { - const sql = postgres(databaseUrl!); - try { - // Create test table - await sql`DROP TABLE IF EXISTS test_concurrent_21311`; - await sql`CREATE TABLE test_concurrent_21311 ( + await using sql = postgres(databaseUrl!); + // Create test table + await sql`DROP TABLE IF EXISTS test_concurrent_21311`; + await sql`CREATE TABLE test_concurrent_21311 ( id serial PRIMARY KEY, should_be_null INT, date DATE NULL );`; - // Run multiple concurrent batch operations - // This tests potential race conditions in field metadata setup - const concurrentOperations = Array.from({ length: 100 }, async (_, threadId) => { - const batchSize = 20; - const values = Array.from( - { length: batchSize }, - (_, i) => `(${i % 2 === 0 ? 1 : 0}, ${i % 2 === 0 ? "'infinity'::date" : "NULL"})`, - ).join(", "); + // Run multiple concurrent batch operations + // This tests potential race conditions in field metadata setup + const concurrentOperations = Array.from({ length: 100 }, async (_, threadId) => { + const batchSize = 20; + const values = Array.from( + { length: batchSize }, + (_, i) => `(${i % 2 === 0 ? 1 : 0}, ${i % 2 === 0 ? "'infinity'::date" : "NULL"})`, + ).join(", "); - const insertQuery = `INSERT INTO test_concurrent_21311 (should_be_null, date) VALUES ${values} RETURNING id, should_be_null, date`; - return sql.unsafe(insertQuery); - }); + const insertQuery = `INSERT INTO test_concurrent_21311 (should_be_null, date) VALUES ${values} RETURNING id, should_be_null, date`; + return sql.unsafe(insertQuery); + }); - await Promise.all(concurrentOperations); + await Promise.all(concurrentOperations); - // Run multiple concurrent queries + // Run multiple concurrent queries - const allQueryResults = await sql`SELECT * FROM test_concurrent_21311`; - allQueryResults.forEach((row, i) => { - expect(row.should_be_null).toBeNumber(); - if (row.should_be_null) { - expect(row.date).toBeDefined(); - expect(row.date?.getTime()).toBeNaN(); - } else { - expect(row.date).toBeNull(); - } - }); - // Cleanup - await sql`DROP TABLE test_concurrent_21311`; - } finally { - await sql.end(); - } + const allQueryResults = await sql`SELECT * FROM test_concurrent_21311`; + allQueryResults.forEach((row, i) => { + expect(row.should_be_null).toBeNumber(); + if (row.should_be_null) { + expect(row.date).toBeDefined(); + expect(row.date?.getTime()).toBeNaN(); + } else { + expect(row.date).toBeNull(); + } + }); + // Cleanup + await sql`DROP TABLE test_concurrent_21311`; }); }); diff --git a/test/regression/issue/22475.test.ts b/test/regression/issue/22475.test.ts new file mode 100644 index 0000000000..8b5c1f1837 --- /dev/null +++ b/test/regression/issue/22475.test.ts @@ -0,0 +1,50 @@ +import { expect, test } from "bun:test"; + +test("issue #22475: cookie.isExpired() should return true for Unix epoch (0)", () => { + const cookies = ["a=; Expires=Thu, 01 Jan 1970 00:00:00 GMT", "b=; Expires=Thu, 01 Jan 1970 00:00:01 GMT"]; + + const results = []; + for (const _cookie of cookies) { + const cookie = new Bun.Cookie(_cookie); + results.push({ + name: cookie.name, + expires: cookie.expires, + isExpired: cookie.isExpired(), + }); + } + + // Cookie 'a' with Unix epoch (0) should be expired + expect(results[0].name).toBe("a"); + expect(results[0].expires).toBeDate(); + expect(results[0].expires?.getTime()).toBe(0); + expect(results[0].isExpired).toBe(true); + + // Cookie 'b' with 1 second after Unix epoch should also be expired + expect(results[1].name).toBe("b"); + expect(results[1].expires).toBeDate(); + expect(results[1].expires?.getTime()).toBe(1000); + expect(results[1].isExpired).toBe(true); +}); + +test("cookie.isExpired() for various edge cases", () => { + // Test Unix epoch (0) - should be expired + const epochCookie = new Bun.Cookie("test", "value", { expires: 0 }); + expect(epochCookie.expires).toBeDate(); + expect(epochCookie.expires?.getTime()).toBe(0); + expect(epochCookie.isExpired()).toBe(true); + + // Test negative timestamp - should be expired + const negativeCookie = new Bun.Cookie("test", "value", { expires: -1 }); + expect(negativeCookie.expires).toBeDate(); + expect(negativeCookie.expires?.getTime()).toBe(-1000); + expect(negativeCookie.isExpired()).toBe(true); + + // Test session cookie (no expires) - should not be expired + const sessionCookie = new Bun.Cookie("test", "value"); + expect(sessionCookie.expires).toBeUndefined(); + expect(sessionCookie.isExpired()).toBe(false); + + // Test future date - should not be expired + const futureCookie = new Bun.Cookie("test", "value", { expires: Date.now() + 86400000 }); + expect(futureCookie.isExpired()).toBe(false); +}); diff --git a/test/regression/issue/22481.test.ts b/test/regression/issue/22481.test.ts new file mode 100644 index 0000000000..d207b84876 --- /dev/null +++ b/test/regression/issue/22481.test.ts @@ -0,0 +1,101 @@ +import { expect, test } from "bun:test"; +import { createConnection, createServer } from "node:net"; + +test("client socket can write Uint8Array (issue #22481)", async () => { + const server = createServer(socket => { + socket.on("data", data => { + // Echo back what we received + socket.write(data); + socket.end(); + }); + }); + + await new Promise(resolve => { + server.listen(0, "127.0.0.1", () => resolve()); + }); + + const port = (server.address() as any).port; + + const testData = "Hello from Uint8Array!"; + const u8 = new Uint8Array(testData.split("").map(x => x.charCodeAt(0))); + + // Test with Uint8Array + { + const received = await new Promise((resolve, reject) => { + const client = createConnection(port, "127.0.0.1", () => { + // Write Uint8Array directly + client.write(u8, err => { + if (err) reject(err); + }); + }); + + let data = ""; + client.on("data", chunk => { + data += chunk.toString(); + }); + + client.on("end", () => { + resolve(data); + }); + + client.on("error", reject); + }); + + expect(received).toBe(testData); + } + + // Test with Buffer.from(Uint8Array) for comparison + { + const received = await new Promise((resolve, reject) => { + const client = createConnection(port, "127.0.0.1", () => { + // Write Buffer created from Uint8Array + client.write(Buffer.from(u8), err => { + if (err) reject(err); + }); + }); + + let data = ""; + client.on("data", chunk => { + data += chunk.toString(); + }); + + client.on("end", () => { + resolve(data); + }); + + client.on("error", reject); + }); + + expect(received).toBe(testData); + } + + // Test with other TypedArrays (Float32Array view) + { + const float32 = new Float32Array([1.5, 2.5]); + const u8view = new Uint8Array(float32.buffer); + + const received = await new Promise((resolve, reject) => { + const client = createConnection(port, "127.0.0.1", () => { + client.write(u8view, err => { + if (err) reject(err); + }); + }); + + const chunks: Buffer[] = []; + client.on("data", chunk => { + chunks.push(chunk); + }); + + client.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + + client.on("error", reject); + }); + + // Check that we received the same bytes back + expect(received).toEqual(Buffer.from(u8view)); + } + + server.close(); +}); diff --git a/test/regression/issue/246-child_process_object_assign_compatibility.test.ts b/test/regression/issue/246-child_process_object_assign_compatibility.test.ts new file mode 100644 index 0000000000..256f756e77 --- /dev/null +++ b/test/regression/issue/246-child_process_object_assign_compatibility.test.ts @@ -0,0 +1,63 @@ +// Regression test for https://github.com/microlinkhq/youtube-dl-exec/issues/246 +// Child process stdio properties should be enumerable for Object.assign() compatibility + +import { expect, test } from "bun:test"; +import { spawn } from "child_process"; + +test("child process stdio properties should be enumerable for Object.assign()", () => { + const child = spawn(process.execPath, ["-e", 'console.log("hello")']); + + // The real issue: stdio properties must be enumerable for Object.assign() to work + // This is what libraries like tinyspawn depend on + expect(Object.keys(child)).toContain("stdin"); + expect(Object.keys(child)).toContain("stdout"); + expect(Object.keys(child)).toContain("stderr"); + expect(Object.keys(child)).toContain("stdio"); + + // Property descriptors should show enumerable: true + for (const key of ["stdin", "stdout", "stderr", "stdio"] as const) { + expect(Object.getOwnPropertyDescriptor(child, key)?.enumerable).toBe(true); + } +}); + +test("Object.assign should copy child process stdio properties", () => { + const child = spawn(process.execPath, ["-e", 'console.log("hello")']); + + // This is what tinyspawn does: Object.assign(promise, childProcess) + const merged = {}; + Object.assign(merged, child); + + // The merged object should have the stdio properties + expect(merged.stdout).toBeTruthy(); + expect(merged.stderr).toBeTruthy(); + expect(merged.stdin).toBeTruthy(); + expect(merged.stdio).toBeTruthy(); + + // Should maintain stream functionality + expect(typeof merged.stdout.pipe).toBe("function"); + expect(typeof merged.stdout.on).toBe("function"); +}); + +test("tinyspawn-like library usage should work", () => { + // Simulate the exact pattern from tinyspawn library + let childProcess; + const promise = new Promise(resolve => { + childProcess = spawn(process.execPath, ["-e", 'console.log("test")']); + childProcess.on("exit", () => resolve(childProcess)); + }); + + // This is the critical line that was failing in Bun + const subprocess = Object.assign(promise, childProcess); + + // Should have stdio properties immediately after Object.assign + expect(subprocess.stdout).toBeTruthy(); + expect(subprocess.stderr).toBeTruthy(); + expect(subprocess.stdin).toBeTruthy(); + + // Should still be a Promise + expect(subprocess instanceof Promise).toBe(true); + + // Should have stream methods available + expect(typeof subprocess.stdout.pipe).toBe("function"); + expect(typeof subprocess.stdout.on).toBe("function"); +}); diff --git a/test/regression/issue/compile-outfile-subdirs.test.ts b/test/regression/issue/compile-outfile-subdirs.test.ts new file mode 100644 index 0000000000..9aae572378 --- /dev/null +++ b/test/regression/issue/compile-outfile-subdirs.test.ts @@ -0,0 +1,259 @@ +import { describe, expect, test } from "bun:test"; +import { execSync } from "child_process"; +import { existsSync } from "fs"; +import { bunEnv, bunExe, isWindows, tempDir } from "harness"; +import { join } from "path"; + +describe.if(isWindows)("compile --outfile with subdirectories", () => { + test("places executable in subdirectory with forward slash", async () => { + using dir = tempDir("compile-subdir-forward", { + "app.js": `console.log("Hello from subdirectory!");`, + }); + + // Use forward slash in outfile + const outfile = "subdir/nested/app.exe"; + + await using proc = Bun.spawn({ + cmd: [bunExe(), "build", "--compile", join(String(dir), "app.js"), "--outfile", outfile], + env: bunEnv, + cwd: String(dir), + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(0); + expect(stderr).toBe(""); + + // Check that the file exists in the subdirectory + const expectedPath = join(String(dir), "subdir", "nested", "app.exe"); + expect(existsSync(expectedPath)).toBe(true); + + // Run the executable to verify it works + await using exe = Bun.spawn({ + cmd: [expectedPath], + env: bunEnv, + stdout: "pipe", + }); + + const exeOutput = await exe.stdout.text(); + expect(exeOutput.trim()).toBe("Hello from subdirectory!"); + }); + + test("places executable in subdirectory with backslash", async () => { + using dir = tempDir("compile-subdir-backslash", { + "app.js": `console.log("Hello with backslash!");`, + }); + + // Use backslash in outfile + const outfile = "subdir\\nested\\app.exe"; + + await using proc = Bun.spawn({ + cmd: [bunExe(), "build", "--compile", join(String(dir), "app.js"), "--outfile", outfile], + env: bunEnv, + cwd: String(dir), + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(0); + expect(stderr).toBe(""); + + // Check that the file exists in the subdirectory + const expectedPath = join(String(dir), "subdir", "nested", "app.exe"); + expect(existsSync(expectedPath)).toBe(true); + }); + + test("creates parent directories if they don't exist", async () => { + using dir = tempDir("compile-create-dirs", { + "app.js": `console.log("Created directories!");`, + }); + + // Use a deep nested path that doesn't exist yet + const outfile = "a/b/c/d/e/app.exe"; + + await using proc = Bun.spawn({ + cmd: [bunExe(), "build", "--compile", join(String(dir), "app.js"), "--outfile", outfile], + env: bunEnv, + cwd: String(dir), + stdout: "pipe", + stderr: "pipe", + }); + + const exitCode = await proc.exited; + expect(exitCode).toBe(0); + + // Check that the file and all directories were created + const expectedPath = join(String(dir), "a", "b", "c", "d", "e", "app.exe"); + expect(existsSync(expectedPath)).toBe(true); + }); + + test.if(isWindows)("Windows metadata works with subdirectories", async () => { + using dir = tempDir("compile-metadata-subdir", { + "app.js": `console.log("App with metadata!");`, + }); + + const outfile = "output/bin/app.exe"; + + await using proc = Bun.spawn({ + cmd: [ + bunExe(), + "build", + "--compile", + join(String(dir), "app.js"), + "--outfile", + outfile, + "--windows-title", + "Subdirectory App", + "--windows-version", + "1.2.3.4", + "--windows-description", + "App in a subdirectory", + ], + env: bunEnv, + cwd: String(dir), + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).toBe(0); + expect(stderr).toBe(""); + + const expectedPath = join(String(dir), "output", "bin", "app.exe"); + expect(existsSync(expectedPath)).toBe(true); + + // Verify metadata was set correctly + const getMetadata = (field: string) => { + try { + return execSync(`powershell -Command "(Get-ItemProperty '${expectedPath}').VersionInfo.${field}"`, { + encoding: "utf8", + }).trim(); + } catch { + return ""; + } + }; + + expect(getMetadata("ProductName")).toBe("Subdirectory App"); + expect(getMetadata("FileDescription")).toBe("App in a subdirectory"); + expect(getMetadata("ProductVersion")).toBe("1.2.3.4"); + }); + + test("fails gracefully when parent is a file", async () => { + using dir = tempDir("compile-parent-is-file", { + "app.js": `console.log("Won't compile!");`, + "blocked": "This is a file, not a directory", + }); + + // Try to use blocked/app.exe where blocked is a file + const outfile = "blocked/app.exe"; + + await using proc = Bun.spawn({ + cmd: [bunExe(), "build", "--compile", join(String(dir), "app.js"), "--outfile", outfile], + env: bunEnv, + cwd: String(dir), + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); + + expect(exitCode).not.toBe(0); + // Should get an error about the path + expect(stderr.toLowerCase()).toContain("notdir"); + }); + + test("works with . and .. in paths", async () => { + using dir = tempDir("compile-relative-paths", { + "src/app.js": `console.log("Relative paths work!");`, + }); + + // Use relative path with . and .. + const outfile = "./output/../output/./app.exe"; + + await using proc = Bun.spawn({ + cmd: [bunExe(), "build", "--compile", join(String(dir), "src", "app.js"), "--outfile", outfile], + env: bunEnv, + cwd: String(dir), + stdout: "pipe", + stderr: "pipe", + }); + + const exitCode = await proc.exited; + expect(exitCode).toBe(0); + + // Should normalize to output/app.exe + const expectedPath = join(String(dir), "output", "app.exe"); + expect(existsSync(expectedPath)).toBe(true); + }); +}); + +describe("Bun.build() compile with subdirectories", () => { + test.if(isWindows)("places executable in subdirectory via API", async () => { + using dir = tempDir("api-compile-subdir", { + "app.js": `console.log("API subdirectory test!");`, + }); + + const result = await Bun.build({ + entrypoints: [join(String(dir), "app.js")], + compile: { + outfile: "dist/bin/app.exe", + }, + outdir: String(dir), + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + // The output path should include the subdirectories + expect(result.outputs[0].path).toContain("dist"); + expect(result.outputs[0].path).toContain("bin"); + + // File should exist at the expected location + const expectedPath = join(String(dir), "dist", "bin", "app.exe"); + expect(existsSync(expectedPath)).toBe(true); + }); + + test.if(isWindows)("API with Windows metadata and subdirectories", async () => { + using dir = tempDir("api-metadata-subdir", { + "app.js": `console.log("API with metadata!");`, + }); + + const result = await Bun.build({ + entrypoints: [join(String(dir), "app.js")], + compile: { + outfile: "build/release/app.exe", + windows: { + title: "API Subdirectory App", + version: "2.0.0.0", + publisher: "Test Publisher", + }, + }, + outdir: String(dir), + }); + + expect(result.success).toBe(true); + + const expectedPath = join(String(dir), "build", "release", "app.exe"); + expect(existsSync(expectedPath)).toBe(true); + + // Verify metadata + const getMetadata = (field: string) => { + try { + return execSync(`powershell -Command "(Get-ItemProperty '${expectedPath}').VersionInfo.${field}"`, { + encoding: "utf8", + }).trim(); + } catch { + return ""; + } + }; + + expect(getMetadata("ProductName")).toBe("API Subdirectory App"); + expect(getMetadata("CompanyName")).toBe("Test Publisher"); + expect(getMetadata("ProductVersion")).toBe("2.0.0.0"); + }); +});