diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index a51b691511..971607eaa0 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -128,11 +128,8 @@ const testPlatforms = [ { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" }, { os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" }, { os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", tier: "oldest" }, { os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", tier: "oldest" }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" }, diff --git a/.claude/commands/upgrade-nodejs.md b/.claude/commands/upgrade-nodejs.md new file mode 100644 index 0000000000..6ba1832ce6 --- /dev/null +++ b/.claude/commands/upgrade-nodejs.md @@ -0,0 +1,92 @@ +# Upgrading Bun's Self-Reported Node.js Version + +This guide explains how to upgrade the Node.js version that Bun reports for compatibility with Node.js packages and native addons. + +## Overview + +Bun reports a Node.js version for compatibility with the Node.js ecosystem. This affects: +- `process.version` output +- Node-API (N-API) compatibility +- Native addon ABI compatibility +- V8 API compatibility for addons using V8 directly + +## Files That Always Need Updates + +### 1. Bootstrap Scripts +- `scripts/bootstrap.sh` - Update `NODEJS_VERSION=` +- `scripts/bootstrap.ps1` - Update `$NODEJS_VERSION =` + +### 2. CMake Configuration +- `cmake/Options.cmake` + - `NODEJS_VERSION` - The Node.js version string (e.g., "24.3.0") + - `NODEJS_ABI_VERSION` - The ABI version number (find using command below) + +### 3. Version Strings +- `src/bun.js/bindings/BunProcess.cpp` + - Update `Bun__versions_node` with the Node.js version + - Update `Bun__versions_v8` with the V8 version (find using command below) + +### 4. N-API Version +- `src/napi/js_native_api.h` + - Update `NAPI_VERSION` define (check Node.js release notes) + +## Files That May Need Updates + +Only check these if the build fails or tests crash after updating version numbers: +- V8 compatibility files in `src/bun.js/bindings/v8/` (if V8 API changed) +- Test files (if Node.js requires newer C++ standard) + +## Quick Commands to Find Version Info + +```bash +# Get latest Node.js version info +curl -s https://nodejs.org/dist/index.json | jq '.[0]' + +# Get V8 version for a specific Node.js version (replace v24.3.0) +curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep V8_VERSION + +# Get ABI version for a specific Node.js version +curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep NODE_MODULE_VERSION + +# Or use the ABI registry +curl -s https://raw.githubusercontent.com/nodejs/node/main/doc/abi_version_registry.json | jq '.NODE_MODULE_VERSION.""' +``` + +## Update Process + +1. **Gather version info** using the commands above +2. **Update the required files** listed in the sections above +3. **Build and test**: + ```bash + bun bd + bun bd -e "console.log(process.version)" + bun bd -e "console.log(process.versions.v8)" + bun bd test test/v8/v8.test.ts + bun bd test test/napi/napi.test.ts + ``` + +4. **Check for V8 API changes** only if build fails or tests crash: + - Compare v8-function-callback.h between versions + - Check v8-internal.h for Isolate size changes + - Look for new required APIs in build errors + +## If Build Fails or Tests Crash + +The V8 API rarely has breaking changes between minor Node.js versions. If you encounter issues: +1. Check build errors for missing symbols or type mismatches +2. Compare V8 headers between old and new Node.js versions +3. Most issues can be resolved by implementing missing functions or adjusting structures + +## Testing Checklist + +- [ ] `process.version` returns correct version +- [ ] `process.versions.v8` returns correct V8 version +- [ ] `process.config.variables.node_module_version` returns correct ABI +- [ ] V8 tests pass +- [ ] N-API tests pass + +## Notes + +- Most upgrades only require updating version numbers +- Major V8 version changes (rare) may require API updates +- The V8 shim implements only APIs used by common native addons \ No newline at end of file diff --git a/.claude/commands/upgrade-webkit.md b/.claude/commands/upgrade-webkit.md new file mode 100644 index 0000000000..c71308bb7f --- /dev/null +++ b/.claude/commands/upgrade-webkit.md @@ -0,0 +1,23 @@ +Upgrade Bun's Webkit fork to the latest upstream version of Webkit. + +To do that: + +- cd vendor/WebKit +- git fetch upstream +- git merge upstream main +- Fix the merge conflicts +- cd ../../ (back to bun) +- make jsc-build (this will take about 7 minutes) +- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md" +- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles) +- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need +- cd vendor/WebKit +- git commit -am "Upgrade Webkit to the latest version" +- git push +- get the commit SHA in the vendor/WebKit directory of your new commit +- cd ../../ (back to bun) +- Update WEBKIT_VERSION in cmake/tools/SetupWebKit.cmake to the commit SHA of your new commit +- git checkout -b bun/webkit-upgrade- +- commit + push (without adding the webkit-changes.md file) +- create PR titled "Upgrade Webkit to the ", paste your webkit-changes.md into the PR description +- delete the webkit-changes.md file diff --git a/.cursor/environment.json b/.cursor/environment.json new file mode 100644 index 0000000000..92e8fb10c7 --- /dev/null +++ b/.cursor/environment.json @@ -0,0 +1,10 @@ +{ + "snapshot": "snapshot-20250706-71021aff-cc0d-4a7f-a468-d443b16c4bf1", + "install": "bun install", + "terminals": [ + { + "name": "bun build", + "command": "bun run build" + } + ] +} diff --git a/.cursor/rules/building-bun.mdc b/.cursor/rules/building-bun.mdc index 764e7c64ef..0a5fa27f2c 100644 --- a/.cursor/rules/building-bun.mdc +++ b/.cursor/rules/building-bun.mdc @@ -1,13 +1,41 @@ --- -description: How to build Bun -globs: +description: +globs: src/**/*.cpp,src/**/*.zig alwaysApply: false --- -# How to build Bun +### Build Commands -Run: +- **Build debug version**: `bun bd` or `bun run build:debug` + - Creates a debug build at `./build/debug/bun-debug` + - Compilation takes ~2.5 minutes +- **Run tests with your debug build**: `bun bd test ` + - **CRITICAL**: Never use `bun test` directly - it won't include your changes +- **Run any command with debug build**: `bun bd ` -```bash -bun bd +### Run a file + +To run a file, use: + +```sh +bun bd <...args> ``` + +**CRITICAL**: Never use `bun ` directly. It will not have your changes. + +### Logging + +`BUN_DEBUG_$(SCOPE)=1` enables debug logs for a specific debug log scope. + +Debug logs look like this: + +```zig +const log = bun.Output.scoped(.${SCOPE}, false); + +// ...later +log("MY DEBUG LOG", .{}) +``` + +### Code Generation + +Code generation happens automatically as part of the build process. There are no commands to run. diff --git a/.cursor/rules/zig-javascriptcore-classes.mdc b/.cursor/rules/zig-javascriptcore-classes.mdc index 965932a988..88636c9752 100644 --- a/.cursor/rules/zig-javascriptcore-classes.mdc +++ b/.cursor/rules/zig-javascriptcore-classes.mdc @@ -1,8 +1,9 @@ --- description: How Zig works with JavaScriptCore bindings generator -globs: +globs: alwaysApply: false --- + # Bun's JavaScriptCore Class Bindings Generator This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC). @@ -24,7 +25,7 @@ The `.classes.ts` files define the JavaScript API using a declarative approach: ```typescript // Example: encoding.classes.ts define({ - name: "TextDecoder", + name: "TextDecoder", constructor: true, JSType: "object", finalize: true, @@ -40,17 +41,18 @@ define({ }, fatal: { // Read-only property - getter: true, + getter: true, }, ignoreBOM: { // Read-only property getter: true, - } - } + }, + }, }); ``` Each class definition specifies: + - The class name - Whether it has a constructor - JavaScript type (object, function, etc.) @@ -87,7 +89,7 @@ pub const TextDecoder = struct { // Fields }); } - + // Prototype methods - note return type includes JSError pub fn decode( this: *TextDecoder, @@ -96,23 +98,23 @@ pub const TextDecoder = struct { ) bun.JSError!JSC.JSValue { // Implementation } - + // Getters pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue { return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding); } - + pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue { return JSC.JSValue.jsBoolean(this.fatal); } - + // Cleanup - note standard pattern of using deinit/deref fn deinit(this: *TextDecoder) void { // Release any retained resources // Free the pointer at the end. bun.destroy(this); } - + // Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted. pub fn finalize(this: *TextDecoder) void { this.deinit(); @@ -121,6 +123,7 @@ pub const TextDecoder = struct { ``` Key components in the Zig file: + - The struct containing native state - `pub const js = JSC.Codegen.JS` to include generated code - Constructor and methods using `bun.JSError!JSValue` return type for proper error handling @@ -128,6 +131,7 @@ Key components in the Zig file: - Methods matching the JavaScript interface - Getters/setters for properties - Proper resource cleanup pattern with `deinit()` and `finalize()` +- Update `src/bun.js/bindings/generated_classes_list.zig` to include the new class ## Code Generation System @@ -140,6 +144,7 @@ The binding generator produces C++ code that connects JavaScript and Zig: 5. **Property Caching**: Implements the caching system for properties The generated C++ code includes: + - A JSC wrapper class (`JSTextDecoder`) - A prototype class (`JSTextDecoderPrototype`) - A constructor function (`JSTextDecoderConstructor`) @@ -152,28 +157,29 @@ The `CallFrame` object provides access to JavaScript execution context: ```zig pub fn decode( - this: *TextDecoder, + this: *TextDecoder, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame ) bun.JSError!JSC.JSValue { // Get arguments const input = callFrame.argument(0); const options = callFrame.argument(1); - + // Get this value const thisValue = callFrame.thisValue(); - + // Implementation with error handling if (input.isUndefinedOrNull()) { return globalObject.throw("Input cannot be null or undefined", .{}); } - + // Return value or throw error return JSC.JSValue.jsString(globalObject, "result"); } ``` CallFrame methods include: + - `argument(i)`: Get the i-th argument - `argumentCount()`: Get the number of arguments - `thisValue()`: Get the `this` value @@ -201,17 +207,17 @@ JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) { auto throwScope = DECLARE_THROW_SCOPE(vm); JSTextDecoder* thisObject = jsCast(JSValue::decode(encodedThisValue)); JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject); - + // Check for cached value and return if present if (JSValue cachedValue = thisObject->m_encoding.get()) return JSValue::encode(cachedValue); - + // Get value from Zig implementation JSC::JSValue result = JSC::JSValue::decode( TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject) ); RETURN_IF_EXCEPTION(throwScope, {}); - + // Store in cache for future access thisObject->m_encoding.set(vm, thisObject, result); RELEASE_AND_RETURN(throwScope, JSValue::encode(result)); @@ -253,7 +259,7 @@ This system provides several key benefits: 1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values 2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC 3. **Consistent Access**: Zig code can easily get/set these cached JS values -4. **Performance**: Cached values avoid repeated computation or serialization +4. **Performance**: Cached values avoid repeated computation or serialization ### Use Cases @@ -281,7 +287,7 @@ Bun uses a consistent pattern for resource cleanup: pub fn deinit(this: *TextDecoder) void { // Release resources like strings this._encoding.deref(); // String deref pattern - + // Free any buffers if (this.buffer) |buffer| { bun.default_allocator.free(buffer); @@ -312,7 +318,7 @@ Bun uses `bun.JSError!JSValue` return type for proper error handling: ```zig pub fn decode( - this: *TextDecoder, + this: *TextDecoder, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame ) bun.JSError!JSC.JSValue { @@ -320,13 +326,14 @@ pub fn decode( if (callFrame.argumentCount() < 1) { return globalObject.throw("Missing required argument", .{}); } - + // Or returning a success value return JSC.JSValue.jsString(globalObject, "Success!"); } ``` This pattern allows Zig functions to: + 1. Return JavaScript values on success 2. Throw JavaScript exceptions on error 3. Propagate errors automatically through the call stack @@ -339,7 +346,7 @@ The binding system includes robust error handling: // Example of type checking in generated code JSTextDecoder* thisObject = jsDynamicCast(callFrame->thisValue()); if (UNLIKELY(!thisObject)) { - scope.throwException(lexicalGlobalObject, + scope.throwException(lexicalGlobalObject, Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s)); return {}; } @@ -351,7 +358,7 @@ The binding system creates proper JavaScript prototype chains: 1. **Constructor**: JSTextDecoderConstructor with standard .prototype property 2. **Prototype**: JSTextDecoderPrototype with methods and properties -3. **Instances**: Each JSTextDecoder instance with __proto__ pointing to prototype +3. **Instances**: Each JSTextDecoder instance with **proto** pointing to prototype This ensures JavaScript inheritance works as expected: @@ -360,7 +367,7 @@ This ensures JavaScript inheritance works as expected: void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype) { Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition); - + // Set up the prototype chain putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly); ASSERT(inherits(info())); @@ -372,7 +379,7 @@ void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globa The binding system is optimized for performance: 1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects -2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties +2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties 3. **Memory Management**: JSC garbage collection integrated with Zig memory management 4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions @@ -381,6 +388,7 @@ The binding system is optimized for performance: To create a new class binding in Bun: 1. **Define the class interface** in a `.classes.ts` file: + ```typescript define({ name: "MyClass", @@ -393,12 +401,13 @@ To create a new class binding in Bun: myProperty: { getter: true, cache: true, - } - } + }, + }, }); ``` 2. **Implement the native functionality** in a `.zig` file: + ```zig pub const MyClass = struct { // Generated bindings @@ -409,9 +418,9 @@ To create a new class binding in Bun: // State value: []const u8, - + pub const new = bun.TrivialNew(@This()); - + // Constructor pub fn constructor( globalObject: *JSGlobalObject, @@ -420,7 +429,7 @@ To create a new class binding in Bun: const arg = callFrame.argument(0); // Implementation } - + // Method pub fn myMethod( this: *MyClass, @@ -429,17 +438,17 @@ To create a new class binding in Bun: ) bun.JSError!JSC.JSValue { // Implementation } - + // Getter pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue { return JSC.JSValue.jsString(globalObject, this.value); } - + // Resource cleanup pub fn deinit(this: *MyClass) void { // Clean up resources } - + pub fn finalize(this: *MyClass) void { this.deinit(); bun.destroy(this); @@ -474,11 +483,13 @@ For each Zig class, the system generates: ### 3. Zig Bindings - **External Function Declarations**: + ```zig extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue; ``` - **Cached Value Accessors**: + ```zig pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... } pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e7850387a0..cf21586338 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,18 +1,5 @@ # Project /.github/CODEOWNERS @Jarred-Sumner -# Build system -/CMakeLists.txt @Electroid -/cmake/*.cmake @Electroid -/scripts/ @Electroid - -# CI -/.buildkite/ @Electroid -/.github/workflows/ @Electroid - -# Debugger protocol -/packages/bun-inspector-protocol/ @Electroid -/packages/bun-debug-adapter-protocol/ @Electroid - # Tests /test/expectations.txt @Jarred-Sumner diff --git a/.github/ISSUE_TEMPLATE/2-bug-report.yml b/.github/ISSUE_TEMPLATE/2-bug-report.yml index a0d51a2bb8..2767cee616 100644 --- a/.github/ISSUE_TEMPLATE/2-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/2-bug-report.yml @@ -12,7 +12,7 @@ body: If you need help or support using Bun, and are not reporting a bug, please join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum. - Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun. + Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun. The bug you are experiencing may already have been fixed. Please try to include as much information as possible. diff --git a/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml b/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml index 3913e25272..bb85f8e6be 100644 --- a/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml +++ b/.github/ISSUE_TEMPLATE/3-typescript-bug-report.yml @@ -2,44 +2,44 @@ name: 🇹 TypeScript Type Bug Report description: Report an issue with TypeScript types labels: [bug, types] body: -- type: markdown - attributes: - value: | - Thank you for submitting a bug report. It helps make Bun better. + - type: markdown + attributes: + value: | + Thank you for submitting a bug report. It helps make Bun better. - If you need help or support using Bun, and are not reporting a bug, please - join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum. + If you need help or support using Bun, and are not reporting a bug, please + join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum. - Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun. - The bug you are experiencing may already have been fixed. + Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun. + The bug you are experiencing may already have been fixed. - Please try to include as much information as possible. + Please try to include as much information as possible. -- type: input - attributes: - label: What version of Bun is running? - description: Copy the output of `bun --revision` -- type: input - attributes: - label: What platform is your computer? - description: | - For MacOS and Linux: copy the output of `uname -mprs` - For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console -- type: textarea - attributes: - label: What steps can reproduce the bug? - description: Explain the bug and provide a code snippet that can reproduce it. - validations: - required: true -- type: textarea - attributes: - label: What is the expected behavior? - description: If possible, please provide text instead of a screenshot. -- type: textarea - attributes: - label: What do you see instead? - description: If possible, please provide text instead of a screenshot. -- type: textarea - attributes: - label: Additional information - description: Is there anything else you think we should know? + - type: input + attributes: + label: What version of Bun is running? + description: Copy the output of `bun --revision` + - type: input + attributes: + label: What platform is your computer? + description: | + For MacOS and Linux: copy the output of `uname -mprs` + For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console + - type: textarea + attributes: + label: What steps can reproduce the bug? + description: Explain the bug and provide a code snippet that can reproduce it. + validations: + required: true + - type: textarea + attributes: + label: What is the expected behavior? + description: If possible, please provide text instead of a screenshot. + - type: textarea + attributes: + label: What do you see instead? + description: If possible, please provide text instead of a screenshot. + - type: textarea + attributes: + label: Additional information + description: Is there anything else you think we should know? diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 753e639934..8eb3e39290 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -46,6 +46,8 @@ jobs: run: | bun scripts/zig-remove-unreferenced-top-level-decls.ts src/ zig fmt src + bun scripts/sortImports src + zig fmt src - name: Commit uses: stefanzweifel/git-auto-commit-action@v5 with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9a1f110a78..a3db1518f4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -202,7 +202,7 @@ jobs: body: | Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }} - https://bun.sh/blog/${{ env.BUN_VERSION }} + https://bun.com/blog/${{ env.BUN_VERSION }} push-to-fork: oven-sh/DefinitelyTyped branch: ${{env.BUN_VERSION}} docker: diff --git a/.vscode/launch.json b/.vscode/launch.json index e3b39ae920..9cc2d04820 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -33,28 +33,6 @@ "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", }, }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [file] --only", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "--only", "${file}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "1", - "BUN_DEBUG_jest": "1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, { "type": "lldb", "name": "Attach", @@ -69,150 +47,6 @@ "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", }, }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [file] (fast)", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "0", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [file] (verbose)", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "0", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [file] --watch", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "--watch", "${file}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [file] --hot", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "--hot", "${file}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [file] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_INSPECT": "ws://localhost:0/?wait=1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [file] --inspect-brk", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_INSPECT": "ws://localhost:0/?break=1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, // bun run [file] { "type": "lldb", @@ -236,150 +70,6 @@ "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", }, }, - { - "type": "lldb", - "request": "launch", - "name": "bun run [file] (fast)", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["run", "${file}"], - "cwd": "${fileDirname}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "0", - "BUN_DEBUG_IncrementalGraph": "1", - "BUN_DEBUG_Bake": "1", - "BUN_DEBUG_reload_file_list": "1", - "GOMAXPROCS": "1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun run [file] (verbose)", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["run", "${file}"], - "cwd": "${fileDirname}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "0", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun run [file] --watch", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["run", "--watch", "${file}"], - "cwd": "${fileDirname}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun run [file] --hot", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["run", "--hot", "${file}"], - "cwd": "${fileDirname}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun run [file] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["run", "${file}"], - "cwd": "${fileDirname}", - "env": { - "FORCE_COLOR": "0", - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_INSPECT": "ws://localhost:0/?wait=1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun run [file] --inspect-brk", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["run", "${file}"], - "cwd": "${fileDirname}", - "env": { - "FORCE_COLOR": "0", - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_INSPECT": "ws://localhost:0/?break=1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, // bun test [...] { "type": "lldb", @@ -403,150 +93,6 @@ "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", }, }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [...] (fast)", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "0", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [...] (verbose)", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [...] --watch", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "--watch", "${input:testName}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [...] --hot", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "--hot", "${input:testName}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [...] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_INSPECT": "ws://localhost:0/?wait=1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [...] --inspect-brk", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_DEBUG_jest": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_INSPECT": "ws://localhost:0/?break=1", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, // bun exec [...] { "type": "lldb", @@ -591,54 +137,6 @@ "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", }, }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [*] (fast)", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "0", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [*] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug", - "args": ["test"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_INSPECT": "ws://localhost:0/", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, { "type": "lldb", "request": "launch", @@ -660,27 +158,6 @@ "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", }, }, - { - "type": "lldb", - "request": "launch", - "name": "bun test [*] (ci)", - "program": "node", - "args": ["test/runner.node.mjs"], - "cwd": "${workspaceFolder}", - "env": { - "BUN_DEBUG_QUIET_LOGS": "1", - "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - }, - "console": "internalConsole", - "sourceMap": { - // macOS - "/Users/runner/work/_temp/webkit-release/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/Users/runner/work/_temp/webkit-release/WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - // linux - "/webkitbuild/vendor/WebKit": "${workspaceFolder}/vendor/WebKit", - "/webkitbuild/.WTF/Headers": "${workspaceFolder}/vendor/WebKit/Source/WTF", - }, - }, // Windows: bun test [file] { "type": "cppvsdbg", @@ -707,149 +184,6 @@ }, ], }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test --only [file]", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "--only", "${file}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [file] (fast)", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "0", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [file] (verbose)", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "0", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [file] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - { - "name": "BUN_INSPECT", - "value": "ws://localhost:0/?wait=1", - }, - ], - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [file] --inspect-brk", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${file}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - { - "name": "BUN_INSPECT", - "value": "ws://localhost:0/?break=1", - }, - ], - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, // Windows: bun run [file] { "type": "cppvsdbg", @@ -897,91 +231,6 @@ }, ], }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun run [file] (verbose)", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["run", "${fileBasename}"], - "cwd": "${fileDirname}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_SYS", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun run [file] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["run", "${fileBasename}"], - "cwd": "${fileDirname}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - { - "name": "BUN_INSPECT", - "value": "ws://localhost:0/?wait=1", - }, - ], - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun run [file] --inspect-brk", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["run", "${fileBasename}"], - "cwd": "${fileDirname}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - { - "name": "BUN_INSPECT", - "value": "ws://localhost:0/?break=1", - }, - ], - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, // Windows: bun test [...] { "type": "cppvsdbg", @@ -1008,174 +257,6 @@ }, ], }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [...] (fast)", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "0", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [...] (verbose)", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "0", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [...] --watch", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "--watch", "${input:testName}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [...] --hot", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "--hot", "${input:testName}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [...] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - { - "name": "BUN_INSPECT", - "value": "ws://localhost:0/?wait=1", - }, - ], - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [...] --inspect-brk", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test", "--timeout=3600000", "${input:testName}"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - { - "name": "BUN_INSPECT", - "value": "ws://localhost:0/?break=1", - }, - ], - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, // Windows: bun exec [...] { "type": "cppvsdbg", @@ -1220,92 +301,6 @@ }, ], }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [*] (fast)", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "0", - }, - ], - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [*] --inspect", - "program": "${workspaceFolder}/build/debug/bun-debug.exe", - "args": ["test"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - { - "name": "BUN_INSPECT", - "value": "ws://localhost:0/", - }, - ], - "serverReadyAction": { - "pattern": "https://debug.bun.sh/#localhost:([0-9]+)/", - "uriFormat": "https://debug.bun.sh/#ws://localhost:%s/", - "action": "openExternally", - }, - }, - { - "type": "cppvsdbg", - "sourceFileMap": { - "D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source", - }, - "request": "launch", - "name": "Windows: bun test [*] (ci)", - "program": "node", - "args": ["test/runner.node.mjs"], - "cwd": "${workspaceFolder}", - "environment": [ - { - "name": "BUN_DEBUG_QUIET_LOGS", - "value": "1", - }, - { - "name": "BUN_DEBUG_jest", - "value": "1", - }, - { - "name": "BUN_GARBAGE_COLLECTOR_LEVEL", - "value": "2", - }, - ], - "console": "internalConsole", - // Don't pause when the GC runs while the debugger is open. - }, { "type": "bun", "name": "[JS] bun test [file]", diff --git a/AGENTS.md b/AGENTS.md deleted file mode 100644 index 9fe2abe4b7..0000000000 --- a/AGENTS.md +++ /dev/null @@ -1,36 +0,0 @@ -## bun tests - -**IMPORTANT**: use the `bun bd` command instead of the `bun` command. For example: - -✅ Good - -```sh -bun bd test internal/ban-words.test.ts -bun bd ./foo.ts -``` - -The `bun bd` command runs the DEBUG build. If you forget to run the debug build, your changes will not be reflected.. - -### Run a file - -To run a file, you can use the `bun bd ` command. - -```sh -bun bd ./foo.ts -``` - -### Run tests - -To run a single test, you need to use the `bun bd test ` command. - -```sh -bun bd test internal/ban-words.test.ts -``` - -You must ALWAYS make sure to pass a file path to the `bun bd test ` command. DO NOT try to run ALL the tests at once unless you're in a specific subdirectory. - -### Run a Node.js test - -```sh -bun bd --silent node:test test-fs-link -``` diff --git a/AGENTS.md b/AGENTS.md new file mode 120000 index 0000000000..681311eb9c --- /dev/null +++ b/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000000..703c9bfda5 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,245 @@ +This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed for speed, with a bundler, test runner, and Node.js-compatible package manager. It's written primarily in Zig with C++ for JavaScriptCore integration, powered by WebKit's JavaScriptCore engine. + +## Building and Running Bun + +### Build Commands + +- **Build debug version**: `bun bd` or `bun run build:debug` + - Creates a debug build at `./build/debug/bun-debug` + - Compilation takes ~2.5 minutes +- **Run tests with your debug build**: `bun bd test ` + - **CRITICAL**: Never use `bun test` directly - it won't include your changes +- **Run any command with debug build**: `bun bd ` + +### Other Build Variants + +- `bun run build:release` - Release build + +Address sanitizer is enabled by default in debug builds of Bun. + +## Testing + +### Running Tests + +- **Single test file**: `bun bd test test/js/bun/http/serve.test.ts` +- **Fuzzy match test file**: `bun bd test http/serve.test.ts` +- **With filter**: `bun bd test test/js/bun/http/serve.test.ts -t "should handle"` + +### Test Organization + +- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.) +- `test/js/node/` - Node.js compatibility tests +- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.) +- `test/cli/` - CLI command tests (install, run, test, etc.) +- `test/regression/issue/` - Regression tests (create one per bug fix) +- `test/bundler/` - Bundler and transpiler tests +- `test/integration/` - End-to-end integration tests +- `test/napi/` - N-API compatibility tests +- `test/v8/` - V8 C++ API compatibility tests + +### Writing Tests + +Tests use Bun's Jest-compatible test runner with proper test fixtures: + +```typescript +import { test, expect } from "bun:test"; +import { bunEnv, bunExe, tempDirWithFiles } from "harness"; + +test("my feature", async () => { + // Create temp directory with test files + const dir = tempDirWithFiles("test-prefix", { + "index.js": `console.log("hello");`, + }); + + // Spawn Bun process + await using proc = Bun.spawn({ + cmd: [bunExe(), "index.js"], + env: bunEnv, + cwd: dir, + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + expect(exitCode).toBe(0); + expect(stdout).toBe("hello\n"); +}); +``` + +## Code Architecture + +### Language Structure + +- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager +- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs +- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section) +- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources + +### Core Source Organization + +#### Runtime Core (`src/`) + +- `bun.zig` - Main entry point +- `cli.zig` - CLI command orchestration +- `js_parser.zig`, `js_lexer.zig`, `js_printer.zig` - JavaScript parsing/printing +- `transpiler.zig` - Wrapper around js_parser with sourcemap support +- `resolver/` - Module resolution system +- `allocators/` - Custom memory allocators for performance + +#### JavaScript Runtime (`src/bun.js/`) + +- `bindings/` - C++ JavaScriptCore bindings + - Generated classes from `.classes.ts` files + - Manual bindings for complex APIs +- `api/` - Bun-specific APIs + - `server.zig` - HTTP server implementation + - `FFI.zig` - Foreign Function Interface + - `crypto.zig` - Cryptographic operations + - `glob.zig` - File pattern matching +- `node/` - Node.js compatibility layer + - Module implementations (fs, path, crypto, etc.) + - Process and Buffer APIs +- `webcore/` - Web API implementations + - `fetch.zig` - Fetch API + - `streams.zig` - Web Streams + - `Blob.zig`, `Response.zig`, `Request.zig` +- `event_loop/` - Event loop and task management + +#### Build Tools & Package Manager + +- `src/bundler/` - JavaScript bundler + - Advanced tree-shaking + - CSS processing + - HTML handling +- `src/install/` - Package manager + - `lockfile/` - Lockfile handling + - `npm.zig` - npm registry client + - `lifecycle_script_runner.zig` - Package scripts + +#### Other Key Components + +- `src/shell/` - Cross-platform shell implementation +- `src/css/` - CSS parser and processor +- `src/http/` - HTTP client implementation + - `websocket_client/` - WebSocket client (including deflate support) +- `src/sql/` - SQL database integrations +- `src/bake/` - Server-side rendering framework + +### JavaScript Class Implementation (C++) + +When implementing JavaScript classes in C++: + +1. Create three classes if there's a public constructor: + + - `class Foo : public JSC::JSDestructibleObject` (if has C++ fields) + - `class FooPrototype : public JSC::JSNonFinalObject` + - `class FooConstructor : public JSC::InternalFunction` + +2. Define properties using HashTableValue arrays +3. Add iso subspaces for classes with C++ fields +4. Cache structures in ZigGlobalObject + +## Development Workflow + +### Code Formatting + +- `bun run prettier` - Format JS/TS files +- `bun run zig-format` - Format Zig files +- `bun run clang-format` - Format C++ files + +### Watching for Changes + +- `bun run watch` - Incremental Zig compilation with error checking +- `bun run watch-windows` - Windows-specific watch mode + +### Code Generation + +Code generation happens automatically as part of the build process. The main scripts are: + +- `src/codegen/generate-classes.ts` - Generates Zig & C++ bindings from `*.classes.ts` files +- `src/codegen/generate-jssink.ts` - Generates stream-related classes +- `src/codegen/bundle-modules.ts` - Bundles built-in modules like `node:fs` +- `src/codegen/bundle-functions.ts` - Bundles global functions like `ReadableStream` + +In development, bundled modules can be reloaded without rebuilding Zig by running `bun run build`. + +## JavaScript Modules (`src/js/`) + +Built-in JavaScript modules use special syntax and are organized as: + +- `node/` - Node.js compatibility modules (`node:fs`, `node:path`, etc.) +- `bun/` - Bun-specific modules (`bun:ffi`, `bun:sqlite`, etc.) +- `thirdparty/` - NPM modules we replace (like `ws`) +- `internal/` - Internal modules not exposed to users +- `builtins/` - Core JavaScript builtins (streams, console, etc.) + +### Special Syntax in Built-in Modules + +1. **`$` prefix** - Access to private properties and JSC intrinsics: + + ```js + const arr = $Array.from(...); // Private global + map.$set(...); // Private method + const arr2 = $newArrayWithSize(5); // JSC intrinsic + ``` + +2. **`require()`** - Must use string literals, resolved at compile time: + + ```js + const fs = require("fs"); // Directly loads by numeric ID + ``` + +3. **Debug helpers**: + + - `$debug()` - Like console.log but stripped in release builds + - `$assert()` - Assertions stripped in release builds + - `if($debug) {}` - Check if debug env var is set + +4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated + +5. **Export syntax**: Use `export default` which gets converted to a return statement: + ```js + export default { + readFile, + writeFile, + }; + ``` + +Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions. + +## CI + +Bun uses BuildKite for CI. To get the status of a PR, you can use the following command: + +```bash +bun ci +``` + +## Important Development Notes + +1. **Never use `bun test` or `bun ` directly** - always use `bun bd test` or `bun bd `. `bun bd` compiles & runs the debug build. +2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.) +3. **Follow existing code style** - check neighboring files for patterns +4. **Create regression tests** in `test/regression/issue/` when fixing bugs +5. **Use absolute paths** - Always use absolute paths in file operations +6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools +7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup +8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes +9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_=1` to enable specific scopes +10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging + +## Key APIs and Features + +### Bun-Specific APIs + +- **Bun.serve()** - High-performance HTTP server +- **Bun.spawn()** - Process spawning with better performance than Node.js +- **Bun.file()** - Fast file I/O operations +- **Bun.write()** - Unified API for writing to files, stdout, etc. +- **Bun.$ (Shell)** - Cross-platform shell scripting +- **Bun.SQLite** - Native SQLite integration +- **Bun.FFI** - Call native libraries from JavaScript +- **Bun.Glob** - Fast file pattern matching diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eb9e48ec3d..f9799ba4ea 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts. -If you are using Windows, please refer to [this guide](https://bun.sh/docs/project/building-windows) +If you are using Windows, please refer to [this guide](https://bun.com/docs/project/building-windows) ## Install Dependencies @@ -37,7 +37,7 @@ Before starting, you will need to already have a release build of Bun installed, {% codetabs %} ```bash#Native -$ curl -fsSL https://bun.sh/install | bash +$ curl -fsSL https://bun.com/install | bash ``` ```bash#npm @@ -144,6 +144,14 @@ $ bun bd test foo.test.ts $ bun bd ./foo.ts ``` +Bun generally takes about 2.5 minutes to compile a debug build when there are Zig changes. If your development workflow is "change one line, save, rebuild", you will spend too much time waiting for the build to finish. Instead: + +- Batch up your changes +- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work) +- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code. +- Use debug logs. `BUN_DEBUG_=1` will enable debug logging for the corresponding `Output.scoped(., false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=.log`. Debug logs are aggressively removed in release builds. +- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many). + ## Code generation scripts Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files. @@ -179,6 +187,7 @@ To run a release build from a pull request, you can use the `bun-pr` npm package bunx bun-pr bunx bun-pr bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566" +bunx bun-pr --asan # Linux x64 only ``` This will download the release build from the pull request and add it to `$PATH` as `bun-${pr-number}`. You can then run the build with `bun-${pr-number}`. @@ -189,24 +198,18 @@ bun-1234566 --version This works by downloading the release build from the GitHub Actions artifacts on the linked pull request. You may need the `gh` CLI installed to authenticate with GitHub. -## Valgrind +## AddressSanitizer -On Linux, valgrind can help find memory issues. +[AddressSanitizer](https://en.wikipedia.org/wiki/AddressSanitizer) helps find memory issues, and is enabled by default in debug builds of Bun on Linux and macOS. This includes the Zig code and all dependencies. It makes the Zig code take about 2x longer to build, if that's stopping you from being productive you can disable it by setting `-Denable_asan=$,true,false>` to `-Denable_asan=false` in the `cmake/targets/BuildBun.cmake` file, but generally we recommend batching your changes up between builds. -Keep in mind: - -- JavaScriptCore doesn't support valgrind. It will report spurious errors. -- Valgrind is slow -- Mimalloc will sometimes cause spurious errors when debug build is enabled - -You'll need a very recent version of Valgrind due to DWARF 5 debug symbols. You may need to manually compile Valgrind instead of using it from your Linux package manager. - -`--fair-sched=try` is necessary if running multithreaded code in Bun (such as the bundler). Otherwise it will hang. +To build a release build with Address Sanitizer, run: ```bash -$ valgrind --fair-sched=try --track-origins=yes bun-debug +$ bun run build:release:asan ``` +In CI, we run our test suite with at least one target that is built with Address Sanitizer. + ## Building WebKit locally + Debug mode of JSC WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run: diff --git a/LATEST b/LATEST index a96f385f15..5ab1538853 100644 --- a/LATEST +++ b/LATEST @@ -1 +1 @@ -1.2.16 \ No newline at end of file +1.2.18 \ No newline at end of file diff --git a/Makefile b/Makefile index 8826aa4ea7..91bb766afa 100644 --- a/Makefile +++ b/Makefile @@ -980,7 +980,7 @@ release-create-auto-updater: .PHONY: release-create release-create: gh release create --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" - gh release create --repo=$(BUN_AUTO_UPDATER_REPO) --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" -n "See https://github.com/oven-sh/bun/releases/tag/$(BUN_BUILD_TAG) for release notes. Using the install script or bun upgrade is the recommended way to install bun. Join bun's Discord to get access https://bun.sh/discord" + gh release create --repo=$(BUN_AUTO_UPDATER_REPO) --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" -n "See https://github.com/oven-sh/bun/releases/tag/$(BUN_BUILD_TAG) for release notes. Using the install script or bun upgrade is the recommended way to install bun. Join bun's Discord to get access https://bun.com/discord" release-bin-entitlements: @@ -1977,7 +1977,7 @@ integration-test-dev: # to run integration tests USE_EXISTING_PROCESS=true TEST_SERVER_URL=http://localhost:3000 node test/scripts/browser.js copy-install: - cp src/cli/install.sh ../bun.sh/docs/install.html + cp src/cli/install.sh ../bun.com/docs/install.html copy-to-bun-release-dir: copy-to-bun-release-dir-bin copy-to-bun-release-dir-dsym @@ -2019,28 +2019,28 @@ vendor-dev: assert-deps submodule npm-install-dev vendor-without-npm .PHONY: bun bun: @echo 'makefile is deprecated - use `cmake` / `bun run build`' - @echo 'See https://bun.sh/docs/project/contributing for more details' + @echo 'See https://bun.com/docs/project/contributing for more details' cpp: @echo 'makefile is deprecated - use `cmake` / `bun run build`' - @echo 'See https://bun.sh/docs/project/contributing for more details' + @echo 'See https://bun.com/docs/project/contributing for more details' zig: @echo 'makefile is deprecated - use `cmake` / `bun run build`' - @echo 'See https://bun.sh/docs/project/contributing for more details' + @echo 'See https://bun.com/docs/project/contributing for more details' dev: @echo 'makefile is deprecated - use `cmake` / `bun run build`' - @echo 'See https://bun.sh/docs/project/contributing for more details' + @echo 'See https://bun.com/docs/project/contributing for more details' setup: @echo 'makefile is deprecated - use `cmake` / `bun run build`' - @echo 'See https://bun.sh/docs/project/contributing for more details' + @echo 'See https://bun.com/docs/project/contributing for more details' bindings: @echo 'makefile is deprecated - use `cmake` / `bun run build`' - @echo 'See https://bun.sh/docs/project/contributing for more details' + @echo 'See https://bun.com/docs/project/contributing for more details' help: @echo 'makefile is deprecated - use `cmake` / `bun run build`' - @echo 'See https://bun.sh/docs/project/contributing for more details' + @echo 'See https://bun.com/docs/project/contributing for more details' diff --git a/README.md b/README.md index d9d3f8e596..61733ac8e8 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,16 @@

- Logo + Logo

Bun

- + stars Bun speed

- Documentation + Documentation   •   Discord   •   @@ -20,7 +20,7 @@
-### [Read the docs →](https://bun.sh/docs) +### [Read the docs →](https://bun.com/docs) ## What is Bun? @@ -47,14 +47,14 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64). > **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1. -> **x64 users** — if you see "illegal instruction" or similar errors, check our [CPU requirements](https://bun.sh/docs/installation#cpu-requirements-and-baseline-builds) +> **x64 users** — if you see "illegal instruction" or similar errors, check our [CPU requirements](https://bun.com/docs/installation#cpu-requirements-and-baseline-builds) ```sh # with install script (recommended) -curl -fsSL https://bun.sh/install | bash +curl -fsSL https://bun.com/install | bash # on windows -powershell -c "irm bun.sh/install.ps1 | iex" +powershell -c "irm bun.com/install.ps1 | iex" # with npm npm install -g bun @@ -87,351 +87,329 @@ bun upgrade --canary ## Quick links - Intro - - - [What is Bun?](https://bun.sh/docs/index) - - [Installation](https://bun.sh/docs/installation) - - [Quickstart](https://bun.sh/docs/quickstart) - - [TypeScript](https://bun.sh/docs/typescript) + - [What is Bun?](https://bun.com/docs/index) + - [Installation](https://bun.com/docs/installation) + - [Quickstart](https://bun.com/docs/quickstart) + - [TypeScript](https://bun.com/docs/typescript) - Templating - - - [`bun init`](https://bun.sh/docs/cli/init) - - [`bun create`](https://bun.sh/docs/cli/bun-create) + - [`bun init`](https://bun.com/docs/cli/init) + - [`bun create`](https://bun.com/docs/cli/bun-create) - CLI - - - [`bun upgrade`](https://bun.sh/docs/cli/bun-upgrade) + - [`bun upgrade`](https://bun.com/docs/cli/bun-upgrade) - Runtime - - - [`bun run`](https://bun.sh/docs/cli/run) - - [File types (Loaders)](https://bun.sh/docs/runtime/loaders) - - [TypeScript](https://bun.sh/docs/runtime/typescript) - - [JSX](https://bun.sh/docs/runtime/jsx) - - [Environment variables](https://bun.sh/docs/runtime/env) - - [Bun APIs](https://bun.sh/docs/runtime/bun-apis) - - [Web APIs](https://bun.sh/docs/runtime/web-apis) - - [Node.js compatibility](https://bun.sh/docs/runtime/nodejs-apis) - - [Single-file executable](https://bun.sh/docs/bundler/executables) - - [Plugins](https://bun.sh/docs/runtime/plugins) - - [Watch mode / Hot Reloading](https://bun.sh/docs/runtime/hot) - - [Module resolution](https://bun.sh/docs/runtime/modules) - - [Auto-install](https://bun.sh/docs/runtime/autoimport) - - [bunfig.toml](https://bun.sh/docs/runtime/bunfig) - - [Debugger](https://bun.sh/docs/runtime/debugger) - - [$ Shell](https://bun.sh/docs/runtime/shell) + - [`bun run`](https://bun.com/docs/cli/run) + - [File types (Loaders)](https://bun.com/docs/runtime/loaders) + - [TypeScript](https://bun.com/docs/runtime/typescript) + - [JSX](https://bun.com/docs/runtime/jsx) + - [Environment variables](https://bun.com/docs/runtime/env) + - [Bun APIs](https://bun.com/docs/runtime/bun-apis) + - [Web APIs](https://bun.com/docs/runtime/web-apis) + - [Node.js compatibility](https://bun.com/docs/runtime/nodejs-apis) + - [Single-file executable](https://bun.com/docs/bundler/executables) + - [Plugins](https://bun.com/docs/runtime/plugins) + - [Watch mode / Hot Reloading](https://bun.com/docs/runtime/hot) + - [Module resolution](https://bun.com/docs/runtime/modules) + - [Auto-install](https://bun.com/docs/runtime/autoimport) + - [bunfig.toml](https://bun.com/docs/runtime/bunfig) + - [Debugger](https://bun.com/docs/runtime/debugger) + - [$ Shell](https://bun.com/docs/runtime/shell) - Package manager - - - [`bun install`](https://bun.sh/docs/cli/install) - - [`bun add`](https://bun.sh/docs/cli/add) - - [`bun remove`](https://bun.sh/docs/cli/remove) - - [`bun update`](https://bun.sh/docs/cli/update) - - [`bun link`](https://bun.sh/docs/cli/link) - - [`bun unlink`](https://bun.sh/docs/cli/unlink) - - [`bun pm`](https://bun.sh/docs/cli/pm) - - [`bun outdated`](https://bun.sh/docs/cli/outdated) - - [`bun publish`](https://bun.sh/docs/cli/publish) - - [`bun patch`](https://bun.sh/docs/install/patch) - - [`bun patch-commit`](https://bun.sh/docs/cli/patch-commit) - - [Global cache](https://bun.sh/docs/install/cache) - - [Workspaces](https://bun.sh/docs/install/workspaces) - - [Lifecycle scripts](https://bun.sh/docs/install/lifecycle) - - [Filter](https://bun.sh/docs/cli/filter) - - [Lockfile](https://bun.sh/docs/install/lockfile) - - [Scopes and registries](https://bun.sh/docs/install/registries) - - [Overrides and resolutions](https://bun.sh/docs/install/overrides) - - [`.npmrc`](https://bun.sh/docs/install/npmrc) + - [`bun install`](https://bun.com/docs/cli/install) + - [`bun add`](https://bun.com/docs/cli/add) + - [`bun remove`](https://bun.com/docs/cli/remove) + - [`bun update`](https://bun.com/docs/cli/update) + - [`bun link`](https://bun.com/docs/cli/link) + - [`bun unlink`](https://bun.com/docs/cli/unlink) + - [`bun pm`](https://bun.com/docs/cli/pm) + - [`bun outdated`](https://bun.com/docs/cli/outdated) + - [`bun publish`](https://bun.com/docs/cli/publish) + - [`bun patch`](https://bun.com/docs/install/patch) + - [`bun patch-commit`](https://bun.com/docs/cli/patch-commit) + - [Global cache](https://bun.com/docs/install/cache) + - [Workspaces](https://bun.com/docs/install/workspaces) + - [Lifecycle scripts](https://bun.com/docs/install/lifecycle) + - [Filter](https://bun.com/docs/cli/filter) + - [Lockfile](https://bun.com/docs/install/lockfile) + - [Scopes and registries](https://bun.com/docs/install/registries) + - [Overrides and resolutions](https://bun.com/docs/install/overrides) + - [`.npmrc`](https://bun.com/docs/install/npmrc) - Bundler - - - [`Bun.build`](https://bun.sh/docs/bundler) - - [Loaders](https://bun.sh/docs/bundler/loaders) - - [Plugins](https://bun.sh/docs/bundler/plugins) - - [Macros](https://bun.sh/docs/bundler/macros) - - [vs esbuild](https://bun.sh/docs/bundler/vs-esbuild) - - [Single-file executable](https://bun.sh/docs/bundler/executables) - - [CSS](https://bun.sh/docs/bundler/css) - - [HTML](https://bun.sh/docs/bundler/html) - - [Hot Module Replacement (HMR)](https://bun.sh/docs/bundler/hmr) - - [Full-stack with HTML imports](https://bun.sh/docs/bundler/fullstack) + - [`Bun.build`](https://bun.com/docs/bundler) + - [Loaders](https://bun.com/docs/bundler/loaders) + - [Plugins](https://bun.com/docs/bundler/plugins) + - [Macros](https://bun.com/docs/bundler/macros) + - [vs esbuild](https://bun.com/docs/bundler/vs-esbuild) + - [Single-file executable](https://bun.com/docs/bundler/executables) + - [CSS](https://bun.com/docs/bundler/css) + - [HTML](https://bun.com/docs/bundler/html) + - [Hot Module Replacement (HMR)](https://bun.com/docs/bundler/hmr) + - [Full-stack with HTML imports](https://bun.com/docs/bundler/fullstack) - Test runner - - - [`bun test`](https://bun.sh/docs/cli/test) - - [Writing tests](https://bun.sh/docs/test/writing) - - [Watch mode](https://bun.sh/docs/test/hot) - - [Lifecycle hooks](https://bun.sh/docs/test/lifecycle) - - [Mocks](https://bun.sh/docs/test/mocks) - - [Snapshots](https://bun.sh/docs/test/snapshots) - - [Dates and times](https://bun.sh/docs/test/time) - - [DOM testing](https://bun.sh/docs/test/dom) - - [Code coverage](https://bun.sh/docs/test/coverage) - - [Configuration](https://bun.sh/docs/test/configuration) - - [Discovery](https://bun.sh/docs/test/discovery) - - [Reporters](https://bun.sh/docs/test/reporters) - - [Runtime Behavior](https://bun.sh/docs/test/runtime-behavior) + - [`bun test`](https://bun.com/docs/cli/test) + - [Writing tests](https://bun.com/docs/test/writing) + - [Watch mode](https://bun.com/docs/test/hot) + - [Lifecycle hooks](https://bun.com/docs/test/lifecycle) + - [Mocks](https://bun.com/docs/test/mocks) + - [Snapshots](https://bun.com/docs/test/snapshots) + - [Dates and times](https://bun.com/docs/test/time) + - [DOM testing](https://bun.com/docs/test/dom) + - [Code coverage](https://bun.com/docs/test/coverage) + - [Configuration](https://bun.com/docs/test/configuration) + - [Discovery](https://bun.com/docs/test/discovery) + - [Reporters](https://bun.com/docs/test/reporters) + - [Runtime Behavior](https://bun.com/docs/test/runtime-behavior) - Package runner - - - [`bunx`](https://bun.sh/docs/cli/bunx) + - [`bunx`](https://bun.com/docs/cli/bunx) - API - - - [HTTP server (`Bun.serve`)](https://bun.sh/docs/api/http) - - [WebSockets](https://bun.sh/docs/api/websockets) - - [Workers](https://bun.sh/docs/api/workers) - - [Binary data](https://bun.sh/docs/api/binary-data) - - [Streams](https://bun.sh/docs/api/streams) - - [File I/O (`Bun.file`)](https://bun.sh/docs/api/file-io) - - [import.meta](https://bun.sh/docs/api/import-meta) - - [SQLite (`bun:sqlite`)](https://bun.sh/docs/api/sqlite) - - [PostgreSQL (`Bun.sql`)](https://bun.sh/docs/api/sql) - - [Redis (`Bun.redis`)](https://bun.sh/docs/api/redis) - - [S3 Client (`Bun.s3`)](https://bun.sh/docs/api/s3) - - [FileSystemRouter](https://bun.sh/docs/api/file-system-router) - - [TCP sockets](https://bun.sh/docs/api/tcp) - - [UDP sockets](https://bun.sh/docs/api/udp) - - [Globals](https://bun.sh/docs/api/globals) - - [$ Shell](https://bun.sh/docs/runtime/shell) - - [Child processes (spawn)](https://bun.sh/docs/api/spawn) - - [Transpiler (`Bun.Transpiler`)](https://bun.sh/docs/api/transpiler) - - [Hashing](https://bun.sh/docs/api/hashing) - - [Colors (`Bun.color`)](https://bun.sh/docs/api/color) - - [Console](https://bun.sh/docs/api/console) - - [FFI (`bun:ffi`)](https://bun.sh/docs/api/ffi) - - [C Compiler (`bun:ffi` cc)](https://bun.sh/docs/api/cc) - - [HTMLRewriter](https://bun.sh/docs/api/html-rewriter) - - [Testing (`bun:test`)](https://bun.sh/docs/api/test) - - [Cookies (`Bun.Cookie`)](https://bun.sh/docs/api/cookie) - - [Utils](https://bun.sh/docs/api/utils) - - [Node-API](https://bun.sh/docs/api/node-api) - - [Glob (`Bun.Glob`)](https://bun.sh/docs/api/glob) - - [Semver (`Bun.semver`)](https://bun.sh/docs/api/semver) - - [DNS](https://bun.sh/docs/api/dns) - - [fetch API extensions](https://bun.sh/docs/api/fetch) + - [HTTP server (`Bun.serve`)](https://bun.com/docs/api/http) + - [WebSockets](https://bun.com/docs/api/websockets) + - [Workers](https://bun.com/docs/api/workers) + - [Binary data](https://bun.com/docs/api/binary-data) + - [Streams](https://bun.com/docs/api/streams) + - [File I/O (`Bun.file`)](https://bun.com/docs/api/file-io) + - [import.meta](https://bun.com/docs/api/import-meta) + - [SQLite (`bun:sqlite`)](https://bun.com/docs/api/sqlite) + - [PostgreSQL (`Bun.sql`)](https://bun.com/docs/api/sql) + - [Redis (`Bun.redis`)](https://bun.com/docs/api/redis) + - [S3 Client (`Bun.s3`)](https://bun.com/docs/api/s3) + - [FileSystemRouter](https://bun.com/docs/api/file-system-router) + - [TCP sockets](https://bun.com/docs/api/tcp) + - [UDP sockets](https://bun.com/docs/api/udp) + - [Globals](https://bun.com/docs/api/globals) + - [$ Shell](https://bun.com/docs/runtime/shell) + - [Child processes (spawn)](https://bun.com/docs/api/spawn) + - [Transpiler (`Bun.Transpiler`)](https://bun.com/docs/api/transpiler) + - [Hashing](https://bun.com/docs/api/hashing) + - [Colors (`Bun.color`)](https://bun.com/docs/api/color) + - [Console](https://bun.com/docs/api/console) + - [FFI (`bun:ffi`)](https://bun.com/docs/api/ffi) + - [C Compiler (`bun:ffi` cc)](https://bun.com/docs/api/cc) + - [HTMLRewriter](https://bun.com/docs/api/html-rewriter) + - [Testing (`bun:test`)](https://bun.com/docs/api/test) + - [Cookies (`Bun.Cookie`)](https://bun.com/docs/api/cookie) + - [Utils](https://bun.com/docs/api/utils) + - [Node-API](https://bun.com/docs/api/node-api) + - [Glob (`Bun.Glob`)](https://bun.com/docs/api/glob) + - [Semver (`Bun.semver`)](https://bun.com/docs/api/semver) + - [DNS](https://bun.com/docs/api/dns) + - [fetch API extensions](https://bun.com/docs/api/fetch) ## Guides - Binary - - - [Convert a Blob to a string](https://bun.sh/guides/binary/blob-to-string) - - [Convert a Buffer to a blob](https://bun.sh/guides/binary/buffer-to-blob) - - [Convert a Blob to a DataView](https://bun.sh/guides/binary/blob-to-dataview) - - [Convert a Buffer to a string](https://bun.sh/guides/binary/buffer-to-string) - - [Convert a Blob to a ReadableStream](https://bun.sh/guides/binary/blob-to-stream) - - [Convert a Blob to a Uint8Array](https://bun.sh/guides/binary/blob-to-typedarray) - - [Convert a DataView to a string](https://bun.sh/guides/binary/dataview-to-string) - - [Convert a Uint8Array to a Blob](https://bun.sh/guides/binary/typedarray-to-blob) - - [Convert a Blob to an ArrayBuffer](https://bun.sh/guides/binary/blob-to-arraybuffer) - - [Convert an ArrayBuffer to a Blob](https://bun.sh/guides/binary/arraybuffer-to-blob) - - [Convert a Buffer to a Uint8Array](https://bun.sh/guides/binary/buffer-to-typedarray) - - [Convert a Uint8Array to a Buffer](https://bun.sh/guides/binary/typedarray-to-buffer) - - [Convert a Uint8Array to a string](https://bun.sh/guides/binary/typedarray-to-string) - - [Convert a Buffer to an ArrayBuffer](https://bun.sh/guides/binary/buffer-to-arraybuffer) - - [Convert an ArrayBuffer to a Buffer](https://bun.sh/guides/binary/arraybuffer-to-buffer) - - [Convert an ArrayBuffer to a string](https://bun.sh/guides/binary/arraybuffer-to-string) - - [Convert a Uint8Array to a DataView](https://bun.sh/guides/binary/typedarray-to-dataview) - - [Convert a Buffer to a ReadableStream](https://bun.sh/guides/binary/buffer-to-readablestream) - - [Convert a Uint8Array to an ArrayBuffer](https://bun.sh/guides/binary/typedarray-to-arraybuffer) - - [Convert an ArrayBuffer to a Uint8Array](https://bun.sh/guides/binary/arraybuffer-to-typedarray) - - [Convert an ArrayBuffer to an array of numbers](https://bun.sh/guides/binary/arraybuffer-to-array) - - [Convert a Uint8Array to a ReadableStream](https://bun.sh/guides/binary/typedarray-to-readablestream) + - [Convert a Blob to a string](https://bun.com/guides/binary/blob-to-string) + - [Convert a Buffer to a blob](https://bun.com/guides/binary/buffer-to-blob) + - [Convert a Blob to a DataView](https://bun.com/guides/binary/blob-to-dataview) + - [Convert a Buffer to a string](https://bun.com/guides/binary/buffer-to-string) + - [Convert a Blob to a ReadableStream](https://bun.com/guides/binary/blob-to-stream) + - [Convert a Blob to a Uint8Array](https://bun.com/guides/binary/blob-to-typedarray) + - [Convert a DataView to a string](https://bun.com/guides/binary/dataview-to-string) + - [Convert a Uint8Array to a Blob](https://bun.com/guides/binary/typedarray-to-blob) + - [Convert a Blob to an ArrayBuffer](https://bun.com/guides/binary/blob-to-arraybuffer) + - [Convert an ArrayBuffer to a Blob](https://bun.com/guides/binary/arraybuffer-to-blob) + - [Convert a Buffer to a Uint8Array](https://bun.com/guides/binary/buffer-to-typedarray) + - [Convert a Uint8Array to a Buffer](https://bun.com/guides/binary/typedarray-to-buffer) + - [Convert a Uint8Array to a string](https://bun.com/guides/binary/typedarray-to-string) + - [Convert a Buffer to an ArrayBuffer](https://bun.com/guides/binary/buffer-to-arraybuffer) + - [Convert an ArrayBuffer to a Buffer](https://bun.com/guides/binary/arraybuffer-to-buffer) + - [Convert an ArrayBuffer to a string](https://bun.com/guides/binary/arraybuffer-to-string) + - [Convert a Uint8Array to a DataView](https://bun.com/guides/binary/typedarray-to-dataview) + - [Convert a Buffer to a ReadableStream](https://bun.com/guides/binary/buffer-to-readablestream) + - [Convert a Uint8Array to an ArrayBuffer](https://bun.com/guides/binary/typedarray-to-arraybuffer) + - [Convert an ArrayBuffer to a Uint8Array](https://bun.com/guides/binary/arraybuffer-to-typedarray) + - [Convert an ArrayBuffer to an array of numbers](https://bun.com/guides/binary/arraybuffer-to-array) + - [Convert a Uint8Array to a ReadableStream](https://bun.com/guides/binary/typedarray-to-readablestream) - Ecosystem - - - [Use React and JSX](https://bun.sh/guides/ecosystem/react) - - [Use EdgeDB with Bun](https://bun.sh/guides/ecosystem/edgedb) - - [Use Prisma with Bun](https://bun.sh/guides/ecosystem/prisma) - - [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry) - - [Create a Discord bot](https://bun.sh/guides/ecosystem/discordjs) - - [Run Bun as a daemon with PM2](https://bun.sh/guides/ecosystem/pm2) - - [Use Drizzle ORM with Bun](https://bun.sh/guides/ecosystem/drizzle) - - [Build an app with Nuxt and Bun](https://bun.sh/guides/ecosystem/nuxt) - - [Build an app with Qwik and Bun](https://bun.sh/guides/ecosystem/qwik) - - [Build an app with Astro and Bun](https://bun.sh/guides/ecosystem/astro) - - [Build an app with Remix and Bun](https://bun.sh/guides/ecosystem/remix) - - [Build a frontend using Vite and Bun](https://bun.sh/guides/ecosystem/vite) - - [Build an app with Next.js and Bun](https://bun.sh/guides/ecosystem/nextjs) - - [Run Bun as a daemon with systemd](https://bun.sh/guides/ecosystem/systemd) - - [Deploy a Bun application on Render](https://bun.sh/guides/ecosystem/render) - - [Build an HTTP server using Hono and Bun](https://bun.sh/guides/ecosystem/hono) - - [Build an app with SvelteKit and Bun](https://bun.sh/guides/ecosystem/sveltekit) - - [Build an app with SolidStart and Bun](https://bun.sh/guides/ecosystem/solidstart) - - [Build an HTTP server using Elysia and Bun](https://bun.sh/guides/ecosystem/elysia) - - [Build an HTTP server using StricJS and Bun](https://bun.sh/guides/ecosystem/stric) - - [Containerize a Bun application with Docker](https://bun.sh/guides/ecosystem/docker) - - [Build an HTTP server using Express and Bun](https://bun.sh/guides/ecosystem/express) - - [Use Neon Postgres through Drizzle ORM](https://bun.sh/guides/ecosystem/neon-drizzle) - - [Server-side render (SSR) a React component](https://bun.sh/guides/ecosystem/ssr-react) - - [Read and write data to MongoDB using Mongoose and Bun](https://bun.sh/guides/ecosystem/mongoose) - - [Use Neon's Serverless Postgres with Bun](https://bun.sh/guides/ecosystem/neon-serverless-postgres) + - [Use React and JSX](https://bun.com/guides/ecosystem/react) + - [Use EdgeDB with Bun](https://bun.com/guides/ecosystem/edgedb) + - [Use Prisma with Bun](https://bun.com/guides/ecosystem/prisma) + - [Add Sentry to a Bun app](https://bun.com/guides/ecosystem/sentry) + - [Create a Discord bot](https://bun.com/guides/ecosystem/discordjs) + - [Run Bun as a daemon with PM2](https://bun.com/guides/ecosystem/pm2) + - [Use Drizzle ORM with Bun](https://bun.com/guides/ecosystem/drizzle) + - [Build an app with Nuxt and Bun](https://bun.com/guides/ecosystem/nuxt) + - [Build an app with Qwik and Bun](https://bun.com/guides/ecosystem/qwik) + - [Build an app with Astro and Bun](https://bun.com/guides/ecosystem/astro) + - [Build an app with Remix and Bun](https://bun.com/guides/ecosystem/remix) + - [Build a frontend using Vite and Bun](https://bun.com/guides/ecosystem/vite) + - [Build an app with Next.js and Bun](https://bun.com/guides/ecosystem/nextjs) + - [Run Bun as a daemon with systemd](https://bun.com/guides/ecosystem/systemd) + - [Deploy a Bun application on Render](https://bun.com/guides/ecosystem/render) + - [Build an HTTP server using Hono and Bun](https://bun.com/guides/ecosystem/hono) + - [Build an app with SvelteKit and Bun](https://bun.com/guides/ecosystem/sveltekit) + - [Build an app with SolidStart and Bun](https://bun.com/guides/ecosystem/solidstart) + - [Build an HTTP server using Elysia and Bun](https://bun.com/guides/ecosystem/elysia) + - [Build an HTTP server using StricJS and Bun](https://bun.com/guides/ecosystem/stric) + - [Containerize a Bun application with Docker](https://bun.com/guides/ecosystem/docker) + - [Build an HTTP server using Express and Bun](https://bun.com/guides/ecosystem/express) + - [Use Neon Postgres through Drizzle ORM](https://bun.com/guides/ecosystem/neon-drizzle) + - [Server-side render (SSR) a React component](https://bun.com/guides/ecosystem/ssr-react) + - [Read and write data to MongoDB using Mongoose and Bun](https://bun.com/guides/ecosystem/mongoose) + - [Use Neon's Serverless Postgres with Bun](https://bun.com/guides/ecosystem/neon-serverless-postgres) - HTMLRewriter - - - [Extract links from a webpage using HTMLRewriter](https://bun.sh/guides/html-rewriter/extract-links) - - [Extract social share images and Open Graph tags](https://bun.sh/guides/html-rewriter/extract-social-meta) + - [Extract links from a webpage using HTMLRewriter](https://bun.com/guides/html-rewriter/extract-links) + - [Extract social share images and Open Graph tags](https://bun.com/guides/html-rewriter/extract-social-meta) - HTTP - - - [Hot reload an HTTP server](https://bun.sh/guides/http/hot) - - [Common HTTP server usage](https://bun.sh/guides/http/server) - - [Write a simple HTTP server](https://bun.sh/guides/http/simple) - - [Configure TLS on an HTTP server](https://bun.sh/guides/http/tls) - - [Send an HTTP request using fetch](https://bun.sh/guides/http/fetch) - - [Proxy HTTP requests using fetch()](https://bun.sh/guides/http/proxy) - - [Start a cluster of HTTP servers](https://bun.sh/guides/http/cluster) - - [Stream a file as an HTTP Response](https://bun.sh/guides/http/stream-file) - - [fetch with unix domain sockets in Bun](https://bun.sh/guides/http/fetch-unix) - - [Upload files via HTTP using FormData](https://bun.sh/guides/http/file-uploads) - - [Streaming HTTP Server with Async Iterators](https://bun.sh/guides/http/stream-iterator) - - [Streaming HTTP Server with Node.js Streams](https://bun.sh/guides/http/stream-node-streams-in-bun) + - [Hot reload an HTTP server](https://bun.com/guides/http/hot) + - [Common HTTP server usage](https://bun.com/guides/http/server) + - [Write a simple HTTP server](https://bun.com/guides/http/simple) + - [Configure TLS on an HTTP server](https://bun.com/guides/http/tls) + - [Send an HTTP request using fetch](https://bun.com/guides/http/fetch) + - [Proxy HTTP requests using fetch()](https://bun.com/guides/http/proxy) + - [Start a cluster of HTTP servers](https://bun.com/guides/http/cluster) + - [Stream a file as an HTTP Response](https://bun.com/guides/http/stream-file) + - [fetch with unix domain sockets in Bun](https://bun.com/guides/http/fetch-unix) + - [Upload files via HTTP using FormData](https://bun.com/guides/http/file-uploads) + - [Streaming HTTP Server with Async Iterators](https://bun.com/guides/http/stream-iterator) + - [Streaming HTTP Server with Node.js Streams](https://bun.com/guides/http/stream-node-streams-in-bun) - Install - - - [Add a dependency](https://bun.sh/guides/install/add) - - [Add a Git dependency](https://bun.sh/guides/install/add-git) - - [Add a peer dependency](https://bun.sh/guides/install/add-peer) - - [Add a trusted dependency](https://bun.sh/guides/install/trusted) - - [Add a development dependency](https://bun.sh/guides/install/add-dev) - - [Add a tarball dependency](https://bun.sh/guides/install/add-tarball) - - [Add an optional dependency](https://bun.sh/guides/install/add-optional) - - [Generate a yarn-compatible lockfile](https://bun.sh/guides/install/yarnlock) - - [Configuring a monorepo using workspaces](https://bun.sh/guides/install/workspaces) - - [Install a package under a different name](https://bun.sh/guides/install/npm-alias) - - [Install dependencies with Bun in GitHub Actions](https://bun.sh/guides/install/cicd) - - [Using bun install with Artifactory](https://bun.sh/guides/install/jfrog-artifactory) - - [Configure git to diff Bun's lockb lockfile](https://bun.sh/guides/install/git-diff-bun-lockfile) - - [Override the default npm registry for bun install](https://bun.sh/guides/install/custom-registry) - - [Using bun install with an Azure Artifacts npm registry](https://bun.sh/guides/install/azure-artifacts) - - [Migrate from npm install to bun install](https://bun.sh/guides/install/from-npm-install-to-bun-install) - - [Configure a private registry for an organization scope with bun install](https://bun.sh/guides/install/registry-scope) + - [Add a dependency](https://bun.com/guides/install/add) + - [Add a Git dependency](https://bun.com/guides/install/add-git) + - [Add a peer dependency](https://bun.com/guides/install/add-peer) + - [Add a trusted dependency](https://bun.com/guides/install/trusted) + - [Add a development dependency](https://bun.com/guides/install/add-dev) + - [Add a tarball dependency](https://bun.com/guides/install/add-tarball) + - [Add an optional dependency](https://bun.com/guides/install/add-optional) + - [Generate a yarn-compatible lockfile](https://bun.com/guides/install/yarnlock) + - [Configuring a monorepo using workspaces](https://bun.com/guides/install/workspaces) + - [Install a package under a different name](https://bun.com/guides/install/npm-alias) + - [Install dependencies with Bun in GitHub Actions](https://bun.com/guides/install/cicd) + - [Using bun install with Artifactory](https://bun.com/guides/install/jfrog-artifactory) + - [Configure git to diff Bun's lockb lockfile](https://bun.com/guides/install/git-diff-bun-lockfile) + - [Override the default npm registry for bun install](https://bun.com/guides/install/custom-registry) + - [Using bun install with an Azure Artifacts npm registry](https://bun.com/guides/install/azure-artifacts) + - [Migrate from npm install to bun install](https://bun.com/guides/install/from-npm-install-to-bun-install) + - [Configure a private registry for an organization scope with bun install](https://bun.com/guides/install/registry-scope) - Process - - - [Read from stdin](https://bun.sh/guides/process/stdin) - - [Listen for CTRL+C](https://bun.sh/guides/process/ctrl-c) - - [Spawn a child process](https://bun.sh/guides/process/spawn) - - [Listen to OS signals](https://bun.sh/guides/process/os-signals) - - [Parse command-line arguments](https://bun.sh/guides/process/argv) - - [Read stderr from a child process](https://bun.sh/guides/process/spawn-stderr) - - [Read stdout from a child process](https://bun.sh/guides/process/spawn-stdout) - - [Get the process uptime in nanoseconds](https://bun.sh/guides/process/nanoseconds) - - [Spawn a child process and communicate using IPC](https://bun.sh/guides/process/ipc) + - [Read from stdin](https://bun.com/guides/process/stdin) + - [Listen for CTRL+C](https://bun.com/guides/process/ctrl-c) + - [Spawn a child process](https://bun.com/guides/process/spawn) + - [Listen to OS signals](https://bun.com/guides/process/os-signals) + - [Parse command-line arguments](https://bun.com/guides/process/argv) + - [Read stderr from a child process](https://bun.com/guides/process/spawn-stderr) + - [Read stdout from a child process](https://bun.com/guides/process/spawn-stdout) + - [Get the process uptime in nanoseconds](https://bun.com/guides/process/nanoseconds) + - [Spawn a child process and communicate using IPC](https://bun.com/guides/process/ipc) - Read file - - - [Read a JSON file](https://bun.sh/guides/read-file/json) - - [Check if a file exists](https://bun.sh/guides/read-file/exists) - - [Read a file as a string](https://bun.sh/guides/read-file/string) - - [Read a file to a Buffer](https://bun.sh/guides/read-file/buffer) - - [Get the MIME type of a file](https://bun.sh/guides/read-file/mime) - - [Watch a directory for changes](https://bun.sh/guides/read-file/watch) - - [Read a file as a ReadableStream](https://bun.sh/guides/read-file/stream) - - [Read a file to a Uint8Array](https://bun.sh/guides/read-file/uint8array) - - [Read a file to an ArrayBuffer](https://bun.sh/guides/read-file/arraybuffer) + - [Read a JSON file](https://bun.com/guides/read-file/json) + - [Check if a file exists](https://bun.com/guides/read-file/exists) + - [Read a file as a string](https://bun.com/guides/read-file/string) + - [Read a file to a Buffer](https://bun.com/guides/read-file/buffer) + - [Get the MIME type of a file](https://bun.com/guides/read-file/mime) + - [Watch a directory for changes](https://bun.com/guides/read-file/watch) + - [Read a file as a ReadableStream](https://bun.com/guides/read-file/stream) + - [Read a file to a Uint8Array](https://bun.com/guides/read-file/uint8array) + - [Read a file to an ArrayBuffer](https://bun.com/guides/read-file/arraybuffer) - Runtime - - - [Delete files](https://bun.sh/guides/runtime/delete-file) - - [Run a Shell Command](https://bun.sh/guides/runtime/shell) - - [Import a JSON file](https://bun.sh/guides/runtime/import-json) - - [Import a TOML file](https://bun.sh/guides/runtime/import-toml) - - [Set a time zone in Bun](https://bun.sh/guides/runtime/timezone) - - [Set environment variables](https://bun.sh/guides/runtime/set-env) - - [Re-map import paths](https://bun.sh/guides/runtime/tsconfig-paths) - - [Delete directories](https://bun.sh/guides/runtime/delete-directory) - - [Read environment variables](https://bun.sh/guides/runtime/read-env) - - [Import a HTML file as text](https://bun.sh/guides/runtime/import-html) - - [Install and run Bun in GitHub Actions](https://bun.sh/guides/runtime/cicd) - - [Debugging Bun with the web debugger](https://bun.sh/guides/runtime/web-debugger) - - [Install TypeScript declarations for Bun](https://bun.sh/guides/runtime/typescript) - - [Debugging Bun with the VS Code extension](https://bun.sh/guides/runtime/vscode-debugger) - - [Inspect memory usage using V8 heap snapshots](https://bun.sh/guides/runtime/heap-snapshot) - - [Define and replace static globals & constants](https://bun.sh/guides/runtime/define-constant) - - [Codesign a single-file JavaScript executable on macOS](https://bun.sh/guides/runtime/codesign-macos-executable) + - [Delete files](https://bun.com/guides/runtime/delete-file) + - [Run a Shell Command](https://bun.com/guides/runtime/shell) + - [Import a JSON file](https://bun.com/guides/runtime/import-json) + - [Import a TOML file](https://bun.com/guides/runtime/import-toml) + - [Set a time zone in Bun](https://bun.com/guides/runtime/timezone) + - [Set environment variables](https://bun.com/guides/runtime/set-env) + - [Re-map import paths](https://bun.com/guides/runtime/tsconfig-paths) + - [Delete directories](https://bun.com/guides/runtime/delete-directory) + - [Read environment variables](https://bun.com/guides/runtime/read-env) + - [Import a HTML file as text](https://bun.com/guides/runtime/import-html) + - [Install and run Bun in GitHub Actions](https://bun.com/guides/runtime/cicd) + - [Debugging Bun with the web debugger](https://bun.com/guides/runtime/web-debugger) + - [Install TypeScript declarations for Bun](https://bun.com/guides/runtime/typescript) + - [Debugging Bun with the VS Code extension](https://bun.com/guides/runtime/vscode-debugger) + - [Inspect memory usage using V8 heap snapshots](https://bun.com/guides/runtime/heap-snapshot) + - [Define and replace static globals & constants](https://bun.com/guides/runtime/define-constant) + - [Codesign a single-file JavaScript executable on macOS](https://bun.com/guides/runtime/codesign-macos-executable) - Streams - - - [Convert a ReadableStream to JSON](https://bun.sh/guides/streams/to-json) - - [Convert a ReadableStream to a Blob](https://bun.sh/guides/streams/to-blob) - - [Convert a ReadableStream to a Buffer](https://bun.sh/guides/streams/to-buffer) - - [Convert a ReadableStream to a string](https://bun.sh/guides/streams/to-string) - - [Convert a ReadableStream to a Uint8Array](https://bun.sh/guides/streams/to-typedarray) - - [Convert a ReadableStream to an array of chunks](https://bun.sh/guides/streams/to-array) - - [Convert a Node.js Readable to JSON](https://bun.sh/guides/streams/node-readable-to-json) - - [Convert a ReadableStream to an ArrayBuffer](https://bun.sh/guides/streams/to-arraybuffer) - - [Convert a Node.js Readable to a Blob](https://bun.sh/guides/streams/node-readable-to-blob) - - [Convert a Node.js Readable to a string](https://bun.sh/guides/streams/node-readable-to-string) - - [Convert a Node.js Readable to an Uint8Array](https://bun.sh/guides/streams/node-readable-to-uint8array) - - [Convert a Node.js Readable to an ArrayBuffer](https://bun.sh/guides/streams/node-readable-to-arraybuffer) + - [Convert a ReadableStream to JSON](https://bun.com/guides/streams/to-json) + - [Convert a ReadableStream to a Blob](https://bun.com/guides/streams/to-blob) + - [Convert a ReadableStream to a Buffer](https://bun.com/guides/streams/to-buffer) + - [Convert a ReadableStream to a string](https://bun.com/guides/streams/to-string) + - [Convert a ReadableStream to a Uint8Array](https://bun.com/guides/streams/to-typedarray) + - [Convert a ReadableStream to an array of chunks](https://bun.com/guides/streams/to-array) + - [Convert a Node.js Readable to JSON](https://bun.com/guides/streams/node-readable-to-json) + - [Convert a ReadableStream to an ArrayBuffer](https://bun.com/guides/streams/to-arraybuffer) + - [Convert a Node.js Readable to a Blob](https://bun.com/guides/streams/node-readable-to-blob) + - [Convert a Node.js Readable to a string](https://bun.com/guides/streams/node-readable-to-string) + - [Convert a Node.js Readable to an Uint8Array](https://bun.com/guides/streams/node-readable-to-uint8array) + - [Convert a Node.js Readable to an ArrayBuffer](https://bun.com/guides/streams/node-readable-to-arraybuffer) - Test - - - [Spy on methods in `bun test`](https://bun.sh/guides/test/spy-on) - - [Bail early with the Bun test runner](https://bun.sh/guides/test/bail) - - [Mock functions in `bun test`](https://bun.sh/guides/test/mock-functions) - - [Run tests in watch mode with Bun](https://bun.sh/guides/test/watch-mode) - - [Use snapshot testing in `bun test`](https://bun.sh/guides/test/snapshot) - - [Skip tests with the Bun test runner](https://bun.sh/guides/test/skip-tests) - - [Using Testing Library with Bun](https://bun.sh/guides/test/testing-library) - - [Update snapshots in `bun test`](https://bun.sh/guides/test/update-snapshots) - - [Run your tests with the Bun test runner](https://bun.sh/guides/test/run-tests) - - [Set the system time in Bun's test runner](https://bun.sh/guides/test/mock-clock) - - [Set a per-test timeout with the Bun test runner](https://bun.sh/guides/test/timeout) - - [Migrate from Jest to Bun's test runner](https://bun.sh/guides/test/migrate-from-jest) - - [Write browser DOM tests with Bun and happy-dom](https://bun.sh/guides/test/happy-dom) - - [Mark a test as a "todo" with the Bun test runner](https://bun.sh/guides/test/todo-tests) - - [Re-run tests multiple times with the Bun test runner](https://bun.sh/guides/test/rerun-each) - - [Generate code coverage reports with the Bun test runner](https://bun.sh/guides/test/coverage) - - [import, require, and test Svelte components with bun test](https://bun.sh/guides/test/svelte-test) - - [Set a code coverage threshold with the Bun test runner](https://bun.sh/guides/test/coverage-threshold) + - [Spy on methods in `bun test`](https://bun.com/guides/test/spy-on) + - [Bail early with the Bun test runner](https://bun.com/guides/test/bail) + - [Mock functions in `bun test`](https://bun.com/guides/test/mock-functions) + - [Run tests in watch mode with Bun](https://bun.com/guides/test/watch-mode) + - [Use snapshot testing in `bun test`](https://bun.com/guides/test/snapshot) + - [Skip tests with the Bun test runner](https://bun.com/guides/test/skip-tests) + - [Using Testing Library with Bun](https://bun.com/guides/test/testing-library) + - [Update snapshots in `bun test`](https://bun.com/guides/test/update-snapshots) + - [Run your tests with the Bun test runner](https://bun.com/guides/test/run-tests) + - [Set the system time in Bun's test runner](https://bun.com/guides/test/mock-clock) + - [Set a per-test timeout with the Bun test runner](https://bun.com/guides/test/timeout) + - [Migrate from Jest to Bun's test runner](https://bun.com/guides/test/migrate-from-jest) + - [Write browser DOM tests with Bun and happy-dom](https://bun.com/guides/test/happy-dom) + - [Mark a test as a "todo" with the Bun test runner](https://bun.com/guides/test/todo-tests) + - [Re-run tests multiple times with the Bun test runner](https://bun.com/guides/test/rerun-each) + - [Generate code coverage reports with the Bun test runner](https://bun.com/guides/test/coverage) + - [import, require, and test Svelte components with bun test](https://bun.com/guides/test/svelte-test) + - [Set a code coverage threshold with the Bun test runner](https://bun.com/guides/test/coverage-threshold) - Util - - - [Generate a UUID](https://bun.sh/guides/util/javascript-uuid) - - [Hash a password](https://bun.sh/guides/util/hash-a-password) - - [Escape an HTML string](https://bun.sh/guides/util/escape-html) - - [Get the current Bun version](https://bun.sh/guides/util/version) - - [Encode and decode base64 strings](https://bun.sh/guides/util/base64) - - [Compress and decompress data with gzip](https://bun.sh/guides/util/gzip) - - [Sleep for a fixed number of milliseconds](https://bun.sh/guides/util/sleep) - - [Detect when code is executed with Bun](https://bun.sh/guides/util/detect-bun) - - [Check if two objects are deeply equal](https://bun.sh/guides/util/deep-equals) - - [Compress and decompress data with DEFLATE](https://bun.sh/guides/util/deflate) - - [Get the absolute path to the current entrypoint](https://bun.sh/guides/util/main) - - [Get the directory of the current file](https://bun.sh/guides/util/import-meta-dir) - - [Check if the current file is the entrypoint](https://bun.sh/guides/util/entrypoint) - - [Get the file name of the current file](https://bun.sh/guides/util/import-meta-file) - - [Convert a file URL to an absolute path](https://bun.sh/guides/util/file-url-to-path) - - [Convert an absolute path to a file URL](https://bun.sh/guides/util/path-to-file-url) - - [Get the absolute path of the current file](https://bun.sh/guides/util/import-meta-path) - - [Get the path to an executable bin file](https://bun.sh/guides/util/which-path-to-executable-bin) + - [Generate a UUID](https://bun.com/guides/util/javascript-uuid) + - [Hash a password](https://bun.com/guides/util/hash-a-password) + - [Escape an HTML string](https://bun.com/guides/util/escape-html) + - [Get the current Bun version](https://bun.com/guides/util/version) + - [Encode and decode base64 strings](https://bun.com/guides/util/base64) + - [Compress and decompress data with gzip](https://bun.com/guides/util/gzip) + - [Sleep for a fixed number of milliseconds](https://bun.com/guides/util/sleep) + - [Detect when code is executed with Bun](https://bun.com/guides/util/detect-bun) + - [Check if two objects are deeply equal](https://bun.com/guides/util/deep-equals) + - [Compress and decompress data with DEFLATE](https://bun.com/guides/util/deflate) + - [Get the absolute path to the current entrypoint](https://bun.com/guides/util/main) + - [Get the directory of the current file](https://bun.com/guides/util/import-meta-dir) + - [Check if the current file is the entrypoint](https://bun.com/guides/util/entrypoint) + - [Get the file name of the current file](https://bun.com/guides/util/import-meta-file) + - [Convert a file URL to an absolute path](https://bun.com/guides/util/file-url-to-path) + - [Convert an absolute path to a file URL](https://bun.com/guides/util/path-to-file-url) + - [Get the absolute path of the current file](https://bun.com/guides/util/import-meta-path) + - [Get the path to an executable bin file](https://bun.com/guides/util/which-path-to-executable-bin) - WebSocket - - - [Build a publish-subscribe WebSocket server](https://bun.sh/guides/websocket/pubsub) - - [Build a simple WebSocket server](https://bun.sh/guides/websocket/simple) - - [Enable compression for WebSocket messages](https://bun.sh/guides/websocket/compression) - - [Set per-socket contextual data on a WebSocket](https://bun.sh/guides/websocket/context) + - [Build a publish-subscribe WebSocket server](https://bun.com/guides/websocket/pubsub) + - [Build a simple WebSocket server](https://bun.com/guides/websocket/simple) + - [Enable compression for WebSocket messages](https://bun.com/guides/websocket/compression) + - [Set per-socket contextual data on a WebSocket](https://bun.com/guides/websocket/context) - Write file - - - [Delete a file](https://bun.sh/guides/write-file/unlink) - - [Write to stdout](https://bun.sh/guides/write-file/stdout) - - [Write a file to stdout](https://bun.sh/guides/write-file/cat) - - [Write a Blob to a file](https://bun.sh/guides/write-file/blob) - - [Write a string to a file](https://bun.sh/guides/write-file/basic) - - [Append content to a file](https://bun.sh/guides/write-file/append) - - [Write a file incrementally](https://bun.sh/guides/write-file/filesink) - - [Write a Response to a file](https://bun.sh/guides/write-file/response) - - [Copy a file to another location](https://bun.sh/guides/write-file/file-cp) - - [Write a ReadableStream to a file](https://bun.sh/guides/write-file/stream) + - [Delete a file](https://bun.com/guides/write-file/unlink) + - [Write to stdout](https://bun.com/guides/write-file/stdout) + - [Write a file to stdout](https://bun.com/guides/write-file/cat) + - [Write a Blob to a file](https://bun.com/guides/write-file/blob) + - [Write a string to a file](https://bun.com/guides/write-file/basic) + - [Append content to a file](https://bun.com/guides/write-file/append) + - [Write a file incrementally](https://bun.com/guides/write-file/filesink) + - [Write a Response to a file](https://bun.com/guides/write-file/response) + - [Copy a file to another location](https://bun.com/guides/write-file/file-cp) + - [Write a ReadableStream to a file](https://bun.com/guides/write-file/stream) ## Contributing -Refer to the [Project > Contributing](https://bun.sh/docs/project/contributing) guide to start contributing to Bun. +Refer to the [Project > Contributing](https://bun.com/docs/project/contributing) guide to start contributing to Bun. ## License -Refer to the [Project > License](https://bun.sh/docs/project/licensing) page for information about Bun's licensing. +Refer to the [Project > License](https://bun.com/docs/project/licensing) page for information about Bun's licensing. diff --git a/SECURITY.md b/SECURITY.md index 5179a1ec70..f9402ed970 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -8,5 +8,4 @@ ## Reporting a Vulnerability -Report any discovered vulnerabilities to the Bun team by emailing `security@bun.sh`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue. - +Report any discovered vulnerabilities to the Bun team by emailing `security@bun.com`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue. diff --git a/bench/expect-to-equal/README.md b/bench/expect-to-equal/README.md index 3e7e3594b7..7f79d91198 100644 --- a/bench/expect-to-equal/README.md +++ b/bench/expect-to-equal/README.md @@ -40,4 +40,4 @@ vitest (node v18.11.0) > expect().toEqual() x 10000: 401.08ms -This project was created using `bun init` in bun v0.3.0. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime. +This project was created using `bun init` in bun v0.3.0. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime. diff --git a/bench/snippets/dns-prefetch.mjs b/bench/snippets/dns-prefetch.mjs index 885be66100..5ea5e24098 100644 --- a/bench/snippets/dns-prefetch.mjs +++ b/bench/snippets/dns-prefetch.mjs @@ -9,7 +9,7 @@ // To clear your DNS cache on Windows: // ipconfig /flushdns // -const url = new URL(process.argv.length > 2 ? process.argv.at(-1) : "https://bun.sh"); +const url = new URL(process.argv.length > 2 ? process.argv.at(-1) : "https://bun.com"); const hostname = url.hostname; const port = url.port ? parseInt(url.port, 10) : url.protocol === "https:" ? 443 : 80; diff --git a/bench/snippets/source-map.js b/bench/snippets/source-map.js new file mode 100644 index 0000000000..0d41bebd41 --- /dev/null +++ b/bench/snippets/source-map.js @@ -0,0 +1,28 @@ +import { SourceMap } from "node:module"; +import { readFileSync } from "node:fs"; +import { bench, run } from "../runner.mjs"; +const json = JSON.parse(readFileSync(process.argv.at(-1), "utf-8")); + +bench("new SourceMap(json)", () => { + return new SourceMap(json); +}); + +const map = new SourceMap(json); + +const toRotate = []; +for (let j = 0; j < 10000; j++) { + if (map.findEntry(0, j).generatedColumn) { + toRotate.push(j); + if (toRotate.length > 5) break; + } +} +let i = 0; +bench("findEntry (match)", () => { + return map.findEntry(0, toRotate[i++ % 3]).generatedColumn; +}); + +bench("findEntry (no match)", () => { + return map.findEntry(0, 9999).generatedColumn; +}); + +await run(); diff --git a/bench/websocket-server/README.md b/bench/websocket-server/README.md index c583e54bab..c3a8ef3575 100644 --- a/bench/websocket-server/README.md +++ b/bench/websocket-server/README.md @@ -34,4 +34,4 @@ For example, when the client sends `"foo"`, the server sends back `"John: foo"` The client script waits until it receives all the messages for each client before sending the next batch of messages. -This project was created using `bun init` in bun v0.2.1. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime. +This project was created using `bun init` in bun v0.2.1. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime. diff --git a/build.zig b/build.zig index 23459193ab..82a7294249 100644 --- a/build.zig +++ b/build.zig @@ -390,6 +390,12 @@ pub fn build(b: *Build) !void { .{ .os = .windows, .arch = .x86_64 }, }, &.{ .Debug, .ReleaseFast }); } + { + const step = b.step("check-windows-debug", "Check for semantic analysis errors on Windows"); + addMultiCheck(b, step, build_options, &.{ + .{ .os = .windows, .arch = .x86_64 }, + }, &.{.Debug}); + } { const step = b.step("check-macos", "Check for semantic analysis errors on Windows"); addMultiCheck(b, step, build_options, &.{ @@ -397,6 +403,13 @@ pub fn build(b: *Build) !void { .{ .os = .mac, .arch = .aarch64 }, }, &.{ .Debug, .ReleaseFast }); } + { + const step = b.step("check-macos-debug", "Check for semantic analysis errors on Windows"); + addMultiCheck(b, step, build_options, &.{ + .{ .os = .mac, .arch = .x86_64 }, + .{ .os = .mac, .arch = .aarch64 }, + }, &.{.Debug}); + } { const step = b.step("check-linux", "Check for semantic analysis errors on Windows"); addMultiCheck(b, step, build_options, &.{ @@ -404,6 +417,13 @@ pub fn build(b: *Build) !void { .{ .os = .linux, .arch = .aarch64 }, }, &.{ .Debug, .ReleaseFast }); } + { + const step = b.step("check-linux-debug", "Check for semantic analysis errors on Windows"); + addMultiCheck(b, step, build_options, &.{ + .{ .os = .linux, .arch = .x86_64 }, + .{ .os = .linux, .arch = .aarch64 }, + }, &.{.Debug}); + } // zig build translate-c-headers { diff --git a/bun.lock b/bun.lock index f4e82bb81b..345e3a7b9b 100644 --- a/bun.lock +++ b/bun.lock @@ -4,7 +4,6 @@ "": { "name": "bun", "devDependencies": { - "@types/react": "^18.3.3", "esbuild": "^0.21.4", "mitata": "^0.1.11", "peechy": "0.4.34", @@ -29,13 +28,17 @@ "@types/node": "*", }, "devDependencies": { + "@types/react": "^19", "typescript": "^5.0.2", }, + "peerDependencies": { + "@types/react": "^19", + }, }, }, "overrides": { - "bun-types": "workspace:packages/bun-types", "@types/bun": "workspace:packages/@types/bun", + "bun-types": "workspace:packages/bun-types", }, "packages": { "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="], @@ -88,9 +91,7 @@ "@types/node": ["@types/node@22.15.18", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-v1DKRfUdyW+jJhZNEI1PYy29S2YRxMV5AOO/x/SjKmW0acCIOqmbj6Haf9eHAhsPmrhlHSxEhv/1WszcLWV4cg=="], - "@types/prop-types": ["@types/prop-types@15.7.14", "", {}, "sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ=="], - - "@types/react": ["@types/react@18.3.21", "", { "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" } }, "sha512-gXLBtmlcRJeT09/sI4PxVwyrku6SaNUj/6cMubjE6T6XdY1fDmBL7r0nX0jbSZPU/Xr0KuwLLZh6aOYY5d91Xw=="], + "@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="], "bun-types": ["bun-types@workspace:packages/bun-types"], diff --git a/cmake/Options.cmake b/cmake/Options.cmake index 412cb068b3..1b20044471 100644 --- a/cmake/Options.cmake +++ b/cmake/Options.cmake @@ -139,10 +139,10 @@ endif() optionx(REVISION STRING "The git revision of the build" DEFAULT ${DEFAULT_REVISION}) # Used in process.version, process.versions.node, napi, and elsewhere -optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "22.6.0") +optionx(NODEJS_VERSION STRING "The version of Node.js to report" DEFAULT "24.3.0") # Used in process.versions.modules and compared while loading V8 modules -optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "127") +optionx(NODEJS_ABI_VERSION STRING "The ABI version of Node.js to report" DEFAULT "137") if(APPLE) set(DEFAULT_STATIC_SQLITE OFF) diff --git a/cmake/scripts/PrepareNodeHeaders.cmake b/cmake/scripts/PrepareNodeHeaders.cmake new file mode 100644 index 0000000000..40328860cf --- /dev/null +++ b/cmake/scripts/PrepareNodeHeaders.cmake @@ -0,0 +1,31 @@ +# This script prepares Node.js headers for use with Bun +# It removes conflicting OpenSSL and libuv headers since Bun uses BoringSSL and its own libuv + +if(NOT DEFINED NODE_INCLUDE_DIR) + message(FATAL_ERROR "NODE_INCLUDE_DIR not defined") +endif() + +if(NOT EXISTS "${NODE_INCLUDE_DIR}/node") + message(FATAL_ERROR "Node headers not found at ${NODE_INCLUDE_DIR}/node") +endif() + +# Remove OpenSSL headers that conflict with BoringSSL +if(EXISTS "${NODE_INCLUDE_DIR}/node/openssl") + file(REMOVE_RECURSE "${NODE_INCLUDE_DIR}/node/openssl") + message(STATUS "Removed conflicting OpenSSL headers") +endif() + +# Remove libuv headers that might conflict +if(EXISTS "${NODE_INCLUDE_DIR}/node/uv") + file(REMOVE_RECURSE "${NODE_INCLUDE_DIR}/node/uv") + message(STATUS "Removed conflicting libuv headers") +endif() + +if(EXISTS "${NODE_INCLUDE_DIR}/node/uv.h") + file(REMOVE "${NODE_INCLUDE_DIR}/node/uv.h") + message(STATUS "Removed conflicting uv.h header") +endif() + +# Add the node directory to include path for cppgc +# This is needed because cppgc internal headers use relative includes +file(WRITE "${NODE_INCLUDE_DIR}/.node-headers-prepared" "1") diff --git a/cmake/sources/CxxSources.txt b/cmake/sources/CxxSources.txt index c5e07b9814..04e1d2c79d 100644 --- a/cmake/sources/CxxSources.txt +++ b/cmake/sources/CxxSources.txt @@ -28,6 +28,7 @@ src/bun.js/bindings/BunWorkerGlobalScope.cpp src/bun.js/bindings/c-bindings.cpp src/bun.js/bindings/CallSite.cpp src/bun.js/bindings/CallSitePrototype.cpp +src/bun.js/bindings/CatchScopeBinding.cpp src/bun.js/bindings/CodeCoverage.cpp src/bun.js/bindings/ConsoleObject.cpp src/bun.js/bindings/Cookie.cpp @@ -99,6 +100,9 @@ src/bun.js/bindings/napi_finalizer.cpp src/bun.js/bindings/napi_handle_scope.cpp src/bun.js/bindings/napi_type_tag.cpp src/bun.js/bindings/napi.cpp +src/bun.js/bindings/NapiClass.cpp +src/bun.js/bindings/NapiRef.cpp +src/bun.js/bindings/NapiWeakValue.cpp src/bun.js/bindings/ncrpyto_engine.cpp src/bun.js/bindings/ncrypto.cpp src/bun.js/bindings/node/crypto/CryptoDhJob.cpp diff --git a/cmake/sources/ZigGeneratedClassesSources.txt b/cmake/sources/ZigGeneratedClassesSources.txt index 82e6ae569b..116f1cc26d 100644 --- a/cmake/sources/ZigGeneratedClassesSources.txt +++ b/cmake/sources/ZigGeneratedClassesSources.txt @@ -7,12 +7,14 @@ src/bun.js/api/h2.classes.ts src/bun.js/api/html_rewriter.classes.ts src/bun.js/api/JSBundler.classes.ts src/bun.js/api/postgres.classes.ts +src/bun.js/api/ResumableSink.classes.ts src/bun.js/api/S3Client.classes.ts src/bun.js/api/S3Stat.classes.ts src/bun.js/api/server.classes.ts src/bun.js/api/Shell.classes.ts src/bun.js/api/ShellArgs.classes.ts src/bun.js/api/sockets.classes.ts +src/bun.js/api/sourcemap.classes.ts src/bun.js/api/streams.classes.ts src/bun.js/api/valkey.classes.ts src/bun.js/api/zlib.classes.ts diff --git a/cmake/sources/ZigSources.txt b/cmake/sources/ZigSources.txt index 7fc059629f..2261399b49 100644 --- a/cmake/sources/ZigSources.txt +++ b/cmake/sources/ZigSources.txt @@ -10,7 +10,26 @@ src/allocators/NullableAllocator.zig src/analytics/analytics_schema.zig src/analytics/analytics_thread.zig src/api/schema.zig +src/ast/Ast.zig +src/ast/ASTMemoryAllocator.zig +src/ast/B.zig src/ast/base.zig +src/ast/Binding.zig +src/ast/BundledAst.zig +src/ast/CharFreq.zig +src/ast/E.zig +src/ast/Expr.zig +src/ast/G.zig +src/ast/Macro.zig +src/ast/NewStore.zig +src/ast/Op.zig +src/ast/S.zig +src/ast/Scope.zig +src/ast/ServerComponentBoundary.zig +src/ast/Stmt.zig +src/ast/Symbol.zig +src/ast/TS.zig +src/ast/UseDirective.zig src/async/posix_event_loop.zig src/async/stub_event_loop.zig src/async/windows_event_loop.zig @@ -73,6 +92,10 @@ src/bun.js/api/server/StaticRoute.zig src/bun.js/api/server/WebSocketServerContext.zig src/bun.js/api/streams.classes.zig src/bun.js/api/Timer.zig +src/bun.js/api/Timer/EventLoopTimer.zig +src/bun.js/api/Timer/ImmediateObject.zig +src/bun.js/api/Timer/TimeoutObject.zig +src/bun.js/api/Timer/TimerObjectInternals.zig src/bun.js/api/TOMLObject.zig src/bun.js/api/UnsafeObject.zig src/bun.js/bindgen_test.zig @@ -81,6 +104,7 @@ src/bun.js/bindings/AnyPromise.zig src/bun.js/bindings/bun-simdutf.zig src/bun.js/bindings/CachedBytecode.zig src/bun.js/bindings/CallFrame.zig +src/bun.js/bindings/CatchScope.zig src/bun.js/bindings/codegen.zig src/bun.js/bindings/CommonAbortReason.zig src/bun.js/bindings/CommonStrings.zig @@ -247,6 +271,7 @@ src/bun.js/webcore/prompt.zig src/bun.js/webcore/ReadableStream.zig src/bun.js/webcore/Request.zig src/bun.js/webcore/Response.zig +src/bun.js/webcore/ResumableSink.zig src/bun.js/webcore/S3Client.zig src/bun.js/webcore/S3File.zig src/bun.js/webcore/S3Stat.zig @@ -319,6 +344,7 @@ src/cli/package_manager_command.zig src/cli/patch_command.zig src/cli/patch_commit_command.zig src/cli/pm_trusted_command.zig +src/cli/pm_version_command.zig src/cli/pm_view_command.zig src/cli/publish_command.zig src/cli/remove_command.zig @@ -498,12 +524,29 @@ src/hive_array.zig src/hmac.zig src/HTMLScanner.zig src/http.zig -src/http/header_builder.zig -src/http/method.zig -src/http/mime_type.zig -src/http/url_path.zig +src/http/AsyncHTTP.zig +src/http/CertificateInfo.zig +src/http/Decompressor.zig +src/http/Encoding.zig +src/http/FetchRedirect.zig +src/http/HeaderBuilder.zig +src/http/Headers.zig +src/http/HTTPCertError.zig +src/http/HTTPContext.zig +src/http/HTTPRequestBody.zig +src/http/HTTPThread.zig +src/http/InitError.zig +src/http/InternalState.zig +src/http/Method.zig +src/http/MimeType.zig +src/http/ProxyTunnel.zig +src/http/SendFile.zig +src/http/Signals.zig +src/http/ThreadSafeStreamBuffer.zig +src/http/URLPath.zig src/http/websocket_client.zig src/http/websocket_client/CppWebSocket.zig +src/http/websocket_client/WebSocketDeflate.zig src/http/websocket_client/WebSocketUpgradeClient.zig src/http/websocket_http_client.zig src/http/websocket.zig @@ -513,11 +556,17 @@ src/import_record.zig src/ini.zig src/install/bin.zig src/install/dependency.zig +src/install/ExternalSlice.zig src/install/extract_tarball.zig src/install/hoisted_install.zig src/install/install_binding.zig src/install/install.zig src/install/integrity.zig +src/install/isolated_install.zig +src/install/isolated_install/Hardlinker.zig +src/install/isolated_install/Installer.zig +src/install/isolated_install/Store.zig +src/install/isolated_install/Symlinker.zig src/install/lifecycle_script_runner.zig src/install/lockfile.zig src/install/lockfile/Buffers.zig @@ -534,12 +583,28 @@ src/install/lockfile/printer/tree_printer.zig src/install/lockfile/printer/Yarn.zig src/install/lockfile/Tree.zig src/install/migration.zig +src/install/NetworkTask.zig src/install/npm.zig src/install/PackageInstall.zig src/install/PackageInstaller.zig +src/install/PackageManager.zig src/install/PackageManager/CommandLineArguments.zig +src/install/PackageManager/install_with_manager.zig src/install/PackageManager/PackageJSONEditor.zig +src/install/PackageManager/PackageManagerDirectories.zig +src/install/PackageManager/PackageManagerEnqueue.zig +src/install/PackageManager/PackageManagerLifecycle.zig src/install/PackageManager/PackageManagerOptions.zig +src/install/PackageManager/PackageManagerResolution.zig +src/install/PackageManager/patchPackage.zig +src/install/PackageManager/processDependencyList.zig +src/install/PackageManager/ProgressStrings.zig +src/install/PackageManager/runTasks.zig +src/install/PackageManager/updatePackageJSONAndInstall.zig +src/install/PackageManager/UpdateRequest.zig +src/install/PackageManager/WorkspacePackageJSONCache.zig +src/install/PackageManagerTask.zig +src/install/PackageManifestMap.zig src/install/padding_checker.zig src/install/patch_install.zig src/install/repository.zig @@ -551,6 +616,7 @@ src/install/windows-shim/bun_shim_impl.zig src/io/heap.zig src/io/io.zig src/io/MaxBuf.zig +src/io/openForWriting.zig src/io/PipeReader.zig src/io/pipes.zig src/io/PipeWriter.zig @@ -583,6 +649,10 @@ src/options.zig src/output.zig src/OutputFile.zig src/patch.zig +src/paths.zig +src/paths/EnvPath.zig +src/paths/path_buffer_pool.zig +src/paths/Path.zig src/perf.zig src/pool.zig src/Progress.zig @@ -621,6 +691,7 @@ src/semver/SemverString.zig src/semver/SlicedString.zig src/semver/Version.zig src/sha.zig +src/shell/AllocScope.zig src/shell/braces.zig src/shell/Builtin.zig src/shell/builtin/basename.zig @@ -665,14 +736,83 @@ src/shell/states/Stmt.zig src/shell/states/Subshell.zig src/shell/subproc.zig src/shell/util.zig +src/shell/Yield.zig src/sourcemap/CodeCoverage.zig +src/sourcemap/JSSourceMap.zig src/sourcemap/LineOffsetTable.zig src/sourcemap/sourcemap.zig src/sourcemap/VLQ.zig -src/sql/DataCell.zig src/sql/postgres.zig -src/sql/postgres/postgres_protocol.zig -src/sql/postgres/postgres_types.zig +src/sql/postgres/AnyPostgresError.zig +src/sql/postgres/AuthenticationState.zig +src/sql/postgres/CommandTag.zig +src/sql/postgres/ConnectionFlags.zig +src/sql/postgres/Data.zig +src/sql/postgres/DataCell.zig +src/sql/postgres/DebugSocketMonitorReader.zig +src/sql/postgres/DebugSocketMonitorWriter.zig +src/sql/postgres/ObjectIterator.zig +src/sql/postgres/PostgresCachedStructure.zig +src/sql/postgres/PostgresProtocol.zig +src/sql/postgres/PostgresRequest.zig +src/sql/postgres/PostgresSQLConnection.zig +src/sql/postgres/PostgresSQLContext.zig +src/sql/postgres/PostgresSQLQuery.zig +src/sql/postgres/PostgresSQLQueryResultMode.zig +src/sql/postgres/PostgresSQLStatement.zig +src/sql/postgres/PostgresTypes.zig +src/sql/postgres/protocol/ArrayList.zig +src/sql/postgres/protocol/Authentication.zig +src/sql/postgres/protocol/BackendKeyData.zig +src/sql/postgres/protocol/Close.zig +src/sql/postgres/protocol/ColumnIdentifier.zig +src/sql/postgres/protocol/CommandComplete.zig +src/sql/postgres/protocol/CopyData.zig +src/sql/postgres/protocol/CopyFail.zig +src/sql/postgres/protocol/CopyInResponse.zig +src/sql/postgres/protocol/CopyOutResponse.zig +src/sql/postgres/protocol/DataRow.zig +src/sql/postgres/protocol/DecoderWrap.zig +src/sql/postgres/protocol/Describe.zig +src/sql/postgres/protocol/ErrorResponse.zig +src/sql/postgres/protocol/Execute.zig +src/sql/postgres/protocol/FieldDescription.zig +src/sql/postgres/protocol/FieldMessage.zig +src/sql/postgres/protocol/FieldType.zig +src/sql/postgres/protocol/NegotiateProtocolVersion.zig +src/sql/postgres/protocol/NewReader.zig +src/sql/postgres/protocol/NewWriter.zig +src/sql/postgres/protocol/NoticeResponse.zig +src/sql/postgres/protocol/NotificationResponse.zig +src/sql/postgres/protocol/ParameterDescription.zig +src/sql/postgres/protocol/ParameterStatus.zig +src/sql/postgres/protocol/Parse.zig +src/sql/postgres/protocol/PasswordMessage.zig +src/sql/postgres/protocol/PortalOrPreparedStatement.zig +src/sql/postgres/protocol/ReadyForQuery.zig +src/sql/postgres/protocol/RowDescription.zig +src/sql/postgres/protocol/SASLInitialResponse.zig +src/sql/postgres/protocol/SASLResponse.zig +src/sql/postgres/protocol/StackReader.zig +src/sql/postgres/protocol/StartupMessage.zig +src/sql/postgres/protocol/TransactionStatusIndicator.zig +src/sql/postgres/protocol/WriteWrap.zig +src/sql/postgres/protocol/zHelpers.zig +src/sql/postgres/QueryBindingIterator.zig +src/sql/postgres/SASL.zig +src/sql/postgres/Signature.zig +src/sql/postgres/SocketMonitor.zig +src/sql/postgres/SSLMode.zig +src/sql/postgres/Status.zig +src/sql/postgres/TLSStatus.zig +src/sql/postgres/types/bool.zig +src/sql/postgres/types/bytea.zig +src/sql/postgres/types/date.zig +src/sql/postgres/types/int_types.zig +src/sql/postgres/types/json.zig +src/sql/postgres/types/numeric.zig +src/sql/postgres/types/PostgresString.zig +src/sql/postgres/types/Tag.zig src/StandaloneModuleGraph.zig src/StaticHashMap.zig src/string_immutable.zig diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 2c7d1574d0..0b0879d3ec 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -650,8 +650,13 @@ register_command( -DDOWNLOAD_PATH=${NODEJS_HEADERS_PATH} -DDOWNLOAD_URL=https://nodejs.org/dist/v${NODEJS_VERSION}/node-v${NODEJS_VERSION}-headers.tar.gz -P ${CWD}/cmake/scripts/DownloadUrl.cmake + COMMAND + ${CMAKE_COMMAND} + -DNODE_INCLUDE_DIR=${NODEJS_HEADERS_PATH}/include + -P ${CWD}/cmake/scripts/PrepareNodeHeaders.cmake OUTPUTS ${NODEJS_HEADERS_PATH}/include/node/node_version.h + ${NODEJS_HEADERS_PATH}/include/.node-headers-prepared ) list(APPEND BUN_CPP_SOURCES @@ -763,6 +768,7 @@ target_include_directories(${bun} PRIVATE ${VENDOR_PATH} ${VENDOR_PATH}/picohttpparser ${NODEJS_HEADERS_PATH}/include + ${NODEJS_HEADERS_PATH}/include/node ) if(NOT WIN32) diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index 502dbb9b40..c17fc10d50 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 397dafc9721b8f8046f9448abb6dbc14efe096d3) + set(WEBKIT_VERSION 29bbdff0f94f362891f8e007ae2a73f9bc3e66d3) endif() string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX) diff --git a/completions/bun.zsh b/completions/bun.zsh index a62c6dc67e..3680e71814 100644 --- a/completions/bun.zsh +++ b/completions/bun.zsh @@ -261,6 +261,7 @@ _bun_pm_completion() { 'hash-string\:"print the string used to hash the lockfile" ' 'hash-print\:"print the hash stored in the current lockfile" ' 'cache\:"print the path to the cache folder" ' + 'version\:"bump the version in package.json and create a git tag" ' ) _alternative "args:cmd3:(($sub_commands))" @@ -299,6 +300,40 @@ _bun_pm_completion() { $pmargs && ret=0 + ;; + version) + version_args=( + "patch[increment patch version]" + "minor[increment minor version]" + "major[increment major version]" + "prepatch[increment patch version and add pre-release]" + "preminor[increment minor version and add pre-release]" + "premajor[increment major version and add pre-release]" + "prerelease[increment pre-release version]" + "from-git[use version from latest git tag]" + ) + + pmargs=( + "--no-git-tag-version[don't create a git commit and tag]" + "--allow-same-version[allow bumping to the same version]" + "-m[use the given message for the commit]:message" + "--message[use the given message for the commit]:message" + "--preid[identifier to prefix pre-release versions]:preid" + ) + + _arguments -s -C \ + '1: :->cmd' \ + '2: :->cmd2' \ + '3: :->increment' \ + $pmargs && + ret=0 + + case $state in + increment) + _alternative "args:increment:(($version_args))" + ;; + esac + ;; esac diff --git a/docs/api/binary-data.md b/docs/api/binary-data.md index 8803765040..bd9bed578b 100644 --- a/docs/api/binary-data.md +++ b/docs/api/binary-data.md @@ -522,7 +522,7 @@ for await (const chunk of stream) { } ``` -For a more complete discussion of streams in Bun, see [API > Streams](https://bun.sh/docs/api/streams). +For a more complete discussion of streams in Bun, see [API > Streams](https://bun.com/docs/api/streams). ## Conversion diff --git a/docs/api/cc.md b/docs/api/cc.md index 0cdf0b0a75..383cdfaba0 100644 --- a/docs/api/cc.md +++ b/docs/api/cc.md @@ -2,7 +2,7 @@ ## Usage (cc in `bun:ffi`) -See the [introduction blog post](https://bun.sh/blog/compile-and-run-c-in-js) for more information. +See the [introduction blog post](https://bun.com/blog/compile-and-run-c-in-js) for more information. JavaScript: diff --git a/docs/api/dns.md b/docs/api/dns.md index 5cb50ee549..68a90631d2 100644 --- a/docs/api/dns.md +++ b/docs/api/dns.md @@ -3,7 +3,7 @@ Bun implements the `node:dns` module. ```ts import * as dns from "node:dns"; -const addrs = await dns.promises.resolve4("bun.sh", { ttl: true }); +const addrs = await dns.promises.resolve4("bun.com", { ttl: true }); console.log(addrs); // => [{ address: "172.67.161.226", family: 4, ttl: 0 }, ...] ``` @@ -54,10 +54,10 @@ Here's an example: ```ts import { dns } from "bun"; -dns.prefetch("bun.sh", 443); +dns.prefetch("bun.com", 443); // // ... sometime later ... -await fetch("https://bun.sh"); +await fetch("https://bun.com"); ``` ### `dns.getCacheStats()` diff --git a/docs/api/fetch.md b/docs/api/fetch.md index 80aaecc46c..3a69755488 100644 --- a/docs/api/fetch.md +++ b/docs/api/fetch.md @@ -267,7 +267,7 @@ const response = await fetch("s3://my-bucket/path/to/object", { Note: Only PUT and POST methods support request bodies when using S3. For uploads, Bun automatically uses multipart upload for streaming bodies. -You can read more about Bun's S3 support in the [S3](https://bun.sh/docs/api/s3) documentation. +You can read more about Bun's S3 support in the [S3](https://bun.com/docs/api/s3) documentation. #### File URLs - `file://` @@ -376,14 +376,14 @@ To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful ```ts import { dns } from "bun"; -dns.prefetch("bun.sh"); +dns.prefetch("bun.com"); ``` #### DNS caching By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`: -To learn more about DNS caching in Bun, see the [DNS caching](https://bun.sh/docs/api/dns) documentation. +To learn more about DNS caching in Bun, see the [DNS caching](https://bun.com/docs/api/dns) documentation. ### Preconnect to a host @@ -392,7 +392,7 @@ To preconnect to a host, you can use the `fetch.preconnect` API. This API is use ```ts import { fetch } from "bun"; -fetch.preconnect("https://bun.sh"); +fetch.preconnect("https://bun.com"); ``` Note: calling `fetch` immediately after `fetch.preconnect` will not make your request faster. Preconnecting only helps if you know you'll need to connect to a host soon, but you're not ready to make the request yet. @@ -402,7 +402,7 @@ Note: calling `fetch` immediately after `fetch.preconnect` will not make your re To preconnect to a host at startup, you can pass `--fetch-preconnect`: ```sh -$ bun --fetch-preconnect https://bun.sh ./my-script.ts +$ bun --fetch-preconnect https://bun.com ./my-script.ts ``` This is sort of like `` in HTML. diff --git a/docs/api/file-io.md b/docs/api/file-io.md index f8e3102783..506ec9b051 100644 --- a/docs/api/file-io.md +++ b/docs/api/file-io.md @@ -1,8 +1,8 @@ {% callout %} - + -**Note** — The `Bun.file` and `Bun.write` APIs documented on this page are heavily optimized and represent the recommended way to perform file-system tasks using Bun. For operations that are not yet available with `Bun.file`, such as `mkdir` or `readdir`, you can use Bun's [nearly complete](https://bun.sh/docs/runtime/nodejs-apis#node-fs) implementation of the [`node:fs`](https://nodejs.org/api/fs.html) module. +**Note** — The `Bun.file` and `Bun.write` APIs documented on this page are heavily optimized and represent the recommended way to perform file-system tasks using Bun. For operations that are not yet available with `Bun.file`, such as `mkdir` or `readdir`, you can use Bun's [nearly complete](https://bun.com/docs/runtime/nodejs-apis#node-fs) implementation of the [`node:fs`](https://nodejs.org/api/fs.html) module. {% /callout %} @@ -208,7 +208,7 @@ await Bun.write(Bun.stdout, input); To write the body of an HTTP response to disk: ```ts -const response = await fetch("https://bun.sh"); +const response = await fetch("https://bun.com"); await Bun.write("index.html", response); ``` diff --git a/docs/api/globals.md b/docs/api/globals.md index 8e5a89651a..1a98bb0899 100644 --- a/docs/api/globals.md +++ b/docs/api/globals.md @@ -34,7 +34,7 @@ Bun implements the following globals. - [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer) - Node.js -- See [Node.js > `Buffer`](https://bun.sh/docs/runtime/nodejs-apis#node-buffer) +- See [Node.js > `Buffer`](https://bun.com/docs/runtime/nodejs-apis#node-buffer) --- @@ -172,7 +172,7 @@ Bun implements the following globals. - [`global`](https://nodejs.org/api/globals.html#global) - Node.js -- See [Node.js > `global`](https://bun.sh/docs/runtime/nodejs-apis#global). +- See [Node.js > `global`](https://bun.com/docs/runtime/nodejs-apis#global). --- @@ -188,7 +188,7 @@ Bun implements the following globals. --- -- [`HTMLRewriter`](https://bun.sh/docs/api/html-rewriter) +- [`HTMLRewriter`](https://bun.com/docs/api/html-rewriter) - Cloudflare -   @@ -220,7 +220,7 @@ Bun implements the following globals. - [`process`](https://nodejs.org/api/process.html) - Node.js -- See [Node.js > `process`](https://bun.sh/docs/runtime/nodejs-apis#node-process) +- See [Node.js > `process`](https://bun.com/docs/runtime/nodejs-apis#node-process) --- diff --git a/docs/api/http.md b/docs/api/http.md index 4e4fd43e62..e5b052fed9 100644 --- a/docs/api/http.md +++ b/docs/api/http.md @@ -1,7 +1,7 @@ The page primarily documents the Bun-native `Bun.serve` API. Bun also implements [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) and the Node.js [`http`](https://nodejs.org/api/http.html) and [`https`](https://nodejs.org/api/https.html) modules. {% callout %} -These modules have been re-implemented to use Bun's fast internal HTTP infrastructure. Feel free to use these modules directly; frameworks like [Express](https://expressjs.com/) that depend on these modules should work out of the box. For granular compatibility information, see [Runtime > Node.js APIs](https://bun.sh/docs/runtime/nodejs-apis). +These modules have been re-implemented to use Bun's fast internal HTTP infrastructure. Feel free to use these modules directly; frameworks like [Express](https://expressjs.com/) that depend on these modules should work out of the box. For granular compatibility information, see [Runtime > Node.js APIs](https://bun.com/docs/runtime/nodejs-apis). {% /callout %} To start a high-performance HTTP server with a clean API, the recommended approach is [`Bun.serve`](#start-a-server-bun-serve). @@ -149,7 +149,7 @@ Bun.serve({ }), // Redirects - "/blog": Response.redirect("https://bun.sh/blog"), + "/blog": Response.redirect("https://bun.com/blog"), // API responses "/api/config": Response.json({ @@ -326,7 +326,11 @@ Bun.serve({ ### HTML imports -To add a client-side single-page app, you can use an HTML import: +Bun supports importing HTML files directly into your server code, enabling full-stack applications with both server-side and client-side code. HTML imports work in two modes: + +**Development (`bun --hot`):** Assets are bundled on-demand at runtime, enabling hot module replacement (HMR) for a fast, iterative development experience. When you change your frontend code, the browser automatically updates without a full page reload. + +**Production (`bun build`):** When building with `bun build --target=bun`, the `import index from "./index.html"` statement resolves to a pre-built manifest object containing all bundled client assets. `Bun.serve` consumes this manifest to serve optimized assets with zero runtime bundling overhead. This is ideal for deploying to production. ```ts import myReactSinglePageApp from "./index.html"; @@ -338,9 +342,9 @@ Bun.serve({ }); ``` -HTML imports don't just serve HTML. It's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.sh/docs/bundler), JavaScript transpiler and CSS parser. +HTML imports don't just serve HTML — it's a full-featured frontend bundler, transpiler, and toolkit built using Bun's [bundler](https://bun.com/docs/bundler), JavaScript transpiler and CSS parser. You can use this to build full-featured frontends with React, TypeScript, Tailwind CSS, and more. -You can use this to build a full-featured frontend with React, TypeScript, Tailwind CSS, and more. Check out [/docs/bundler/fullstack](https://bun.sh/docs/bundler/fullstack) to learn more. +For a complete guide on building full-stack applications with HTML imports, including detailed examples and best practices, see [/docs/bundler/fullstack](https://bun.com/docs/bundler/fullstack). ### Practical example: REST API @@ -601,7 +605,7 @@ Bun.serve({ ``` {% callout %} -[Learn more about debugging in Bun](https://bun.sh/docs/runtime/debugger) +[Learn more about debugging in Bun](https://bun.com/docs/runtime/debugger) {% /callout %} The call to `Bun.serve` returns a `Server` object. To stop the server, call the `.stop()` method. @@ -768,7 +772,7 @@ Instead of passing the server options into `Bun.serve`, `export default` it. Thi $ bun --hot server.ts ``` --> - + ## Streaming files diff --git a/docs/api/s3.md b/docs/api/s3.md index 857e77ab49..68dae77a39 100644 --- a/docs/api/s3.md +++ b/docs/api/s3.md @@ -4,7 +4,7 @@ Production servers often read, upload, and write files to S3-compatible object s ### Bun's S3 API is fast -{% image src="https://bun.sh/bun-s3-node.gif" alt="Bun's S3 API is fast" caption="Left: Bun v1.1.44. Right: Node.js v23.6.0" /%} +{% image src="https://bun.com/bun-s3-node.gif" alt="Bun's S3 API is fast" caption="Left: Bun v1.1.44. Right: Node.js v23.6.0" /%} {% /callout %} @@ -160,7 +160,8 @@ const writer = s3file.writer({ partSize: 5 * 1024 * 1024, }); for (let i = 0; i < 10; i++) { - await writer.write(bigFile); + writer.write(bigFile); + await writer.flush(); } await writer.end(); ``` diff --git a/docs/api/spawn.md b/docs/api/spawn.md index d570dc09f2..ad794bb7cf 100644 --- a/docs/api/spawn.md +++ b/docs/api/spawn.md @@ -34,7 +34,7 @@ const proc = Bun.spawn(["cat"], { ), }); -const text = await new Response(proc.stdout).text(); +const text = await proc.stdout.text(); console.log(text); // "const input = "hello world".repeat(400); ..." ``` @@ -113,14 +113,34 @@ proc.stdin.flush(); proc.stdin.end(); ``` +Passing a `ReadableStream` to `stdin` lets you pipe data from a JavaScript `ReadableStream` directly to the subprocess's input: + +```ts +const stream = new ReadableStream({ + start(controller) { + controller.enqueue("Hello from "); + controller.enqueue("ReadableStream!"); + controller.close(); + }, +}); + +const proc = Bun.spawn(["cat"], { + stdin: stream, + stdout: "pipe", +}); + +const output = await new Response(proc.stdout).text(); +console.log(output); // "Hello from ReadableStream!" +``` + ## Output streams You can read results from the subprocess via the `stdout` and `stderr` properties. By default these are instances of `ReadableStream`. ```ts const proc = Bun.spawn(["bun", "--version"]); -const text = await new Response(proc.stdout).text(); -console.log(text); // => "$BUN_LATEST_VERSION" +const text = await proc.stdout.text(); +console.log(text); // => "$BUN_LATEST_VERSION\n" ``` Configure the output stream by passing one of the following values to `stdout/stderr`: diff --git a/docs/api/utils.md b/docs/api/utils.md index 979e406851..76571fd97d 100644 --- a/docs/api/utils.md +++ b/docs/api/utils.md @@ -234,7 +234,7 @@ const currentFile = import.meta.url; Bun.openInEditor(currentFile); ``` -You can override this via the `debug.editor` setting in your [`bunfig.toml`](https://bun.sh/docs/runtime/bunfig). +You can override this via the `debug.editor` setting in your [`bunfig.toml`](https://bun.com/docs/runtime/bunfig). ```toml-diff#bunfig.toml + [debug] @@ -582,11 +582,11 @@ Compresses a `Uint8Array` using zlib's DEFLATE algorithm. const buf = Buffer.from("hello".repeat(100)); const compressed = Bun.deflateSync(buf); -buf; // => Uint8Array(25) -compressed; // => Uint8Array(10) +buf; // => Buffer(500) +compressed; // => Uint8Array(12) ``` -The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bungzipsync). +The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bun-gzipsync). ## `Bun.inflateSync()` @@ -704,7 +704,7 @@ Bun.nanoseconds(); Bun implements a set of convenience functions for asynchronously consuming the body of a `ReadableStream` and converting it to various binary formats. ```ts -const stream = (await fetch("https://bun.sh")).body; +const stream = (await fetch("https://bun.com")).body; stream; // => ReadableStream await Bun.readableStreamToArrayBuffer(stream); @@ -787,7 +787,7 @@ const buffer = Buffer.alloc(1024 * 1024); estimateShallowMemoryUsageOf(buffer); // => 1048624 -const req = new Request("https://bun.sh"); +const req = new Request("https://bun.com"); estimateShallowMemoryUsageOf(req); // => 167 diff --git a/docs/bundler/executables.md b/docs/bundler/executables.md index b30057caa9..76d37954cf 100644 --- a/docs/bundler/executables.md +++ b/docs/bundler/executables.md @@ -126,6 +126,81 @@ The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that erro The `--bytecode` argument enables bytecode compilation. Every time you run JavaScript code in Bun, JavaScriptCore (the engine) will compile your source code into bytecode. We can move this parsing work from runtime to bundle time, saving you startup time. +## Full-stack executables + +{% note %} + +New in Bun v1.2.17 + +{% /note %} + +Bun's `--compile` flag can create standalone executables that contain both server and client code, making it ideal for full-stack applications. When you import an HTML file in your server code, Bun automatically bundles all frontend assets (JavaScript, CSS, etc.) and embeds them into the executable. When Bun sees the HTML import on the server, it kicks off a frontend build process to bundle JavaScript, CSS, and other assets. + +{% codetabs %} + +```ts#server.ts +import { serve } from "bun"; +import index from "./index.html"; + +const server = serve({ + routes: { + "/": index, + "/api/hello": { GET: () => Response.json({ message: "Hello from API" }) }, + }, +}); + +console.log(`Server running at http://localhost:${server.port}`); +``` + +```html#index.html + + + + My App + + + +

Hello World

+ + + +``` + +```js#app.js +console.log("Hello from the client!"); +``` + +```css#styles.css +body { + background-color: #f0f0f0; +} +``` + +{% /codetabs %} + +To build this into a single executable: + +```sh +bun build --compile ./server.ts --outfile myapp +``` + +This creates a self-contained binary that includes: + +- Your server code +- The Bun runtime +- All frontend assets (HTML, CSS, JavaScript) +- Any npm packages used by your server + +The result is a single file that can be deployed anywhere without needing Node.js, Bun, or any dependencies installed. Just run: + +```sh +./myapp +``` + +Bun automatically handles serving the frontend assets with proper MIME types and cache headers. The HTML import is replaced with a manifest object that `Bun.serve` uses to efficiently serve pre-bundled assets. + +For more details on building full-stack applications with Bun, see the [full-stack guide](/docs/bundler/fullstack). + ## Worker To use workers in a standalone executable, add the worker's entrypoint to the CLI arguments: @@ -174,7 +249,7 @@ $ ./hello Standalone executables support embedding files. -To embed files into an executable with `bun build --compile`, import the file in your code +To embed files into an executable with `bun build --compile`, import the file in your code. ```ts // this becomes an internal file path @@ -353,5 +428,4 @@ Currently, the `--compile` flag can only accept a single entrypoint at a time an - `--splitting` - `--public-path` - `--target=node` or `--target=browser` -- `--format` - always outputs a binary executable. Internally, it's almost esm. - `--no-bundle` - we always bundle everything into the executable. diff --git a/docs/bundler/fullstack.md b/docs/bundler/fullstack.md index e89b29af2e..16ed1d8402 100644 --- a/docs/bundler/fullstack.md +++ b/docs/bundler/fullstack.md @@ -1,5 +1,3 @@ -Using `Bun.serve()`'s `routes` option, you can run your frontend and backend in the same app with no extra steps. - To get started, import HTML files and pass them to the `routes` option in `Bun.serve()`. ```ts @@ -234,7 +232,92 @@ When `console: true` is set, Bun will stream console logs from the browser to th #### Production mode -When serving your app in production, set `development: false` in `Bun.serve()`. +Hot reloading and `development: true` helps you iterate quickly, but in production, your server should be as fast as possible and have as few external dependencies as possible. + +##### Ahead of time bundling (recommended) + +As of Bun v1.2.17, you can use `Bun.build` or `bun build` to bundle your full-stack application ahead of time. + +```sh +$ bun build --target=bun --production --outdir=dist ./src/index.ts +``` + +When Bun's bundler sees an HTML import from server-side code, it will bundle the referenced JavaScript/TypeScript/TSX/JSX and CSS files into a manifest object that Bun.serve() can use to serve the assets. + +```ts +import { serve } from "bun"; +import index from "./index.html"; + +serve({ + routes: { "/": index }, +}); +``` + +{% details summary="Internally, the `index` variable is a manifest object that looks something like this" %} + +```json +{ + "index": "./index.html", + "files": [ + { + "input": "index.html", + "path": "./index-f2me3qnf.js", + "loader": "js", + "isEntry": true, + "headers": { + "etag": "eet6gn75", + "content-type": "text/javascript;charset=utf-8" + } + }, + { + "input": "index.html", + "path": "./index.html", + "loader": "html", + "isEntry": true, + "headers": { + "etag": "r9njjakd", + "content-type": "text/html;charset=utf-8" + } + }, + { + "input": "index.html", + "path": "./index-gysa5fmk.css", + "loader": "css", + "isEntry": true, + "headers": { + "etag": "50zb7x61", + "content-type": "text/css;charset=utf-8" + } + }, + { + "input": "logo.svg", + "path": "./logo-kygw735p.svg", + "loader": "file", + "isEntry": false, + "headers": { + "etag": "kygw735p", + "content-type": "application/octet-stream" + } + }, + { + "input": "react.svg", + "path": "./react-ck11dneg.svg", + "loader": "file", + "isEntry": false, + "headers": { + "etag": "ck11dneg", + "content-type": "application/octet-stream" + } + } + ] +} +``` + +{% /details %} + +##### Runtime bundling + +When adding a build step is too complicated, you can set `development: false` in `Bun.serve()`. - Enable in-memory caching of bundled assets. Bun will bundle assets lazily on the first request to an `.html` file, and cache the result in memory until the server restarts. - Enables `Cache-Control` headers and `ETag` headers @@ -242,7 +325,7 @@ When serving your app in production, set `development: false` in `Bun.serve()`. ## Plugins -Bun's [bundler plugins](https://bun.sh/docs/bundler/plugins) are also supported when bundling static routes. +Bun's [bundler plugins](https://bun.com/docs/bundler/plugins) are also supported when bundling static routes. To configure plugins for `Bun.serve`, add a `plugins` array in the `[serve.static]` section of your `bunfig.toml`. @@ -282,7 +365,7 @@ Or in your CSS: ### Custom plugins -Any JS file or module which exports a [valid bundler plugin object](https://bun.sh/docs/bundler/plugins#usage) (essentially an object with a `name` and `setup` field) can be placed inside the `plugins` array: +Any JS file or module which exports a [valid bundler plugin object](https://bun.com/docs/bundler/plugins#usage) (essentially an object with a `name` and `setup` field) can be placed inside the `plugins` array: ```toml#bunfig.toml [serve.static] @@ -298,7 +381,6 @@ Note: this is currently in `bunfig.toml` to make it possible to know statically Bun uses [`HTMLRewriter`](/docs/api/html-rewriter) to scan for `
- Bun + Bun

bun --hot websockets

diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig index 4fea0260fa..63c6d7e172 100644 --- a/misctools/http_bench.zig +++ b/misctools/http_bench.zig @@ -12,7 +12,7 @@ const C = bun.C; const clap = @import("../src/deps/zig-clap/clap.zig"); const URL = @import("../src/url.zig").URL; -const Method = @import("../src/http/method.zig").Method; +const Method = @import("../src/http/Method.zig").Method; const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType; const HeadersTuple = ColonListType(string, noop_resolver); const path_handler = @import("../src/resolver/resolve_path.zig"); diff --git a/misctools/machbench.zig b/misctools/machbench.zig index 874f8a6c43..4e4b1549a8 100644 --- a/misctools/machbench.zig +++ b/misctools/machbench.zig @@ -14,7 +14,7 @@ const clap = @import("../src/deps/zig-clap/clap.zig"); const URL = @import("../src/url.zig").URL; const Headers = bun.http.Headers; -const Method = @import("../src/http/method.zig").Method; +const Method = @import("../src/http/Method.zig").Method; const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType; const HeadersTuple = ColonListType(string, noop_resolver); const path_handler = @import("../src/resolver/resolve_path.zig"); diff --git a/package.json b/package.json index 764e813718..c5bcfc55a8 100644 --- a/package.json +++ b/package.json @@ -1,13 +1,12 @@ { "private": true, "name": "bun", - "version": "1.2.17", + "version": "1.2.19", "workspaces": [ "./packages/bun-types", "./packages/@types/bun" ], "devDependencies": { - "@types/react": "^18.3.3", "esbuild": "^0.21.4", "mitata": "^0.1.11", "peechy": "0.4.34", @@ -24,7 +23,8 @@ }, "scripts": { "build": "bun run build:debug", - "watch": "bun run zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib -Doverride-no-export-cpp-apis=true", + "ci": "bun scripts/buildkite-failures.ts ", + "watch": "bun run zig build check --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib", "watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib", "bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug", "bd": "BUN_DEBUG_QUIET_LOGS=1 bun bd:v", @@ -74,7 +74,7 @@ "clang-tidy:diff": "bun run analysis --target clang-tidy-diff", "zig-format": "bun run analysis:no-llvm --target zig-format", "zig-format:check": "bun run analysis:no-llvm --target zig-format-check", - "prettier": "bunx prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'", + "prettier": "bunx --bun prettier@latest --plugin=prettier-plugin-organize-imports --config .prettierrc --write scripts packages src docs 'test/**/*.{test,spec}.{ts,tsx,js,jsx,mts,mjs,cjs,cts}' '!test/**/*fixture*.*'", "node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ", "node:test:cp": "bun ./scripts/fetch-node-test.ts ", "clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true", diff --git a/packages/bun-error/index.tsx b/packages/bun-error/index.tsx index 30321a0b4a..e81dd401e9 100644 --- a/packages/bun-error/index.tsx +++ b/packages/bun-error/index.tsx @@ -848,7 +848,7 @@ const Summary = ({ errorCount, onClose }: { errorCount: number; onClose: () => v {errorCount} error{errorCount > 1 ? "s" : ""} on this page - + - Logo + Logo

bun-plugin-svelte

-The official [Svelte](https://svelte.dev/) plugin for [Bun](https://bun.sh/). +The official [Svelte](https://svelte.dev/) plugin for [Bun](https://bun.com/). ## Installation @@ -13,10 +13,10 @@ $ bun add -D bun-plugin-svelte ## Dev Server Usage -`bun-plugin-svelte` integrates with Bun's [Fullstack Dev Server](https://bun.sh/docs/bundler/fullstack), giving you +`bun-plugin-svelte` integrates with Bun's [Fullstack Dev Server](https://bun.com/docs/bundler/fullstack), giving you HMR when developing your Svelte app. -Start by registering it in your [bunfig.toml](https://bun.sh/docs/runtime/bunfig): +Start by registering it in your [bunfig.toml](https://bun.com/docs/runtime/bunfig): ```toml [serve.static] @@ -33,7 +33,7 @@ See the [example](https://github.com/oven-sh/bun/tree/main/packages/bun-plugin-s ## Bundler Usage -`bun-plugin-svelte` lets you bundle Svelte components with [`Bun.build`](https://bun.sh/docs/bundler). +`bun-plugin-svelte` lets you bundle Svelte components with [`Bun.build`](https://bun.com/docs/bundler). ```ts // build.ts diff --git a/packages/bun-plugin-svelte/example/App.svelte b/packages/bun-plugin-svelte/example/App.svelte index a73279986b..afda15aa72 100644 --- a/packages/bun-plugin-svelte/example/App.svelte +++ b/packages/bun-plugin-svelte/example/App.svelte @@ -2,7 +2,7 @@ import FeatureCard from "./FeatureCard.svelte"; const links = [ - { text: "Bun Documentation", url: "https://bun.sh/docs" }, + { text: "Bun Documentation", url: "https://bun.com/docs" }, { text: "Svelte Documentation", url: "https://svelte.dev/docs" }, { text: "GitHub", url: "https://github.com/oven-sh/bun/tree/main/packages/bun-plugin-svelte" }, ]; @@ -11,7 +11,7 @@

Bun

+

The official Svelte plugin for Bun

- 🚀 Get Started + 🚀 Get Started 👀 View Examples @@ -39,7 +39,7 @@
bun add -D bun-plugin-svelte
-

2. Add it to your bunfig.toml

+

2. Add it to your bunfig.toml


 [serve.static]
 plugins = ["bun-plugin-svelte"];
@@ -51,11 +51,11 @@ plugins = ["bun-plugin-svelte"];
   

✨ Features

- + Integrates with Bun's Fullstack Dev Server for hot module replacement - - Bundle Svelte components with Bun.build + + Bundle Svelte components with Bun.build
@@ -78,8 +78,9 @@ plugins = ["bun-plugin-svelte"]; :global(body) { margin: 0; padding: 0; - font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Open Sans", - "Helvetica Neue", sans-serif; + font-family: + -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Open Sans", "Helvetica Neue", + sans-serif; background-color: #f9f9f9; color: #333; } diff --git a/packages/bun-plugin-svelte/example/index.html b/packages/bun-plugin-svelte/example/index.html index 7933124600..09a17427fe 100644 --- a/packages/bun-plugin-svelte/example/index.html +++ b/packages/bun-plugin-svelte/example/index.html @@ -2,8 +2,8 @@ - - + + { export function optimizeBun(path: string): void { const installScript = - os === "win32" ? 'powershell -c "irm bun.sh/install.ps1 | iex"' : "curl -fsSL https://bun.sh/install | bash"; + os === "win32" ? 'powershell -c "irm bun.com/install.ps1 | iex"' : "curl -fsSL https://bun.com/install | bash"; try { rename(path, join(__dirname, "bin", "bun.exe")); + link(join(__dirname, "bin", "bun.exe"), join(__dirname, "bin", "bunx.exe")); return; } catch (error) { debug("optimizeBun failed", error); diff --git a/packages/bun-types/README.md b/packages/bun-types/README.md index 45c74a24d1..cf798d2e65 100644 --- a/packages/bun-types/README.md +++ b/packages/bun-types/README.md @@ -1,7 +1,7 @@ # TypeScript types for Bun

- Logo + Logo

These are the type definitions for Bun's JavaScript runtime APIs. diff --git a/packages/bun-types/authoring.md b/packages/bun-types/authoring.md index 6b3dd9f0ab..cb5002ec42 100644 --- a/packages/bun-types/authoring.md +++ b/packages/bun-types/authoring.md @@ -65,13 +65,11 @@ Note: The order of references in `index.d.ts` is important - `bun.ns.d.ts` must ### Best Practices 1. **Type Safety** - - Please use strict types instead of `any` where possible - Leverage TypeScript's type system features (generics, unions, etc.) - Document complex types with JSDoc comments 2. **Compatibility** - - Use `Bun.__internal.UseLibDomIfAvailable` for types that might conflict with lib.dom.d.ts (see [`./fetch.d.ts`](./fetch.d.ts) for a real example) - `@types/node` often expects variables to always be defined (this was the biggest cause of most of the conflicts in the past!), so we use the `UseLibDomIfAvailable` type to make sure we don't overwrite `lib.dom.d.ts` but still provide Bun types while simultaneously declaring the variable exists (for Node to work) in the cases that we can. diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index 9bc2e89162..eec0782497 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -45,6 +45,7 @@ declare module "bun" { type DOMHighResTimeStamp = number; type EventListenerOrEventListenerObject = EventListener | EventListenerObject; type BlobOrStringOrBuffer = string | NodeJS.TypedArray | ArrayBufferLike | Blob; + type MaybePromise = T | Promise; namespace __internal { type LibDomIsLoaded = typeof globalThis extends { onabort: any } ? true : false; @@ -852,6 +853,8 @@ declare module "bun" { * * @param stream The stream to consume. * @returns A promise that resolves with the concatenated chunks or the concatenated chunks as a {@link Uint8Array}. + * + * @deprecated Use {@link ReadableStream.bytes} */ function readableStreamToBytes( stream: ReadableStream, @@ -864,6 +867,8 @@ declare module "bun" { * * @param stream The stream to consume. * @returns A promise that resolves with the concatenated chunks as a {@link Blob}. + * + * @deprecated Use {@link ReadableStream.blob} */ function readableStreamToBlob(stream: ReadableStream): Promise; @@ -906,6 +911,8 @@ declare module "bun" { * * @param stream The stream to consume. * @returns A promise that resolves with the concatenated chunks as a {@link String}. + * + * @deprecated Use {@link ReadableStream.text} */ function readableStreamToText(stream: ReadableStream): Promise; @@ -916,6 +923,8 @@ declare module "bun" { * * @param stream The stream to consume. * @returns A promise that resolves with the concatenated chunks as a {@link String}. + * + * @deprecated Use {@link ReadableStream.json} */ function readableStreamToJSON(stream: ReadableStream): Promise; @@ -1242,9 +1251,9 @@ declare module "bun" { */ writer(options?: { highWaterMark?: number }): FileSink; - readonly readable: ReadableStream; - - // TODO: writable: WritableStream; + // TODO + // readonly readable: ReadableStream; + // readonly writable: WritableStream; /** * A UNIX timestamp indicating when the file was last modified. @@ -1303,116 +1312,285 @@ declare module "bun" { stat(): Promise; } - /** - * Configuration options for SQL client connection and behavior - * @example - * const config: SQLOptions = { - * host: 'localhost', - * port: 5432, - * user: 'dbuser', - * password: 'secretpass', - * database: 'myapp', - * idleTimeout: 30, - * max: 20, - * onconnect: (client) => { - * console.log('Connected to database'); - * } - * }; - */ + namespace SQL { + type AwaitPromisesArray>> = { + [K in keyof T]: Awaited; + }; - interface SQLOptions { - /** Connection URL (can be string or URL object) */ - url?: URL | string; - /** Database server hostname */ - host?: string; - /** Database server hostname (alias for host) */ - hostname?: string; - /** Database server port number */ - port?: number | string; - /** Database user for authentication */ - username?: string; - /** Database user for authentication (alias for username) */ - user?: string; - /** Database password for authentication */ - password?: string | (() => Promise); - /** Database password for authentication (alias for password) */ - pass?: string | (() => Promise); - /** Name of the database to connect to */ - database?: string; - /** Name of the database to connect to (alias for database) */ - db?: string; - /** Database adapter/driver to use */ - adapter?: string; - /** Maximum time in seconds to wait for connection to become available */ - idleTimeout?: number; - /** Maximum time in seconds to wait for connection to become available (alias for idleTimeout) */ - idle_timeout?: number; - /** Maximum time in seconds to wait when establishing a connection */ - connectionTimeout?: number; - /** Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout) */ - connection_timeout?: number; - /** Maximum lifetime in seconds of a connection */ - maxLifetime?: number; - /** Maximum lifetime in seconds of a connection (alias for maxLifetime) */ - max_lifetime?: number; - /** Whether to use TLS/SSL for the connection */ - tls?: TLSOptions | boolean; - /** Whether to use TLS/SSL for the connection (alias for tls) */ - ssl?: TLSOptions | boolean; - /** Callback function executed when a connection is established */ - onconnect?: (client: SQL) => void; - /** Callback function executed when a connection is closed */ - onclose?: (client: SQL) => void; - /** Maximum number of connections in the pool */ - max?: number; - /** By default values outside i32 range are returned as strings. If this is true, values outside i32 range are returned as BigInts. */ - bigint?: boolean; - /** Automatic creation of prepared statements, defaults to true */ - prepare?: boolean; + type ContextCallbackResult = T extends Array> ? AwaitPromisesArray : Awaited; + type ContextCallback = (sql: SQL) => Promise; + + /** + * Configuration options for SQL client connection and behavior + * + * @example + * ```ts + * const config: Bun.SQL.Options = { + * host: 'localhost', + * port: 5432, + * user: 'dbuser', + * password: 'secretpass', + * database: 'myapp', + * idleTimeout: 30, + * max: 20, + * onconnect: (client) => { + * console.log('Connected to database'); + * } + * }; + * ``` + */ + interface Options { + /** + * Connection URL (can be string or URL object) + */ + url?: URL | string | undefined; + + /** + * Database server hostname + * @default "localhost" + */ + host?: string | undefined; + + /** + * Database server hostname (alias for host) + * @deprecated Prefer {@link host} + * @default "localhost" + */ + hostname?: string | undefined; + + /** + * Database server port number + * @default 5432 + */ + port?: number | string | undefined; + + /** + * Database user for authentication + * @default "postgres" + */ + username?: string | undefined; + + /** + * Database user for authentication (alias for username) + * @deprecated Prefer {@link username} + * @default "postgres" + */ + user?: string | undefined; + + /** + * Database password for authentication + * @default "" + */ + password?: string | (() => MaybePromise) | undefined; + + /** + * Database password for authentication (alias for password) + * @deprecated Prefer {@link password} + * @default "" + */ + pass?: string | (() => MaybePromise) | undefined; + + /** + * Name of the database to connect to + * @default The username value + */ + database?: string | undefined; + + /** + * Name of the database to connect to (alias for database) + * @deprecated Prefer {@link database} + * @default The username value + */ + db?: string | undefined; + + /** + * Database adapter/driver to use + * @default "postgres" + */ + adapter?: "postgres" /*| "sqlite" | "mysql"*/ | (string & {}) | undefined; + + /** + * Maximum time in seconds to wait for connection to become available + * @default 0 (no timeout) + */ + idleTimeout?: number | undefined; + + /** + * Maximum time in seconds to wait for connection to become available (alias for idleTimeout) + * @deprecated Prefer {@link idleTimeout} + * @default 0 (no timeout) + */ + idle_timeout?: number | undefined; + + /** + * Maximum time in seconds to wait when establishing a connection + * @default 30 + */ + connectionTimeout?: number | undefined; + + /** + * Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout) + * @deprecated Prefer {@link connectionTimeout} + * @default 30 + */ + connection_timeout?: number | undefined; + + /** + * Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout) + * @deprecated Prefer {@link connectionTimeout} + * @default 30 + */ + connectTimeout?: number | undefined; + + /** + * Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout) + * @deprecated Prefer {@link connectionTimeout} + * @default 30 + */ + connect_timeout?: number | undefined; + + /** + * Maximum lifetime in seconds of a connection + * @default 0 (no maximum lifetime) + */ + maxLifetime?: number | undefined; + + /** + * Maximum lifetime in seconds of a connection (alias for maxLifetime) + * @deprecated Prefer {@link maxLifetime} + * @default 0 (no maximum lifetime) + */ + max_lifetime?: number | undefined; + + /** + * Whether to use TLS/SSL for the connection + * @default false + */ + tls?: TLSOptions | boolean | undefined; + + /** + * Whether to use TLS/SSL for the connection (alias for tls) + * @default false + */ + ssl?: TLSOptions | boolean | undefined; + + // `.path` is currently unsupported in Bun, the implementation is incomplete. + // + // /** + // * Unix domain socket path for connection + // * @default "" + // */ + // path?: string | undefined; + + /** + * Callback function executed when a connection is established + */ + onconnect?: ((client: SQL) => void) | undefined; + + /** + * Callback function executed when a connection is closed + */ + onclose?: ((client: SQL) => void) | undefined; + + /** + * Postgres client runtime configuration options + * + * @see https://www.postgresql.org/docs/current/runtime-config-client.html + */ + connection?: Record | undefined; + + /** + * Maximum number of connections in the pool + * @default 10 + */ + max?: number | undefined; + + /** + * By default values outside i32 range are returned as strings. If this is true, values outside i32 range are returned as BigInts. + * @default false + */ + bigint?: boolean | undefined; + + /** + * Automatic creation of prepared statements + * @default true + */ + prepare?: boolean | undefined; + } + + /** + * Represents a SQL query that can be executed, with additional control methods + * Extends Promise to allow for async/await usage + */ + interface Query extends Promise { + /** + * Indicates if the query is currently executing + */ + active: boolean; + + /** + * Indicates if the query has been cancelled + */ + cancelled: boolean; + + /** + * Cancels the executing query + */ + cancel(): Query; + + /** + * Executes the query as a simple query, no parameters are allowed but can execute multiple commands separated by semicolons + */ + simple(): Query; + + /** + * Executes the query + */ + execute(): Query; + + /** + * Returns the raw query result + */ + raw(): Query; + + /** + * Returns only the values from the query result + */ + values(): Query; + } + + /** + * Callback function type for transaction contexts + * @param sql Function to execute SQL queries within the transaction + */ + type TransactionContextCallback = ContextCallback; + + /** + * Callback function type for savepoint contexts + * @param sql Function to execute SQL queries within the savepoint + */ + type SavepointContextCallback = ContextCallback; + + /** + * SQL.Helper represents a parameter or serializable + * value inside of a query. + * + * @example + * ```ts + * const helper = sql(users, 'id'); + * await sql`insert into users ${helper}`; + * ``` + */ + interface Helper { + readonly value: T[]; + readonly columns: (keyof T)[]; + } } - /** - * Represents a SQL query that can be executed, with additional control methods - * Extends Promise to allow for async/await usage - */ - interface SQLQuery extends Promise { - /** Indicates if the query is currently executing */ - active: boolean; - - /** Indicates if the query has been cancelled */ - cancelled: boolean; - - /** Cancels the executing query */ - cancel(): SQLQuery; - - /** Execute as a simple query, no parameters are allowed but can execute multiple commands separated by semicolons */ - simple(): SQLQuery; - - /** Executes the query */ - execute(): SQLQuery; - - /** Returns the raw query result */ - raw(): SQLQuery; - - /** Returns only the values from the query result */ - values(): SQLQuery; - } - - /** - * Callback function type for transaction contexts - * @param sql Function to execute SQL queries within the transaction - */ - type SQLTransactionContextCallback = (sql: TransactionSQL) => Promise | Array; - /** - * Callback function type for savepoint contexts - * @param sql Function to execute SQL queries within the savepoint - */ - type SQLSavepointContextCallback = (sql: SavepointSQL) => Promise | Array; - /** * Main SQL client interface providing connection and transaction management */ - interface SQL { + interface SQL extends AsyncDisposable { /** * Executes a SQL query using template literals * @example @@ -1420,7 +1598,12 @@ declare module "bun" { * const [user] = await sql`select * from users where id = ${1}`; * ``` */ - (strings: string[] | TemplateStringsArray, ...values: any[]): SQLQuery; + (strings: TemplateStringsArray, ...values: unknown[]): SQL.Query; + + /** + * Execute a SQL query using a string + */ + (string: string): SQL.Query; /** * Helper function for inserting an object into a query @@ -1428,16 +1611,19 @@ declare module "bun" { * @example * ```ts * // Insert an object - * const result = await sql`insert into users ${sql(users)} RETURNING *`; + * const result = await sql`insert into users ${sql(users)} returning *`; * * // Or pick specific columns - * const result = await sql`insert into users ${sql(users, "id", "name")} RETURNING *`; + * const result = await sql`insert into users ${sql(users, "id", "name")} returning *`; * * // Or a single object - * const result = await sql`insert into users ${sql(user)} RETURNING *`; + * const result = await sql`insert into users ${sql(user)} returning *`; * ``` */ - (obj: T | T[] | readonly T[], ...columns: (keyof T)[]): SQLQuery; + ( + obj: T | T[] | readonly T[], + ...columns: readonly Keys[] + ): SQL.Helper>; /** * Helper function for inserting any serializable value into a query @@ -1447,7 +1633,7 @@ declare module "bun" { * const result = await sql`SELECT * FROM users WHERE id IN ${sql([1, 2, 3])}`; * ``` */ - (obj: unknown): SQLQuery; + (value: T): SQL.Helper; /** * Commits a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL @@ -1519,6 +1705,7 @@ declare module "bun" { /** * The reserve method pulls out a connection from the pool, and returns a client that wraps the single connection. + * * This can be used for running queries on an isolated connection. * Calling reserve in a reserved Sql will return a new reserved connection, not the same connection (behavior matches postgres package). * @@ -1544,7 +1731,10 @@ declare module "bun" { * ``` */ reserve(): Promise; - /** Begins a new transaction + + /** + * Begins a new transaction. + * * Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function. * BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue. * @example @@ -1568,8 +1758,11 @@ declare module "bun" { * return [user, account] * }) */ - begin(fn: SQLTransactionContextCallback): Promise; - /** Begins a new transaction with options + begin(fn: SQL.TransactionContextCallback): Promise>; + + /** + * Begins a new transaction with options. + * * Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function. * BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue. * @example @@ -1593,8 +1786,11 @@ declare module "bun" { * return [user, account] * }) */ - begin(options: string, fn: SQLTransactionContextCallback): Promise; - /** Alternative method to begin a transaction + begin(options: string, fn: SQL.TransactionContextCallback): Promise>; + + /** + * Alternative method to begin a transaction. + * * Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function. * BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue. * @alias begin @@ -1619,11 +1815,15 @@ declare module "bun" { * return [user, account] * }) */ - transaction(fn: SQLTransactionContextCallback): Promise; - /** Alternative method to begin a transaction with options + transaction(fn: SQL.TransactionContextCallback): Promise>; + + /** + * Alternative method to begin a transaction with options * Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function. * BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue. - * @alias begin + * + * @alias {@link begin} + * * @example * const [user, account] = await sql.transaction("read write", async sql => { * const [user] = await sql` @@ -1643,15 +1843,18 @@ declare module "bun" { * returning * * ` * return [user, account] - * }) + * }); */ - transaction(options: string, fn: SQLTransactionContextCallback): Promise; - /** Begins a distributed transaction + transaction(options: string, fn: SQL.TransactionContextCallback): Promise>; + + /** + * Begins a distributed transaction * Also know as Two-Phase Commit, in a distributed transaction, Phase 1 involves the coordinator preparing nodes by ensuring data is written and ready to commit, while Phase 2 finalizes with nodes committing or rolling back based on the coordinator's decision, ensuring durability and releasing locks. * In PostgreSQL and MySQL distributed transactions persist beyond the original session, allowing privileged users or coordinators to commit/rollback them, ensuring support for distributed transactions, recovery, and administrative tasks. * beginDistributed will automatic rollback if any exception are not caught, and you can commit and rollback later if everything goes well. * PostgreSQL natively supports distributed transactions using PREPARE TRANSACTION, while MySQL uses XA Transactions, and MSSQL also supports distributed/XA transactions. However, in MSSQL, distributed transactions are tied to the original session, the DTC coordinator, and the specific connection. * These transactions are automatically committed or rolled back following the same rules as regular transactions, with no option for manual intervention from other sessions, in MSSQL distributed transactions are used to coordinate transactions using Linked Servers. + * * @example * await sql.beginDistributed("numbers", async sql => { * await sql`create table if not exists numbers (a int)`; @@ -1661,31 +1864,38 @@ declare module "bun" { * await sql.commitDistributed("numbers"); * // or await sql.rollbackDistributed("numbers"); */ - beginDistributed(name: string, fn: SQLTransactionContextCallback): Promise; + beginDistributed( + name: string, + fn: SQL.TransactionContextCallback, + ): Promise>; + /** Alternative method to begin a distributed transaction - * @alias beginDistributed + * @alias {@link beginDistributed} */ - distributed(name: string, fn: SQLTransactionContextCallback): Promise; + distributed(name: string, fn: SQL.TransactionContextCallback): Promise>; + /**If you know what you're doing, you can use unsafe to pass any string you'd like. * Please note that this can lead to SQL injection if you're not careful. * You can also nest sql.unsafe within a safe sql expression. This is useful if only part of your fraction has unsafe elements. * @example * const result = await sql.unsafe(`select ${danger} from users where id = ${dragons}`) */ - unsafe(string: string, values?: any[]): SQLQuery; + unsafe(string: string, values?: any[]): SQL.Query; + /** * Reads a file and uses the contents as a query. * Optional parameters can be used if the file includes $1, $2, etc * @example * const result = await sql.file("query.sql", [1, 2, 3]); */ - file(filename: string, values?: any[]): SQLQuery; + file(filename: string, values?: any[]): SQL.Query; - /** Current client options */ - options: SQLOptions; - - [Symbol.asyncDispose](): Promise; + /** + * Current client options + */ + options: SQL.Options; } + const SQL: { /** * Creates a new SQL client instance @@ -1711,7 +1921,7 @@ declare module "bun" { * const sql = new SQL("postgres://localhost:5432/mydb", { idleTimeout: 1000 }); * ``` */ - new (connectionString: string | URL, options: Omit): SQL; + new (connectionString: string | URL, options: Omit): SQL; /** * Creates a new SQL client instance with options @@ -1723,17 +1933,18 @@ declare module "bun" { * const sql = new SQL({ url: "postgres://localhost:5432/mydb", idleTimeout: 1000 }); * ``` */ - new (options?: SQLOptions): SQL; + new (options?: SQL.Options): SQL; }; /** * Represents a reserved connection from the connection pool * Extends SQL with additional release functionality */ - interface ReservedSQL extends SQL { - /** Releases the client back to the connection pool */ + interface ReservedSQL extends SQL, Disposable { + /** + * Releases the client back to the connection pool + */ release(): void; - [Symbol.dispose](): void; } /** @@ -1742,26 +1953,30 @@ declare module "bun" { */ interface TransactionSQL extends SQL { /** Creates a savepoint within the current transaction */ - savepoint(name: string, fn: SQLSavepointContextCallback): Promise; - savepoint(fn: SQLSavepointContextCallback): Promise; + savepoint(name: string, fn: SQLSavepointContextCallback): Promise; + savepoint(fn: SQLSavepointContextCallback): Promise; } + /** * Represents a savepoint within a transaction */ interface SavepointSQL extends SQL {} type CSRFAlgorithm = "blake2b256" | "blake2b512" | "sha256" | "sha384" | "sha512" | "sha512-256"; + interface CSRFGenerateOptions { /** * The number of milliseconds until the token expires. 0 means the token never expires. * @default 24 * 60 * 60 * 1000 (24 hours) */ expiresIn?: number; + /** * The encoding of the token. * @default "base64url" */ encoding?: "base64" | "base64url" | "hex"; + /** * The algorithm to use for the token. * @default "sha256" @@ -1774,16 +1989,19 @@ declare module "bun" { * The secret to use for the token. If not provided, a random default secret will be generated in memory and used. */ secret?: string; + /** * The encoding of the token. * @default "base64url" */ encoding?: "base64" | "base64url" | "hex"; + /** * The algorithm to use for the token. * @default "sha256" */ algorithm?: CSRFAlgorithm; + /** * The number of milliseconds until the token expires. 0 means the token never expires. * @default 24 * 60 * 60 * 1000 (24 hours) @@ -1793,15 +2011,11 @@ declare module "bun" { /** * SQL client - * - * @category Database */ const sql: SQL; /** * SQL client for PostgreSQL - * - * @category Database */ const postgres: SQL; @@ -2120,7 +2334,7 @@ declare module "bun" { } /** - * @see [Bun.build API docs](https://bun.sh/docs/bundler#api) + * @see [Bun.build API docs](https://bun.com/docs/bundler#api) */ interface BuildConfig { entrypoints: string[]; // list of file path @@ -2305,7 +2519,7 @@ declare module "bun" { * * These are fast APIs that can run in a worker thread if used asynchronously. * - * @see [Bun.password API docs](https://bun.sh/guides/util/hash-a-password) + * @see [Bun.password API docs](https://bun.com/guides/util/hash-a-password) * * @category Security */ @@ -2341,7 +2555,7 @@ declare module "bun" { * Password hashing functions are necessarily slow, and this object will * automatically run in a worker thread. * - * @see [Bun.password API docs](https://bun.sh/guides/util/hash-a-password) + * @see [Bun.password API docs](https://bun.com/guides/util/hash-a-password) * * The underlying implementation of these functions are provided by the Zig * Standard Library. Thanks to \@jedisct1 and other Zig contributors for their @@ -3296,7 +3510,7 @@ declare module "bun" { [K in HTTPMethod]?: RouteHandlerWithWebSocketUpgrade; }; - type RouteValue = Response | false | RouteHandler | RouteHandlerObject; + type RouteValue = Response | false | RouteHandler | RouteHandlerObject | HTMLBundle; type RouteValueWithWebSocketUpgrade = | RouteValue | RouteHandlerWithWebSocketUpgrade @@ -5499,7 +5713,7 @@ declare module "bun" { */ | "browser"; - /** https://bun.sh/docs/bundler/loaders */ + /** https://bun.com/docs/bundler/loaders */ type Loader = "js" | "jsx" | "ts" | "tsx" | "json" | "toml" | "file" | "napi" | "wasm" | "text" | "css" | "html"; interface PluginConstraints { @@ -5859,7 +6073,7 @@ declare module "bun" { const isMainThread: boolean; /** - * Used when importing an HTML file at runtime. + * Used when importing an HTML file at runtime or at build time. * * @example * @@ -5867,10 +6081,34 @@ declare module "bun" { * import app from "./index.html"; * ``` * - * Bun.build support for this isn't imlpemented yet. */ + interface HTMLBundle { index: string; + + /** Array of generated output files with metadata. This only exists when built ahead of time with `Bun.build` or `bun build` */ + files?: Array<{ + /** Original source file path. */ + input?: string; + /** Generated output file path (with content hash, if included in naming) */ + path: string; + /** File type/loader used (js, css, html, file, etc.) */ + loader: Loader; + /** Whether this file is an entry point */ + isEntry: boolean; + /** HTTP headers including ETag and Content-Type */ + headers: { + /** ETag for caching */ + etag: string; + /** MIME type with charset */ + "content-type": string; + + /** + * Additional headers may be added in the future. + */ + [key: string]: string; + }; + }>; } /** @@ -6965,8 +7203,6 @@ declare module "bun" { maxBuffer?: number; } - type ReadableIO = ReadableStream | number | undefined; - type ReadableToIO = X extends "pipe" | undefined ? ReadableStream : X extends BunFile | ArrayBufferView | number @@ -7681,6 +7917,56 @@ declare module "bun" { timestamp?: number | Date, ): Buffer; + /** + * Generate a UUIDv5, which is a name-based UUID based on the SHA-1 hash of a namespace UUID and a name. + * + * @param name The name to use for the UUID + * @param namespace The namespace to use for the UUID + * @param encoding The encoding to use for the UUID + * + * + * @example + * ```js + * import { randomUUIDv5 } from "bun"; + * const uuid = randomUUIDv5("www.example.com", "dns"); + * console.log(uuid); // "6ba7b810-9dad-11d1-80b4-00c04fd430c8" + * ``` + * + * ```js + * import { randomUUIDv5 } from "bun"; + * const uuid = randomUUIDv5("www.example.com", "url"); + * console.log(uuid); // "6ba7b811-9dad-11d1-80b4-00c04fd430c8" + * ``` + */ + function randomUUIDv5( + name: string | BufferSource, + namespace: string | BufferSource | "dns" | "url" | "oid" | "x500", + /** + * @default "hex" + */ + encoding?: "hex" | "base64" | "base64url", + ): string; + + /** + * Generate a UUIDv5 as a Buffer + * + * @param name The name to use for the UUID + * @param namespace The namespace to use for the UUID + * @param encoding The encoding to use for the UUID + * + * @example + * ```js + * import { randomUUIDv5 } from "bun"; + * const uuid = randomUUIDv5("www.example.com", "url", "buffer"); + * console.log(uuid); // + * ``` + */ + function randomUUIDv5( + name: string | BufferSource, + namespace: string | BufferSource | "dns" | "url" | "oid" | "x500", + encoding: "buffer", + ): Buffer; + /** * Types for `bun.lock` */ @@ -7689,15 +7975,15 @@ declare module "bun" { workspaces: { [workspace: string]: BunLockFileWorkspacePackage; }; - /** @see https://bun.sh/docs/install/overrides */ + /** @see https://bun.com/docs/install/overrides */ overrides?: Record; - /** @see https://bun.sh/docs/install/patch */ + /** @see https://bun.com/docs/install/patch */ patchedDependencies?: Record; - /** @see https://bun.sh/docs/install/lifecycle#trusteddependencies */ + /** @see https://bun.com/docs/install/lifecycle#trusteddependencies */ trustedDependencies?: string[]; - /** @see https://bun.sh/docs/install/catalogs */ + /** @see https://bun.com/docs/install/catalogs */ catalog?: Record; - /** @see https://bun.sh/docs/install/catalogs */ + /** @see https://bun.com/docs/install/catalogs */ catalogs?: Record>; /** diff --git a/packages/bun-types/deprecated.d.ts b/packages/bun-types/deprecated.d.ts index 2fb502e4ad..c1ce3138e4 100644 --- a/packages/bun-types/deprecated.d.ts +++ b/packages/bun-types/deprecated.d.ts @@ -14,10 +14,23 @@ declare module "bun" { ): void; } + /** @deprecated Use {@link SQL.Query Bun.SQL.Query} */ + type SQLQuery = SQL.Query; + + /** @deprecated Use {@link SQL.TransactionContextCallback Bun.SQL.TransactionContextCallback} */ + type SQLTransactionContextCallback = SQL.TransactionContextCallback; + + /** @deprecated Use {@link SQL.SavepointContextCallback Bun.SQL.SavepointContextCallback} */ + type SQLSavepointContextCallback = SQL.SavepointContextCallback; + + /** @deprecated Use {@link SQL.Options Bun.SQL.Options} */ + type SQLOptions = SQL.Options; + /** * @deprecated Renamed to `ErrorLike` */ type Errorlike = ErrorLike; + interface TLSOptions { /** * File path to a TLS key @@ -27,6 +40,7 @@ declare module "bun" { * @deprecated since v0.6.3 - Use `key: Bun.file(path)` instead. */ keyFile?: string; + /** * File path to a TLS certificate * @@ -35,6 +49,7 @@ declare module "bun" { * @deprecated since v0.6.3 - Use `cert: Bun.file(path)` instead. */ certFile?: string; + /** * File path to a .pem file for a custom root CA * @@ -42,6 +57,9 @@ declare module "bun" { */ caFile?: string; } + + /** @deprecated This type is unused in Bun's declarations and may be removed in the future */ + type ReadableIO = ReadableStream | number | undefined; } declare namespace NodeJS { diff --git a/packages/bun-types/devserver.d.ts b/packages/bun-types/devserver.d.ts index 19e91d1852..c26c1c1f05 100644 --- a/packages/bun-types/devserver.d.ts +++ b/packages/bun-types/devserver.d.ts @@ -29,7 +29,7 @@ interface ImportMeta { * However, this check is usually not needed as Bun will dead-code-eliminate * calls to all of the HMR APIs in production builds. * - * https://bun.sh/docs/bundler/hmr + * https://bun.com/docs/bundler/hmr */ hot: { /** @@ -167,7 +167,7 @@ interface ImportMeta { * * For compatibility with Vite, event names are also available via vite:* prefix instead of bun:*. * - * https://bun.sh/docs/bundler/hmr#import-meta-hot-on-and-off + * https://bun.com/docs/bundler/hmr#import-meta-hot-on-and-off * @param event The event to listen to * @param callback The callback to call when the event is emitted */ @@ -178,7 +178,7 @@ interface ImportMeta { * * For compatibility with Vite, event names are also available via vite:* prefix instead of bun:*. * - * https://bun.sh/docs/bundler/hmr#import-meta-hot-on-and-off + * https://bun.com/docs/bundler/hmr#import-meta-hot-on-and-off * @param event The event to stop listening to * @param callback The callback to stop listening to */ diff --git a/packages/bun-types/experimental.d.ts b/packages/bun-types/experimental.d.ts new file mode 100644 index 0000000000..093f00e622 --- /dev/null +++ b/packages/bun-types/experimental.d.ts @@ -0,0 +1,276 @@ +declare module "bun" { + export namespace __experimental { + /** + * Base interface for static site generation route parameters. + * + * Supports both single string values and arrays of strings for dynamic route segments. + * This is typically used for route parameters like `[slug]`, `[...rest]`, or `[id]`. + * + * @warning These APIs are experimental and might be moved/changed in future releases. + * + * @example + * ```tsx + * // Simple slug parameter + * type BlogParams = { slug: string }; + * + * // Multiple parameters + * type ProductParams = { + * category: string; + * id: string; + * }; + * + * // Catch-all routes with string arrays + * type DocsParams = { + * path: string[]; + * }; + * ``` + */ + export interface SSGParamsLike { + [key: string]: string | string[]; + } + + /** + * Configuration object for a single static route to be generated. + * + * Each path object contains the parameters needed to render a specific + * instance of a dynamic route at build time. + * + * @warning These APIs are experimental and might be moved/changed in future releases. + * + * @template Params - The shape of route parameters for this path + * + * @example + * ```tsx + * // Single blog post path + * const blogPath: SSGPath<{ slug: string }> = { + * params: { slug: "my-first-post" } + * }; + * + * // Product page with multiple params + * const productPath: SSGPath<{ category: string; id: string }> = { + * params: { + * category: "electronics", + * id: "laptop-123" + * } + * }; + * + * // Documentation with catch-all route + * const docsPath: SSGPath<{ path: string[] }> = { + * params: { path: ["getting-started", "installation"] } + * }; + * ``` + */ + export interface SSGPath { + params: Params; + } + + /** + * Array of static paths to be generated at build time. + * + * This type represents the collection of all route configurations + * that should be pre-rendered for a dynamic route. + * + * @warning These APIs are experimental and might be moved/changed in future releases. + * + * @template Params - The shape of route parameters for these paths + * + * @example + * ```tsx + * // Array of blog post paths + * const blogPaths: SSGPaths<{ slug: string }> = [ + * { params: { slug: "introduction-to-bun" } }, + * { params: { slug: "performance-benchmarks" } }, + * { params: { slug: "getting-started-guide" } } + * ]; + * + * // Mixed parameter types + * const productPaths: SSGPaths<{ category: string; id: string }> = [ + * { params: { category: "books", id: "javascript-guide" } }, + * { params: { category: "electronics", id: "smartphone-x" } } + * ]; + * ``` + */ + export type SSGPaths = SSGPath[]; + + /** + * Props interface for SSG page components. + * + * This interface defines the shape of props that will be passed to your + * static page components during the build process. The `params` object + * contains the route parameters extracted from the URL pattern. + * + * @warning These APIs are experimental and might be moved/changed in future releases. + * + * @template Params - The shape of route parameters for this page + * + * @example + * ```tsx + * // Blog post component props + * interface BlogPageProps extends SSGPageProps<{ slug: string }> { + * // params: { slug: string } is automatically included + * } + * + * // Product page component props + * interface ProductPageProps extends SSGPageProps<{ + * category: string; + * id: string; + * }> { + * // params: { category: string; id: string } is automatically included + * } + * + * // Usage in component + * function BlogPost({ params }: BlogPageProps) { + * const { slug } = params; // TypeScript knows slug is a string + * return

Blog post: {slug}

; + * } + * ``` + */ + export interface SSGPageProps { + params: Params; + } + + /** + * React component type for SSG pages that can be statically generated. + * + * This type represents a React component that receives SSG page props + * and can be rendered at build time. The component can be either a regular + * React component or an async React Server Component for advanced use cases + * like data fetching during static generation. + * + * @warning These APIs are experimental and might be moved/changed in future releases. + * + * @template Params - The shape of route parameters for this page component + * + * @example + * ```tsx + * // Regular synchronous SSG page component + * const BlogPost: SSGPage<{ slug: string }> = ({ params }) => { + * return ( + *
+ *

Blog Post: {params.slug}

+ *

This content was generated at build time!

+ *
+ * ); + * }; + * + * // Async React Server Component for data fetching + * const AsyncBlogPost: SSGPage<{ slug: string }> = async ({ params }) => { + * // Fetch data during static generation + * const post = await fetchBlogPost(params.slug); + * const author = await fetchAuthor(post.authorId); + * + * return ( + *
+ *

{post.title}

+ *

By {author.name}

+ *
+ *
+ * ); + * }; + * + * // Product page with multiple params and async data fetching + * const ProductPage: SSGPage<{ category: string; id: string }> = async ({ params }) => { + * const [product, reviews] = await Promise.all([ + * fetchProduct(params.category, params.id), + * fetchProductReviews(params.id) + * ]); + * + * return ( + *
+ *

{product.name}

+ *

Category: {params.category}

+ *

Price: ${product.price}

+ *
+ *

Reviews ({reviews.length})

+ * {reviews.map(review => ( + *
{review.comment}
+ * ))} + *
+ *
+ * ); + * }; + * ``` + */ + export type SSGPage = React.ComponentType>; + + /** + * getStaticPaths is Bun's implementation of SSG (Static Site Generation) path determination. + * + * This function is called at your app's build time to determine which + * dynamic routes should be pre-rendered as static pages. It returns an + * array of path parameters that will be used to generate static pages for + * dynamic routes (e.g., [slug].tsx, [category]/[id].tsx). + * + * The function can be either synchronous or asynchronous, allowing you to + * fetch data from APIs, databases, or file systems to determine which paths + * should be statically generated. + * + * @warning These APIs are experimental and might be moved/changed in future releases. + * + * @template Params - The shape of route parameters for the dynamic route + * + * @returns An object containing an array of paths to be statically generated + * + * @example + * ```tsx + * // In pages/blog/[slug].tsx ———————————————————╮ + * export const getStaticPaths: GetStaticPaths<{ slug: string }> = async () => { + * // Fetch all blog posts from your CMS or API at build time + * const posts = await fetchBlogPosts(); + * + * return { + * paths: posts.map((post) => ({ + * params: { slug: post.slug } + * })) + * }; + * }; + * + * // In pages/products/[category]/[id].tsx + * export const getStaticPaths: GetStaticPaths<{ + * category: string; + * id: string; + * }> = async () => { + * // Fetch products from database + * const products = await db.products.findMany({ + * select: { id: true, category: { slug: true } } + * }); + * + * return { + * paths: products.map(product => ({ + * params: { + * category: product.category.slug, + * id: product.id + * } + * })) + * }; + * }; + * + * // In pages/docs/[...path].tsx (catch-all route) + * export const getStaticPaths: GetStaticPaths<{ path: string[] }> = async () => { + * // Read documentation structure from file system + * const docPaths = await getDocumentationPaths('./content/docs'); + * + * return { + * paths: docPaths.map(docPath => ({ + * params: { path: docPath.split('/') } + * })) + * }; + * }; + * + * // Synchronous example with static data + * export const getStaticPaths: GetStaticPaths<{ id: string }> = () => { + * const staticIds = ['1', '2', '3', '4', '5']; + * + * return { + * paths: staticIds.map(id => ({ + * params: { id } + * })) + * }; + * }; + * ``` + */ + export type GetStaticPaths = () => MaybePromise<{ + paths: SSGPaths; + }>; + } +} diff --git a/packages/bun-types/extensions.d.ts b/packages/bun-types/extensions.d.ts index ac98d51016..9fb2526baf 100644 --- a/packages/bun-types/extensions.d.ts +++ b/packages/bun-types/extensions.d.ts @@ -19,7 +19,7 @@ declare module "*/bun.lock" { } declare module "*.html" { - // In Bun v1.2, this might change to Bun.HTMLBundle - var contents: any; + var contents: import("bun").HTMLBundle; + export = contents; } diff --git a/packages/bun-types/index.d.ts b/packages/bun-types/index.d.ts index 3eed5f1b6c..c5b488ba22 100644 --- a/packages/bun-types/index.d.ts +++ b/packages/bun-types/index.d.ts @@ -20,6 +20,7 @@ /// /// /// +/// /// diff --git a/packages/bun-types/overrides.d.ts b/packages/bun-types/overrides.d.ts index 27e4f9700b..f798e00f80 100644 --- a/packages/bun-types/overrides.d.ts +++ b/packages/bun-types/overrides.d.ts @@ -1,5 +1,26 @@ export {}; +declare module "stream/web" { + interface ReadableStream { + /** + * Consume a ReadableStream as text + */ + text(): Promise; + /** + * Consume a ReadableStream as a Uint8Array + */ + bytes(): Promise; + /** + * Consume a ReadableStream as JSON + */ + json(): Promise; + /** + * Consume a ReadableStream as a Blob + */ + blob(): Promise; + } +} + declare global { namespace NodeJS { interface ProcessEnv extends Bun.Env, ImportMetaEnv {} diff --git a/packages/bun-types/package.json b/packages/bun-types/package.json index d9bc95f0a6..4ab5abba4a 100644 --- a/packages/bun-types/package.json +++ b/packages/bun-types/package.json @@ -15,11 +15,15 @@ "CLAUDE.md", "README.md" ], - "homepage": "https://bun.sh", + "homepage": "https://bun.com", "dependencies": { "@types/node": "*" }, + "peerDependencies": { + "@types/react": "^19" + }, "devDependencies": { + "@types/react": "^19", "typescript": "^5.0.2" }, "scripts": { diff --git a/packages/bun-types/shell.d.ts b/packages/bun-types/shell.d.ts index 5a91c7f4ab..0659c2ff78 100644 --- a/packages/bun-types/shell.d.ts +++ b/packages/bun-types/shell.d.ts @@ -12,7 +12,7 @@ declare module "bun" { | ReadableStream; /** - * The [Bun shell](https://bun.sh/docs/runtime/shell) is a powerful tool for running shell commands. + * The [Bun shell](https://bun.com/docs/runtime/shell) is a powerful tool for running shell commands. * * @example * ```ts @@ -206,7 +206,7 @@ declare module "bun" { } /** - * ShellError represents an error that occurred while executing a shell command with [the Bun Shell](https://bun.sh/docs/runtime/shell). + * ShellError represents an error that occurred while executing a shell command with [the Bun Shell](https://bun.com/docs/runtime/shell). * * @example * ```ts diff --git a/packages/bun-types/test.d.ts b/packages/bun-types/test.d.ts index 7692df3f77..c02e72f1d7 100644 --- a/packages/bun-types/test.d.ts +++ b/packages/bun-types/test.d.ts @@ -1184,14 +1184,6 @@ declare module "bun:test" { * expect(null).toBeInstanceOf(Array); // fail */ toBeInstanceOf(value: unknown): void; - /** - * Asserts that the expected value is an instance of value - * - * @example - * expect([]).toBeInstanceOf(Array); - * expect(null).toBeInstanceOf(Array); // fail - */ - toBeInstanceOf(value: unknown): void; /** * Asserts that a value is `undefined`. * diff --git a/packages/bun-usockets/misc/manual.md b/packages/bun-usockets/misc/manual.md index 5fab9b3134..275d1f8151 100644 --- a/packages/bun-usockets/misc/manual.md +++ b/packages/bun-usockets/misc/manual.md @@ -95,8 +95,7 @@ WIN32_EXPORT struct us_socket_context_t *us_create_child_socket_context(int ssl, ```c /* Write up to length bytes of data. Returns actual bytes written. Will call the on_writable callback of active socket context on failure to write everything off in one go. - * Set hint msg_more if you have more immediate data to write. */ -WIN32_EXPORT int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length, int msg_more); +WIN32_EXPORT int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length); /* Set a low precision, high performance timer on a socket. A socket can only have one single active timer at any given point in time. Will remove any such pre set timer */ WIN32_EXPORT void us_socket_timeout(int ssl, struct us_socket_t *s, unsigned int seconds); diff --git a/packages/bun-usockets/src/bsd.c b/packages/bun-usockets/src/bsd.c index f4d4bc31c9..6c81929cfb 100644 --- a/packages/bun-usockets/src/bsd.c +++ b/packages/bun-usockets/src/bsd.c @@ -762,9 +762,9 @@ ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_le } #else ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_length, const char *payload, int payload_length) { - ssize_t written = bsd_send(fd, header, header_length, 0); + ssize_t written = bsd_send(fd, header, header_length); if (written == header_length) { - ssize_t second_write = bsd_send(fd, payload, payload_length, 0); + ssize_t second_write = bsd_send(fd, payload, payload_length); if (second_write > 0) { written += second_write; } @@ -773,7 +773,7 @@ ssize_t bsd_write2(LIBUS_SOCKET_DESCRIPTOR fd, const char *header, int header_le } #endif -ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more) { +ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length) { while (1) { // MSG_MORE (Linux), MSG_PARTIAL (Windows), TCP_NOPUSH (BSD) @@ -781,13 +781,8 @@ ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int ms #define MSG_NOSIGNAL 0 #endif - #ifdef MSG_MORE - // for Linux we do not want signals - ssize_t rc = send(fd, buf, length, ((msg_more != 0) * MSG_MORE) | MSG_NOSIGNAL | MSG_DONTWAIT); - #else - // use TCP_NOPUSH - ssize_t rc = send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT); - #endif + // use TCP_NOPUSH + ssize_t rc = send(fd, buf, length, MSG_NOSIGNAL | MSG_DONTWAIT); if (UNLIKELY(IS_EINTR(rc))) { continue; diff --git a/packages/bun-usockets/src/context.c b/packages/bun-usockets/src/context.c index fa01b9864a..048f773858 100644 --- a/packages/bun-usockets/src/context.c +++ b/packages/bun-usockets/src/context.c @@ -23,7 +23,6 @@ #ifndef _WIN32 #include #endif - #define CONCURRENT_CONNECTIONS 4 // clang-format off @@ -43,17 +42,20 @@ int us_raw_root_certs(struct us_cert_string_t**out){ void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls) { /* us_listen_socket_t extends us_socket_t so we close in similar ways */ - if (!us_socket_is_closed(0, &ls->s)) { - us_internal_socket_context_unlink_listen_socket(ssl, ls->s.context, ls); - us_poll_stop((struct us_poll_t *) &ls->s, ls->s.context->loop); - bsd_close_socket(us_poll_fd((struct us_poll_t *) &ls->s)); + struct us_socket_t* s = &ls->s; + if (!us_socket_is_closed(0, s)) { + struct us_socket_context_t* context = s->context; + struct us_loop_t* loop = context->loop; + us_internal_socket_context_unlink_listen_socket(ssl, context, ls); + us_poll_stop((struct us_poll_t *) s, loop); + bsd_close_socket(us_poll_fd((struct us_poll_t *) s)); /* Link this socket to the close-list and let it be deleted after this iteration */ - ls->s.next = ls->s.context->loop->data.closed_head; - ls->s.context->loop->data.closed_head = &ls->s; + s->next = loop->data.closed_head; + loop->data.closed_head = s; /* Any socket with prev = context is marked as closed */ - ls->s.prev = (struct us_socket_t *) ls->s.context; + s->prev = (struct us_socket_t *) context; } /* We cannot immediately free a listen socket as we can be inside an accept loop */ @@ -91,16 +93,18 @@ void us_internal_socket_context_unlink_listen_socket(int ssl, struct us_socket_c context->iterator = ls->s.next; } - if (ls->s.prev == ls->s.next) { + struct us_socket_t* prev = ls->s.prev; + struct us_socket_t* next = ls->s.next; + if (prev == next) { context->head_listen_sockets = 0; } else { - if (ls->s.prev) { - ls->s.prev->next = ls->s.next; + if (prev) { + prev->next = next; } else { - context->head_listen_sockets = (struct us_listen_socket_t *) ls->s.next; + context->head_listen_sockets = (struct us_listen_socket_t *) next; } - if (ls->s.next) { - ls->s.next->prev = ls->s.prev; + if (next) { + next->prev = prev; } } us_socket_context_unref(ssl, context); @@ -112,31 +116,35 @@ void us_internal_socket_context_unlink_socket(int ssl, struct us_socket_context_ context->iterator = s->next; } - if (s->prev == s->next) { + struct us_socket_t* prev = s->prev; + struct us_socket_t* next = s->next; + if (prev == next) { context->head_sockets = 0; } else { - if (s->prev) { - s->prev->next = s->next; + if (prev) { + prev->next = next; } else { - context->head_sockets = s->next; + context->head_sockets = next; } - if (s->next) { - s->next->prev = s->prev; + if (next) { + next->prev = prev; } } us_socket_context_unref(ssl, context); } void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) { - if (c->prev_pending == c->next_pending) { + struct us_connecting_socket_t* prev = c->prev_pending; + struct us_connecting_socket_t* next = c->next_pending; + if (prev == next) { context->head_connecting_sockets = 0; } else { - if (c->prev_pending) { - c->prev_pending->next_pending = c->next_pending; + if (prev) { + prev->next_pending = next; } else { - context->head_connecting_sockets = c->next_pending; + context->head_connecting_sockets = next; } - if (c->next_pending) { - c->next_pending->prev_pending = c->prev_pending; + if (next) { + next->prev_pending = prev; } } us_socket_context_unref(ssl, context); @@ -144,11 +152,12 @@ void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_sock /* We always add in the top, so we don't modify any s.next */ void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *context, struct us_listen_socket_t *ls) { - ls->s.context = context; - ls->s.next = (struct us_socket_t *) context->head_listen_sockets; - ls->s.prev = 0; + struct us_socket_t* s = &ls->s; + s->context = context; + s->next = (struct us_socket_t *) context->head_listen_sockets; + s->prev = 0; if (context->head_listen_sockets) { - context->head_listen_sockets->s.prev = &ls->s; + context->head_listen_sockets->s.prev = s; } context->head_listen_sockets = ls; us_socket_context_ref(0, context); @@ -366,15 +375,15 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co us_poll_start(p, context->loop, LIBUS_SOCKET_READABLE); struct us_listen_socket_t *ls = (struct us_listen_socket_t *) p; - - ls->s.context = context; - ls->s.timeout = 255; - ls->s.long_timeout = 255; - ls->s.flags.low_prio_state = 0; - ls->s.flags.is_paused = 0; - ls->s.flags.is_ipc = 0; - ls->s.next = 0; - ls->s.flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN); + struct us_socket_t* s = &ls->s; + s->context = context; + s->timeout = 255; + s->long_timeout = 255; + s->flags.low_prio_state = 0; + s->flags.is_paused = 0; + s->flags.is_ipc = 0; + s->next = 0; + s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN); us_internal_socket_context_link_listen_socket(context, ls); ls->socket_ext_size = socket_ext_size; @@ -400,15 +409,16 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock us_poll_start(p, context->loop, LIBUS_SOCKET_READABLE); struct us_listen_socket_t *ls = (struct us_listen_socket_t *) p; - ls->s.connect_state = NULL; - ls->s.context = context; - ls->s.timeout = 255; - ls->s.long_timeout = 255; - ls->s.flags.low_prio_state = 0; - ls->s.flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN); - ls->s.flags.is_paused = 0; - ls->s.flags.is_ipc = 0; - ls->s.next = 0; + struct us_socket_t* s = &ls->s; + s->connect_state = NULL; + s->context = context; + s->timeout = 255; + s->long_timeout = 255; + s->flags.low_prio_state = 0; + s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN); + s->flags.is_paused = 0; + s->flags.is_ipc = 0; + s->next = 0; us_internal_socket_context_link_listen_socket(context, ls); ls->socket_ext_size = socket_ext_size; @@ -515,9 +525,10 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co } // if there is only one result we can immediately connect - if (result->entries && result->entries->info.ai_next == NULL) { + struct addrinfo_result_entry* entries = result->entries; + if (entries && entries->info.ai_next == NULL) { struct sockaddr_storage addr; - init_addr_with_port(&result->entries->info, port, &addr); + init_addr_with_port(&entries->info, port, &addr); *has_dns_resolved = 1; struct us_socket_t *s = us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size); Bun__addrinfo_freeRequest(ai_req, s == NULL); @@ -557,17 +568,19 @@ int start_connections(struct us_connecting_socket_t *c, int count) { } ++opened; bsd_socket_nodelay(connect_socket_fd, 1); - - struct us_socket_t *s = (struct us_socket_t *)us_create_poll(c->context->loop, 0, sizeof(struct us_socket_t) + c->socket_ext_size); - s->context = c->context; + struct us_loop_t* loop = c->context->loop; + struct us_socket_context_t* context = c->context; + struct us_socket_t *s = (struct us_socket_t *)us_create_poll(loop, 0, sizeof(struct us_socket_t) + c->socket_ext_size); + s->context = context; s->timeout = c->timeout; s->long_timeout = c->long_timeout; - s->flags.low_prio_state = 0; - s->flags.allow_half_open = (c->options & LIBUS_SOCKET_ALLOW_HALF_OPEN); - s->flags.is_paused = 0; - s->flags.is_ipc = 0; + struct us_socket_flags* flags = &s->flags; + flags->low_prio_state = 0; + flags->allow_half_open = (c->options & LIBUS_SOCKET_ALLOW_HALF_OPEN); + flags->is_paused = 0; + flags->is_ipc = 0; /* Link it into context so that timeout fires properly */ - us_internal_socket_context_link_socket(s->context, s); + us_internal_socket_context_link_socket(context, s); // TODO check this, specifically how it interacts with the SSL code // does this work when we create multiple sockets at once? will we need multiple SSL contexts? @@ -579,10 +592,10 @@ int start_connections(struct us_connecting_socket_t *c, int count) { c->connecting_head = s; s->connect_state = c; - + struct us_poll_t* poll = &s->p; /* Connect sockets are semi-sockets just like listen sockets */ - us_poll_init(&s->p, connect_socket_fd, POLL_TYPE_SEMI_SOCKET); - us_poll_start(&s->p, s->context->loop, LIBUS_SOCKET_WRITABLE); + us_poll_init(poll, connect_socket_fd, POLL_TYPE_SEMI_SOCKET); + us_poll_start(poll, loop, LIBUS_SOCKET_WRITABLE); } return opened; } @@ -774,42 +787,50 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con if (us_socket_is_closed(ssl, s) || us_socket_is_shut_down(ssl, s)) { return s; } - + struct us_socket_context_t *old_context = s->context; + struct us_loop_t *loop = old_context->loop; + /* We need to be sure that we still holding a reference*/ + us_socket_context_ref(ssl, old_context); if (s->flags.low_prio_state != 1) { - /* We need to be sure that we still holding a reference*/ - us_socket_context_ref(ssl, context); /* This properly updates the iterator if in on_timeout */ - us_internal_socket_context_unlink_socket(ssl, s->context, s); + us_internal_socket_context_unlink_socket(ssl, old_context, s); + } else { + /* We manually ref/unref context to handle context life cycle with low-priority queue */ + us_socket_context_unref(ssl, old_context); } - struct us_connecting_socket_t *c = s->connect_state; - + struct us_socket_t *new_s = s; + if (ext_size != -1) { - new_s = (struct us_socket_t *) us_poll_resize(&s->p, s->context->loop, sizeof(struct us_socket_t) + ext_size); + struct us_poll_t *pool_ref = &s->p; + + new_s = (struct us_socket_t *) us_poll_resize(pool_ref, loop, sizeof(struct us_socket_t) + ext_size); if (c) { c->connecting_head = new_s; - struct us_socket_context_t *old_context = s->context; c->context = context; - us_internal_socket_context_link_connecting_socket(ssl, context, c); us_internal_socket_context_unlink_connecting_socket(ssl, old_context, c); + us_internal_socket_context_link_connecting_socket(ssl, context, c); } } + new_s->context = context; new_s->timeout = 255; new_s->long_timeout = 255; if (new_s->flags.low_prio_state == 1) { /* update pointers in low-priority queue */ - if (!new_s->prev) new_s->context->loop->data.low_prio_head = new_s; + if (!new_s->prev) loop->data.low_prio_head = new_s; else new_s->prev->next = new_s; if (new_s->next) new_s->next->prev = new_s; + /* We manually ref/unref context to handle context life cycle with low-priority queue */ + us_socket_context_ref(ssl, context); } else { us_internal_socket_context_link_socket(context, new_s); - us_socket_context_unref(ssl, context); } - + /* We can safely unref the old context here with can potentially be freed */ + us_socket_context_unref(ssl, old_context); return new_s; } diff --git a/packages/bun-usockets/src/crypto/openssl.c b/packages/bun-usockets/src/crypto/openssl.c index dff3399fb6..7e8c712555 100644 --- a/packages/bun-usockets/src/crypto/openssl.c +++ b/packages/bun-usockets/src/crypto/openssl.c @@ -52,10 +52,6 @@ struct loop_ssl_data { unsigned int ssl_read_input_offset; struct us_socket_t *ssl_socket; - - int last_write_was_msg_more; - int msg_more; - BIO *shared_rbio; BIO *shared_wbio; BIO_METHOD *shared_biom; @@ -139,10 +135,7 @@ int BIO_s_custom_write(BIO *bio, const char *data, int length) { struct loop_ssl_data *loop_ssl_data = (struct loop_ssl_data *)BIO_get_data(bio); - loop_ssl_data->last_write_was_msg_more = - loop_ssl_data->msg_more || length == 16413; - int written = us_socket_write(0, loop_ssl_data->ssl_socket, data, length, - loop_ssl_data->last_write_was_msg_more); + int written = us_socket_write(0, loop_ssl_data->ssl_socket, data, length); BIO_clear_retry_flags(bio); if (!written) { @@ -192,7 +185,6 @@ struct loop_ssl_data * us_internal_set_loop_ssl_data(struct us_internal_ssl_sock loop_ssl_data->ssl_read_input_length = 0; loop_ssl_data->ssl_read_input_offset = 0; loop_ssl_data->ssl_socket = &s->s; - loop_ssl_data->msg_more = 0; return loop_ssl_data; } @@ -665,8 +657,6 @@ void us_internal_init_loop_ssl_data(struct us_loop_t *loop) { us_calloc(1, sizeof(struct loop_ssl_data)); loop_ssl_data->ssl_read_input_length = 0; loop_ssl_data->ssl_read_input_offset = 0; - loop_ssl_data->last_write_was_msg_more = 0; - loop_ssl_data->msg_more = 0; loop_ssl_data->ssl_read_output = us_malloc(LIBUS_RECV_BUFFER_LENGTH + LIBUS_RECV_BUFFER_PADDING * 2); @@ -1741,17 +1731,16 @@ us_internal_ssl_socket_get_native_handle(struct us_internal_ssl_socket_t *s) { } int us_internal_ssl_socket_raw_write(struct us_internal_ssl_socket_t *s, - const char *data, int length, - int msg_more) { + const char *data, int length) { if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) { return 0; } - return us_socket_write(0, &s->s, data, length, msg_more); + return us_socket_write(0, &s->s, data, length); } int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s, - const char *data, int length, int msg_more) { + const char *data, int length) { if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) || length == 0) { return 0; @@ -1772,14 +1761,8 @@ int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s, loop_ssl_data->ssl_read_input_length = 0; loop_ssl_data->ssl_socket = &s->s; - loop_ssl_data->msg_more = msg_more; - loop_ssl_data->last_write_was_msg_more = 0; int written = SSL_write(s->ssl, data, length); - loop_ssl_data->msg_more = 0; - if (loop_ssl_data->last_write_was_msg_more && !msg_more) { - us_socket_flush(0, &s->s); - } if (written > 0) { return written; @@ -1836,7 +1819,6 @@ void us_internal_ssl_socket_shutdown(struct us_internal_ssl_socket_t *s) { // on_data and checked in the BIO loop_ssl_data->ssl_socket = &s->s; - loop_ssl_data->msg_more = 0; // sets SSL_SENT_SHUTDOWN and waits for the other side to do the same int ret = SSL_shutdown(s->ssl); diff --git a/packages/bun-usockets/src/eventing/epoll_kqueue.c b/packages/bun-usockets/src/eventing/epoll_kqueue.c index 6a18af88e4..3b6c7e438f 100644 --- a/packages/bun-usockets/src/eventing/epoll_kqueue.c +++ b/packages/bun-usockets/src/eventing/epoll_kqueue.c @@ -19,7 +19,6 @@ #include "internal/internal.h" #include #include - #if defined(LIBUS_USE_EPOLL) || defined(LIBUS_USE_KQUEUE) void Bun__internal_dispatch_ready_poll(void* loop, void* poll); @@ -338,7 +337,7 @@ void us_internal_loop_update_pending_ready_polls(struct us_loop_t *loop, struct // if new events does not contain the ready events of this poll then remove (no we filter that out later on) SET_READY_POLL(loop, i, new_poll); - + num_entries_possibly_remaining--; } } @@ -381,19 +380,18 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop, unsigned int ext_size) { int events = us_poll_events(p); + struct us_poll_t *new_p = us_realloc(p, sizeof(struct us_poll_t) + ext_size); - if (p != new_p && events) { + if (p != new_p) { #ifdef LIBUS_USE_EPOLL /* Hack: forcefully update poll by stripping away already set events */ new_p->state.poll_type = us_internal_poll_type(new_p); us_poll_change(new_p, loop, events); #else /* Forcefully update poll by resetting them with new_p as user data */ - kqueue_change(loop->fd, new_p->state.fd, 0, events, new_p); -#endif - - /* This is needed for epoll also (us_change_poll doesn't update the old poll) */ + kqueue_change(loop->fd, new_p->state.fd, 0, LIBUS_SOCKET_WRITABLE | LIBUS_SOCKET_READABLE, new_p); +#endif /* This is needed for epoll also (us_change_poll doesn't update the old poll) */ us_internal_loop_update_pending_ready_polls(loop, p, new_p, events, events); } @@ -447,7 +445,7 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) { kqueue_change(loop->fd, p->state.fd, old_events, events, p); #endif /* Set all removed events to null-polls in pending ready poll list */ - //us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events); + // us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events); } } diff --git a/packages/bun-usockets/src/internal/internal.h b/packages/bun-usockets/src/internal/internal.h index 5af0d56e2f..1989d5d58d 100644 --- a/packages/bun-usockets/src/internal/internal.h +++ b/packages/bun-usockets/src/internal/internal.h @@ -421,10 +421,9 @@ struct us_socket_t *us_internal_ssl_socket_context_connect_unix( size_t pathlen, int options, int socket_ext_size); int us_internal_ssl_socket_write(us_internal_ssl_socket_r s, - const char *data, int length, int msg_more); + const char *data, int length); int us_internal_ssl_socket_raw_write(us_internal_ssl_socket_r s, - const char *data, int length, - int msg_more); + const char *data, int length); void us_internal_ssl_socket_timeout(us_internal_ssl_socket_r s, unsigned int seconds); diff --git a/packages/bun-usockets/src/internal/networking/bsd.h b/packages/bun-usockets/src/internal/networking/bsd.h index c10b96785e..699aeffa92 100644 --- a/packages/bun-usockets/src/internal/networking/bsd.h +++ b/packages/bun-usockets/src/internal/networking/bsd.h @@ -210,7 +210,7 @@ ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags); #if !defined(_WIN32) ssize_t bsd_recvmsg(LIBUS_SOCKET_DESCRIPTOR fd, struct msghdr *msg, int flags); #endif -ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length, int msg_more); +ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length); #if !defined(_WIN32) ssize_t bsd_sendmsg(LIBUS_SOCKET_DESCRIPTOR fd, const struct msghdr *msg, int flags); #endif diff --git a/packages/bun-usockets/src/libusockets.h b/packages/bun-usockets/src/libusockets.h index 6128d855f1..7bb0cd0b53 100644 --- a/packages/bun-usockets/src/libusockets.h +++ b/packages/bun-usockets/src/libusockets.h @@ -419,9 +419,8 @@ struct us_poll_t *us_poll_resize(us_poll_r p, us_loop_r loop, unsigned int ext_s void *us_socket_get_native_handle(int ssl, us_socket_r s) nonnull_fn_decl; /* Write up to length bytes of data. Returns actual bytes written. - * Will call the on_writable callback of active socket context on failure to write everything off in one go. - * Set hint msg_more if you have more immediate data to write. */ -int us_socket_write(int ssl, us_socket_r s, const char * nonnull_arg data, int length, int msg_more) nonnull_fn_decl; + * Will call the on_writable callback of active socket context on failure to write everything off in one go. */ +int us_socket_write(int ssl, us_socket_r s, const char * nonnull_arg data, int length) nonnull_fn_decl; /* Special path for non-SSL sockets. Used to send header and payload in one go. Works like us_socket_write. */ int us_socket_write2(int ssl, us_socket_r s, const char *header, int header_length, const char *payload, int payload_length) nonnull_fn_decl; @@ -440,7 +439,7 @@ void *us_connecting_socket_ext(int ssl, struct us_connecting_socket_t *c) nonnul /* Return the socket context of this socket */ struct us_socket_context_t *us_socket_context(int ssl, us_socket_r s) nonnull_fn_decl __attribute__((returns_nonnull)); -/* Withdraw any msg_more status and flush any pending data */ +/* Flush any pending data */ void us_socket_flush(int ssl, us_socket_r s) nonnull_fn_decl; /* Shuts down the connection by sending FIN and/or close_notify */ @@ -471,7 +470,7 @@ void us_socket_local_address(int ssl, us_socket_r s, char *nonnull_arg buf, int struct us_socket_t *us_socket_pair(struct us_socket_context_t *ctx, int socket_ext_size, LIBUS_SOCKET_DESCRIPTOR* fds); struct us_socket_t *us_socket_from_fd(struct us_socket_context_t *ctx, int socket_ext_size, LIBUS_SOCKET_DESCRIPTOR fd, int ipc); struct us_socket_t *us_socket_wrap_with_tls(int ssl, us_socket_r s, struct us_bun_socket_context_options_t options, struct us_socket_events_t events, int socket_ext_size); -int us_socket_raw_write(int ssl, us_socket_r s, const char *data, int length, int msg_more); +int us_socket_raw_write(int ssl, us_socket_r s, const char *data, int length); struct us_socket_t* us_socket_open(int ssl, struct us_socket_t * s, int is_client, char* ip, int ip_length); int us_raw_root_certs(struct us_cert_string_t**out); unsigned int us_get_remote_address_info(char *buf, us_socket_r s, const char **dest, int *port, int *is_ipv6); diff --git a/packages/bun-usockets/src/loop.c b/packages/bun-usockets/src/loop.c index b499dd3651..7830c5ca47 100644 --- a/packages/bun-usockets/src/loop.c +++ b/packages/bun-usockets/src/loop.c @@ -336,12 +336,13 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in case POLL_TYPE_SOCKET: { /* We should only use s, no p after this point */ struct us_socket_t *s = (struct us_socket_t *) p; - + /* The context can change after calling a callback but the loop is always the same */ + struct us_loop_t* loop = s->context->loop; if (events & LIBUS_SOCKET_WRITABLE && !error) { /* Note: if we failed a write as a socket of one loop then adopted * to another loop, this will be wrong. Absurd case though */ - s->context->loop->data.last_write_failed = 0; - + loop->data.last_write_failed = 0; + s = s->context->on_writable(s); if (!s || us_socket_is_closed(0, s)) { @@ -349,8 +350,8 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in } /* If we have no failed write or if we shut down, then stop polling for more writable */ - if (!s->context->loop->data.last_write_failed || us_socket_is_shut_down(0, s)) { - us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_READABLE); + if (!loop->data.last_write_failed || us_socket_is_shut_down(0, s)) { + us_poll_change(&s->p, loop, us_poll_events(&s->p) & LIBUS_SOCKET_READABLE); } } @@ -358,25 +359,28 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in /* Contexts may prioritize down sockets that are currently readable, e.g. when SSL handshake has to be done. * SSL handshakes are CPU intensive, so we limit the number of handshakes per loop iteration, and move the rest * to the low-priority queue */ - if (s->context->is_low_prio(s)) { - if (s->flags.low_prio_state == 2) { - s->flags.low_prio_state = 0; /* Socket has been delayed and now it's time to process incoming data for one iteration */ - } else if (s->context->loop->data.low_prio_budget > 0) { - s->context->loop->data.low_prio_budget--; /* Still having budget for this iteration - do normal processing */ + struct us_socket_context_t *context = s->context; + struct us_socket_flags* flags = &s->flags; + if (context->is_low_prio(s)) { + if (flags->low_prio_state == 2) { + flags->low_prio_state = 0; /* Socket has been delayed and now it's time to process incoming data for one iteration */ + } else if (loop->data.low_prio_budget > 0) { + loop->data.low_prio_budget--; /* Still having budget for this iteration - do normal processing */ } else { - us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE); - us_socket_context_ref(0, s->context); - us_internal_socket_context_unlink_socket(0, s->context, s); + struct us_poll_t* poll = &s->p; + us_poll_change(poll, loop, us_poll_events(poll) & LIBUS_SOCKET_WRITABLE); + us_socket_context_ref(0, context); + us_internal_socket_context_unlink_socket(0, context, s); /* Link this socket to the low-priority queue - we use a LIFO queue, to prioritize newer clients that are * maybe not already timeouted - sounds unfair, but works better in real-life with smaller client-timeouts * under high load */ s->prev = 0; - s->next = s->context->loop->data.low_prio_head; + s->next = loop->data.low_prio_head; if (s->next) s->next->prev = s; - s->context->loop->data.low_prio_head = s; + loop->data.low_prio_head = s; - s->flags.low_prio_state = 1; + flags->low_prio_state = 1; break; } @@ -385,7 +389,6 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in size_t repeat_recv_count = 0; do { - const struct us_loop_t* loop = s->context->loop; #ifdef _WIN32 const int recv_flags = MSG_PUSH_IMMEDIATE; #else @@ -478,7 +481,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in } if(s->flags.allow_half_open) { /* We got a Error but is EOF and we allow half open so stop polling for readable and keep going*/ - us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE); + us_poll_change(&s->p, loop, us_poll_events(&s->p) & LIBUS_SOCKET_WRITABLE); s = s->context->on_end(s); } else { /* We dont allow half open just emit end and close the socket */ diff --git a/packages/bun-usockets/src/socket.c b/packages/bun-usockets/src/socket.c index 1d49d2fe77..8b3a8723e3 100644 --- a/packages/bun-usockets/src/socket.c +++ b/packages/bun-usockets/src/socket.c @@ -357,17 +357,17 @@ void *us_connecting_socket_get_native_handle(int ssl, struct us_connecting_socke return (void *) (uintptr_t) -1; } -int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length, int msg_more) { +int us_socket_write(int ssl, struct us_socket_t *s, const char *data, int length) { #ifndef LIBUS_NO_SSL if (ssl) { - return us_internal_ssl_socket_write((struct us_internal_ssl_socket_t *) s, data, length, msg_more); + return us_internal_ssl_socket_write((struct us_internal_ssl_socket_t *) s, data, length); } #endif if (us_socket_is_closed(ssl, s) || us_socket_is_shut_down(ssl, s)) { return 0; } - int written = bsd_send(us_poll_fd(&s->p), data, length, msg_more); + int written = bsd_send(us_poll_fd(&s->p), data, length); if (written != length) { s->context->loop->data.last_write_failed = 1; us_poll_change(&s->p, s->context->loop, LIBUS_SOCKET_READABLE | LIBUS_SOCKET_WRITABLE); @@ -495,14 +495,14 @@ struct us_socket_t* us_socket_open(int ssl, struct us_socket_t * s, int is_clien } -int us_socket_raw_write(int ssl, struct us_socket_t *s, const char *data, int length, int msg_more) { +int us_socket_raw_write(int ssl, struct us_socket_t *s, const char *data, int length) { #ifndef LIBUS_NO_SSL if (ssl) { - return us_internal_ssl_socket_raw_write((struct us_internal_ssl_socket_t *) s, data, length, msg_more); + return us_internal_ssl_socket_raw_write((struct us_internal_ssl_socket_t *) s, data, length); } #endif // non-TLS is always raw - return us_socket_write(ssl, s, data, length, msg_more); + return us_socket_write(ssl, s, data, length); } unsigned int us_get_remote_address_info(char *buf, struct us_socket_t *s, const char **dest, int *port, int *is_ipv6) diff --git a/packages/bun-uws/src/AsyncSocket.h b/packages/bun-uws/src/AsyncSocket.h index 0782794338..e5bcf5cabb 100644 --- a/packages/bun-uws/src/AsyncSocket.h +++ b/packages/bun-uws/src/AsyncSocket.h @@ -247,7 +247,7 @@ public: int max_flush_len = std::min(buffer_len, (size_t)INT_MAX); /* Attempt to write data to the socket */ - int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len, 0); + int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len); total_written += written; /* Check if we couldn't write the entire buffer */ @@ -297,7 +297,7 @@ public: int max_flush_len = std::min(buffer_len, (size_t)INT_MAX); /* Write off as much as we can */ - int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len, /*nextLength != 0 | */length); + int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), max_flush_len); /* On failure return, otherwise continue down the function */ if ((unsigned int) written < buffer_len) { /* Update buffering (todo: we can do better here if we keep track of what happens to this guy later on) */ @@ -342,7 +342,7 @@ public: } } else { /* We are not corked */ - int written = us_socket_write(SSL, (us_socket_t *) this, src, length, nextLength != 0); + int written = us_socket_write(SSL, (us_socket_t *) this, src, length); /* Did we fail? */ if (written < length) { diff --git a/packages/bun-uws/src/HttpContext.h b/packages/bun-uws/src/HttpContext.h index a5b89123bb..0fc7cf9f56 100644 --- a/packages/bun-uws/src/HttpContext.h +++ b/packages/bun-uws/src/HttpContext.h @@ -383,7 +383,7 @@ private: httpContextData->onClientError(SSL, s, result.parserError, data, length); } /* For errors, we only deliver them "at most once". We don't care if they get halfways delivered or not. */ - us_socket_write(SSL, s, httpErrorResponses[httpErrorStatusCode].data(), (int) httpErrorResponses[httpErrorStatusCode].length(), false); + us_socket_write(SSL, s, httpErrorResponses[httpErrorStatusCode].data(), (int) httpErrorResponses[httpErrorStatusCode].length()); us_socket_shutdown(SSL, s); /* Close any socket on HTTP errors */ us_socket_close(SSL, s, 0, nullptr); diff --git a/packages/bun-vscode/README.md b/packages/bun-vscode/README.md index b15037f245..18a72b9464 100644 --- a/packages/bun-vscode/README.md +++ b/packages/bun-vscode/README.md @@ -4,12 +4,12 @@ -This extension adds support for using [Bun](https://bun.sh/) with Visual Studio Code. Bun is an all-in-one toolkit for JavaScript and TypeScript apps. +This extension adds support for using [Bun](https://bun.com/) with Visual Studio Code. Bun is an all-in-one toolkit for JavaScript and TypeScript apps. At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
- Documentation + Documentation   •   Discord   •   diff --git a/packages/bun-vscode/package.json b/packages/bun-vscode/package.json index e18d4c4c83..eab9bcfcef 100644 --- a/packages/bun-vscode/package.json +++ b/packages/bun-vscode/package.json @@ -353,7 +353,7 @@ "color": "#3B3738", "theme": "dark" }, - "homepage": "https://bun.sh/", + "homepage": "https://bun.com/", "icon": "assets/icon.png", "keywords": [ "bun", diff --git a/scripts/bootstrap.ps1 b/scripts/bootstrap.ps1 index 10e7756f32..b50ca4772b 100755 --- a/scripts/bootstrap.ps1 +++ b/scripts/bootstrap.ps1 @@ -1,4 +1,4 @@ -# Version: 8 +# Version: 9 # A script that installs the dependencies needed to build and test Bun. # This should work on Windows 10 or newer with PowerShell. @@ -240,11 +240,11 @@ function Install-Git { } function Install-NodeJs { - Install-Package nodejs -Command node -Version "22.9.0" + Install-Package nodejs -Command node -Version "24.3.0" } function Install-Bun { - Install-Package bun -Version "1.1.30" + Install-Package bun -Version "1.2.17" } function Install-Cygwin { diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 16ea4089d5..99cd1c9ea7 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Version: 11 +# Version: 14 # A script that installs the dependencies needed to build and test Bun. # This should work on macOS and Linux with a POSIX shell. @@ -130,7 +130,7 @@ create_directory() { create_tmp_directory() { mktemp="$(require mktemp)" path="$(execute "$mktemp" -d)" - grant_to_user "$path" + grant_to_user "$path" print "$path" } @@ -191,10 +191,21 @@ download_file() { fetch "$file_url" >"$file_tmp_path" grant_to_user "$file_tmp_path" - + print "$file_tmp_path" } +# path=$(download_and_verify_file URL sha256) +download_and_verify_file() { + file_url="$1" + hash="$2" + + path=$(download_file "$file_url") + execute sh -c 'echo "'"$hash $path"'" | sha256sum -c' >/dev/null 2>&1 + + print "$path" +} + append_to_profile() { content="$1" profiles=".profile .zprofile .bash_profile .bashrc .zshrc" @@ -317,7 +328,7 @@ check_operating_system() { distro="$("$sw_vers" -productName)" release="$("$sw_vers" -productVersion)" fi - + case "$arch" in x64) sysctl="$(which sysctl)" @@ -400,7 +411,7 @@ check_package_manager() { pm="brew" ;; linux) - if [ -f "$(which apt)" ]; then + if [ -f "$(which apt-get)" ]; then pm="apt" elif [ -f "$(which dnf)" ]; then pm="dnf" @@ -470,10 +481,8 @@ check_ulimit() { print "Checking ulimits..." systemd_conf="/etc/systemd/system.conf" - if [ -f "$systemd_conf" ]; then - limits_conf="/etc/security/limits.d/99-unlimited.conf" - create_file "$limits_conf" - fi + limits_conf="/etc/security/limits.d/99-unlimited.conf" + create_file "$limits_conf" limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue" for limit in $limits; do @@ -495,6 +504,10 @@ check_ulimit() { fi if [ -f "$systemd_conf" ]; then + # in systemd's configuration you need to say "infinity" when you mean "unlimited" + if [ "$limit_value" = "unlimited" ]; then + limit_value="infinity" + fi append_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value" fi done @@ -534,7 +547,7 @@ check_ulimit() { append_file "$dpkg_conf" "force-unsafe-io" append_file "$dpkg_conf" "no-debsig" - apt_conf="/etc/apt/apt.conf.d/99-ci-options" + apt_conf="/etc/apt/apt.conf.d/99-ci-options" execute_sudo create_directory "$(dirname "$apt_conf")" append_file "$apt_conf" 'Acquire::Languages "none";' append_file "$apt_conf" 'Acquire::GzipIndexes "true";' @@ -549,7 +562,7 @@ check_ulimit() { package_manager() { case "$pm" in apt) - execute_sudo apt "$@" + execute_sudo apt-get "$@" ;; dnf) case "$distro" in @@ -598,6 +611,7 @@ install_packages() { package_manager install \ --yes \ --no-install-recommends \ + --fix-missing \ "$@" ;; dnf) @@ -673,7 +687,7 @@ install_common_software() { esac case "$distro" in - amzn) + amzn | alpine) install_packages \ tar ;; @@ -711,12 +725,7 @@ install_common_software() { } nodejs_version_exact() { - # https://unofficial-builds.nodejs.org/download/release/ - if ! [ "$abi" = "musl" ] && [ -n "$abi_version" ] && ! [ "$(compare_version "$abi_version" "2.27")" = "1" ]; then - print "16.9.1" - else - print "22.9.0" - fi + print "24.3.0" } nodejs_version() { @@ -746,26 +755,60 @@ install_nodejs() { ;; esac - # Some distros do not install the node headers by default. - # These are needed for certain FFI tests, such as: `cc.test.ts` - case "$distro" in - alpine | amzn) - install_nodejs_headers - ;; - esac + # Ensure that Node.js headers are always pre-downloaded so that we don't rely on node-gyp + install_nodejs_headers } install_nodejs_headers() { - nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")" + nodejs_version="$(nodejs_version_exact)" + nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$nodejs_version/node-v$nodejs_version-headers.tar.gz")" nodejs_headers_dir="$(dirname "$nodejs_headers_tar")" execute tar -xzf "$nodejs_headers_tar" -C "$nodejs_headers_dir" - nodejs_headers_include="$nodejs_headers_dir/node-v$(nodejs_version_exact)/include" + nodejs_headers_include="$nodejs_headers_dir/node-v$nodejs_version/include" execute_sudo cp -R "$nodejs_headers_include/" "/usr" + + # Also install to node-gyp cache locations for different node-gyp versions + # This ensures node-gyp finds headers without downloading them + setup_node_gyp_cache "$nodejs_version" "$nodejs_headers_dir/node-v$nodejs_version" +} + +setup_node_gyp_cache() { + nodejs_version="$1" + headers_source="$2" + + cache_dir="$home/.cache/node-gyp/$nodejs_version" + + create_directory "$cache_dir" + + # Copy headers + if [ -d "$headers_source/include" ]; then + cp -R "$headers_source/include" "$cache_dir/" 2>/dev/null || true + fi + + # Create installVersion file (node-gyp expects this) + echo "11" > "$cache_dir/installVersion" 2>/dev/null || true + + # For Linux, we don't need .lib files like Windows + # but create the directory structure node-gyp expects + case "$arch" in + x86_64|amd64) + create_directory "$cache_dir/lib/x64" 2>/dev/null || true + ;; + aarch64|arm64) + create_directory "$cache_dir/lib/arm64" 2>/dev/null || true + ;; + *) + create_directory "$cache_dir/lib" 2>/dev/null || true + ;; + esac + + # Ensure entire path is accessible, not just last component + grant_to_user "$home/.cache" } bun_version_exact() { - print "1.2.0" + print "1.2.17" } install_bun() { @@ -910,7 +953,7 @@ install_llvm() { bash="$(require bash)" llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all - + # Install llvm-symbolizer explicitly to ensure it's available for ASAN install_packages "llvm-$(llvm_version)-tools" ;; @@ -930,7 +973,8 @@ install_llvm() { } install_gcc() { - if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]; then + if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ] + then return fi @@ -1332,6 +1376,58 @@ install_chromium() { esac } +install_age() { + # we only use this to encrypt core dumps, which we only have on Linux + case "$os" in + linux) + age_tarball="" + case "$arch" in + x64) + age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-amd64.tar.gz 7df45a6cc87d4da11cc03a539a7470c15b1041ab2b396af088fe9990f7c79d50)" + ;; + aarch64) + age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-arm64.tar.gz 57fd79a7ece5fe501f351b9dd51a82fbee1ea8db65a8839db17f5c080245e99f)" + ;; + esac + + age_extract_dir="$(create_tmp_directory)" + execute tar -C "$age_extract_dir" -zxf "$age_tarball" age/age + move_to_bin "$age_extract_dir/age/age" + ;; + esac +} + +configure_core_dumps() { + # we only have core dumps on Linux + case "$os" in + linux) + # set up a directory that the test runner will look in after running tests + cores_dir="/var/bun-cores-$distro-$release-$arch" + sysctl_file="/etc/sysctl.d/local.conf" + create_directory "$cores_dir" + # ensure core_pattern will point there + # %e = executable filename + # %p = pid + append_file "$sysctl_file" "kernel.core_pattern = $cores_dir/%e-%p.core" + + # disable apport.service if it exists since it will override the core_pattern + if which systemctl >/dev/null; then + if systemctl list-unit-files apport.service >/dev/null; then + execute_sudo "$systemctl" disable --now apport.service + fi + fi + + # load the new configuration + execute_sudo sysctl -p "$sysctl_file" + + # ensure that a regular user will be able to run sysctl + if [ -d /sbin ]; then + append_to_path /sbin + fi + ;; + esac +} + clean_system() { if ! [ "$ci" = "1" ]; then return @@ -1357,6 +1453,8 @@ main() { install_build_essentials install_chromium install_fuse_python + install_age + configure_core_dumps clean_system } diff --git a/scripts/buildkite-failures.ts b/scripts/buildkite-failures.ts new file mode 100755 index 0000000000..fa506f83f4 --- /dev/null +++ b/scripts/buildkite-failures.ts @@ -0,0 +1,740 @@ +#!/usr/bin/env bun + +import { $ } from "bun"; +import { existsSync } from "fs"; +import { resolve } from "path"; + +// Check if we're in a TTY for color support +const isTTY = process.stdout.isTTY || process.env.FORCE_COLOR === "1"; + +// Get git root directory +let gitRoot = process.cwd(); +try { + gitRoot = (await $`git rev-parse --show-toplevel`.quiet().text()).trim(); +} catch { + // Fall back to current directory if not in a git repo +} + +// Helper to convert file path to file:// URL if it exists +function fileToUrl(filePath) { + try { + // Extract just the file path without line numbers or other info + const match = filePath.match(/^([^\s:]+\.(ts|js|tsx|jsx|zig))/); + if (!match) return filePath; + + const cleanPath = match[1]; + const fullPath = resolve(gitRoot, cleanPath); + + if (existsSync(fullPath)) { + return `file://${fullPath}`; + } + } catch (error) { + // If anything fails, just return the original path + } + + return filePath; +} + +// Color codes - simpler color scheme +const colors = { + reset: isTTY ? "\x1b[0m" : "", + bold: isTTY ? "\x1b[1m" : "", + dim: isTTY ? "\x1b[2m" : "", + red: isTTY ? "\x1b[31m" : "", + green: isTTY ? "\x1b[32m" : "", + bgBlue: isTTY ? "\x1b[44m" : "", + bgRed: isTTY ? "\x1b[41m" : "", + white: isTTY ? "\x1b[97m" : "", +}; + +// Parse command line arguments +const args = process.argv.slice(2); +const showWarnings = args.includes("--warnings") || args.includes("-w"); +const showFlaky = args.includes("--flaky") || args.includes("-f"); +const inputArg = args.find(arg => !arg.startsWith("-")); + +// Determine what type of input we have +let buildNumber = null; +let branch = null; + +if (inputArg) { + // BuildKite URL + if (inputArg.includes("buildkite.com")) { + const buildMatch = inputArg.match(/builds\/(\d+)/); + if (buildMatch) { + buildNumber = buildMatch[1]; + } + } + // GitHub PR URL + else if (inputArg.includes("github.com") && inputArg.includes("/pull/")) { + const prMatch = inputArg.match(/pull\/(\d+)/); + if (prMatch) { + // Fetch PR info from GitHub API + const prNumber = prMatch[1]; + const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`); + if (prResponse.ok) { + const pr = await prResponse.json(); + branch = pr.head.ref; + } + } + } + // Plain number or #number - assume it's a GitHub PR + else if (/^#?\d+$/.test(inputArg)) { + const prNumber = inputArg.replace("#", ""); + const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`); + if (prResponse.ok) { + const pr = await prResponse.json(); + branch = pr.head.ref; + } else { + // If not a valid PR, maybe it's a BuildKite build number + buildNumber = prNumber; + } + } + // Otherwise assume it's a branch name + else { + branch = inputArg; + } +} else { + // No input, use current branch + branch = (await $`git rev-parse --abbrev-ref HEAD`.text()).trim(); +} + +// If branch specified, find latest build +if (!buildNumber) { + const buildsUrl = `https://buildkite.com/bun/bun/builds?branch=${encodeURIComponent(branch)}`; + const response = await fetch(buildsUrl); + const html = await response.text(); + const match = html.match(/\/bun\/bun\/builds\/(\d+)/); + + if (!match) { + console.log(`No builds found for branch: ${branch}`); + process.exit(0); + } + + buildNumber = match[1]; +} + +// Fetch build JSON +const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`); +const build = await buildResponse.json(); + +// Calculate time ago +const buildTime = new Date(build.started_at); +const now = new Date(); +const diffMs = now.getTime() - buildTime.getTime(); +const diffSecs = Math.floor(diffMs / 1000); +const diffMins = Math.floor(diffSecs / 60); +const diffHours = Math.floor(diffMins / 60); +const diffDays = Math.floor(diffHours / 24); + +let timeAgo; +if (diffDays > 0) { + timeAgo = `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`; +} else if (diffHours > 0) { + timeAgo = `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`; +} else if (diffMins > 0) { + timeAgo = `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`; +} else { + timeAgo = `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`; +} + +console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`); + +// Check if build passed +if (build.state === "passed") { + console.log(`${colors.green}✅ Passed!${colors.reset}`); + process.exit(0); +} + +// Get failed jobs +const failedJobs = + build.jobs?.filter(job => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script") || []; + +// Platform emoji mapping +const platformMap = { + "darwin": "🍎", + "macos": "🍎", + "ubuntu": "🐧", + "debian": "🐧", + "alpine": "🐧", + "linux": "🐧", + "windows": "🪟", + "win": "🪟", +}; + +// Fetch annotations by scraping the build page +const pageResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}`); +const pageHtml = await pageResponse.text(); + +// Extract script tags using HTMLRewriter +let annotationsData = null; +const scriptContents: string[] = []; + +const scriptRewriter = new HTMLRewriter().on("script", { + text(text) { + scriptContents.push(text.text); + }, +}); + +await new Response(scriptRewriter.transform(new Response(pageHtml))).text(); + +// Find the registerRequest call in script contents +const fullScript = scriptContents.join(""); +let registerRequestIndex = fullScript.indexOf("registerRequest"); + +// Find the AnnotationsListRendererQuery after registerRequest +if (registerRequestIndex !== -1) { + const afterRegisterRequest = fullScript.substring(registerRequestIndex); + const annotationsIndex = afterRegisterRequest.indexOf('"AnnotationsListRendererQuery"'); + if (annotationsIndex === -1 || annotationsIndex > 100) { + // Not the right registerRequest call + registerRequestIndex = -1; + } +} + +if (registerRequestIndex !== -1) { + try { + // Find the start of the JSON object (after the comma and any whitespace) + let jsonStart = registerRequestIndex; + + // Skip to the opening brace, accounting for the function name and first parameter + let commaFound = false; + for (let i = registerRequestIndex; i < fullScript.length; i++) { + if (fullScript[i] === "," && !commaFound) { + commaFound = true; + } else if (commaFound && fullScript[i] === "{") { + jsonStart = i; + break; + } + } + + // Find the matching closing brace, considering strings + let braceCount = 0; + let jsonEnd = jsonStart; + let inString = false; + let escapeNext = false; + + for (let i = jsonStart; i < fullScript.length; i++) { + const char = fullScript[i]; + + if (escapeNext) { + escapeNext = false; + continue; + } + + if (char === "\\") { + escapeNext = true; + continue; + } + + if (char === '"' && !inString) { + inString = true; + } else if (char === '"' && inString) { + inString = false; + } + + if (!inString) { + if (char === "{") braceCount++; + else if (char === "}") { + braceCount--; + if (braceCount === 0) { + jsonEnd = i + 1; + break; + } + } + } + } + + const jsonString = fullScript.substring(jsonStart, jsonEnd); + annotationsData = JSON.parse(jsonString); + const edges = annotationsData?.build?.annotations?.edges || []; + + // Just collect all unique annotations by context + const annotationsByContext = new Map(); + + for (const edge of edges) { + const node = edge.node; + if (!node || !node.context) continue; + + // Skip if we already have this context + if (annotationsByContext.has(node.context)) { + continue; + } + + annotationsByContext.set(node.context, { + context: node.context, + html: node.body?.html || "", + }); + } + + // Collect annotations + const annotations = Array.from(annotationsByContext.values()); + + // Group annotations by test file to detect duplicates + const annotationsByFile = new Map(); + const nonFileAnnotations = []; + + for (const annotation of annotations) { + // Check if this is a file-based annotation + const isFileAnnotation = annotation.context.match(/\.(ts|js|tsx|jsx|zig)$/); + + if (isFileAnnotation) { + // Parse the HTML to extract all platform sections + const html = annotation.html || ""; + + // Check if this annotation contains multiple
sections (one per platform) + const detailsSections = html.match(/
[\s\S]*?<\/details>/g); + + if (detailsSections && detailsSections.length > 1) { + // Multiple platform failures in one annotation + for (const section of detailsSections) { + const summaryMatch = section.match( + /[\s\S]*?]+>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+]+>([\s\S]+?)<\/a>/, + ); + + if (summaryMatch) { + const filePath = summaryMatch[1]; + const failureInfo = summaryMatch[2]; + const platformHtml = summaryMatch[3]; + const platform = platformHtml.replace(/]+>/g, "").trim(); + + const fileKey = `${filePath}|${failureInfo}`; + if (!annotationsByFile.has(fileKey)) { + annotationsByFile.set(fileKey, { + filePath, + failureInfo, + platforms: [], + htmlParts: [], + originalAnnotations: [], + }); + } + + const entry = annotationsByFile.get(fileKey); + entry.platforms.push(platform); + entry.htmlParts.push(section); + entry.originalAnnotations.push({ + ...annotation, + html: section, + originalHtml: html, + }); + } + } + } else { + // Single platform failure + const summaryMatch = html.match( + /[\s\S]*?]+>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+]+>([\s\S]+?)<\/a>/, + ); + + if (summaryMatch) { + const filePath = summaryMatch[1]; + const failureInfo = summaryMatch[2]; + const platformHtml = summaryMatch[3]; + const platform = platformHtml.replace(/]+>/g, "").trim(); + + const fileKey = `${filePath}|${failureInfo}`; + if (!annotationsByFile.has(fileKey)) { + annotationsByFile.set(fileKey, { + filePath, + failureInfo, + platforms: [], + htmlParts: [], + originalAnnotations: [], + }); + } + + const entry = annotationsByFile.get(fileKey); + entry.platforms.push(platform); + entry.htmlParts.push(html); + entry.originalAnnotations.push(annotation); + } else { + // Couldn't parse, treat as non-file annotation + nonFileAnnotations.push(annotation); + } + } + } else { + // Non-file annotations (like "zig error") + nonFileAnnotations.push(annotation); + } + } + + // Create merged annotations + const mergedAnnotations = []; + + // Add file-based annotations + for (const [key, entry] of annotationsByFile) { + const { filePath, failureInfo, platforms, htmlParts, originalAnnotations } = entry; + + // If we have multiple platforms with the same content, merge them + if (platforms.length > 1) { + // Create context string with all platforms + const uniquePlatforms = [...new Set(platforms)]; + const context = `${filePath} - ${failureInfo} on ${uniquePlatforms.join(", ")}`; + + // Check if all HTML parts are identical + const firstHtml = htmlParts[0]; + const allSame = htmlParts.every(html => html === firstHtml); + + let mergedHtml = ""; + if (allSame) { + // If all the same, just use the first one + mergedHtml = firstHtml; + } else { + // If different, try to find one with the most color spans + let bestHtml = firstHtml; + let maxColorCount = (firstHtml.match(/term-fg/g) || []).length; + + for (const html of htmlParts) { + const colorCount = (html.match(/term-fg/g) || []).length; + if (colorCount > maxColorCount) { + maxColorCount = colorCount; + bestHtml = html; + } + } + mergedHtml = bestHtml; + } + + mergedAnnotations.push({ + context, + html: mergedHtml, + merged: true, + platformCount: uniquePlatforms.length, + }); + } else { + // Single platform, use original + mergedAnnotations.push(originalAnnotations[0]); + } + } + + // Add non-file annotations + mergedAnnotations.push(...nonFileAnnotations); + + // Sort annotations: ones with colors at the bottom + const annotationsWithColorInfo = mergedAnnotations.map(annotation => { + const html = annotation.html || ""; + const hasColors = html.includes("term-fg") || html.includes("\\x1b["); + return { annotation, hasColors }; + }); + + // Sort: no colors first, then colors + annotationsWithColorInfo.sort((a, b) => { + if (a.hasColors === b.hasColors) return 0; + return a.hasColors ? 1 : -1; + }); + + const sortedAnnotations = annotationsWithColorInfo.map(item => item.annotation); + + // Count failures - look for actual test counts in the content + let totalFailures = 0; + let totalFlaky = 0; + + // First try to count from annotations + for (const annotation of sortedAnnotations) { + const isFlaky = annotation.context.toLowerCase().includes("flaky"); + const html = annotation.html || ""; + + // Look for patterns like "X tests failed" or "X failing" + const failureMatches = html.match(/(\d+)\s+(tests?\s+failed|failing)/gi); + if (failureMatches) { + for (const match of failureMatches) { + const count = parseInt(match.match(/\d+/)[0]); + if (isFlaky) { + totalFlaky += count; + } else { + totalFailures += count; + } + break; // Only count first match to avoid duplicates + } + } else if (!isFlaky) { + // If no count found, count the annotation itself + totalFailures++; + } + } + + // If no annotations, use job count + if (totalFailures === 0 && failedJobs.length > 0) { + totalFailures = failedJobs.length; + } + + // Display failure count + if (totalFailures > 0 || totalFlaky > 0) { + if (totalFailures > 0) { + console.log(`\n${colors.red}${colors.bold}${totalFailures} test failures${colors.reset}`); + } + if (showFlaky && totalFlaky > 0) { + console.log(`${colors.dim}${totalFlaky} flaky tests${colors.reset}`); + } + console.log(); + } else if (failedJobs.length > 0) { + console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`); + } + + // Display all annotations + console.log(); + for (const annotation of sortedAnnotations) { + // Skip flaky tests unless --flaky flag is set + if (!showFlaky && annotation.context.toLowerCase().includes("flaky")) { + continue; + } + + // Display context header with background color + // For merged annotations, show platform info + if (annotation.merged && annotation.platformCount) { + // Extract filename and failure info from context + const contextParts = annotation.context.match(/^(.+?)\s+-\s+(.+?)\s+on\s+(.+)$/); + if (contextParts) { + const [, filename, failureInfo, platformsStr] = contextParts; + const fileUrl = fileToUrl(filename); + console.log( + `${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platformsStr}${colors.reset}`, + ); + } else { + const fileUrl = fileToUrl(annotation.context); + console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`); + } + } else { + // Single annotation - need to extract platform info from HTML + const fileUrl = fileToUrl(annotation.context); + + // Try to extract platform info from the HTML for single platform tests + const html = annotation.html || ""; + const singlePlatformMatch = html.match( + /[\s\S]*?]+>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+]+>([\s\S]+?)<\/a>/, + ); + + if (singlePlatformMatch) { + const failureInfo = singlePlatformMatch[2]; + const platformHtml = singlePlatformMatch[3]; + const platform = platformHtml.replace(/]+>/g, "").trim(); + console.log( + `${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platform}${colors.reset}`, + ); + } else { + console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`); + } + } + console.log(); + + // Process the annotation HTML to preserve colors + const html = annotation.html || ""; + + // First unescape unicode sequences + let unescapedHtml = html + .replace(/\\u003c/g, "<") + .replace(/\\u003e/g, ">") + .replace(/\\u0026/g, "&") + .replace(/\\"/g, '"') + .replace(/\\'/g, "'") + .replace(/\\u001b/g, "\x1b"); // Unescape ANSI escape sequences + + // Handle newlines more carefully - BuildKite sometimes has actual newlines that shouldn't be there + // Only replace \n if it's actually an escaped newline, not part of the content + unescapedHtml = unescapedHtml.replace(/\\n/g, "\n"); + + // Also handle escaped ANSI sequences that might appear as \\x1b or \033 + unescapedHtml = unescapedHtml.replace(/\\\\x1b/g, "\x1b").replace(/\\033/g, "\x1b"); + + // Convert HTML with ANSI color classes to actual ANSI codes + const termColors = { + // Standard colors (0-7) + "term-fg0": "\x1b[30m", // black + "term-fg1": "\x1b[31m", // red + "term-fg2": "\x1b[32m", // green + "term-fg3": "\x1b[33m", // yellow + "term-fg4": "\x1b[34m", // blue + "term-fg5": "\x1b[35m", // magenta + "term-fg6": "\x1b[36m", // cyan + "term-fg7": "\x1b[37m", // white + // Also support 30-37 format + "term-fg30": "\x1b[30m", // black + "term-fg31": "\x1b[31m", // red + "term-fg32": "\x1b[32m", // green + "term-fg33": "\x1b[33m", // yellow + "term-fg34": "\x1b[34m", // blue + "term-fg35": "\x1b[35m", // magenta + "term-fg36": "\x1b[36m", // cyan + "term-fg37": "\x1b[37m", // white + // Bright colors with 'i' prefix + "term-fgi90": "\x1b[90m", // bright black + "term-fgi91": "\x1b[91m", // bright red + "term-fgi92": "\x1b[92m", // bright green + "term-fgi93": "\x1b[93m", // bright yellow + "term-fgi94": "\x1b[94m", // bright blue + "term-fgi95": "\x1b[95m", // bright magenta + "term-fgi96": "\x1b[96m", // bright cyan + "term-fgi97": "\x1b[97m", // bright white + // Also support without 'i' + "term-fg90": "\x1b[90m", // bright black + "term-fg91": "\x1b[91m", // bright red + "term-fg92": "\x1b[92m", // bright green + "term-fg93": "\x1b[93m", // bright yellow + "term-fg94": "\x1b[94m", // bright blue + "term-fg95": "\x1b[95m", // bright magenta + "term-fg96": "\x1b[96m", // bright cyan + "term-fg97": "\x1b[97m", // bright white + // Background colors + "term-bg40": "\x1b[40m", // black + "term-bg41": "\x1b[41m", // red + "term-bg42": "\x1b[42m", // green + "term-bg43": "\x1b[43m", // yellow + "term-bg44": "\x1b[44m", // blue + "term-bg45": "\x1b[45m", // magenta + "term-bg46": "\x1b[46m", // cyan + "term-bg47": "\x1b[47m", // white + // Text styles + "term-bold": "\x1b[1m", + "term-dim": "\x1b[2m", + "term-italic": "\x1b[3m", + "term-underline": "\x1b[4m", + }; + + let text = unescapedHtml; + + // Convert color spans to ANSI codes if TTY + if (isTTY) { + // Convert spans with color classes to ANSI codes + for (const [className, ansiCode] of Object.entries(termColors)) { + // Match spans that contain the class name (might have multiple classes) + // Need to handle both formats: and + const regex = new RegExp(`]*class="[^"]*\\b${className}\\b[^"]*"[^>]*>([\\s\\S]*?)`, "g"); + text = text.replace(regex, (match, content) => { + // Don't add reset if the content already has ANSI codes + if (content.includes("\x1b[")) { + return `${ansiCode}${content}`; + } + return `${ansiCode}${content}${colors.reset}`; + }); + } + } + + // Check if we already have ANSI codes in the text after processing + const hasExistingAnsi = text.includes("\x1b["); + + // Check for broken color patterns (single characters wrapped in colors) + // If we see patterns like green[, red text, green], it's likely broken + // Also check for patterns like: green[, then reset, then text, then red text, then reset, then green] + const hasBrokenColors = + text.includes("\x1b[32m[") || + text.includes("\x1b[32m]") || + (text.includes("\x1b[32m✓") && text.includes("\x1b[31m") && text.includes("ms]")); + + if (hasBrokenColors) { + // Remove all ANSI codes if the coloring looks broken + text = text.replace(/\x1b\[[0-9;]*m/g, ""); + } + + // Remove all HTML tags, but be careful with existing ANSI codes + text = text + .replace(/]*>]*>([\s\S]*?)<\/code><\/pre>/g, "$1") + .replace(//g, "\n") + .replace(/<\/p>/g, "\n") + .replace(/

/g, "") + .replace(/<[^>]+>/g, "") + .replace(/</g, "<") + .replace(/>/g, ">") + .replace(/&/g, "&") + .replace(/"/g, '"') + .replace(/'/g, "'") + .replace(/ /g, " ") + .replace(/\u00A0/g, " ") // Non-breaking space + .trim(); + + // Remove excessive blank lines - be more aggressive + text = text.replace(/\n\s*\n\s*\n+/g, "\n\n"); // Replace 3+ newlines with 2 + text = text.replace(/\n\s*\n/g, "\n"); // Replace 2 newlines with 1 + + // For zig error annotations, check if there are multiple platform sections + let handled = false; + if (annotation.context.includes("zig error")) { + // Split by platform headers within the content + const platformSections = text.split(/(?=^\s*[^\s\/]+\.zig\s*-\s*zig error\s+on\s+)/m); + + if (platformSections.length > 1) { + // Skip the first empty section if it exists + const sections = platformSections.filter(s => s.trim()); + + if (sections.length > 1) { + // We have multiple platform errors in one annotation + // Extract unique platform names + const platforms = []; + for (const section of sections) { + const platformMatch = section.match(/on\s+(\S+)/); + if (platformMatch) { + platforms.push(platformMatch[1]); + } + } + + // Show combined header with background color + const filename = annotation.context; + const fileUrl = fileToUrl(filename); + const platformText = platforms.join(", "); + console.log( + `${colors.bgRed}${colors.white}${colors.bold} ${fileUrl} ${colors.reset} ${colors.dim}on ${platformText}${colors.reset}`, + ); + console.log(); + + // Show only the first error detail (they're the same) + const firstError = sections[0]; + const errorLines = firstError.split("\n"); + + // Skip the platform-specific header line and remove excessive blank lines + let previousWasBlank = false; + for (let i = 0; i < errorLines.length; i++) { + const line = errorLines[i]; + if (i === 0 && line.match(/\.zig\s*-\s*zig error\s+on\s+/)) { + continue; // Skip platform header + } + + // Skip multiple consecutive blank lines + const isBlank = line.trim() === ""; + if (isBlank && previousWasBlank) { + continue; + } + previousWasBlank = isBlank; + + console.log(line); // No indentation + } + console.log(); + handled = true; + } + } + } + + // Normal processing for other annotations + if (!handled) { + // For merged annotations, skip the duplicate headers within the content + const isMerged = annotation.merged || (annotation.platformCount && annotation.platformCount > 1); + + // Process lines, removing excessive blank lines + let previousWasBlank = false; + text.split("\n").forEach((line, index) => { + // For merged annotations, skip duplicate platform headers + if ( + isMerged && + index > 0 && + line.match(/^[^\s\/]+\.(ts|js|tsx|jsx|zig)\s*-\s*\d+\s+(failing|errors?|warnings?)\s+on\s+/) + ) { + return; // Skip duplicate headers in merged content + } + + // Skip multiple consecutive blank lines + const isBlank = line.trim() === ""; + if (isBlank && previousWasBlank) { + return; + } + previousWasBlank = isBlank; + + console.log(line); // No indentation + }); + console.log(); + } + } + } catch (e) { + console.error("Failed to parse annotations:", e); + console.log("\nView detailed results at:"); + console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`); + } +} else { + console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`); + console.log("View detailed results at:"); + console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`); +} diff --git a/scripts/debug-coredump.ts b/scripts/debug-coredump.ts new file mode 100644 index 0000000000..625afb727a --- /dev/null +++ b/scripts/debug-coredump.ts @@ -0,0 +1,63 @@ +import fs from "node:fs"; +import { tmpdir } from "node:os"; +import { basename, join } from "node:path"; +import { parseArgs } from "node:util"; + +// usage: bun debug-coredump.ts +// -p (buildkite should show this) +// -b +// -c +// -d (default: lldb) +const { + values: { pid: stringPid, ["build-url"]: buildUrl, ["cores-url"]: coresUrl, debugger: debuggerPath }, +} = parseArgs({ + options: { + pid: { type: "string", short: "p" }, + ["build-url"]: { type: "string", short: "b" }, + ["cores-url"]: { type: "string", short: "c" }, + debugger: { type: "string", short: "d", default: "lldb" }, + }, +}); + +if (stringPid === undefined) throw new Error("no PID given"); +const pid = parseInt(stringPid); +if (buildUrl === undefined) throw new Error("no build-url given"); +if (coresUrl === undefined) throw new Error("no cores-url given"); +if (!process.env.AGE_CORES_IDENTITY?.startsWith("AGE-SECRET-KEY-")) + throw new Error("no identity given in $AGE_CORES_IDENTITY"); + +const id = Bun.hash(buildUrl + coresUrl).toString(36); +const dir = join(tmpdir(), `debug-coredump-${id}.tmp`); +fs.mkdirSync(dir, { recursive: true }); + +if (!fs.existsSync(join(dir, "bun-profile")) || !fs.existsSync(join(dir, `bun-${pid}.core`))) { + console.log("downloading bun-profile.zip"); + const zip = await (await fetch(buildUrl)).arrayBuffer(); + await Bun.write(join(dir, "bun-profile.zip"), zip); + // -j: junk paths (don't create directories when extracting) + // -o: overwrite without prompting + // -d: extract to this directory instead of cwd + await Bun.$`unzip -j -o ${join(dir, "bun-profile.zip")} -d ${dir}`; + + console.log("downloading cores"); + const cores = await (await fetch(coresUrl)).arrayBuffer(); + await Bun.$`bash -c ${`age -d -i <(echo "$AGE_CORES_IDENTITY")`} < ${cores} | tar -zxvC ${dir}`; + + console.log("moving cores out of nested directory"); + for await (const file of new Bun.Glob("bun-cores-*/bun-*.core").scan(dir)) { + fs.renameSync(join(dir, file), join(dir, basename(file))); + } +} else { + console.log(`already downloaded in ${dir}`); +} + +console.log("launching debugger:"); +console.log(`${debuggerPath} --core ${join(dir, `bun-${pid}.core`)} ${join(dir, "bun-profile")}`); + +const proc = await Bun.spawn([debuggerPath, "--core", join(dir, `bun-${pid}.core`), join(dir, "bun-profile")], { + stdin: "inherit", + stdout: "inherit", + stderr: "inherit", +}); +await proc.exited; +process.exit(proc.exitCode); diff --git a/scripts/gamble.ts b/scripts/gamble.ts index c2b2f7eb23..6da9cab285 100755 --- a/scripts/gamble.ts +++ b/scripts/gamble.ts @@ -11,6 +11,23 @@ let numTimedOut = 0; const signals = new Map(); const codes = new Map(); let numOk = 0; +const width = attempts.toString().length; +const pad = (num: number): string => num.toString().padStart(width, " "); +const green = (text: string) => console.log(`\x1b[32m${text}\x1b[0m`); +const red = (text: string) => console.log(`\x1b[31m${text}\x1b[0m`); +const formatTime = (ms: number): string => { + if (ms < 0) ms = 0; + const totalSeconds = Math.floor(ms / 1000); + const hours = Math.floor(totalSeconds / 3600); + const minutes = Math.floor((totalSeconds % 3600) / 60); + const seconds = totalSeconds % 60; + + const padNumber = (n: number) => n.toString().padStart(2, "0"); + + return `${padNumber(hours)}:${padNumber(minutes)}:${padNumber(seconds)}`; +}; +const start = Date.now(); +let totalTimeEstimate = -1; for (let i = 0; i < attempts; i++) { const proc = Bun.spawn({ @@ -24,31 +41,42 @@ for (let i = 0; i < attempts; i++) { const errors = await new Response(proc.stderr).text(); const { signalCode: signal, exitCode } = proc; + let description: string; if (signal === "SIGTERM") { // sent for timeouts numTimedOut += 1; + description = "timeout"; } else if (signal) { const newCount = 1 + (signals.get(signal) ?? 0); signals.set(signal, newCount); + description = signal; } else if (exitCode !== 0) { // if null there should have been a signal assert(exitCode !== null); const newCount = 1 + (codes.get(exitCode) ?? 0); codes.set(exitCode, newCount); + description = `code ${exitCode}`; } else { + description = "ok"; numOk += 1; } - if (exitCode !== 0) console.log(errors); - process.stdout.write(exitCode === 0 ? "." : "!"); + if (exitCode !== 0) { + red(" " + description); + console.log(errors); + } + const now = Date.now(); + const currentTotalTimeEstimate = (now - start) / ((i + 1) / attempts); + if (totalTimeEstimate < 0) { + totalTimeEstimate = currentTotalTimeEstimate; + } else { + totalTimeEstimate = 0.8 * totalTimeEstimate + 0.2 * currentTotalTimeEstimate; + } + const remaining = totalTimeEstimate - (now - start); + process.stdout.write(`\r\x1b[2K${pad(i + 1)}/${attempts} completed, ${formatTime(remaining)} remaining`); } process.stdout.write("\n"); -const width = attempts.toString().length; -const pad = (num: number): string => num.toString().padStart(width, " "); -const green = (text: string) => console.log(`\x1b[32m${text}\x1b[0m`); -const red = (text: string) => console.log(`\x1b[31m${text}\x1b[0m`); - green(`${pad(numOk)}/${attempts} OK`); if (numTimedOut > 0) { red(`${pad(numTimedOut)}/${attempts} timeout`); diff --git a/scripts/longest.js b/scripts/longest.js new file mode 100644 index 0000000000..df78b0e85e --- /dev/null +++ b/scripts/longest.js @@ -0,0 +1,125 @@ +const fs = require("fs"); +const path = require("path"); + +// Regex patterns for different types of top-level declarations +const DECLARATION_PATTERN = + // pub? (export|extern)? (const|fn|var) name + /^(pub\s+)?(export\s+|extern\s+)?(const|fn|var)\s+([a-zA-Z_][a-zA-Z0-9_]*)/; + +function findDeclarations(filePath) { + const content = fs.readFileSync(filePath, "utf8"); + const lines = content.split("\n"); + const declarations = []; + + // First pass: collect all declarations with their line numbers + for (let lineNum = 0; lineNum < lines.length; lineNum++) { + const line = lines[lineNum]; + + // Skip empty lines and comments + if (!line || line.trim().startsWith("//") || line.trim().startsWith("///")) { + continue; + } + + // Only process top-level declarations (no indentation) + if (line.startsWith(" ") || line.startsWith("\t")) { + continue; + } + + const trimmedLine = line.trim(); + + // Check each pattern + const match = trimmedLine.match(DECLARATION_PATTERN); + if (match) { + // Extract the name from the match + const name = match[match.length - 1]; // Last capture group is the name + + declarations.push({ + name, + match: match[0], + line: lineNum + 1, + type: getDeclarationType(match[0]), + fullLine: trimmedLine, + startLine: lineNum, + }); + } + } + + // Second pass: calculate sizes based on next declaration's start line + for (let i = 0; i < declarations.length; i++) { + const currentDecl = declarations[i]; + const nextDecl = declarations[i + 1]; + + if (nextDecl) { + // Size is from current declaration start to next declaration start + currentDecl.size = nextDecl.startLine - currentDecl.startLine; + } else { + // Last declaration: size is from current declaration start to end of file + currentDecl.size = lines.length - currentDecl.startLine; + } + } + + return declarations; +} + +function getDeclarationType(matchText) { + if (matchText.includes("const")) return "const"; + if (matchText.includes("fn")) return "fn"; + if (matchText.includes("var")) return "var"; + return "unknown"; +} + +function main() { + const args = process.argv.slice(2); + + if (args.length === 0) { + console.error("Usage: bun longest.js "); + console.error("Example: bun longest.js src/walker_skippable.zig"); + process.exit(1); + } + + const filePath = args[0]; + + if (!fs.existsSync(filePath)) { + console.error(`File not found: ${filePath}`); + process.exit(1); + } + + if (!filePath.endsWith(".zig")) { + console.error("Please provide a .zig file"); + process.exit(1); + } + + try { + const declarations = findDeclarations(filePath); + + if (declarations.length === 0) { + console.log("No top-level declarations found."); + return; + } + + console.log(`Found ${declarations.length} top-level declarations in ${filePath}:\n`); + + // Sort by declaration size (smallest first) + declarations.sort((a, b) => a.size - b.size); + + // Find the longest name for formatting + const maxNameLength = Math.max(...declarations.map(d => d.match.length)); + const maxTypeLength = Math.max(...declarations.map(d => d.type.length)); + + console.log(`${"Name".padEnd(maxNameLength + 2)} ${"Type".padEnd(maxTypeLength + 2)} ${"Num Lines".padEnd(6)}`); + console.log("-".repeat(maxNameLength + maxTypeLength + 15)); + + declarations.forEach(decl => { + console.log( + `${decl.match.padEnd(maxNameLength + 2)} ${decl.type.padEnd(maxTypeLength + 2)} ${decl.size.toString().padEnd(6)}`, + ); + }); + } catch (error) { + console.error("Error reading file:", error.message); + process.exit(1); + } +} + +if (require.main === module) { + main(); +} diff --git a/scripts/nav2readme.ts b/scripts/nav2readme.ts index 58c936b15b..057b5d9c04 100644 --- a/scripts/nav2readme.ts +++ b/scripts/nav2readme.ts @@ -16,7 +16,7 @@ function getQuickLinks() { if (item.type === "divider") { md += "\n" + `- ${item.title}` + "\n"; } else { - md += ` - [${item.title}](https://bun.sh/docs/${item.slug})` + "\n"; + md += ` - [${item.title}](https://bun.com/docs/${item.slug})` + "\n"; } } @@ -70,7 +70,7 @@ async function getGuides() { prevDirname = dirname; } md += - ` - [${name}](https://bun.sh/guides/${path.dirname(file)}/${path.basename(file, path.extname(file))})` + "\n"; + ` - [${name}](https://bun.com/guides/${path.dirname(file)}/${path.basename(file, path.extname(file))})` + "\n"; } return md; diff --git a/scripts/runner.node.mjs b/scripts/runner.node.mjs index 70c7cd5def..8a320d5612 100755 --- a/scripts/runner.node.mjs +++ b/scripts/runner.node.mjs @@ -51,6 +51,7 @@ import { isBuildkite, isCI, isGithubAction, + isLinux, isMacOS, isWindows, isX64, @@ -59,6 +60,7 @@ import { startGroup, tmpdir, unzip, + uploadArtifact, } from "./utils.mjs"; let isQuiet = false; const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd(); @@ -146,6 +148,10 @@ const { values: options, positionals: filters } = parseArgs({ type: "boolean", default: isBuildkite, }, + ["coredump-upload"]: { + type: "boolean", + default: isBuildkite && isLinux, + }, }, }); @@ -230,6 +236,27 @@ function getTestExpectations() { return expectations; } +/** + * Returns whether we should validate exception checks running the given test + * @param {string} test + * @returns {boolean} + */ +const shouldValidateExceptions = (() => { + let skipArray; + return test => { + if (!skipArray) { + const path = join(cwd, "test/no-validate-exceptions.txt"); + if (!existsSync(path)) { + skipArray = []; + } + skipArray = readFileSync(path, "utf-8") + .split("\n") + .filter(line => !line.startsWith("#") && line.length > 0); + } + return !(skipArray.includes(test) || skipArray.includes("test/" + test)); + }; +})(); + /** * @param {string} testPath * @returns {string[]} @@ -416,16 +443,20 @@ async function runTests() { const runWithBunTest = title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test"); const subcommand = runWithBunTest ? "test" : "run"; + const env = { + FORCE_COLOR: "0", + NO_COLOR: "1", + BUN_DEBUG_QUIET_LOGS: "1", + }; + if (basename(execPath).includes("asan") && shouldValidateExceptions(testPath)) { + env.BUN_JSC_validateExceptionChecks = "1"; + } await runTest(title, async () => { const { ok, error, stdout } = await spawnBun(execPath, { cwd: cwd, args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath], timeout: getNodeParallelTestTimeout(title), - env: { - FORCE_COLOR: "0", - NO_COLOR: "1", - BUN_DEBUG_QUIET_LOGS: "1", - }, + env, stdout: chunk => pipeTestStdout(process.stdout, chunk), stderr: chunk => pipeTestStdout(process.stderr, chunk), }); @@ -580,6 +611,78 @@ async function runTests() { } } + if (options["coredump-upload"]) { + try { + // this sysctl is set in bootstrap.sh to /var/bun-cores-$distro-$release-$arch + const sysctl = await spawnSafe({ command: "sysctl", args: ["-n", "kernel.core_pattern"] }); + let coresDir = sysctl.stdout; + if (sysctl.ok) { + if (coresDir.startsWith("|")) { + throw new Error("cores are being piped not saved"); + } + // change /foo/bar/%e-%p.core to /foo/bar + coresDir = dirname(sysctl.stdout); + } else { + throw new Error(`Failed to check core_pattern: ${sysctl.error}`); + } + + const coresDirBase = dirname(coresDir); + const coresDirName = basename(coresDir); + const coreFileNames = readdirSync(coresDir); + + if (coreFileNames.length > 0) { + console.log(`found ${coreFileNames.length} cores in ${coresDir}`); + let totalBytes = 0; + let totalBlocks = 0; + for (const f of coreFileNames) { + const stat = statSync(join(coresDir, f)); + totalBytes += stat.size; + totalBlocks += stat.blocks; + } + console.log(`total apparent size = ${totalBytes} bytes`); + console.log(`total size on disk = ${512 * totalBlocks} bytes`); + const outdir = mkdtempSync(join(tmpdir(), "cores-upload")); + const outfileName = `${coresDirName}.tar.gz.age`; + const outfileAbs = join(outdir, outfileName); + + // This matches an age identity known by Bun employees. Core dumps from CI have to be kept + // secret since they will contain API keys. + const ageRecipient = "age1eunsrgxwjjpzr48hm0y98cw2vn5zefjagt4r0qj4503jg2nxedqqkmz6fu"; // reject external PRs changing this, see above + + // Run tar in the parent directory of coresDir so that it creates archive entries with + // coresDirName in them. This way when you extract the tarball you get a folder named + // bun-cores-XYZ containing core files, instead of a bunch of core files strewn in your + // current directory + const before = Date.now(); + const zipAndEncrypt = await spawnSafe({ + command: "bash", + args: [ + "-c", + // tar -S: handle sparse files efficiently + `set -euo pipefail && tar -Sc "$0" | gzip -1 | age -e -r ${ageRecipient} -o "$1"`, + // $0 + coresDirName, + // $1 + outfileAbs, + ], + cwd: coresDirBase, + stdout: () => {}, + timeout: 60_000, + }); + const elapsed = Date.now() - before; + if (!zipAndEncrypt.ok) { + throw new Error(zipAndEncrypt.error); + } + console.log(`saved core dumps to ${outfileAbs} (${statSync(outfileAbs).size} bytes) in ${elapsed} ms`); + await uploadArtifact(outfileAbs); + } else { + console.log(`no cores found in ${coresDir}`); + } + } catch (err) { + console.error("Error collecting and uploading core dumps:", err); + } + } + if (!isCI && !isQuiet) { console.table({ "Total Tests": okResults.length + failedResults.length + flakyResults.length, @@ -755,6 +858,7 @@ async function spawnSafe(options) { const [, message] = error || []; error = message ? message.split("\n")[0].toLowerCase() : "crash"; error = error.indexOf("\\n") !== -1 ? error.substring(0, error.indexOf("\\n")) : error; + error = `pid ${subprocess.pid} ${error}`; } else if (signalCode) { if (signalCode === "SIGTERM" && duration >= timeout) { error = "timeout"; @@ -846,7 +950,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) { }; if (basename(execPath).includes("asan")) { - bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1"; + bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0"; } if (isWindows && bunEnv.Path) { @@ -953,13 +1057,18 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) { testArgs.push(absPath); + const env = { + GITHUB_ACTIONS: "true", // always true so annotations are parsed + }; + if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) { + env.BUN_JSC_validateExceptionChecks = "1"; + } + const { ok, error, stdout } = await spawnBun(execPath, { args: isReallyTest ? testArgs : [...args, absPath], cwd: options["cwd"], timeout: isReallyTest ? timeout : 30_000, - env: { - GITHUB_ACTIONS: "true", // always true so annotations are parsed - }, + env, stdout: chunk => pipeTestStdout(process.stdout, chunk), stderr: chunk => pipeTestStdout(process.stderr, chunk), }); @@ -993,7 +1102,7 @@ function getTestTimeout(testPath) { if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) { return integrationTimeout; } - if (/napi/i.test(testPath)) { + if (/napi/i.test(testPath) || /v8/i.test(testPath)) { return napiTimeout; } return testTimeout; diff --git a/scripts/sortImports.ts b/scripts/sortImports.ts new file mode 100644 index 0000000000..4113aff3a1 --- /dev/null +++ b/scripts/sortImports.ts @@ -0,0 +1,394 @@ +import { readdirSync } from "fs"; +import path from "path"; + +// Parse command line arguments +const args = process.argv.slice(2); + +const filePaths = args.filter(arg => !arg.startsWith("-")); +const usage = String.raw` + __ .__ __ + ____________________/ |_ _______|__| _____ ______ ____________/ |_ ______ + \___ / _ \_ __ \ __\ \___ / |/ \\____ \ / _ \_ __ \ __\/ ___/ + / ( <_> ) | \/| | / /| | Y Y \ |_> > <_> ) | \/| | \___ \ + /_____ \____/|__| |__| /_____ \__|__|_| / __/ \____/|__| |__| /____ > + \/ \/ \/|__| \/ + +Usage: bun scripts/sortImports [options] + +Options: + --help Show this help message + --no-include-pub Exclude pub imports from sorting + --no-remove-unused Don't remove unused imports + --include-unsorted Process files even if they don't have @sortImports marker + +Examples: + bun scripts/sortImports src +`.slice(1); +if (args.includes("--help")) { + console.log(usage); + process.exit(0); +} +if (filePaths.length === 0) { + console.error(usage); + process.exit(1); +} + +const config = { + includePub: !args.includes("--no-include-pub"), + removeUnused: !args.includes("--no-remove-unused"), + includeUnsorted: args.includes("--include-unsorted"), +}; + +// Type definitions +type Declaration = { + index: number; + key: string; + value: string; + segments: string[] | null; + whole: string; + last?: string; + wholepath?: string[]; +}; + +// Parse declarations from the file +function parseDeclarations( + lines: string[], + fileContents: string, +): { + declarations: Map; + unusedLineIndices: number[]; +} { + const declarations = new Map(); + const unusedLineIndices: number[] = []; + + // for stability + const sortedLineKeys = [...lines.keys()].sort((a, b) => (lines[a] < lines[b] ? -1 : lines[a] > lines[b] ? 1 : 0)); + + for (const i of sortedLineKeys) { + const line = lines[i]; + + if (line === "// @sortImports") { + lines[i] = ""; + continue; + } + + const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);$/; + const match = line.match(inlineDeclPattern); + + if (!match) continue; + + const name = match[1]; + const value = match[2]; + + // Skip if the previous line has a doc comment + const prevLine = lines[i - 1] ?? ""; + if (prevLine.startsWith("///")) { + continue; + } + + // Skip unused declarations (non-public declarations that appear only once) + if (config.removeUnused && !line.includes("pub ")) { + const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const expectedCount = (line.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length; + const actualCount = (fileContents.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length; + if (expectedCount === actualCount) { + // unused decl + unusedLineIndices.push(i); + continue; + } + } + + if (!config.includePub && line.includes("pub ")) { + continue; + } + + declarations.set(name, { + whole: line, + index: i, + key: name, + value, + segments: parseSegments(value), + }); + } + + return { declarations, unusedLineIndices }; +} + +// Validate if a segment is a valid identifier +function isValidSegment(segment: string): boolean { + if (segment.startsWith("@import(") || segment === "@This()") { + return true; + } + return segment.match(/^[a-zA-Z0-9_]+$/) != null; +} + +// Parse import path segments from a value +function parseSegments(value: string): null | string[] { + if (value.startsWith("@import(")) { + const rightBracketIndex = value.indexOf(")"); + if (rightBracketIndex === -1) return null; + + const importPart = value.slice(0, rightBracketIndex + 1); + const remainingPart = value.slice(rightBracketIndex + 1); + + if (remainingPart.startsWith(".")) { + const segments = remainingPart.slice(1).split("."); + if (!segments.every(segment => isValidSegment(segment))) return null; + return [importPart, ...segments]; + } else if (remainingPart === "") { + return [importPart]; + } else { + return null; + } + } else { + const segments = value.split("."); + if (!segments.every(segment => isValidSegment(segment))) return null; + return segments; + } +} + +// Resolve the first segment of an import path +function resolveFirstSegment(firstSegment: string, declarations: Map): null | string[] { + if (firstSegment.startsWith("@import(") || firstSegment.startsWith("@This()")) { + return [firstSegment]; + } else { + const declaration = declarations.get(firstSegment); + if (!declaration) { + return null; // Unknown declaration + } + + const subFirstSegment = declaration.segments?.[0]; + if (!subFirstSegment) { + return null; // Invalid declaration + } + + const resolvedSubFirst = resolveFirstSegment(subFirstSegment, declarations); + if (!resolvedSubFirst) { + return null; // Unable to resolve + } + + return [...resolvedSubFirst, ...(declaration.segments?.slice(1) ?? [])]; + } +} + +type Group = { + keySegments: string[]; + declarations: Declaration[]; +}; + +// Group declarations by their import paths +function groupDeclarationsByImportPath(declarations: Map): Map { + const groups = new Map(); + + for (const declaration of declarations.values()) { + if (!declaration.segments || declaration.segments.length < 1) { + continue; + } + + const firstSegment = declaration.segments[0]; + const resolvedFirst = resolveFirstSegment(firstSegment, declarations); + + if (!resolvedFirst) { + continue; + } + + const remainingSegments = declaration.segments.slice(1); + const fullPath = [...resolvedFirst, ...remainingSegments]; + const lastSegment = fullPath.pop(); + + if (!lastSegment) { + continue; + } + + const groupKey = fullPath.join("."); + if (!groups.has(groupKey)) { + groups.set(groupKey, { keySegments: fullPath, declarations: [] }); + } + + groups.get(groupKey)!.declarations.push(declaration); + declaration.last = lastSegment; + declaration.wholepath = [...fullPath, lastSegment]; + } + + return groups; +} + +// Merge single-item groups into their parent groups +function mergeSingleItemGroups(groups: Map): void { + while (true) { + let hasChanges = false; + + for (const [groupKey, group] of groups.entries()) { + if (group.declarations.length === 1) { + const gcsplit = [...group.keySegments]; + while (gcsplit.pop()) { + const parentKey = gcsplit.join("."); + if (groups.has(parentKey)) { + groups.get(parentKey)!.declarations.push(group.declarations[0]); + groups.delete(groupKey); + hasChanges = true; + break; + } + } + } + } + + if (!hasChanges) break; + } +} + +// Move items with child groups to the top of those child groups +function promoteItemsWithChildGroups(groups: Map): void { + for (const [groupKey, group] of groups.entries()) { + for (let i = 0; i < group.declarations.length; ) { + const item = group.declarations[i]; + const childGroupKey = (groupKey ? groupKey + "." : "") + item.last; + + if (groups.has(childGroupKey)) { + groups.get(childGroupKey)!.declarations.unshift(item); + group.declarations.splice(i, 1); + } else { + i++; + } + } + } +} + +// Sort groups and their declarations +function sortGroupsAndDeclarations(groups: Map): string[] { + // Sort declarations within each group + for (const group of groups.values()) { + group.declarations.sort((a, b) => { + if (a.wholepath?.length !== b.wholepath?.length) { + return (a.wholepath?.length ?? 0) - (b.wholepath?.length ?? 0); + } + return a.key < b.key ? -1 : a.key > b.key ? 1 : 0; + }); + } + + // Sort group keys alphabetically + return Array.from(groups.keys()).sort((a, b) => { + return a < b ? -1 : a > b ? 1 : 0; + }); +} + +// Generate the sorted output +function generateSortedOutput(lines: string[], groups: Map, sortedGroupKeys: string[]): string[] { + const outputLines = [...lines]; + outputLines.push(""); + outputLines.push("// @sortImports"); + + for (const groupKey of sortedGroupKeys) { + const groupDeclarations = groups.get(groupKey)!; + if (!groupDeclarations?.declarations.length) continue; + + // Add spacing between groups + outputLines.push(""); + + // Add declarations to output and mark original lines for removal + for (const declaration of groupDeclarations.declarations) { + outputLines.push(declaration.whole); + outputLines[declaration.index] = ""; + } + } + + return outputLines; +} + +// Main execution function for a single file +async function processFile(filePath: string): Promise { + const originalFileContents = await Bun.file(filePath).text(); + let fileContents = originalFileContents; + + if (!config.includeUnsorted && !originalFileContents.includes("// @sortImports")) { + return; + } + console.log(`Processing: ${filePath}`); + + let needsRecurse = true; + while (needsRecurse) { + needsRecurse = false; + + const lines = fileContents.split("\n"); + + const { declarations, unusedLineIndices } = parseDeclarations(lines, fileContents); + const groups = groupDeclarationsByImportPath(declarations); + + promoteItemsWithChildGroups(groups); + mergeSingleItemGroups(groups); + const sortedGroupKeys = sortGroupsAndDeclarations(groups); + + const sortedLines = generateSortedOutput(lines, groups, sortedGroupKeys); + + // Remove unused declarations + if (config.removeUnused) { + for (const line of unusedLineIndices) { + sortedLines[line] = ""; + needsRecurse = true; + } + } + fileContents = sortedLines.join("\n"); + } + + // Remove any leading newlines + fileContents = fileContents.replace(/^\n+/, ""); + + // Maximum of one empty line + fileContents = fileContents.replace(/\n\n+/g, "\n\n"); + + // Ensure exactly one trailing newline + fileContents = fileContents.replace(/\s*$/, "\n"); + + // If the file is empty, remove the trailing newline + if (fileContents === "\n") fileContents = ""; + + if (fileContents === originalFileContents) { + console.log(`✓ No changes: ${filePath}`); + return; + } + + // Write the sorted file + await Bun.write(filePath, fileContents); + + console.log(`✓ Done: ${filePath}`); +} + +// Process all files +async function main() { + let successCount = 0; + let errorCount = 0; + + for (const filePath of filePaths) { + const stat = await Bun.file(filePath).stat(); + if (stat.isDirectory()) { + const files = readdirSync(filePath, { recursive: true }); + for (const file of files) { + if (typeof file !== "string" || !file.endsWith(".zig")) continue; + try { + await processFile(path.join(filePath, file)); + successCount++; + } catch (error) { + errorCount++; + console.error(`Failed to process ${filePath}`); + } + } + continue; + } + + try { + await processFile(filePath); + successCount++; + } catch (error) { + errorCount++; + console.error(`Failed to process ${filePath}`); + } + } + + console.log(`\nSummary: ${successCount} files processed successfully, ${errorCount} errors`); + + if (errorCount > 0) { + process.exit(1); + } +} + +main(); diff --git a/scripts/utils.mjs b/scripts/utils.mjs index 09e3596922..83bcdc6dbc 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -16,7 +16,7 @@ import { } from "node:fs"; import { connect } from "node:net"; import { hostname, homedir as nodeHomedir, tmpdir as nodeTmpdir, release, userInfo } from "node:os"; -import { dirname, join, relative, resolve } from "node:path"; +import { basename, dirname, join, relative, resolve } from "node:path"; import { normalize as normalizeWindows } from "node:path/win32"; export const isWindows = process.platform === "win32"; @@ -1370,13 +1370,16 @@ export async function getLastSuccessfulBuild() { } /** - * @param {string} filename - * @param {string} [cwd] + * @param {string} filename Absolute path to file to upload */ -export async function uploadArtifact(filename, cwd) { +export async function uploadArtifact(filename) { if (isBuildkite) { - const relativePath = relative(cwd ?? process.cwd(), filename); - await spawnSafe(["buildkite-agent", "artifact", "upload", relativePath], { cwd, stdio: "inherit" }); + await spawnSafe(["buildkite-agent", "artifact", "upload", basename(filename)], { + cwd: dirname(filename), + stdio: "inherit", + }); + } else { + console.warn(`not in buildkite. artifact ${filename} not uploaded.`); } } @@ -2840,6 +2843,20 @@ export function printEnvironment() { spawnSync([shell, "-c", "ulimit -a"], { stdio: "inherit" }); } }); + startGroup("Disk (df)", () => { + const shell = which(["sh", "bash"]); + if (shell) { + spawnSync([shell, "-c", "df"], { stdio: "inherit" }); + } + }); + } + if (isWindows) { + startGroup("Disk (win)", () => { + const shell = which(["pwsh"]); + if (shell) { + spawnSync([shell, "-c", "get-psdrive"], { stdio: "inherit" }); + } + }); } } diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig index 267d1f83f1..fc5530aabc 100644 --- a/src/StandaloneModuleGraph.zig +++ b/src/StandaloneModuleGraph.zig @@ -97,6 +97,12 @@ pub const StandaloneModuleGraph = struct { encoding: Encoding = .latin1, loader: bun.options.Loader = .file, module_format: ModuleFormat = .none, + side: FileSide = .server, + }; + + pub const FileSide = enum(u8) { + server = 0, + client = 1, }; pub const Encoding = enum(u8) { @@ -141,6 +147,11 @@ pub const StandaloneModuleGraph = struct { wtf_string: bun.String = bun.String.empty, bytecode: []u8 = "", module_format: ModuleFormat = .none, + side: FileSide = .server, + + pub fn appearsInEmbeddedFilesArray(this: *const File) bool { + return this.side == .client or !this.loader.isJavaScriptLike(); + } pub fn stat(this: *const File) bun.Stat { var result = std.mem.zeroes(bun.Stat); @@ -226,6 +237,7 @@ pub const StandaloneModuleGraph = struct { null, std.math.maxInt(i32), std.math.maxInt(i32), + .{}, )) { .success => |x| x, .fail => { @@ -251,7 +263,7 @@ pub const StandaloneModuleGraph = struct { }); stored.external_source_names = file_names; - stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)), .load_hint = .none }; + stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)), .load_hint = .none, .kind = .zig }; stored.is_standalone_module_graph = true; const parsed = bun.new(SourceMap.ParsedSourceMap, stored); @@ -300,6 +312,7 @@ pub const StandaloneModuleGraph = struct { .none, .bytecode = if (module.bytecode.length > 0) @constCast(sliceTo(raw_bytes, module.bytecode)) else &.{}, .module_format = module.module_format, + .side = module.side, }, ); } @@ -347,8 +360,10 @@ pub const StandaloneModuleGraph = struct { string_builder.cap += (output_file.value.buffer.bytes.len + 255) / 256 * 256 + 256; } else { if (entry_point_id == null) { - if (output_file.output_kind == .@"entry-point") { - entry_point_id = module_count; + if (output_file.side == null or output_file.side.? == .server) { + if (output_file.output_kind == .@"entry-point") { + entry_point_id = module_count; + } } } @@ -421,6 +436,10 @@ pub const StandaloneModuleGraph = struct { else => .none, } else .none, .bytecode = bytecode, + .side = switch (output_file.side orelse .server) { + .server => .server, + .client => .client, + }, }; if (output_file.source_map_index != std.math.maxInt(u32)) { @@ -839,7 +858,7 @@ pub const StandaloneModuleGraph = struct { .fromStdDir(root_dir), bun.sliceTo(&(try std.posix.toPosixPath(std.fs.path.basename(outfile))), 0), ) catch |err| { - if (err == error.IsDir) { + if (err == error.IsDir or err == error.EISDIR) { Output.prettyErrorln("error: {} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.quote(outfile)}); } else { Output.prettyErrorln("error: failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) }); diff --git a/src/Watcher.zig b/src/Watcher.zig index 7d0d88669c..6ebd50faf7 100644 --- a/src/Watcher.zig +++ b/src/Watcher.zig @@ -307,7 +307,7 @@ fn appendFileAssumeCapacity( loader: options.Loader, parent_hash: HashType, package_json: ?*PackageJSON, - comptime copy_file_path: bool, + comptime clone_file_path: bool, ) bun.JSC.Maybe(void) { if (comptime Environment.isWindows) { // on windows we can only watch items that are in the directory tree of the top level dir @@ -320,7 +320,7 @@ fn appendFileAssumeCapacity( const watchlist_id = this.watchlist.len; - const file_path_: string = if (comptime copy_file_path) + const file_path_: string = if (comptime clone_file_path) bun.asByteSlice(this.allocator.dupeZ(u8, file_path) catch bun.outOfMemory()) else file_path; @@ -383,13 +383,12 @@ fn appendFileAssumeCapacity( this.watchlist.appendAssumeCapacity(item); return .{ .result = {} }; } - fn appendDirectoryAssumeCapacity( this: *Watcher, stored_fd: bun.FileDescriptor, file_path: string, hash: HashType, - comptime copy_file_path: bool, + comptime clone_file_path: bool, ) bun.JSC.Maybe(WatchItemIndex) { if (comptime Environment.isWindows) { // on windows we can only watch items that are in the directory tree of the top level dir @@ -408,13 +407,13 @@ fn appendDirectoryAssumeCapacity( }; }; - const parent_hash = getHash(bun.fs.PathName.init(file_path).dirWithTrailingSlash()); - - const file_path_: string = if (comptime copy_file_path) + const file_path_: string = if (comptime clone_file_path) bun.asByteSlice(this.allocator.dupeZ(u8, file_path) catch bun.outOfMemory()) else file_path; + const parent_hash = getHash(bun.fs.PathName.init(file_path_).dirWithTrailingSlash()); + const watchlist_id = this.watchlist.len; var item = WatchItem{ @@ -464,13 +463,21 @@ fn appendDirectoryAssumeCapacity( null, ); } else if (Environment.isLinux) { - const file_path_to_use_ = std.mem.trimRight(u8, file_path_, "/"); - var buf: bun.PathBuffer = undefined; - bun.copy(u8, &buf, file_path_to_use_); - buf[file_path_to_use_.len] = 0; - const slice: [:0]u8 = buf[0..file_path_to_use_.len :0]; - item.eventlist_index = switch (this.platform.watchDir(slice)) { - .err => |err| return .{ .err = err }, + const buf = bun.path_buffer_pool.get(); + defer { + bun.path_buffer_pool.put(buf); + } + const path: [:0]const u8 = if (clone_file_path and file_path_.len > 0 and file_path_[file_path_.len - 1] == 0) + file_path_[0 .. file_path_.len - 1 :0] + else brk: { + const trailing_slash = if (file_path_.len > 1) std.mem.trimRight(u8, file_path_, &.{ 0, '/' }) else file_path_; + @memcpy(buf[0..trailing_slash.len], trailing_slash); + buf[trailing_slash.len] = 0; + break :brk buf[0..trailing_slash.len :0]; + }; + + item.eventlist_index = switch (this.platform.watchDir(path)) { + .err => |err| return .{ .err = err.withPath(file_path) }, .result => |r| r, }; } @@ -491,7 +498,7 @@ pub fn appendFileMaybeLock( loader: options.Loader, dir_fd: bun.FileDescriptor, package_json: ?*PackageJSON, - comptime copy_file_path: bool, + comptime clone_file_path: bool, comptime lock: bool, ) bun.JSC.Maybe(void) { if (comptime lock) this.mutex.lock(); @@ -524,8 +531,8 @@ pub fn appendFileMaybeLock( this.watchlist.ensureUnusedCapacity(this.allocator, 1 + @as(usize, @intCast(@intFromBool(parent_watch_item == null)))) catch bun.outOfMemory(); if (autowatch_parent_dir) { - parent_watch_item = parent_watch_item orelse switch (this.appendDirectoryAssumeCapacity(dir_fd, parent_dir, parent_dir_hash, copy_file_path)) { - .err => |err| return .{ .err = err }, + parent_watch_item = parent_watch_item orelse switch (this.appendDirectoryAssumeCapacity(dir_fd, parent_dir, parent_dir_hash, clone_file_path)) { + .err => |err| return .{ .err = err.withPath(parent_dir) }, .result => |r| r, }; } @@ -537,9 +544,9 @@ pub fn appendFileMaybeLock( loader, parent_dir_hash, package_json, - copy_file_path, + clone_file_path, )) { - .err => |err| return .{ .err = err }, + .err => |err| return .{ .err = err.withPath(file_path) }, .result => {}, } @@ -568,9 +575,9 @@ pub fn appendFile( loader: options.Loader, dir_fd: bun.FileDescriptor, package_json: ?*PackageJSON, - comptime copy_file_path: bool, + comptime clone_file_path: bool, ) bun.JSC.Maybe(void) { - return appendFileMaybeLock(this, fd, file_path, hash, loader, dir_fd, package_json, copy_file_path, true); + return appendFileMaybeLock(this, fd, file_path, hash, loader, dir_fd, package_json, clone_file_path, true); } pub fn addDirectory( @@ -578,7 +585,7 @@ pub fn addDirectory( fd: bun.FileDescriptor, file_path: string, hash: HashType, - comptime copy_file_path: bool, + comptime clone_file_path: bool, ) bun.JSC.Maybe(WatchItemIndex) { this.mutex.lock(); defer this.mutex.unlock(); @@ -589,7 +596,7 @@ pub fn addDirectory( this.watchlist.ensureUnusedCapacity(this.allocator, 1) catch bun.outOfMemory(); - return this.appendDirectoryAssumeCapacity(fd, file_path, hash, copy_file_path); + return this.appendDirectoryAssumeCapacity(fd, file_path, hash, clone_file_path); } pub fn addFile( @@ -600,7 +607,7 @@ pub fn addFile( loader: options.Loader, dir_fd: bun.FileDescriptor, package_json: ?*PackageJSON, - comptime copy_file_path: bool, + comptime clone_file_path: bool, ) bun.JSC.Maybe(void) { // This must lock due to concurrent transpiler this.mutex.lock(); @@ -617,7 +624,7 @@ pub fn addFile( return .{ .result = {} }; } - return this.appendFileMaybeLock(fd, file_path, hash, loader, dir_fd, package_json, copy_file_path, false); + return this.appendFileMaybeLock(fd, file_path, hash, loader, dir_fd, package_json, clone_file_path, false); } pub fn indexOf(this: *Watcher, hash: HashType) ?u32 { diff --git a/src/allocators/AllocationScope.zig b/src/allocators/AllocationScope.zig index de5556ba04..bce56a36bf 100644 --- a/src/allocators/AllocationScope.zig +++ b/src/allocators/AllocationScope.zig @@ -2,7 +2,7 @@ //! It also allows measuring how much memory a scope has allocated. const AllocationScope = @This(); -pub const enabled = bun.Environment.isDebug; +pub const enabled = bun.Environment.enableAllocScopes; parent: Allocator, state: if (enabled) struct { @@ -36,7 +36,7 @@ pub const Extra = union(enum) { }; pub fn init(parent: Allocator) AllocationScope { - return if (enabled) + return if (comptime enabled) .{ .parent = parent, .state = .{ @@ -52,7 +52,7 @@ pub fn init(parent: Allocator) AllocationScope { } pub fn deinit(scope: *AllocationScope) void { - if (enabled) { + if (comptime enabled) { scope.state.mutex.lock(); defer scope.state.allocations.deinit(scope.parent); const count = scope.state.allocations.count(); @@ -83,7 +83,7 @@ pub fn deinit(scope: *AllocationScope) void { } pub fn allocator(scope: *AllocationScope) Allocator { - return if (enabled) .{ .ptr = scope, .vtable = &vtable } else scope.parent; + return if (comptime enabled) .{ .ptr = scope, .vtable = &vtable } else scope.parent; } const vtable: Allocator.VTable = .{ @@ -176,7 +176,7 @@ fn trackFreeAssumeLocked(scope: *AllocationScope, buf: []const u8, ret_addr: usi } pub fn assertOwned(scope: *AllocationScope, ptr: anytype) void { - if (!enabled) return; + if (comptime !enabled) return; const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) { .c, .one, .many => ptr, .slice => if (ptr.len > 0) ptr.ptr else return, @@ -188,7 +188,7 @@ pub fn assertOwned(scope: *AllocationScope, ptr: anytype) void { } pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void { - if (!enabled) return; + if (comptime !enabled) return; const cast_ptr: [*]const u8 = @ptrCast(switch (@typeInfo(@TypeOf(ptr)).pointer.size) { .c, .one, .many => ptr, .slice => if (ptr.len > 0) ptr.ptr else return, @@ -196,7 +196,7 @@ pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void { scope.state.mutex.lock(); defer scope.state.mutex.unlock(); if (scope.state.allocations.getPtr(cast_ptr)) |owned| { - Output.debugWarn("Pointer allocated here:"); + Output.warn("Owned pointer allocated here:"); bun.crash_handler.dumpStackTrace(owned.allocated_at.trace(), trace_limits, trace_limits); } @panic("this pointer was owned by the allocation scope when it was not supposed to be"); @@ -205,7 +205,7 @@ pub fn assertUnowned(scope: *AllocationScope, ptr: anytype) void { /// Track an arbitrary pointer. Extra data can be stored in the allocation, /// which will be printed when a leak is detected. pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_addr: ?usize, extra: Extra) void { - if (!enabled) return; + if (comptime !enabled) return; scope.state.mutex.lock(); defer scope.state.mutex.unlock(); scope.state.allocations.ensureUnusedCapacity(scope.parent, 1) catch bun.outOfMemory(); @@ -214,15 +214,29 @@ pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_add /// Call when the pointer from `trackExternalAllocation` is freed. /// Returns true if the free was invalid. -pub fn trackExternalFree(scope: *AllocationScope, ptr: []const u8, ret_addr: ?usize) bool { - if (!enabled) return; +pub fn trackExternalFree(scope: *AllocationScope, slice: anytype, ret_addr: ?usize) bool { + if (comptime !enabled) return; + const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) { + .pointer => |p| switch (p.size) { + .slice => brk: { + if (p.child != u8) @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))); + if (p.sentinel_ptr == null) break :brk slice; + // Ensure we include the sentinel value + break :brk slice[0 .. slice.len + 1]; + }, + else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))), + }, + else => @compileError("This function only supports []u8 or [:sentinel]u8 types, you passed in: " ++ @typeName(@TypeOf(slice))), + }; + // Empty slice usually means invalid pointer + if (ptr.len == 0) return false; scope.state.mutex.lock(); defer scope.state.mutex.unlock(); return trackFreeAssumeLocked(scope, ptr, ret_addr orelse @returnAddress()); } pub fn setPointerExtra(scope: *AllocationScope, ptr: *anyopaque, extra: Extra) void { - if (!enabled) return; + if (comptime !enabled) return; scope.state.mutex.lock(); defer scope.state.mutex.unlock(); const allocation = scope.state.allocations.getPtr(ptr) orelse diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig index c6a4e58b94..2a70bea13c 100644 --- a/src/analytics/analytics_thread.zig +++ b/src/analytics/analytics_thread.zig @@ -93,6 +93,8 @@ pub const Features = struct { pub var loaders: usize = 0; pub var lockfile_migration_from_package_lock: usize = 0; pub var text_lockfile: usize = 0; + pub var isolated_bun_install: usize = 0; + pub var hoisted_bun_install: usize = 0; pub var macros: usize = 0; pub var no_avx2: usize = 0; pub var no_avx: usize = 0; diff --git a/src/ast/ASTMemoryAllocator.zig b/src/ast/ASTMemoryAllocator.zig new file mode 100644 index 0000000000..bfecc27688 --- /dev/null +++ b/src/ast/ASTMemoryAllocator.zig @@ -0,0 +1,96 @@ +const SFA = std.heap.StackFallbackAllocator(@min(8192, std.heap.page_size_min)); + +stack_allocator: SFA = undefined, +bump_allocator: std.mem.Allocator = undefined, +allocator: std.mem.Allocator, +previous: ?*ASTMemoryAllocator = null, + +pub fn enter(this: *ASTMemoryAllocator, allocator: std.mem.Allocator) ASTMemoryAllocator.Scope { + this.allocator = allocator; + this.stack_allocator = SFA{ + .buffer = undefined, + .fallback_allocator = allocator, + .fixed_buffer_allocator = undefined, + }; + this.bump_allocator = this.stack_allocator.get(); + this.previous = null; + var ast_scope = ASTMemoryAllocator.Scope{ + .current = this, + .previous = Stmt.Data.Store.memory_allocator, + }; + ast_scope.enter(); + return ast_scope; +} +pub const Scope = struct { + current: ?*ASTMemoryAllocator = null, + previous: ?*ASTMemoryAllocator = null, + + pub fn enter(this: *@This()) void { + bun.debugAssert(Expr.Data.Store.memory_allocator == Stmt.Data.Store.memory_allocator); + + this.previous = Expr.Data.Store.memory_allocator; + + const current = this.current; + + Expr.Data.Store.memory_allocator = current; + Stmt.Data.Store.memory_allocator = current; + + if (current == null) { + Stmt.Data.Store.begin(); + Expr.Data.Store.begin(); + } + } + + pub fn exit(this: *const @This()) void { + Expr.Data.Store.memory_allocator = this.previous; + Stmt.Data.Store.memory_allocator = this.previous; + } +}; + +pub fn reset(this: *ASTMemoryAllocator) void { + this.stack_allocator = SFA{ + .buffer = undefined, + .fallback_allocator = this.allocator, + .fixed_buffer_allocator = undefined, + }; + this.bump_allocator = this.stack_allocator.get(); +} + +pub fn push(this: *ASTMemoryAllocator) void { + Stmt.Data.Store.memory_allocator = this; + Expr.Data.Store.memory_allocator = this; +} + +pub fn pop(this: *ASTMemoryAllocator) void { + const prev = this.previous; + bun.assert(prev != this); + Stmt.Data.Store.memory_allocator = prev; + Expr.Data.Store.memory_allocator = prev; + this.previous = null; +} + +pub fn append(this: ASTMemoryAllocator, comptime ValueType: type, value: anytype) *ValueType { + const ptr = this.bump_allocator.create(ValueType) catch unreachable; + ptr.* = value; + return ptr; +} + +/// Initialize ASTMemoryAllocator as `undefined`, and call this. +pub fn initWithoutStack(this: *ASTMemoryAllocator, arena: std.mem.Allocator) void { + this.stack_allocator = SFA{ + .buffer = undefined, + .fallback_allocator = arena, + .fixed_buffer_allocator = .init(&.{}), + }; + this.bump_allocator = this.stack_allocator.get(); +} + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); + +const js_ast = bun.js_ast; +const ASTMemoryAllocator = js_ast.ASTMemoryAllocator; +const Expr = js_ast.Expr; +const Stmt = js_ast.Stmt; diff --git a/src/ast/Ast.zig b/src/ast/Ast.zig new file mode 100644 index 0000000000..e51f1a2abd --- /dev/null +++ b/src/ast/Ast.zig @@ -0,0 +1,143 @@ +pub const TopLevelSymbolToParts = std.ArrayHashMapUnmanaged(Ref, BabyList(u32), Ref.ArrayHashCtx, false); + +approximate_newline_count: usize = 0, +has_lazy_export: bool = false, +runtime_imports: Runtime.Imports = .{}, + +nested_scope_slot_counts: SlotCounts = SlotCounts{}, + +runtime_import_record_id: ?u32 = null, +needs_runtime: bool = false, +// This is a list of CommonJS features. When a file uses CommonJS features, +// it's not a candidate for "flat bundling" and must be wrapped in its own +// closure. +has_top_level_return: bool = false, +uses_exports_ref: bool = false, +uses_module_ref: bool = false, +uses_require_ref: bool = false, +commonjs_module_exports_assigned_deoptimized: bool = false, + +force_cjs_to_esm: bool = false, +exports_kind: ExportsKind = ExportsKind.none, + +// This is a list of ES6 features. They are ranges instead of booleans so +// that they can be used in log messages. Check to see if "Len > 0". +import_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax or "import()" +export_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax +top_level_await_keyword: logger.Range = logger.Range.None, + +/// These are stored at the AST level instead of on individual AST nodes so +/// they can be manipulated efficiently without a full AST traversal +import_records: ImportRecord.List = .{}, + +hashbang: string = "", +directive: ?string = null, +parts: Part.List = Part.List{}, +// This list may be mutated later, so we should store the capacity +symbols: Symbol.List = Symbol.List{}, +module_scope: Scope = Scope{}, +char_freq: ?CharFreq = null, +exports_ref: Ref = Ref.None, +module_ref: Ref = Ref.None, +/// When using format .bake_internal_dev, this is the HMR variable instead +/// of the wrapper. This is because that format does not store module +/// wrappers in a variable. +wrapper_ref: Ref = Ref.None, +require_ref: Ref = Ref.None, + +// These are used when bundling. They are filled in during the parser pass +// since we already have to traverse the AST then anyway and the parser pass +// is conveniently fully parallelized. +named_imports: NamedImports = .{}, +named_exports: NamedExports = .{}, +export_star_import_records: []u32 = &([_]u32{}), + +// allocator: std.mem.Allocator, +top_level_symbols_to_parts: TopLevelSymbolToParts = .{}, + +commonjs_named_exports: CommonJSNamedExports = .{}, + +redirect_import_record_index: ?u32 = null, + +/// Only populated when bundling +target: bun.options.Target = .browser, +// const_values: ConstValuesMap = .{}, +ts_enums: TsEnumsMap = .{}, + +/// Not to be confused with `commonjs_named_exports` +/// This is a list of named exports that may exist in a CommonJS module +/// We use this with `commonjs_at_runtime` to re-export CommonJS +has_commonjs_export_names: bool = false, +import_meta_ref: Ref = Ref.None, + +pub const CommonJSNamedExport = struct { + loc_ref: LocRef, + needs_decl: bool = true, +}; +pub const CommonJSNamedExports = bun.StringArrayHashMapUnmanaged(CommonJSNamedExport); + +pub const NamedImports = std.ArrayHashMapUnmanaged(Ref, NamedImport, RefHashCtx, true); +pub const NamedExports = bun.StringArrayHashMapUnmanaged(NamedExport); +pub const ConstValuesMap = std.ArrayHashMapUnmanaged(Ref, Expr, RefHashCtx, false); +pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged(InlinedEnumValue), RefHashCtx, false); + +pub fn fromParts(parts: []Part) Ast { + return Ast{ + .parts = Part.List.init(parts), + .runtime_imports = .{}, + }; +} + +pub fn initTest(parts: []Part) Ast { + return Ast{ + .parts = Part.List.init(parts), + .runtime_imports = .{}, + }; +} + +pub const empty = Ast{ .parts = Part.List{}, .runtime_imports = .{} }; + +pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void { + const opts = std.json.StringifyOptions{ .whitespace = std.json.StringifyOptions.Whitespace{ + .separator = true, + } }; + try std.json.stringify(self.parts, opts, stream); +} + +/// Do not call this if it wasn't globally allocated! +pub fn deinit(this: *Ast) void { + // TODO: assert mimalloc-owned memory + if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator); + if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator); + if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator); +} + +// @sortImports + +const std = @import("std"); +const Runtime = @import("../runtime.zig").Runtime; + +const bun = @import("bun"); +const BabyList = bun.BabyList; +const ImportRecord = bun.ImportRecord; +const logger = bun.logger; +const string = bun.string; + +const js_ast = bun.js_ast; +const Ast = js_ast.Ast; +const CharFreq = js_ast.CharFreq; +const ExportsKind = js_ast.ExportsKind; +const Expr = js_ast.Expr; +const InlinedEnumValue = js_ast.InlinedEnumValue; +const LocRef = js_ast.LocRef; +const NamedExport = js_ast.NamedExport; +const NamedImport = js_ast.NamedImport; +const Part = js_ast.Part; +const Ref = js_ast.Ref; +const RefHashCtx = js_ast.RefHashCtx; +const Scope = js_ast.Scope; +const SlotCounts = js_ast.SlotCounts; +const Symbol = js_ast.Symbol; + +const G = js_ast.G; +pub const Class = G.Class; diff --git a/src/ast/B.zig b/src/ast/B.zig new file mode 100644 index 0000000000..4c66f7c4dc --- /dev/null +++ b/src/ast/B.zig @@ -0,0 +1,106 @@ +/// B is for Binding! Bindings are on the left side of variable +/// declarations (s_local), which is how destructuring assignments +/// are represented in memory. Consider a basic example. +/// +/// let hello = world; +/// ^ ^ +/// | E.Identifier +/// B.Identifier +/// +/// Bindings can be nested +/// +/// B.Array +/// | B.Identifier +/// | | +/// let { foo: [ bar ] } = ... +/// ---------------- +/// B.Object +pub const B = union(Binding.Tag) { + // let x = ... + b_identifier: *B.Identifier, + // let [a, b] = ... + b_array: *B.Array, + // let { a, b: c } = ... + b_object: *B.Object, + // this is used to represent array holes + b_missing: B.Missing, + + pub const Identifier = struct { + ref: Ref, + }; + + pub const Property = struct { + flags: Flags.Property.Set = Flags.Property.None, + key: ExprNodeIndex, + value: Binding, + default_value: ?Expr = null, + }; + + pub const Object = struct { + properties: []B.Property, + is_single_line: bool = false, + + pub const Property = B.Property; + }; + + pub const Array = struct { + items: []ArrayBinding, + has_spread: bool = false, + is_single_line: bool = false, + + pub const Item = ArrayBinding; + }; + + pub const Missing = struct {}; + + /// This hash function is currently only used for React Fast Refresh transform. + /// This doesn't include the `is_single_line` properties, as they only affect whitespace. + pub fn writeToHasher(b: B, hasher: anytype, symbol_table: anytype) void { + switch (b) { + .b_identifier => |id| { + const original_name = id.ref.getSymbol(symbol_table).original_name; + writeAnyToHasher(hasher, .{ std.meta.activeTag(b), original_name.len }); + }, + .b_array => |array| { + writeAnyToHasher(hasher, .{ std.meta.activeTag(b), array.has_spread, array.items.len }); + for (array.items) |item| { + writeAnyToHasher(hasher, .{item.default_value != null}); + if (item.default_value) |default| { + default.data.writeToHasher(hasher, symbol_table); + } + item.binding.data.writeToHasher(hasher, symbol_table); + } + }, + .b_object => |object| { + writeAnyToHasher(hasher, .{ std.meta.activeTag(b), object.properties.len }); + for (object.properties) |property| { + writeAnyToHasher(hasher, .{ property.default_value != null, property.flags }); + if (property.default_value) |default| { + default.data.writeToHasher(hasher, symbol_table); + } + property.key.data.writeToHasher(hasher, symbol_table); + property.value.data.writeToHasher(hasher, symbol_table); + } + }, + .b_missing => {}, + } + } +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const writeAnyToHasher = bun.writeAnyToHasher; + +const js_ast = bun.js_ast; +const ArrayBinding = js_ast.ArrayBinding; +const Binding = js_ast.Binding; +const Expr = js_ast.Expr; +const ExprNodeIndex = js_ast.ExprNodeIndex; +const Flags = js_ast.Flags; +const Ref = js_ast.Ref; + +const G = js_ast.G; +pub const Class = G.Class; diff --git a/src/ast/Binding.zig b/src/ast/Binding.zig new file mode 100644 index 0000000000..85626ba5cb --- /dev/null +++ b/src/ast/Binding.zig @@ -0,0 +1,165 @@ +loc: logger.Loc, +data: B, + +const Serializable = struct { + type: Tag, + object: string, + value: B, + loc: logger.Loc, +}; + +pub fn jsonStringify(self: *const @This(), writer: anytype) !void { + return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "binding", .value = self.data, .loc = self.loc }); +} + +pub fn ToExpr(comptime expr_type: type, comptime func_type: anytype) type { + const ExprType = expr_type; + return struct { + context: *ExprType, + allocator: std.mem.Allocator, + pub const Context = @This(); + + pub fn wrapIdentifier(ctx: *const Context, loc: logger.Loc, ref: Ref) Expr { + return func_type(ctx.context, loc, ref); + } + + pub fn init(context: *ExprType) Context { + return Context{ .context = context, .allocator = context.allocator }; + } + }; +} + +pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr { + const loc = binding.loc; + + switch (binding.data) { + .b_missing => { + return Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = loc }; + }, + .b_identifier => |b| { + return wrapper.wrapIdentifier(loc, b.ref); + }, + .b_array => |b| { + var exprs = wrapper.allocator.alloc(Expr, b.items.len) catch unreachable; + var i: usize = 0; + while (i < exprs.len) : (i += 1) { + const item = b.items[i]; + exprs[i] = convert: { + const expr = toExpr(&item.binding, wrapper); + if (b.has_spread and i == exprs.len - 1) { + break :convert Expr.init(E.Spread, E.Spread{ .value = expr }, expr.loc); + } else if (item.default_value) |default| { + break :convert Expr.assign(expr, default); + } else { + break :convert expr; + } + }; + } + + return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc); + }, + .b_object => |b| { + const properties = wrapper + .allocator + .alloc(G.Property, b.properties.len) catch unreachable; + for (properties, b.properties) |*property, item| { + property.* = .{ + .flags = item.flags, + .key = item.key, + .kind = if (item.flags.contains(.is_spread)) + .spread + else + .normal, + .value = toExpr(&item.value, wrapper), + .initializer = item.default_value, + }; + } + return Expr.init( + E.Object, + E.Object{ + .properties = G.Property.List.init(properties), + .is_single_line = b.is_single_line, + }, + loc, + ); + }, + } +} + +pub const Tag = enum(u5) { + b_identifier, + b_array, + b_object, + b_missing, + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } +}; + +pub var icount: usize = 0; + +pub fn init(t: anytype, loc: logger.Loc) Binding { + icount += 1; + switch (@TypeOf(t)) { + *B.Identifier => { + return Binding{ .loc = loc, .data = B{ .b_identifier = t } }; + }, + *B.Array => { + return Binding{ .loc = loc, .data = B{ .b_array = t } }; + }, + *B.Object => { + return Binding{ .loc = loc, .data = B{ .b_object = t } }; + }, + B.Missing => { + return Binding{ .loc = loc, .data = B{ .b_missing = t } }; + }, + else => { + @compileError("Invalid type passed to Binding.init"); + }, + } +} + +pub fn alloc(allocator: std.mem.Allocator, t: anytype, loc: logger.Loc) Binding { + icount += 1; + switch (@TypeOf(t)) { + B.Identifier => { + const data = allocator.create(B.Identifier) catch unreachable; + data.* = t; + return Binding{ .loc = loc, .data = B{ .b_identifier = data } }; + }, + B.Array => { + const data = allocator.create(B.Array) catch unreachable; + data.* = t; + return Binding{ .loc = loc, .data = B{ .b_array = data } }; + }, + B.Object => { + const data = allocator.create(B.Object) catch unreachable; + data.* = t; + return Binding{ .loc = loc, .data = B{ .b_object = data } }; + }, + B.Missing => { + return Binding{ .loc = loc, .data = B{ .b_missing = .{} } }; + }, + else => { + @compileError("Invalid type passed to Binding.alloc"); + }, + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const logger = bun.logger; +const string = bun.string; + +const js_ast = bun.js_ast; +const B = js_ast.B; +const Binding = js_ast.Binding; +const E = js_ast.E; +const Expr = js_ast.Expr; +const ExprNodeList = js_ast.ExprNodeList; +const G = js_ast.G; +const Ref = js_ast.Ref; diff --git a/src/ast/BundledAst.zig b/src/ast/BundledAst.zig new file mode 100644 index 0000000000..141909c961 --- /dev/null +++ b/src/ast/BundledAst.zig @@ -0,0 +1,231 @@ +//! Like Ast but slimmer and for bundling only. +//! +//! On Linux, the hottest function in the bundler is: +//! src.multi_array_list.MultiArrayList(src.js_ast.Ast).ensureTotalCapacity +//! https://share.firefox.dev/3NNlRKt +//! +//! So we make a slimmer version of Ast for bundling that doesn't allocate as much memory + +approximate_newline_count: u32 = 0, +nested_scope_slot_counts: SlotCounts = .{}, + +exports_kind: ExportsKind = .none, + +/// These are stored at the AST level instead of on individual AST nodes so +/// they can be manipulated efficiently without a full AST traversal +import_records: ImportRecord.List = .{}, + +hashbang: string = "", +parts: Part.List = .{}, +css: ?*bun.css.BundlerStyleSheet = null, +url_for_css: []const u8 = "", +symbols: Symbol.List = .{}, +module_scope: Scope = .{}, +char_freq: CharFreq = undefined, +exports_ref: Ref = Ref.None, +module_ref: Ref = Ref.None, +wrapper_ref: Ref = Ref.None, +require_ref: Ref = Ref.None, +top_level_await_keyword: logger.Range, +tla_check: TlaCheck = .{}, + +// These are used when bundling. They are filled in during the parser pass +// since we already have to traverse the AST then anyway and the parser pass +// is conveniently fully parallelized. +named_imports: NamedImports = .{}, +named_exports: NamedExports = .{}, +export_star_import_records: []u32 = &.{}, + +top_level_symbols_to_parts: TopLevelSymbolToParts = .{}, + +commonjs_named_exports: CommonJSNamedExports = .{}, + +redirect_import_record_index: u32 = std.math.maxInt(u32), + +/// Only populated when bundling. When --server-components is passed, this +/// will be .browser when it is a client component, and the server's target +/// on the server. +target: bun.options.Target = .browser, + +// const_values: ConstValuesMap = .{}, +ts_enums: Ast.TsEnumsMap = .{}, + +flags: BundledAst.Flags = .{}, + +pub const Flags = packed struct(u8) { + // This is a list of CommonJS features. When a file uses CommonJS features, + // it's not a candidate for "flat bundling" and must be wrapped in its own + // closure. + uses_exports_ref: bool = false, + uses_module_ref: bool = false, + // uses_require_ref: bool = false, + uses_export_keyword: bool = false, + has_char_freq: bool = false, + force_cjs_to_esm: bool = false, + has_lazy_export: bool = false, + commonjs_module_exports_assigned_deoptimized: bool = false, + has_explicit_use_strict_directive: bool = false, +}; + +pub const empty = BundledAst.init(Ast.empty); + +pub fn toAST(this: *const BundledAst) Ast { + return .{ + .approximate_newline_count = this.approximate_newline_count, + .nested_scope_slot_counts = this.nested_scope_slot_counts, + + .exports_kind = this.exports_kind, + + .import_records = this.import_records, + + .hashbang = this.hashbang, + .parts = this.parts, + // This list may be mutated later, so we should store the capacity + .symbols = this.symbols, + .module_scope = this.module_scope, + .char_freq = if (this.flags.has_char_freq) this.char_freq else null, + .exports_ref = this.exports_ref, + .module_ref = this.module_ref, + .wrapper_ref = this.wrapper_ref, + .require_ref = this.require_ref, + .top_level_await_keyword = this.top_level_await_keyword, + + // These are used when bundling. They are filled in during the parser pass + // since we already have to traverse the AST then anyway and the parser pass + // is conveniently fully parallelized. + .named_imports = this.named_imports, + .named_exports = this.named_exports, + .export_star_import_records = this.export_star_import_records, + + .top_level_symbols_to_parts = this.top_level_symbols_to_parts, + + .commonjs_named_exports = this.commonjs_named_exports, + + .redirect_import_record_index = this.redirect_import_record_index, + + .target = this.target, + + // .const_values = this.const_values, + .ts_enums = this.ts_enums, + + .uses_exports_ref = this.flags.uses_exports_ref, + .uses_module_ref = this.flags.uses_module_ref, + // .uses_require_ref = ast.uses_require_ref, + .export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .{} }, + .force_cjs_to_esm = this.flags.force_cjs_to_esm, + .has_lazy_export = this.flags.has_lazy_export, + .commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized, + .directive = if (this.flags.has_explicit_use_strict_directive) "use strict" else null, + }; +} + +pub fn init(ast: Ast) BundledAst { + return .{ + .approximate_newline_count = @as(u32, @truncate(ast.approximate_newline_count)), + .nested_scope_slot_counts = ast.nested_scope_slot_counts, + + .exports_kind = ast.exports_kind, + + .import_records = ast.import_records, + + .hashbang = ast.hashbang, + .parts = ast.parts, + // This list may be mutated later, so we should store the capacity + .symbols = ast.symbols, + .module_scope = ast.module_scope, + .char_freq = ast.char_freq orelse undefined, + .exports_ref = ast.exports_ref, + .module_ref = ast.module_ref, + .wrapper_ref = ast.wrapper_ref, + .require_ref = ast.require_ref, + .top_level_await_keyword = ast.top_level_await_keyword, + // These are used when bundling. They are filled in during the parser pass + // since we already have to traverse the AST then anyway and the parser pass + // is conveniently fully parallelized. + .named_imports = ast.named_imports, + .named_exports = ast.named_exports, + .export_star_import_records = ast.export_star_import_records, + + // .allocator = ast.allocator, + .top_level_symbols_to_parts = ast.top_level_symbols_to_parts, + + .commonjs_named_exports = ast.commonjs_named_exports, + + .redirect_import_record_index = ast.redirect_import_record_index orelse std.math.maxInt(u32), + + .target = ast.target, + + // .const_values = ast.const_values, + .ts_enums = ast.ts_enums, + + .flags = .{ + .uses_exports_ref = ast.uses_exports_ref, + .uses_module_ref = ast.uses_module_ref, + // .uses_require_ref = ast.uses_require_ref, + .uses_export_keyword = ast.export_keyword.len > 0, + .has_char_freq = ast.char_freq != null, + .force_cjs_to_esm = ast.force_cjs_to_esm, + .has_lazy_export = ast.has_lazy_export, + .commonjs_module_exports_assigned_deoptimized = ast.commonjs_module_exports_assigned_deoptimized, + .has_explicit_use_strict_directive = strings.eqlComptime(ast.directive orelse "", "use strict"), + }, + }; +} + +/// TODO: Move this from being done on all parse tasks into the start of the linker. This currently allocates base64 encoding for every small file loaded thing. +pub fn addUrlForCss( + this: *BundledAst, + allocator: std.mem.Allocator, + source: *const logger.Source, + mime_type_: ?[]const u8, + unique_key: ?[]const u8, +) void { + { + const mime_type = if (mime_type_) |m| m else MimeType.byExtension(bun.strings.trimLeadingChar(std.fs.path.extension(source.path.text), '.')).value; + const contents = source.contents; + // TODO: make this configurable + const COPY_THRESHOLD = 128 * 1024; // 128kb + const should_copy = contents.len >= COPY_THRESHOLD and unique_key != null; + if (should_copy) return; + this.url_for_css = url_for_css: { + + // Encode as base64 + const encode_len = bun.base64.encodeLen(contents); + const data_url_prefix_len = "data:".len + mime_type.len + ";base64,".len; + const total_buffer_len = data_url_prefix_len + encode_len; + var encoded = allocator.alloc(u8, total_buffer_len) catch bun.outOfMemory(); + _ = std.fmt.bufPrint(encoded[0..data_url_prefix_len], "data:{s};base64,", .{mime_type}) catch unreachable; + const len = bun.base64.encode(encoded[data_url_prefix_len..], contents); + break :url_for_css encoded[0 .. data_url_prefix_len + len]; + }; + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const ImportRecord = bun.ImportRecord; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; +const MimeType = bun.http.MimeType; + +const js_ast = bun.js_ast; +const BundledAst = js_ast.BundledAst; +const CharFreq = js_ast.CharFreq; +const ExportsKind = js_ast.ExportsKind; +const Part = js_ast.Part; +const Ref = js_ast.Ref; +const Scope = js_ast.Scope; +const SlotCounts = js_ast.SlotCounts; +const Symbol = js_ast.Symbol; +const TlaCheck = js_ast.TlaCheck; + +const Ast = js_ast.Ast; +pub const CommonJSNamedExports = Ast.CommonJSNamedExports; +pub const ConstValuesMap = Ast.ConstValuesMap; +pub const NamedExports = Ast.NamedExports; +pub const NamedImports = Ast.NamedImports; +pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts; diff --git a/src/ast/CharFreq.zig b/src/ast/CharFreq.zig new file mode 100644 index 0000000000..e7d3f2baf5 --- /dev/null +++ b/src/ast/CharFreq.zig @@ -0,0 +1,139 @@ +pub const char_freq_count = 64; +pub const CharAndCount = struct { + char: u8 = 0, + count: i32 = 0, + index: usize = 0, + + pub const Array = [char_freq_count]CharAndCount; + + pub fn lessThan(_: void, a: CharAndCount, b: CharAndCount) bool { + if (a.count != b.count) { + return a.count > b.count; + } + + if (a.index != b.index) { + return a.index < b.index; + } + + return a.char < b.char; + } +}; + +const Vector = @Vector(char_freq_count, i32); +const Buffer = [char_freq_count]i32; + +freqs: Buffer align(1) = undefined, + +const scan_big_chunk_size = 32; +pub fn scan(this: *CharFreq, text: string, delta: i32) void { + if (delta == 0) + return; + + if (text.len < scan_big_chunk_size) { + scanSmall(&this.freqs, text, delta); + } else { + scanBig(&this.freqs, text, delta); + } +} + +fn scanBig(out: *align(1) Buffer, text: string, delta: i32) void { + // https://zig.godbolt.org/z/P5dPojWGK + var freqs = out.*; + defer out.* = freqs; + var deltas: [256]i32 = [_]i32{0} ** 256; + var remain = text; + + bun.assert(remain.len >= scan_big_chunk_size); + + const unrolled = remain.len - (remain.len % scan_big_chunk_size); + const remain_end = remain.ptr + unrolled; + var unrolled_ptr = remain.ptr; + remain = remain[unrolled..]; + + while (unrolled_ptr != remain_end) : (unrolled_ptr += scan_big_chunk_size) { + const chunk = unrolled_ptr[0..scan_big_chunk_size].*; + inline for (0..scan_big_chunk_size) |i| { + deltas[@as(usize, chunk[i])] += delta; + } + } + + for (remain) |c| { + deltas[@as(usize, c)] += delta; + } + + freqs[0..26].* = deltas['a' .. 'a' + 26].*; + freqs[26 .. 26 * 2].* = deltas['A' .. 'A' + 26].*; + freqs[26 * 2 .. 62].* = deltas['0' .. '0' + 10].*; + freqs[62] = deltas['_']; + freqs[63] = deltas['$']; +} + +fn scanSmall(out: *align(1) Buffer, text: string, delta: i32) void { + var freqs: [char_freq_count]i32 = out.*; + defer out.* = freqs; + + for (text) |c| { + const i: usize = switch (c) { + 'a'...'z' => @as(usize, @intCast(c)) - 'a', + 'A'...'Z' => @as(usize, @intCast(c)) - ('A' - 26), + '0'...'9' => @as(usize, @intCast(c)) + (53 - '0'), + '_' => 62, + '$' => 63, + else => continue, + }; + freqs[i] += delta; + } +} + +pub fn include(this: *CharFreq, other: CharFreq) void { + // https://zig.godbolt.org/z/Mq8eK6K9s + const left: @Vector(char_freq_count, i32) = this.freqs; + const right: @Vector(char_freq_count, i32) = other.freqs; + + this.freqs = left + right; +} + +pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier { + const array: CharAndCount.Array = brk: { + var _array: CharAndCount.Array = undefined; + + for (&_array, NameMinifier.default_tail, this.freqs, 0..) |*dest, char, freq, i| { + dest.* = CharAndCount{ + .char = char, + .index = i, + .count = freq, + }; + } + + std.sort.pdq(CharAndCount, &_array, {}, CharAndCount.lessThan); + + break :brk _array; + }; + + var minifier = NameMinifier.init(allocator); + minifier.head.ensureTotalCapacityPrecise(NameMinifier.default_head.len) catch unreachable; + minifier.tail.ensureTotalCapacityPrecise(NameMinifier.default_tail.len) catch unreachable; + // TODO: investigate counting number of < 0 and > 0 and pre-allocating + for (array) |item| { + if (item.char < '0' or item.char > '9') { + minifier.head.append(item.char) catch unreachable; + } + minifier.tail.append(item.char) catch unreachable; + } + + return minifier; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const string = bun.string; + +const js_ast = bun.js_ast; +const CharFreq = js_ast.CharFreq; +const NameMinifier = js_ast.NameMinifier; + +const G = js_ast.G; +pub const Class = G.Class; diff --git a/src/ast/E.zig b/src/ast/E.zig new file mode 100644 index 0000000000..4b30eaa3ed --- /dev/null +++ b/src/ast/E.zig @@ -0,0 +1,1441 @@ +/// This represents an internal property name that can be mangled. The symbol +/// referenced by this expression should be a "SymbolMangledProp" symbol. +pub const NameOfSymbol = struct { + ref: Ref = Ref.None, + + /// If true, a preceding comment contains "@__KEY__" + /// + /// Currently not used + has_property_key_comment: bool = false, +}; + +pub const Array = struct { + items: ExprNodeList = ExprNodeList{}, + comma_after_spread: ?logger.Loc = null, + is_single_line: bool = false, + is_parenthesized: bool = false, + was_originally_macro: bool = false, + close_bracket_loc: logger.Loc = logger.Loc.Empty, + + pub fn push(this: *Array, allocator: std.mem.Allocator, item: Expr) !void { + try this.items.push(allocator, item); + } + + pub inline fn slice(this: Array) []Expr { + return this.items.slice(); + } + + pub fn inlineSpreadOfArrayLiterals( + this: *Array, + allocator: std.mem.Allocator, + estimated_count: usize, + ) !ExprNodeList { + var out = try allocator.alloc( + Expr, + // This over-allocates a little but it's fine + estimated_count + @as(usize, this.items.len), + ); + var remain = out; + for (this.items.slice()) |item| { + switch (item.data) { + .e_spread => |val| { + if (val.value.data == .e_array) { + for (val.value.data.e_array.items.slice()) |inner_item| { + if (inner_item.data == .e_missing) { + remain[0] = Expr.init(E.Undefined, .{}, inner_item.loc); + remain = remain[1..]; + } else { + remain[0] = inner_item; + remain = remain[1..]; + } + } + + // skip empty arrays + // don't include the inlined spread. + continue; + } + // non-arrays are kept in + }, + else => {}, + } + + remain[0] = item; + remain = remain[1..]; + } + + return ExprNodeList.init(out[0 .. out.len - remain.len]); + } + + pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { + const items = this.items.slice(); + var array = try JSC.JSValue.createEmptyArray(globalObject, items.len); + array.protect(); + defer array.unprotect(); + for (items, 0..) |expr, j| { + try array.putIndex(globalObject, @as(u32, @truncate(j)), try expr.data.toJS(allocator, globalObject)); + } + + return array; + } + + /// Assumes each item in the array is a string + pub fn alphabetizeStrings(this: *Array) void { + if (comptime Environment.allow_assert) { + for (this.items.slice()) |item| { + bun.assert(item.data == .e_string); + } + } + std.sort.pdq(Expr, this.items.slice(), {}, Sorter.isLessThan); + } + + const Sorter = struct { + pub fn isLessThan(ctx: void, lhs: Expr, rhs: Expr) bool { + return strings.cmpStringsAsc(ctx, lhs.data.e_string.data, rhs.data.e_string.data); + } + }; +}; + +pub const Unary = struct { + op: Op.Code, + value: ExprNodeIndex, +}; + +pub const Binary = struct { + left: ExprNodeIndex, + right: ExprNodeIndex, + op: Op.Code, +}; + +pub const Boolean = struct { + value: bool, + pub fn toJS(this: @This(), ctx: *JSC.JSGlobalObject) JSC.C.JSValueRef { + return JSC.C.JSValueMakeBoolean(ctx, this.value); + } +}; +pub const Super = struct {}; +pub const Null = struct {}; +pub const This = struct {}; +pub const Undefined = struct {}; +pub const New = struct { + target: ExprNodeIndex, + args: ExprNodeList = ExprNodeList{}, + + // True if there is a comment containing "@__PURE__" or "#__PURE__" preceding + // this call expression. See the comment inside ECall for more details. + can_be_unwrapped_if_unused: bool = false, + + close_parens_loc: logger.Loc, +}; +pub const NewTarget = struct { + range: logger.Range, +}; +pub const ImportMeta = struct {}; +pub const ImportMetaMain = struct { + /// If we want to print `!import.meta.main`, set this flag to true + /// instead of wrapping in a unary not. This way, the printer can easily + /// print `require.main != module` instead of `!(require.main == module)` + inverted: bool = false, +}; + +pub const Special = union(enum) { + /// emits `exports` or `module.exports` depending on `commonjs_named_exports_deoptimized` + module_exports, + /// `import.meta.hot` + hot_enabled, + /// Acts as .e_undefined, but allows property accesses to the rest of the HMR API. + hot_disabled, + /// `import.meta.hot.data` when HMR is enabled. Not reachable when it is disabled. + hot_data, + /// `import.meta.hot.accept` when HMR is enabled. Truthy. + hot_accept, + /// Converted from `hot_accept` to this in js_parser.zig when it is + /// passed strings. Printed as `hmr.hot.acceptSpecifiers` + hot_accept_visited, + /// Prints the resolved specifier string for an import record. + resolved_specifier_string: ImportRecord.Index, +}; + +pub const Call = struct { + // Node: + target: ExprNodeIndex, + args: ExprNodeList = ExprNodeList{}, + optional_chain: ?OptionalChain = null, + is_direct_eval: bool = false, + close_paren_loc: logger.Loc = logger.Loc.Empty, + + // True if there is a comment containing "@__PURE__" or "#__PURE__" preceding + // this call expression. This is an annotation used for tree shaking, and + // means that the call can be removed if it's unused. It does not mean the + // call is pure (e.g. it may still return something different if called twice). + // + // Note that the arguments are not considered to be part of the call. If the + // call itself is removed due to this annotation, the arguments must remain + // if they have side effects. + can_be_unwrapped_if_unused: bool = false, + + // Used when printing to generate the source prop on the fly + was_jsx_element: bool = false, + + pub fn hasSameFlagsAs(a: *Call, b: *Call) bool { + return (a.optional_chain == b.optional_chain and + a.is_direct_eval == b.is_direct_eval and + a.can_be_unwrapped_if_unused == b.can_be_unwrapped_if_unused); + } +}; + +pub const Dot = struct { + // target is Node + target: ExprNodeIndex, + name: string, + name_loc: logger.Loc, + optional_chain: ?OptionalChain = null, + + // If true, this property access is known to be free of side-effects. That + // means it can be removed if the resulting value isn't used. + can_be_removed_if_unused: bool = false, + + // If true, this property access is a function that, when called, can be + // unwrapped if the resulting value is unused. Unwrapping means discarding + // the call target but keeping any arguments with side effects. + call_can_be_unwrapped_if_unused: bool = false, + + pub fn hasSameFlagsAs(a: *Dot, b: *Dot) bool { + return (a.optional_chain == b.optional_chain and + a.is_direct_eval == b.is_direct_eval and + a.can_be_removed_if_unused == b.can_be_removed_if_unused and a.call_can_be_unwrapped_if_unused == b.call_can_be_unwrapped_if_unused); + } +}; + +pub const Index = struct { + index: ExprNodeIndex, + target: ExprNodeIndex, + optional_chain: ?OptionalChain = null, + + pub fn hasSameFlagsAs(a: *E.Index, b: *E.Index) bool { + return (a.optional_chain == b.optional_chain); + } +}; + +pub const Arrow = struct { + args: []G.Arg = &[_]G.Arg{}, + body: G.FnBody, + + is_async: bool = false, + has_rest_arg: bool = false, + prefer_expr: bool = false, // Use shorthand if true and "Body" is a single return statement + + pub const noop_return_undefined: Arrow = .{ + .args = &.{}, + .body = .{ + .loc = .Empty, + .stmts = &.{}, + }, + }; +}; + +pub const Function = struct { func: G.Fn }; + +pub const Identifier = struct { + ref: Ref = Ref.None, + + // If we're inside a "with" statement, this identifier may be a property + // access. In that case it would be incorrect to remove this identifier since + // the property access may be a getter or setter with side effects. + must_keep_due_to_with_stmt: bool = false, + + // If true, this identifier is known to not have a side effect (i.e. to not + // throw an exception) when referenced. If false, this identifier may or + // not have side effects when referenced. This is used to allow the removal + // of known globals such as "Object" if they aren't used. + can_be_removed_if_unused: bool = false, + + // If true, this identifier represents a function that, when called, can be + // unwrapped if the resulting value is unused. Unwrapping means discarding + // the call target but keeping any arguments with side effects. + call_can_be_unwrapped_if_unused: bool = false, + + pub inline fn init(ref: Ref) Identifier { + return Identifier{ + .ref = ref, + .must_keep_due_to_with_stmt = false, + .can_be_removed_if_unused = false, + .call_can_be_unwrapped_if_unused = false, + }; + } +}; + +/// This is similar to an `Identifier` but it represents a reference to an ES6 +/// import item. +/// +/// Depending on how the code is linked, the file containing this EImportIdentifier +/// may or may not be in the same module group as the file it was imported from. +/// +/// If it's the same module group than we can just merge the import item symbol +/// with the corresponding symbol that was imported, effectively renaming them +/// to be the same thing and statically binding them together. +/// +/// But if it's a different module group, then the import must be dynamically +/// evaluated using a property access off the corresponding namespace symbol, +/// which represents the result of a require() call. +/// +/// It's stored as a separate type so it's not easy to confuse with a plain +/// identifier. For example, it'd be bad if code trying to convert "{x: x}" into +/// "{x}" shorthand syntax wasn't aware that the "x" in this case is actually +/// "{x: importedNamespace.x}". This separate type forces code to opt-in to +/// doing this instead of opt-out. +pub const ImportIdentifier = struct { + ref: Ref = Ref.None, + + /// If true, this was originally an identifier expression such as "foo". If + /// false, this could potentially have been a member access expression such + /// as "ns.foo" off of an imported namespace object. + was_originally_identifier: bool = false, +}; + +/// This is a dot expression on exports, such as `exports.`. It is given +/// it's own AST node to allow CommonJS unwrapping, in which this can just be +/// the identifier in the Ref +pub const CommonJSExportIdentifier = struct { + ref: Ref = Ref.None, + base: Base = .exports, + + /// The original variant of the dot expression must be known so that in the case that we + /// - fail to convert this to ESM + /// - ALSO see an assignment to `module.exports` (commonjs_module_exports_assigned_deoptimized) + /// It must be known if `exports` or `module.exports` was written in source + /// code, as the distinction will alter behavior. The fixup happens in the printer when + /// printing this node. + pub const Base = enum { + exports, + module_dot_exports, + }; +}; + +// This is similar to EIdentifier but it represents class-private fields and +// methods. It can be used where computed properties can be used, such as +// EIndex and Property. +pub const PrivateIdentifier = struct { + ref: Ref, +}; + +/// In development mode, the new JSX transform has a few special props +/// - `React.jsxDEV(type, arguments, key, isStaticChildren, source, self)` +/// - `arguments`: +/// ```{ ...props, children: children, }``` +/// - `source`: https://github.com/babel/babel/blob/ef87648f3f05ccc393f89dea7d4c7c57abf398ce/packages/babel-plugin-transform-react-jsx-source/src/index.js#L24-L48 +/// ```{ +/// fileName: string | null, +/// columnNumber: number | null, +/// lineNumber: number | null, +/// }``` +/// - `children`: +/// - static the function is React.jsxsDEV, "jsxs" instead of "jsx" +/// - one child? the function is React.jsxDEV, +/// - no children? the function is React.jsxDEV and children is an empty array. +/// `isStaticChildren`: https://github.com/facebook/react/blob/4ca62cac45c288878d2532e5056981d177f9fdac/packages/react/src/jsx/ReactJSXElementValidator.js#L369-L384 +/// This flag means children is an array of JSX Elements literals. +/// The documentation on this is sparse, but it appears that +/// React just calls Object.freeze on the children array. +/// Object.freeze, historically, is quite a bit slower[0] than just not doing that. +/// Given that...I am choosing to always pass "false" to this. +/// This also skips extra state that we'd need to track. +/// If React Fast Refresh ends up using this later, then we can revisit this decision. +/// [0]: https://github.com/automerge/automerge/issues/177 +pub const JSXElement = struct { + /// JSX tag name + ///

=> E.String.init("div") + /// => E.Identifier{.ref = symbolPointingToMyComponent } + /// null represents a fragment + tag: ?ExprNodeIndex = null, + + /// JSX props + properties: G.Property.List = G.Property.List{}, + + /// JSX element children
{this_is_a_child_element}
+ children: ExprNodeList = ExprNodeList{}, + + // needed to make sure parse and visit happen in the same order + key_prop_index: i32 = -1, + + flags: Flags.JSXElement.Bitset = Flags.JSXElement.Bitset{}, + + close_tag_loc: logger.Loc = logger.Loc.Empty, + + pub const SpecialProp = enum { + __self, // old react transform used this as a prop + __source, + key, + ref, + any, + + pub const Map = ComptimeStringMap(SpecialProp, .{ + .{ "__self", .__self }, + .{ "__source", .__source }, + .{ "key", .key }, + .{ "ref", .ref }, + }); + }; +}; + +pub const Missing = struct { + pub fn jsonStringify(_: *const @This(), writer: anytype) !void { + return try writer.write(null); + } +}; + +pub const Number = struct { + value: f64, + + const double_digit = [_]string{ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63", "64", "65", "66", "67", "68", "69", "70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "80", "81", "82", "83", "84", "85", "86", "87", "88", "89", "90", "91", "92", "93", "94", "95", "96", "97", "98", "99", "100" }; + const neg_double_digit = [_]string{ "-0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9", "-10", "-11", "-12", "-13", "-14", "-15", "-16", "-17", "-18", "-19", "-20", "-21", "-22", "-23", "-24", "-25", "-26", "-27", "-28", "-29", "-30", "-31", "-32", "-33", "-34", "-35", "-36", "-37", "-38", "-39", "-40", "-41", "-42", "-43", "-44", "-45", "-46", "-47", "-48", "-49", "-50", "-51", "-52", "-53", "-54", "-55", "-56", "-57", "-58", "-59", "-60", "-61", "-62", "-63", "-64", "-65", "-66", "-67", "-68", "-69", "-70", "-71", "-72", "-73", "-74", "-75", "-76", "-77", "-78", "-79", "-80", "-81", "-82", "-83", "-84", "-85", "-86", "-87", "-88", "-89", "-90", "-91", "-92", "-93", "-94", "-95", "-96", "-97", "-98", "-99", "-100" }; + + /// String concatenation with numbers is required by the TypeScript compiler for + /// "constant expression" handling in enums. We can match the behavior of a JS VM + /// by calling out to the APIs in WebKit which are responsible for this operation. + /// + /// This can return `null` in wasm builds to avoid linking JSC + pub fn toString(this: Number, allocator: std.mem.Allocator) ?string { + return toStringFromF64(this.value, allocator); + } + + pub fn toStringFromF64(value: f64, allocator: std.mem.Allocator) ?string { + if (value == @trunc(value) and (value < std.math.maxInt(i32) and value > std.math.minInt(i32))) { + const int_value = @as(i64, @intFromFloat(value)); + const abs = @as(u64, @intCast(@abs(int_value))); + + // do not allocate for a small set of constant numbers: -100 through 100 + if (abs < double_digit.len) { + return if (int_value < 0) + neg_double_digit[abs] + else + double_digit[abs]; + } + + return std.fmt.allocPrint(allocator, "{d}", .{@as(i32, @intCast(int_value))}) catch return null; + } + + if (std.math.isNan(value)) { + return "NaN"; + } + + if (std.math.isNegativeInf(value)) { + return "-Infinity"; + } + + if (std.math.isInf(value)) { + return "Infinity"; + } + + if (Environment.isNative) { + var buf: [124]u8 = undefined; + return allocator.dupe(u8, bun.fmt.FormatDouble.dtoa(&buf, value)) catch bun.outOfMemory(); + } else { + // do not attempt to implement the spec here, it would be error prone. + } + + return null; + } + + pub inline fn toU64(self: Number) u64 { + return self.to(u64); + } + + pub inline fn toUsize(self: Number) usize { + return self.to(usize); + } + + pub inline fn toU32(self: Number) u32 { + return self.to(u32); + } + + pub inline fn toU16(self: Number) u16 { + return self.to(u16); + } + + pub fn to(self: Number, comptime T: type) T { + return @as(T, @intFromFloat(@min(@max(@trunc(self.value), 0), comptime @min(std.math.floatMax(f64), std.math.maxInt(T))))); + } + + pub fn jsonStringify(self: *const Number, writer: anytype) !void { + return try writer.write(self.value); + } + + pub fn toJS(this: @This()) JSC.JSValue { + return JSC.JSValue.jsNumber(this.value); + } +}; + +pub const BigInt = struct { + value: string, + + pub var empty = BigInt{ .value = "" }; + + pub fn jsonStringify(self: *const @This(), writer: anytype) !void { + return try writer.write(self.value); + } + + pub fn toJS(_: @This()) JSC.JSValue { + // TODO: + return JSC.JSValue.jsNumber(0); + } +}; + +pub const Object = struct { + properties: G.Property.List = G.Property.List{}, + comma_after_spread: ?logger.Loc = null, + is_single_line: bool = false, + is_parenthesized: bool = false, + was_originally_macro: bool = false, + + close_brace_loc: logger.Loc = logger.Loc.Empty, + + // used in TOML parser to merge properties + pub const Rope = struct { + head: Expr, + next: ?*Rope = null, + pub fn append(this: *Rope, expr: Expr, allocator: std.mem.Allocator) OOM!*Rope { + if (this.next) |next| { + return try next.append(expr, allocator); + } + + const rope = try allocator.create(Rope); + rope.* = .{ .head = expr }; + this.next = rope; + return rope; + } + }; + + pub fn get(self: *const Object, key: string) ?Expr { + return if (asProperty(self, key)) |query| query.expr else @as(?Expr, null); + } + + pub fn toJS(this: *Object, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { + var obj = JSC.JSValue.createEmptyObject(globalObject, this.properties.len); + obj.protect(); + defer obj.unprotect(); + const props: []const G.Property = this.properties.slice(); + for (props) |prop| { + if (prop.kind != .normal or prop.class_static_block != null or prop.key == null or prop.value == null) { + return error.@"Cannot convert argument type to JS"; + } + const key = try prop.key.?.data.toJS(allocator, globalObject); + const value = try prop.value.?.toJS(allocator, globalObject); + try obj.putToPropertyKey(globalObject, key, value); + } + + return obj; + } + + pub fn put(self: *Object, allocator: std.mem.Allocator, key: string, expr: Expr) !void { + if (asProperty(self, key)) |query| { + self.properties.ptr[query.i].value = expr; + } else { + try self.properties.push(allocator, .{ + .key = Expr.init(E.String, E.String.init(key), expr.loc), + .value = expr, + }); + } + } + + pub fn putString(self: *Object, allocator: std.mem.Allocator, key: string, value: string) !void { + return try put(self, allocator, key, Expr.init(E.String, E.String.init(value), logger.Loc.Empty)); + } + + pub const SetError = error{ OutOfMemory, Clobber }; + + pub fn set(self: *const Object, key: Expr, allocator: std.mem.Allocator, value: Expr) SetError!void { + if (self.hasProperty(key.data.e_string.data)) return error.Clobber; + try self.properties.push(allocator, .{ + .key = key, + .value = value, + }); + } + + pub const RopeQuery = struct { + expr: Expr, + rope: *const Rope, + }; + + // this is terribly, shamefully slow + pub fn setRope(self: *Object, rope: *const Rope, allocator: std.mem.Allocator, value: Expr) SetError!void { + if (self.get(rope.head.data.e_string.data)) |existing| { + switch (existing.data) { + .e_array => |array| { + if (rope.next == null) { + try array.push(allocator, value); + return; + } + + if (array.items.last()) |last| { + if (last.data != .e_object) { + return error.Clobber; + } + + try last.data.e_object.setRope(rope.next.?, allocator, value); + return; + } + + try array.push(allocator, value); + return; + }, + .e_object => |object| { + if (rope.next != null) { + try object.setRope(rope.next.?, allocator, value); + return; + } + + return error.Clobber; + }, + else => { + return error.Clobber; + }, + } + } + + var value_ = value; + if (rope.next) |next| { + var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); + try obj.data.e_object.setRope(next, allocator, value); + value_ = obj; + } + + try self.properties.push(allocator, .{ + .key = rope.head, + .value = value_, + }); + } + + pub fn getOrPutObject(self: *Object, rope: *const Rope, allocator: std.mem.Allocator) SetError!Expr { + if (self.get(rope.head.data.e_string.data)) |existing| { + switch (existing.data) { + .e_array => |array| { + if (rope.next == null) { + return error.Clobber; + } + + if (array.items.last()) |last| { + if (last.data != .e_object) { + return error.Clobber; + } + + return try last.data.e_object.getOrPutObject(rope.next.?, allocator); + } + + return error.Clobber; + }, + .e_object => |object| { + if (rope.next != null) { + return try object.getOrPutObject(rope.next.?, allocator); + } + + // success + return existing; + }, + else => { + return error.Clobber; + }, + } + } + + if (rope.next) |next| { + var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); + const out = try obj.data.e_object.getOrPutObject(next, allocator); + try self.properties.push(allocator, .{ + .key = rope.head, + .value = obj, + }); + return out; + } + + const out = Expr.init(E.Object, E.Object{}, rope.head.loc); + try self.properties.push(allocator, .{ + .key = rope.head, + .value = out, + }); + return out; + } + + pub fn getOrPutArray(self: *Object, rope: *const Rope, allocator: std.mem.Allocator) SetError!Expr { + if (self.get(rope.head.data.e_string.data)) |existing| { + switch (existing.data) { + .e_array => |array| { + if (rope.next == null) { + return existing; + } + + if (array.items.last()) |last| { + if (last.data != .e_object) { + return error.Clobber; + } + + return try last.data.e_object.getOrPutArray(rope.next.?, allocator); + } + + return error.Clobber; + }, + .e_object => |object| { + if (rope.next == null) { + return error.Clobber; + } + + return try object.getOrPutArray(rope.next.?, allocator); + }, + else => { + return error.Clobber; + }, + } + } + + if (rope.next) |next| { + var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); + const out = try obj.data.e_object.getOrPutArray(next, allocator); + try self.properties.push(allocator, .{ + .key = rope.head, + .value = obj, + }); + return out; + } + + const out = Expr.init(E.Array, E.Array{}, rope.head.loc); + try self.properties.push(allocator, .{ + .key = rope.head, + .value = out, + }); + return out; + } + + pub fn hasProperty(obj: *const Object, name: string) bool { + for (obj.properties.slice()) |prop| { + const key = prop.key orelse continue; + if (std.meta.activeTag(key.data) != .e_string) continue; + if (key.data.e_string.eql(string, name)) return true; + } + return false; + } + + pub fn asProperty(obj: *const Object, name: string) ?Expr.Query { + for (obj.properties.slice(), 0..) |prop, i| { + const value = prop.value orelse continue; + const key = prop.key orelse continue; + if (std.meta.activeTag(key.data) != .e_string) continue; + const key_str = key.data.e_string; + if (key_str.eql(string, name)) { + return Expr.Query{ + .expr = value, + .loc = key.loc, + .i = @as(u32, @truncate(i)), + }; + } + } + + return null; + } + + /// Assumes each key in the property is a string + pub fn alphabetizeProperties(this: *Object) void { + if (comptime Environment.isDebug) { + for (this.properties.slice()) |prop| { + bun.assert(prop.key.?.data == .e_string); + } + } + std.sort.pdq(G.Property, this.properties.slice(), {}, Sorter.isLessThan); + } + + pub fn packageJSONSort(this: *Object) void { + std.sort.pdq(G.Property, this.properties.slice(), {}, PackageJSONSort.Fields.isLessThan); + } + + const PackageJSONSort = struct { + const Fields = enum(u8) { + name = 0, + version = 1, + author = 2, + repository = 3, + config = 4, + main = 5, + module = 6, + dependencies = 7, + devDependencies = 8, + optionalDependencies = 9, + peerDependencies = 10, + exports = 11, + __fake = 12, + + pub const Map = ComptimeStringMap(Fields, .{ + .{ "name", Fields.name }, + .{ "version", Fields.version }, + .{ "author", Fields.author }, + .{ "repository", Fields.repository }, + .{ "config", Fields.config }, + .{ "main", Fields.main }, + .{ "module", Fields.module }, + .{ "dependencies", Fields.dependencies }, + .{ "devDependencies", Fields.devDependencies }, + .{ "optionalDependencies", Fields.optionalDependencies }, + .{ "peerDependencies", Fields.peerDependencies }, + .{ "exports", Fields.exports }, + }); + + pub fn isLessThan(ctx: void, lhs: G.Property, rhs: G.Property) bool { + var lhs_key_size: u8 = @intFromEnum(Fields.__fake); + var rhs_key_size: u8 = @intFromEnum(Fields.__fake); + + if (lhs.key != null and lhs.key.?.data == .e_string) { + lhs_key_size = @intFromEnum(Map.get(lhs.key.?.data.e_string.data) orelse Fields.__fake); + } + + if (rhs.key != null and rhs.key.?.data == .e_string) { + rhs_key_size = @intFromEnum(Map.get(rhs.key.?.data.e_string.data) orelse Fields.__fake); + } + + return switch (std.math.order(lhs_key_size, rhs_key_size)) { + .lt => true, + .gt => false, + .eq => strings.cmpStringsAsc(ctx, lhs.key.?.data.e_string.data, rhs.key.?.data.e_string.data), + }; + } + }; + }; + + const Sorter = struct { + pub fn isLessThan(ctx: void, lhs: G.Property, rhs: G.Property) bool { + return strings.cmpStringsAsc(ctx, lhs.key.?.data.e_string.data, rhs.key.?.data.e_string.data); + } + }; +}; + +pub const Spread = struct { value: ExprNodeIndex }; + +/// JavaScript string literal type +pub const String = struct { + // A version of this where `utf8` and `value` are stored in a packed union, with len as a single u32 was attempted. + // It did not improve benchmarks. Neither did converting this from a heap-allocated type to a stack-allocated type. + // TODO: change this to *const anyopaque and change all uses to either .slice8() or .slice16() + data: []const u8 = "", + prefer_template: bool = false, + + // A very simple rope implementation + // We only use this for string folding, so this is kind of overkill + // We don't need to deal with substrings + next: ?*String = null, + end: ?*String = null, + rope_len: u32 = 0, + is_utf16: bool = false, + + pub fn isIdentifier(this: *String, allocator: std.mem.Allocator) bool { + if (!this.isUTF8()) { + return bun.js_lexer.isIdentifierUTF16(this.slice16()); + } + + return bun.js_lexer.isIdentifier(this.slice(allocator)); + } + + pub const class = E.String{ .data = "class" }; + + pub fn push(this: *String, other: *String) void { + bun.assert(this.isUTF8()); + bun.assert(other.isUTF8()); + + if (other.rope_len == 0) { + other.rope_len = @truncate(other.data.len); + } + + if (this.rope_len == 0) { + this.rope_len = @truncate(this.data.len); + } + + this.rope_len += other.rope_len; + if (this.next == null) { + this.next = other; + this.end = other; + } else { + var end = this.end.?; + while (end.next != null) end = end.end.?; + end.next = other; + this.end = other; + } + } + + /// Cloning the rope string is rarely needed, see `foldStringAddition`'s + /// comments and the 'edgecase/EnumInliningRopeStringPoison' test + pub fn cloneRopeNodes(s: String) String { + var root = s; + + if (root.next != null) { + var current: ?*String = &root; + while (true) { + const node = current.?; + if (node.next) |next| { + node.next = Expr.Data.Store.append(String, next.*); + current = node.next; + } else { + root.end = node; + break; + } + } + } + + return root; + } + + pub fn toUTF8(this: *String, allocator: std.mem.Allocator) !void { + if (!this.is_utf16) return; + this.data = try strings.toUTF8Alloc(allocator, this.slice16()); + this.is_utf16 = false; + } + + pub fn init(value: anytype) String { + const Value = @TypeOf(value); + if (Value == []u16 or Value == []const u16) { + return .{ + .data = @as([*]const u8, @ptrCast(value.ptr))[0..value.len], + .is_utf16 = true, + }; + } + + return .{ .data = value }; + } + + /// E.String containing non-ascii characters may not fully work. + /// https://github.com/oven-sh/bun/issues/11963 + /// More investigation is needed. + pub fn initReEncodeUTF8(utf8: []const u8, allocator: std.mem.Allocator) String { + return if (bun.strings.isAllASCII(utf8)) + init(utf8) + else + init(bun.strings.toUTF16AllocForReal(allocator, utf8, false, false) catch bun.outOfMemory()); + } + + pub fn slice8(this: *const String) []const u8 { + bun.assert(!this.is_utf16); + return this.data; + } + + pub fn slice16(this: *const String) []const u16 { + bun.assert(this.is_utf16); + return @as([*]const u16, @ptrCast(@alignCast(this.data.ptr)))[0..this.data.len]; + } + + pub fn resolveRopeIfNeeded(this: *String, allocator: std.mem.Allocator) void { + if (this.next == null or !this.isUTF8()) return; + var bytes = std.ArrayList(u8).initCapacity(allocator, this.rope_len) catch bun.outOfMemory(); + + bytes.appendSliceAssumeCapacity(this.data); + var str = this.next; + while (str) |part| { + bytes.appendSlice(part.data) catch bun.outOfMemory(); + str = part.next; + } + this.data = bytes.items; + this.next = null; + } + + pub fn slice(this: *String, allocator: std.mem.Allocator) []const u8 { + this.resolveRopeIfNeeded(allocator); + return this.string(allocator) catch bun.outOfMemory(); + } + + pub var empty = String{}; + pub var @"true" = String{ .data = "true" }; + pub var @"false" = String{ .data = "false" }; + pub var @"null" = String{ .data = "null" }; + pub var @"undefined" = String{ .data = "undefined" }; + + pub fn clone(str: *const String, allocator: std.mem.Allocator) !String { + return String{ + .data = try allocator.dupe(u8, str.data), + .prefer_template = str.prefer_template, + .is_utf16 = !str.isUTF8(), + }; + } + + pub fn cloneSliceIfNecessary(str: *const String, allocator: std.mem.Allocator) !bun.string { + if (str.isUTF8()) { + return allocator.dupe(u8, str.string(allocator) catch unreachable); + } + + return str.string(allocator); + } + + pub fn javascriptLength(s: *const String) ?u32 { + if (s.rope_len > 0) { + // We only support ascii ropes for now + return s.rope_len; + } + + if (s.isUTF8()) { + if (!strings.isAllASCII(s.data)) { + return null; + } + return @truncate(s.data.len); + } + + return @truncate(s.slice16().len); + } + + pub inline fn len(s: *const String) usize { + return if (s.rope_len > 0) s.rope_len else s.data.len; + } + + pub inline fn isUTF8(s: *const String) bool { + return !s.is_utf16; + } + + pub inline fn isBlank(s: *const String) bool { + return s.len() == 0; + } + + pub inline fn isPresent(s: *const String) bool { + return s.len() > 0; + } + + pub fn eql(s: *const String, comptime _t: type, other: anytype) bool { + if (s.isUTF8()) { + switch (_t) { + @This() => { + if (other.isUTF8()) { + return strings.eqlLong(s.data, other.data, true); + } else { + return strings.utf16EqlString(other.slice16(), s.data); + } + }, + bun.string => { + return strings.eqlLong(s.data, other, true); + }, + []u16, []const u16 => { + return strings.utf16EqlString(other, s.data); + }, + else => { + @compileError("Invalid type"); + }, + } + } else { + switch (_t) { + @This() => { + if (other.isUTF8()) { + return strings.utf16EqlString(s.slice16(), other.data); + } else { + return std.mem.eql(u16, other.slice16(), s.slice16()); + } + }, + bun.string => { + return strings.utf16EqlString(s.slice16(), other); + }, + []u16, []const u16 => { + return std.mem.eql(u16, other.slice16(), s.slice16()); + }, + else => { + @compileError("Invalid type"); + }, + } + } + } + + pub fn eqlComptime(s: *const String, comptime value: []const u8) bool { + bun.assert(s.next == null); + return if (s.isUTF8()) + strings.eqlComptime(s.data, value) + else + strings.eqlComptimeUTF16(s.slice16(), value); + } + + pub fn hasPrefixComptime(s: *const String, comptime value: anytype) bool { + if (s.data.len < value.len) + return false; + + return if (s.isUTF8()) + strings.eqlComptime(s.data[0..value.len], value) + else + strings.eqlComptimeUTF16(s.slice16()[0..value.len], value); + } + + pub fn string(s: *const String, allocator: std.mem.Allocator) OOM!bun.string { + if (s.isUTF8()) { + return s.data; + } else { + return strings.toUTF8Alloc(allocator, s.slice16()); + } + } + + pub fn stringZ(s: *const String, allocator: std.mem.Allocator) OOM!bun.stringZ { + if (s.isUTF8()) { + return allocator.dupeZ(u8, s.data); + } else { + return strings.toUTF8AllocZ(allocator, s.slice16()); + } + } + + pub fn stringCloned(s: *const String, allocator: std.mem.Allocator) OOM!bun.string { + if (s.isUTF8()) { + return allocator.dupe(u8, s.data); + } else { + return strings.toUTF8Alloc(allocator, s.slice16()); + } + } + + pub fn hash(s: *const String) u64 { + if (s.isBlank()) return 0; + + if (s.isUTF8()) { + // hash utf-8 + return bun.hash(s.data); + } else { + // hash utf-16 + return bun.hash(@as([*]const u8, @ptrCast(s.slice16().ptr))[0 .. s.slice16().len * 2]); + } + } + + pub fn toJS(s: *String, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) !JSC.JSValue { + s.resolveRopeIfNeeded(allocator); + if (!s.isPresent()) { + var emp = bun.String.empty; + return emp.toJS(globalObject); + } + + if (s.isUTF8()) { + if (try strings.toUTF16Alloc(allocator, s.slice8(), false, false)) |utf16| { + var out, const chars = bun.String.createUninitialized(.utf16, utf16.len); + @memcpy(chars, utf16); + return out.transferToJS(globalObject); + } else { + var out, const chars = bun.String.createUninitialized(.latin1, s.slice8().len); + @memcpy(chars, s.slice8()); + return out.transferToJS(globalObject); + } + } else { + var out, const chars = bun.String.createUninitialized(.utf16, s.slice16().len); + @memcpy(chars, s.slice16()); + return out.transferToJS(globalObject); + } + } + + pub fn toZigString(s: *String, allocator: std.mem.Allocator) JSC.ZigString { + if (s.isUTF8()) { + return JSC.ZigString.fromUTF8(s.slice(allocator)); + } else { + return JSC.ZigString.initUTF16(s.slice16()); + } + } + + pub fn format(s: String, comptime fmt: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + comptime bun.assert(fmt.len == 0); + + try writer.writeAll("E.String"); + if (s.next == null) { + try writer.writeAll("("); + if (s.isUTF8()) { + try writer.print("\"{s}\"", .{s.data}); + } else { + try writer.print("\"{}\"", .{bun.fmt.utf16(s.slice16())}); + } + try writer.writeAll(")"); + } else { + try writer.writeAll("(rope: ["); + var it: ?*const String = &s; + while (it) |part| { + if (part.isUTF8()) { + try writer.print("\"{s}\"", .{part.data}); + } else { + try writer.print("\"{}\"", .{bun.fmt.utf16(part.slice16())}); + } + it = part.next; + if (it != null) try writer.writeAll(" "); + } + try writer.writeAll("])"); + } + } + + pub fn jsonStringify(s: *const String, writer: anytype) !void { + var buf = [_]u8{0} ** 4096; + var i: usize = 0; + for (s.slice16()) |char| { + buf[i] = @as(u8, @intCast(char)); + i += 1; + if (i >= 4096) { + break; + } + } + + return try writer.write(buf[0..i]); + } +}; + +// value is in the Node +pub const TemplatePart = struct { + value: ExprNodeIndex, + tail_loc: logger.Loc, + tail: Template.Contents, +}; + +pub const Template = struct { + tag: ?ExprNodeIndex = null, + parts: []TemplatePart = &.{}, + head: Contents, + + pub const Contents = union(Tag) { + cooked: E.String, + raw: string, + + const Tag = enum { + cooked, + raw, + }; + + pub fn isUTF8(contents: Contents) bool { + return contents == .cooked and contents.cooked.isUTF8(); + } + }; + + /// "`a${'b'}c`" => "`abc`" + pub fn fold( + this: *Template, + allocator: std.mem.Allocator, + loc: logger.Loc, + ) Expr { + if (this.tag != null or (this.head == .cooked and !this.head.cooked.isUTF8())) { + // we only fold utf-8/ascii for now + return Expr{ + .data = .{ .e_template = this }, + .loc = loc, + }; + } + + bun.assert(this.head == .cooked); + + if (this.parts.len == 0) { + return Expr.init(E.String, this.head.cooked, loc); + } + + var parts = std.ArrayList(TemplatePart).initCapacity(allocator, this.parts.len) catch unreachable; + var head = Expr.init(E.String, this.head.cooked, loc); + for (this.parts) |part_src| { + var part = part_src; + bun.assert(part.tail == .cooked); + + part.value = part.value.unwrapInlined(); + + switch (part.value.data) { + .e_number => { + if (part.value.data.e_number.toString(allocator)) |s| { + part.value = Expr.init(E.String, E.String.init(s), part.value.loc); + } + }, + .e_null => { + part.value = Expr.init(E.String, E.String.init("null"), part.value.loc); + }, + .e_boolean => { + part.value = Expr.init(E.String, E.String.init(if (part.value.data.e_boolean.value) + "true" + else + "false"), part.value.loc); + }, + .e_undefined => { + part.value = Expr.init(E.String, E.String.init("undefined"), part.value.loc); + }, + .e_big_int => |value| { + part.value = Expr.init(E.String, E.String.init(value.value), part.value.loc); + }, + else => {}, + } + + if (part.value.data == .e_string and part.tail.cooked.isUTF8() and part.value.data.e_string.isUTF8()) { + if (parts.items.len == 0) { + if (part.value.data.e_string.len() > 0) { + head.data.e_string.push(Expr.init(E.String, part.value.data.e_string.*, logger.Loc.Empty).data.e_string); + } + + if (part.tail.cooked.len() > 0) { + head.data.e_string.push(Expr.init(E.String, part.tail.cooked, part.tail_loc).data.e_string); + } + + continue; + } else { + var prev_part = &parts.items[parts.items.len - 1]; + bun.assert(prev_part.tail == .cooked); + + if (prev_part.tail.cooked.isUTF8()) { + if (part.value.data.e_string.len() > 0) { + prev_part.tail.cooked.push(Expr.init(E.String, part.value.data.e_string.*, logger.Loc.Empty).data.e_string); + } + + if (part.tail.cooked.len() > 0) { + prev_part.tail.cooked.push(Expr.init(E.String, part.tail.cooked, part.tail_loc).data.e_string); + } + } else { + parts.appendAssumeCapacity(part); + } + } + } else { + parts.appendAssumeCapacity(part); + } + } + + if (parts.items.len == 0) { + parts.deinit(); + head.data.e_string.resolveRopeIfNeeded(allocator); + return head; + } + + return Expr.init(E.Template, .{ + .tag = null, + .parts = parts.items, + .head = .{ .cooked = head.data.e_string.* }, + }, loc); + } +}; + +pub const RegExp = struct { + value: string, + + // This exists for JavaScript bindings + // The RegExp constructor expects flags as a second argument. + // We want to avoid re-lexing the flags, so we store them here. + // This is the index of the first character in a flag, not the "/" + // /foo/gim + // ^ + flags_offset: ?u16 = null, + + pub var empty = RegExp{ .value = "" }; + + pub fn pattern(this: RegExp) string { + + // rewind until we reach the /foo/gim + // ^ + // should only ever be a single character + // but we're being cautious + if (this.flags_offset) |i_| { + var i = i_; + while (i > 0 and this.value[i] != '/') { + i -= 1; + } + + return std.mem.trim(u8, this.value[0..i], "/"); + } + + return std.mem.trim(u8, this.value, "/"); + } + + pub fn flags(this: RegExp) string { + // rewind until we reach the /foo/gim + // ^ + // should only ever be a single character + // but we're being cautious + if (this.flags_offset) |i| { + return this.value[i..]; + } + + return ""; + } + + pub fn jsonStringify(self: *const RegExp, writer: anytype) !void { + return try writer.write(self.value); + } +}; + +pub const Await = struct { + value: ExprNodeIndex, +}; + +pub const Yield = struct { + value: ?ExprNodeIndex = null, + is_star: bool = false, +}; + +pub const If = struct { + test_: ExprNodeIndex, + yes: ExprNodeIndex, + no: ExprNodeIndex, +}; + +pub const RequireString = struct { + import_record_index: u32 = 0, + + unwrapped_id: u32 = std.math.maxInt(u32), +}; + +pub const RequireResolveString = struct { + import_record_index: u32, + + // close_paren_loc: logger.Loc = logger.Loc.Empty, +}; + +pub const InlinedEnum = struct { + value: ExprNodeIndex, + comment: string, +}; + +pub const Import = struct { + expr: ExprNodeIndex, + options: ExprNodeIndex = Expr.empty, + import_record_index: u32, + + /// TODO: + /// Comments inside "import()" expressions have special meaning for Webpack. + /// Preserving comments inside these expressions makes it possible to use + /// esbuild as a TypeScript-to-JavaScript frontend for Webpack to improve + /// performance. We intentionally do not interpret these comments in esbuild + /// because esbuild is not Webpack. But we do preserve them since doing so is + /// harmless, easy to maintain, and useful to people. See the Webpack docs for + /// more info: https://webpack.js.org/api/module-methods/#magic-comments. + // leading_interior_comments: []G.Comment = &([_]G.Comment{}), + + pub fn isImportRecordNull(this: *const Import) bool { + return this.import_record_index == std.math.maxInt(u32); + } + + pub fn importRecordLoader(import: *const Import) ?bun.options.Loader { + // This logic is duplicated in js_printer.zig fn parsePath() + const obj = import.options.data.as(.e_object) orelse + return null; + const with = obj.get("with") orelse obj.get("assert") orelse + return null; + const with_obj = with.data.as(.e_object) orelse + return null; + const str = (with_obj.get("type") orelse + return null).data.as(.e_string) orelse + return null; + + if (!str.is_utf16) if (bun.options.Loader.fromString(str.data)) |loader| { + if (loader == .sqlite) { + const embed = with_obj.get("embed") orelse return loader; + const embed_str = embed.data.as(.e_string) orelse return loader; + if (embed_str.eqlComptime("true")) { + return .sqlite_embedded; + } + } + return loader; + }; + + return null; + } +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const ComptimeStringMap = bun.ComptimeStringMap; +const Environment = bun.Environment; +const ImportRecord = bun.ImportRecord; +const JSC = bun.JSC; +const OOM = bun.OOM; +const logger = bun.logger; +const string = bun.string; +const stringZ = bun.stringZ; +const strings = bun.strings; +const Loader = bun.options.Loader; + +const js_ast = bun.js_ast; +const E = js_ast.E; +const Expr = js_ast.Expr; +const ExprNodeIndex = js_ast.ExprNodeIndex; +const ExprNodeList = js_ast.ExprNodeList; +const Flags = js_ast.Flags; +const Op = js_ast.Op; +const OptionalChain = js_ast.OptionalChain; +const Ref = js_ast.Ref; +const ToJSError = js_ast.ToJSError; + +const G = js_ast.G; +pub const Class = G.Class; diff --git a/src/ast/Expr.zig b/src/ast/Expr.zig new file mode 100644 index 0000000000..d3939cd938 --- /dev/null +++ b/src/ast/Expr.zig @@ -0,0 +1,3231 @@ +loc: logger.Loc, +data: Data, + +pub const empty = Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = logger.Loc.Empty }; + +pub fn isAnonymousNamed(expr: Expr) bool { + return switch (expr.data) { + .e_arrow => true, + .e_function => |func| func.func.name == null, + .e_class => |class| class.class_name == null, + else => false, + }; +} + +pub fn clone(this: Expr, allocator: std.mem.Allocator) !Expr { + return .{ + .loc = this.loc, + .data = try this.data.clone(allocator), + }; +} + +pub fn deepClone(this: Expr, allocator: std.mem.Allocator) anyerror!Expr { + return .{ + .loc = this.loc, + .data = try this.data.deepClone(allocator), + }; +} + +pub fn wrapInArrow(this: Expr, allocator: std.mem.Allocator) !Expr { + var stmts = try allocator.alloc(Stmt, 1); + stmts[0] = Stmt.alloc(S.Return, S.Return{ .value = this }, this.loc); + + return Expr.init(E.Arrow, E.Arrow{ + .args = &.{}, + .body = .{ + .loc = this.loc, + .stmts = stmts, + }, + }, this.loc); +} + +pub fn canBeInlinedFromPropertyAccess(this: Expr) bool { + return switch (this.data) { + // if the array has a spread we must keep it + // https://github.com/oven-sh/bun/issues/2594 + .e_spread => false, + + .e_missing => false, + else => true, + }; +} + +pub fn canBeConstValue(this: Expr) bool { + return this.data.canBeConstValue(); +} + +pub fn canBeMoved(expr: Expr) bool { + return expr.data.canBeMoved(); +} + +pub fn unwrapInlined(expr: Expr) Expr { + if (expr.data.as(.e_inlined_enum)) |inlined| return inlined.value; + return expr; +} + +pub fn fromBlob( + blob: *const JSC.WebCore.Blob, + allocator: std.mem.Allocator, + mime_type_: ?MimeType, + log: *logger.Log, + loc: logger.Loc, +) !Expr { + const bytes = blob.sharedView(); + + const mime_type = mime_type_ orelse MimeType.init(blob.content_type, null, null); + + if (mime_type.category == .json) { + const source = &logger.Source.initPathString("fetch.json", bytes); + var out_expr = JSONParser.parseForMacro(source, log, allocator) catch { + return error.MacroFailed; + }; + out_expr.loc = loc; + + switch (out_expr.data) { + .e_object => { + out_expr.data.e_object.was_originally_macro = true; + }, + .e_array => { + out_expr.data.e_array.was_originally_macro = true; + }, + else => {}, + } + + return out_expr; + } + + if (mime_type.category.isTextLike()) { + var output = MutableString.initEmpty(allocator); + output = try JSPrinter.quoteForJSON(bytes, output, true); + var list = output.toOwnedSlice(); + // remove the quotes + if (list.len > 0) { + list = list[1 .. list.len - 1]; + } + return Expr.init(E.String, E.String.init(list), loc); + } + + return Expr.init( + E.String, + E.String{ + .data = try JSC.ZigString.init(bytes).toBase64DataURL(allocator), + }, + loc, + ); +} + +pub inline fn initIdentifier(ref: Ref, loc: logger.Loc) Expr { + return Expr{ + .loc = loc, + .data = .{ + .e_identifier = E.Identifier.init(ref), + }, + }; +} + +pub fn toEmpty(expr: Expr) Expr { + return Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = expr.loc }; +} +pub fn isEmpty(expr: Expr) bool { + return expr.data == .e_missing; +} +pub const Query = struct { expr: Expr, loc: logger.Loc, i: u32 = 0 }; + +pub fn hasAnyPropertyNamed(expr: *const Expr, comptime names: []const string) bool { + if (std.meta.activeTag(expr.data) != .e_object) return false; + const obj = expr.data.e_object; + if (obj.properties.len == 0) return false; + + for (obj.properties.slice()) |prop| { + if (prop.value == null) continue; + const key = prop.key orelse continue; + if (std.meta.activeTag(key.data) != .e_string) continue; + const key_str = key.data.e_string; + if (strings.eqlAnyComptime(key_str.data, names)) return true; + } + + return false; +} + +pub fn toJS(this: Expr, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { + return this.data.toJS(allocator, globalObject); +} + +pub inline fn isArray(this: *const Expr) bool { + return this.data == .e_array; +} + +pub inline fn isObject(this: *const Expr) bool { + return this.data == .e_object; +} + +pub fn get(expr: *const Expr, name: string) ?Expr { + return if (asProperty(expr, name)) |query| query.expr else null; +} + +/// Only use this for pretty-printing JSON. Do not use in transpiler. +/// +/// This does not handle edgecases like `-1` or stringifying arbitrary property lookups. +pub fn getByIndex(expr: *const Expr, index: u32, index_str: string, allocator: std.mem.Allocator) ?Expr { + switch (expr.data) { + .e_array => |array| { + if (index >= array.items.len) return null; + return array.items.slice()[index]; + }, + .e_object => |object| { + for (object.properties.sliceConst()) |*prop| { + const key = &(prop.key orelse continue); + switch (key.data) { + .e_string => |str| { + if (str.eql(string, index_str)) { + return prop.value; + } + }, + .e_number => |num| { + if (num.toU32() == index) { + return prop.value; + } + }, + else => {}, + } + } + + return null; + }, + .e_string => |str| { + if (str.len() > index) { + var slice = str.slice(allocator); + // TODO: this is not correct since .length refers to UTF-16 code units and not UTF-8 bytes + // However, since this is only used in the JSON prettifier for `bun pm view`, it's not a blocker for shipping. + if (slice.len > index) { + return Expr.init(E.String, .{ .data = slice[index..][0..1] }, expr.loc); + } + } + }, + else => {}, + } + + return null; +} + +/// This supports lookups like: +/// - `foo` +/// - `foo.bar` +/// - `foo[123]` +/// - `foo[123].bar` +/// - `foo[123].bar[456]` +/// - `foo[123].bar[456].baz` +/// - `foo[123].bar[456].baz.qux` // etc. +/// +/// This is not intended for use by the transpiler, instead by pretty printing JSON. +pub fn getPathMayBeIndex(expr: *const Expr, name: string) ?Expr { + if (name.len == 0) { + return null; + } + + if (strings.indexOfAny(name, "[.")) |idx| { + switch (name[idx]) { + '[' => { + const end_idx = strings.indexOfChar(name, ']') orelse return null; + var base_expr = expr; + if (idx > 0) { + const key = name[0..idx]; + base_expr = &(base_expr.get(key) orelse return null); + } + + const index_str = name[idx + 1 .. end_idx]; + const index = std.fmt.parseInt(u32, index_str, 10) catch return null; + const rest = if (name.len > end_idx) name[end_idx + 1 ..] else ""; + const result = &(base_expr.getByIndex(index, index_str, bun.default_allocator) orelse return null); + if (rest.len > 0) return result.getPathMayBeIndex(rest); + return result.*; + }, + '.' => { + const key = name[0..idx]; + const sub_expr = &(expr.get(key) orelse return null); + const subpath = if (name.len > idx) name[idx + 1 ..] else ""; + if (subpath.len > 0) { + return sub_expr.getPathMayBeIndex(subpath); + } + + return sub_expr.*; + }, + else => unreachable, + } + } + + return expr.get(name); +} + +/// Don't use this if you care about performance. +/// +/// Sets the value of a property, creating it if it doesn't exist. +/// `expr` must be an object. +pub fn set(expr: *Expr, allocator: std.mem.Allocator, name: string, value: Expr) OOM!void { + bun.assertWithLocation(expr.isObject(), @src()); + for (0..expr.data.e_object.properties.len) |i| { + const prop = &expr.data.e_object.properties.ptr[i]; + const key = prop.key orelse continue; + if (std.meta.activeTag(key.data) != .e_string) continue; + if (key.data.e_string.eql(string, name)) { + prop.value = value; + return; + } + } + + var new_props = expr.data.e_object.properties.listManaged(allocator); + try new_props.append(.{ + .key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty), + .value = value, + }); + + expr.data.e_object.properties = BabyList(G.Property).fromList(new_props); +} + +/// Don't use this if you care about performance. +/// +/// Sets the value of a property to a string, creating it if it doesn't exist. +/// `expr` must be an object. +pub fn setString(expr: *Expr, allocator: std.mem.Allocator, name: string, value: string) OOM!void { + bun.assertWithLocation(expr.isObject(), @src()); + for (0..expr.data.e_object.properties.len) |i| { + const prop = &expr.data.e_object.properties.ptr[i]; + const key = prop.key orelse continue; + if (std.meta.activeTag(key.data) != .e_string) continue; + if (key.data.e_string.eql(string, name)) { + prop.value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty); + return; + } + } + + var new_props = expr.data.e_object.properties.listManaged(allocator); + try new_props.append(.{ + .key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty), + .value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty), + }); + + expr.data.e_object.properties = BabyList(G.Property).fromList(new_props); +} + +pub fn getObject(expr: *const Expr, name: string) ?Expr { + if (expr.asProperty(name)) |query| { + if (query.expr.isObject()) { + return query.expr; + } + } + return null; +} + +pub fn getString(expr: *const Expr, allocator: std.mem.Allocator, name: string) OOM!?struct { string, logger.Loc } { + if (asProperty(expr, name)) |q| { + if (q.expr.asString(allocator)) |str| { + return .{ + str, + q.expr.loc, + }; + } + } + return null; +} + +pub fn getNumber(expr: *const Expr, name: string) ?struct { f64, logger.Loc } { + if (asProperty(expr, name)) |q| { + if (q.expr.asNumber()) |num| { + return .{ + num, + q.expr.loc, + }; + } + } + return null; +} + +pub fn getStringCloned(expr: *const Expr, allocator: std.mem.Allocator, name: string) OOM!?string { + return if (asProperty(expr, name)) |q| q.expr.asStringCloned(allocator) else null; +} + +pub fn getStringClonedZ(expr: *const Expr, allocator: std.mem.Allocator, name: string) OOM!?stringZ { + return if (asProperty(expr, name)) |q| q.expr.asStringZ(allocator) else null; +} + +pub fn getArray(expr: *const Expr, name: string) ?ArrayIterator { + return if (asProperty(expr, name)) |q| q.expr.asArray() else null; +} + +pub fn getRope(self: *const Expr, rope: *const E.Object.Rope) ?E.Object.RopeQuery { + if (self.get(rope.head.data.e_string.data)) |existing| { + switch (existing.data) { + .e_array => |array| { + if (rope.next) |next| { + if (array.items.last()) |end| { + return end.getRope(next); + } + } + + return E.Object.RopeQuery{ + .expr = existing, + .rope = rope, + }; + }, + .e_object => { + if (rope.next) |next| { + if (existing.getRope(next)) |end| { + return end; + } + } + + return E.Object.RopeQuery{ + .expr = existing, + .rope = rope, + }; + }, + else => return E.Object.RopeQuery{ + .expr = existing, + .rope = rope, + }, + } + } + + return null; +} + +// Making this comptime bloats the binary and doesn't seem to impact runtime performance. +pub fn asProperty(expr: *const Expr, name: string) ?Query { + if (std.meta.activeTag(expr.data) != .e_object) return null; + const obj = expr.data.e_object; + if (obj.properties.len == 0) return null; + + return obj.asProperty(name); +} + +pub fn asPropertyStringMap(expr: *const Expr, name: string, allocator: std.mem.Allocator) ?*bun.StringArrayHashMap(string) { + if (std.meta.activeTag(expr.data) != .e_object) return null; + const obj_ = expr.data.e_object; + if (obj_.properties.len == 0) return null; + const query = obj_.asProperty(name) orelse return null; + if (query.expr.data != .e_object) return null; + + const obj = query.expr.data.e_object; + var count: usize = 0; + for (obj.properties.slice()) |prop| { + const key = prop.key.?.asString(allocator) orelse continue; + const value = prop.value.?.asString(allocator) orelse continue; + count += @as(usize, @intFromBool(key.len > 0 and value.len > 0)); + } + + if (count == 0) return null; + var map = bun.StringArrayHashMap(string).init(allocator); + map.ensureUnusedCapacity(count) catch return null; + + for (obj.properties.slice()) |prop| { + const key = prop.key.?.asString(allocator) orelse continue; + const value = prop.value.?.asString(allocator) orelse continue; + + if (!(key.len > 0 and value.len > 0)) continue; + + map.putAssumeCapacity(key, value); + } + + const ptr = allocator.create(bun.StringArrayHashMap(string)) catch unreachable; + ptr.* = map; + return ptr; +} + +pub const ArrayIterator = struct { + array: *const E.Array, + index: u32, + + pub fn next(this: *ArrayIterator) ?Expr { + if (this.index >= this.array.items.len) { + return null; + } + defer this.index += 1; + return this.array.items.ptr[this.index]; + } +}; + +pub fn asArray(expr: *const Expr) ?ArrayIterator { + if (std.meta.activeTag(expr.data) != .e_array) return null; + const array = expr.data.e_array; + if (array.items.len == 0) return null; + + return ArrayIterator{ .array = array, .index = 0 }; +} + +pub inline fn asUtf8StringLiteral(expr: *const Expr) ?string { + if (expr.data == .e_string) { + bun.debugAssert(expr.data.e_string.next == null); + return expr.data.e_string.data; + } + return null; +} + +pub inline fn asStringLiteral(expr: *const Expr, allocator: std.mem.Allocator) ?string { + if (std.meta.activeTag(expr.data) != .e_string) return null; + return expr.data.e_string.string(allocator) catch null; +} + +pub inline fn isString(expr: *const Expr) bool { + return switch (expr.data) { + .e_string => true, + else => false, + }; +} + +pub inline fn asString(expr: *const Expr, allocator: std.mem.Allocator) ?string { + switch (expr.data) { + .e_string => |str| return str.string(allocator) catch bun.outOfMemory(), + else => return null, + } +} +pub inline fn asStringHash(expr: *const Expr, allocator: std.mem.Allocator, comptime hash_fn: *const fn (buf: []const u8) callconv(.Inline) u64) OOM!?u64 { + switch (expr.data) { + .e_string => |str| { + if (str.isUTF8()) return hash_fn(str.data); + const utf8_str = try str.string(allocator); + defer allocator.free(utf8_str); + return hash_fn(utf8_str); + }, + else => return null, + } +} + +pub inline fn asStringCloned(expr: *const Expr, allocator: std.mem.Allocator) OOM!?string { + switch (expr.data) { + .e_string => |str| return try str.stringCloned(allocator), + else => return null, + } +} + +pub inline fn asStringZ(expr: *const Expr, allocator: std.mem.Allocator) OOM!?stringZ { + switch (expr.data) { + .e_string => |str| return try str.stringZ(allocator), + else => return null, + } +} + +pub fn asBool( + expr: *const Expr, +) ?bool { + if (std.meta.activeTag(expr.data) != .e_boolean) return null; + + return expr.data.e_boolean.value; +} + +pub fn asNumber(expr: *const Expr) ?f64 { + if (expr.data != .e_number) return null; + + return expr.data.e_number.value; +} + +pub const EFlags = enum { none, ts_decorator }; + +const Serializable = struct { + type: Tag, + object: string, + value: Data, + loc: logger.Loc, +}; + +pub fn isMissing(a: *const Expr) bool { + return std.meta.activeTag(a.data) == Expr.Tag.e_missing; +} + +// The goal of this function is to "rotate" the AST if it's possible to use the +// left-associative property of the operator to avoid unnecessary parentheses. +// +// When using this, make absolutely sure that the operator is actually +// associative. For example, the "-" operator is not associative for +// floating-point numbers. +pub fn joinWithLeftAssociativeOp( + comptime op: Op.Code, + a: Expr, + b: Expr, + allocator: std.mem.Allocator, +) Expr { + // "(a, b) op c" => "a, b op c" + switch (a.data) { + .e_binary => |comma| { + if (comma.op == .bin_comma) { + comma.right = joinWithLeftAssociativeOp(op, comma.right, b, allocator); + } + }, + else => {}, + } + + // "a op (b op c)" => "(a op b) op c" + // "a op (b op (c op d))" => "((a op b) op c) op d" + switch (b.data) { + .e_binary => |binary| { + if (binary.op == op) { + return joinWithLeftAssociativeOp( + op, + joinWithLeftAssociativeOp(op, a, binary.left, allocator), + binary.right, + allocator, + ); + } + }, + else => {}, + } + + // "a op b" => "a op b" + // "(a op b) op c" => "(a op b) op c" + return Expr.init(E.Binary, E.Binary{ .op = op, .left = a, .right = b }, a.loc); +} + +pub fn joinWithComma(a: Expr, b: Expr, _: std.mem.Allocator) Expr { + if (a.isMissing()) { + return b; + } + + if (b.isMissing()) { + return a; + } + + return Expr.init(E.Binary, E.Binary{ .op = .bin_comma, .left = a, .right = b }, a.loc); +} + +pub fn joinAllWithComma(all: []Expr, allocator: std.mem.Allocator) Expr { + bun.assert(all.len > 0); + switch (all.len) { + 1 => { + return all[0]; + }, + 2 => { + return Expr.joinWithComma(all[0], all[1], allocator); + }, + else => { + var expr = all[0]; + for (1..all.len) |i| { + expr = Expr.joinWithComma(expr, all[i], allocator); + } + return expr; + }, + } +} + +pub fn joinAllWithCommaCallback(all: []Expr, comptime Context: type, ctx: Context, comptime callback: (fn (ctx: anytype, expr: Expr) ?Expr), allocator: std.mem.Allocator) ?Expr { + switch (all.len) { + 0 => return null, + 1 => { + return callback(ctx, all[0]); + }, + 2 => { + return Expr.joinWithComma( + callback(ctx, all[0]) orelse Expr{ + .data = .{ .e_missing = .{} }, + .loc = all[0].loc, + }, + callback(ctx, all[1]) orelse Expr{ + .data = .{ .e_missing = .{} }, + .loc = all[1].loc, + }, + allocator, + ); + }, + else => { + var i: usize = 1; + var expr = callback(ctx, all[0]) orelse Expr{ + .data = .{ .e_missing = .{} }, + .loc = all[0].loc, + }; + + while (i < all.len) : (i += 1) { + expr = Expr.joinWithComma(expr, callback(ctx, all[i]) orelse Expr{ + .data = .{ .e_missing = .{} }, + .loc = all[i].loc, + }, allocator); + } + + return expr; + }, + } +} + +pub fn jsonStringify(self: *const @This(), writer: anytype) !void { + return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "expr", .value = self.data, .loc = self.loc }); +} + +pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 { + return .{ + left.extractNumericValue() orelse return null, + right.extractNumericValue() orelse return null, + }; +} + +pub var icount: usize = 0; + +// We don't need to dynamically allocate booleans +var true_bool = E.Boolean{ .value = true }; +var false_bool = E.Boolean{ .value = false }; +var bool_values = [_]*E.Boolean{ &false_bool, &true_bool }; + +/// When the lifetime of an Expr.Data's pointer must exist longer than reset() is called, use this function. +/// Be careful to free the memory (or use an allocator that does it for you) +/// Also, prefer Expr.init or Expr.alloc when possible. This will be slower. +pub fn allocate(allocator: std.mem.Allocator, comptime Type: type, st: Type, loc: logger.Loc) Expr { + icount += 1; + Data.Store.assert(); + + switch (Type) { + E.Array => { + return Expr{ + .loc = loc, + .data = Data{ + .e_array = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Class => { + return Expr{ + .loc = loc, + .data = Data{ + .e_class = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Unary => { + return Expr{ + .loc = loc, + .data = Data{ + .e_unary = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Binary => { + return Expr{ + .loc = loc, + .data = Data{ + .e_binary = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.This => { + return Expr{ + .loc = loc, + .data = Data{ + .e_this = st, + }, + }; + }, + E.Boolean => { + return Expr{ + .loc = loc, + .data = Data{ + .e_boolean = st, + }, + }; + }, + E.Super => { + return Expr{ + .loc = loc, + .data = Data{ + .e_super = st, + }, + }; + }, + E.Null => { + return Expr{ + .loc = loc, + .data = Data{ + .e_null = st, + }, + }; + }, + E.Undefined => { + return Expr{ + .loc = loc, + .data = Data{ + .e_undefined = st, + }, + }; + }, + E.New => { + return Expr{ + .loc = loc, + .data = Data{ + .e_new = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.NewTarget => { + return Expr{ + .loc = loc, + .data = Data{ + .e_new_target = st, + }, + }; + }, + E.Function => { + return Expr{ + .loc = loc, + .data = Data{ + .e_function = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.ImportMeta => { + return Expr{ + .loc = loc, + .data = Data{ + .e_import_meta = st, + }, + }; + }, + E.Call => { + return Expr{ + .loc = loc, + .data = Data{ + .e_call = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Dot => { + return Expr{ + .loc = loc, + .data = Data{ + .e_dot = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Index => { + return Expr{ + .loc = loc, + .data = Data{ + .e_index = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Arrow => { + return Expr{ + .loc = loc, + .data = Data{ + .e_arrow = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Identifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_identifier = E.Identifier{ + .ref = st.ref, + .must_keep_due_to_with_stmt = st.must_keep_due_to_with_stmt, + .can_be_removed_if_unused = st.can_be_removed_if_unused, + .call_can_be_unwrapped_if_unused = st.call_can_be_unwrapped_if_unused, + }, + }, + }; + }, + E.ImportIdentifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_import_identifier = .{ + .ref = st.ref, + .was_originally_identifier = st.was_originally_identifier, + }, + }, + }; + }, + E.CommonJSExportIdentifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_commonjs_export_identifier = .{ + .ref = st.ref, + }, + }, + }; + }, + + E.PrivateIdentifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_private_identifier = st, + }, + }; + }, + E.JSXElement => { + return Expr{ + .loc = loc, + .data = Data{ + .e_jsx_element = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Missing => { + return Expr{ .loc = loc, .data = Data{ .e_missing = E.Missing{} } }; + }, + E.Number => { + return Expr{ + .loc = loc, + .data = Data{ + .e_number = st, + }, + }; + }, + E.BigInt => { + return Expr{ + .loc = loc, + .data = Data{ + .e_big_int = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Object => { + return Expr{ + .loc = loc, + .data = Data{ + .e_object = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Spread => { + return Expr{ + .loc = loc, + .data = Data{ + .e_spread = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.String => { + if (comptime Environment.isDebug) { + // Sanity check: assert string is not a null ptr + if (st.data.len > 0 and st.isUTF8()) { + bun.assert(@intFromPtr(st.data.ptr) > 0); + } + } + return Expr{ + .loc = loc, + .data = Data{ + .e_string = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + + E.Template => { + return Expr{ + .loc = loc, + .data = Data{ + .e_template = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.RegExp => { + return Expr{ + .loc = loc, + .data = Data{ + .e_reg_exp = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Await => { + return Expr{ + .loc = loc, + .data = Data{ + .e_await = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.Yield => { + return Expr{ + .loc = loc, + .data = Data{ + .e_yield = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.If => { + return Expr{ + .loc = loc, + .data = Data{ + .e_if = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.RequireResolveString => { + return Expr{ + .loc = loc, + .data = Data{ + .e_require_resolve_string = st, + }, + }; + }, + E.Import => { + return Expr{ + .loc = loc, + .data = Data{ + .e_import = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st; + break :brk item; + }, + }, + }; + }, + E.RequireString => { + return Expr{ + .loc = loc, + .data = Data{ + .e_require_string = st, + }, + }; + }, + *E.String => { + return Expr{ + .loc = loc, + .data = Data{ + .e_string = brk: { + const item = allocator.create(Type) catch unreachable; + item.* = st.*; + break :brk item; + }, + }, + }; + }, + + else => { + @compileError("Invalid type passed to Expr.init: " ++ @typeName(Type)); + }, + } +} + +pub const Disabler = bun.DebugOnlyDisabler(@This()); + +pub fn init(comptime Type: type, st: Type, loc: logger.Loc) Expr { + icount += 1; + Data.Store.assert(); + + switch (Type) { + E.NameOfSymbol => { + return Expr{ + .loc = loc, + .data = Data{ + .e_name_of_symbol = Data.Store.append(E.NameOfSymbol, st), + }, + }; + }, + E.Array => { + return Expr{ + .loc = loc, + .data = Data{ + .e_array = Data.Store.append(Type, st), + }, + }; + }, + E.Class => { + return Expr{ + .loc = loc, + .data = Data{ + .e_class = Data.Store.append(Type, st), + }, + }; + }, + E.Unary => { + return Expr{ + .loc = loc, + .data = Data{ + .e_unary = Data.Store.append(Type, st), + }, + }; + }, + E.Binary => { + return Expr{ + .loc = loc, + .data = Data{ + .e_binary = Data.Store.append(Type, st), + }, + }; + }, + E.This => { + return Expr{ + .loc = loc, + .data = Data{ + .e_this = st, + }, + }; + }, + E.Boolean => { + return Expr{ + .loc = loc, + .data = Data{ + .e_boolean = st, + }, + }; + }, + E.Super => { + return Expr{ + .loc = loc, + .data = Data{ + .e_super = st, + }, + }; + }, + E.Null => { + return Expr{ + .loc = loc, + .data = Data{ + .e_null = st, + }, + }; + }, + E.Undefined => { + return Expr{ + .loc = loc, + .data = Data{ + .e_undefined = st, + }, + }; + }, + E.New => { + return Expr{ + .loc = loc, + .data = Data{ + .e_new = Data.Store.append(Type, st), + }, + }; + }, + E.NewTarget => { + return Expr{ + .loc = loc, + .data = Data{ + .e_new_target = st, + }, + }; + }, + E.Function => { + return Expr{ + .loc = loc, + .data = Data{ + .e_function = Data.Store.append(Type, st), + }, + }; + }, + E.ImportMeta => { + return Expr{ + .loc = loc, + .data = Data{ + .e_import_meta = st, + }, + }; + }, + E.Call => { + return Expr{ + .loc = loc, + .data = Data{ + .e_call = Data.Store.append(Type, st), + }, + }; + }, + E.Dot => { + return Expr{ + .loc = loc, + .data = Data{ + .e_dot = Data.Store.append(Type, st), + }, + }; + }, + E.Index => { + return Expr{ + .loc = loc, + .data = Data{ + .e_index = Data.Store.append(Type, st), + }, + }; + }, + E.Arrow => { + return Expr{ + .loc = loc, + .data = Data{ + .e_arrow = Data.Store.append(Type, st), + }, + }; + }, + E.Identifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_identifier = E.Identifier{ + .ref = st.ref, + .must_keep_due_to_with_stmt = st.must_keep_due_to_with_stmt, + .can_be_removed_if_unused = st.can_be_removed_if_unused, + .call_can_be_unwrapped_if_unused = st.call_can_be_unwrapped_if_unused, + }, + }, + }; + }, + E.ImportIdentifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_import_identifier = .{ + .ref = st.ref, + .was_originally_identifier = st.was_originally_identifier, + }, + }, + }; + }, + E.CommonJSExportIdentifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_commonjs_export_identifier = .{ + .ref = st.ref, + .base = st.base, + }, + }, + }; + }, + E.PrivateIdentifier => { + return Expr{ + .loc = loc, + .data = Data{ + .e_private_identifier = st, + }, + }; + }, + E.JSXElement => { + return Expr{ + .loc = loc, + .data = Data{ + .e_jsx_element = Data.Store.append(Type, st), + }, + }; + }, + E.Missing => { + return Expr{ .loc = loc, .data = Data{ .e_missing = E.Missing{} } }; + }, + E.Number => { + return Expr{ + .loc = loc, + .data = Data{ + .e_number = st, + }, + }; + }, + E.BigInt => { + return Expr{ + .loc = loc, + .data = Data{ + .e_big_int = Data.Store.append(Type, st), + }, + }; + }, + E.Object => { + return Expr{ + .loc = loc, + .data = Data{ + .e_object = Data.Store.append(Type, st), + }, + }; + }, + E.Spread => { + return Expr{ + .loc = loc, + .data = Data{ + .e_spread = Data.Store.append(Type, st), + }, + }; + }, + E.String => { + if (comptime Environment.isDebug) { + // Sanity check: assert string is not a null ptr + if (st.data.len > 0 and st.isUTF8()) { + bun.assert(@intFromPtr(st.data.ptr) > 0); + } + } + return Expr{ + .loc = loc, + .data = Data{ + .e_string = Data.Store.append(Type, st), + }, + }; + }, + + E.Template => { + return Expr{ + .loc = loc, + .data = Data{ + .e_template = Data.Store.append(Type, st), + }, + }; + }, + E.RegExp => { + return Expr{ + .loc = loc, + .data = Data{ + .e_reg_exp = Data.Store.append(Type, st), + }, + }; + }, + E.Await => { + return Expr{ + .loc = loc, + .data = Data{ + .e_await = Data.Store.append(Type, st), + }, + }; + }, + E.Yield => { + return Expr{ + .loc = loc, + .data = Data{ + .e_yield = Data.Store.append(Type, st), + }, + }; + }, + E.If => { + return Expr{ + .loc = loc, + .data = Data{ + .e_if = Data.Store.append(Type, st), + }, + }; + }, + E.RequireResolveString => { + return Expr{ + .loc = loc, + .data = Data{ + .e_require_resolve_string = st, + }, + }; + }, + E.Import => { + return Expr{ + .loc = loc, + .data = Data{ + .e_import = Data.Store.append(Type, st), + }, + }; + }, + E.RequireString => { + return Expr{ + .loc = loc, + .data = Data{ + .e_require_string = st, + }, + }; + }, + *E.String => { + return Expr{ + .loc = loc, + .data = Data{ + .e_string = Data.Store.append(@TypeOf(st.*), st.*), + }, + }; + }, + E.InlinedEnum => return .{ .loc = loc, .data = .{ + .e_inlined_enum = Data.Store.append(@TypeOf(st), st), + } }, + + else => { + @compileError("Invalid type passed to Expr.init: " ++ @typeName(Type)); + }, + } +} + +pub fn isPrimitiveLiteral(this: Expr) bool { + return @as(Tag, this.data).isPrimitiveLiteral(); +} + +pub fn isRef(this: Expr, ref: Ref) bool { + return switch (this.data) { + .e_import_identifier => |import_identifier| import_identifier.ref.eql(ref), + .e_identifier => |ident| ident.ref.eql(ref), + else => false, + }; +} + +pub const Tag = enum { + e_array, + e_unary, + e_binary, + e_class, + e_new, + e_function, + e_call, + e_dot, + e_index, + e_arrow, + e_jsx_element, + e_object, + e_spread, + e_template, + e_reg_exp, + e_await, + e_yield, + e_if, + e_import, + e_identifier, + e_import_identifier, + e_private_identifier, + e_commonjs_export_identifier, + e_boolean, + e_number, + e_big_int, + e_string, + e_require_string, + e_require_resolve_string, + e_require_call_target, + e_require_resolve_call_target, + e_missing, + e_this, + e_super, + e_null, + e_undefined, + e_new_target, + e_import_meta, + e_import_meta_main, + e_require_main, + e_special, + e_inlined_enum, + e_name_of_symbol, + + // object, regex and array may have had side effects + pub fn isPrimitiveLiteral(tag: Tag) bool { + return switch (tag) { + .e_null, .e_undefined, .e_string, .e_boolean, .e_number, .e_big_int => true, + else => false, + }; + } + + pub fn typeof(tag: Tag) ?string { + return switch (tag) { + .e_array, .e_object, .e_null, .e_reg_exp => "object", + .e_undefined => "undefined", + .e_boolean => "boolean", + .e_number => "number", + .e_big_int => "bigint", + .e_string => "string", + .e_class, .e_function, .e_arrow => "function", + else => null, + }; + } + + pub fn format(tag: Tag, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try switch (tag) { + .e_string => writer.writeAll("string"), + .e_array => writer.writeAll("array"), + .e_unary => writer.writeAll("unary"), + .e_binary => writer.writeAll("binary"), + .e_boolean => writer.writeAll("boolean"), + .e_super => writer.writeAll("super"), + .e_null => writer.writeAll("null"), + .e_undefined => writer.writeAll("undefined"), + .e_new => writer.writeAll("new"), + .e_function => writer.writeAll("function"), + .e_new_target => writer.writeAll("new target"), + .e_import_meta => writer.writeAll("import.meta"), + .e_call => writer.writeAll("call"), + .e_dot => writer.writeAll("dot"), + .e_index => writer.writeAll("index"), + .e_arrow => writer.writeAll("arrow"), + .e_identifier => writer.writeAll("identifier"), + .e_import_identifier => writer.writeAll("import identifier"), + .e_private_identifier => writer.writeAll("#privateIdentifier"), + .e_jsx_element => writer.writeAll(""), + .e_missing => writer.writeAll(""), + .e_number => writer.writeAll("number"), + .e_big_int => writer.writeAll("BigInt"), + .e_object => writer.writeAll("object"), + .e_spread => writer.writeAll("..."), + .e_template => writer.writeAll("template"), + .e_reg_exp => writer.writeAll("regexp"), + .e_await => writer.writeAll("await"), + .e_yield => writer.writeAll("yield"), + .e_if => writer.writeAll("if"), + .e_require_resolve_string => writer.writeAll("require_or_require_resolve"), + .e_import => writer.writeAll("import"), + .e_this => writer.writeAll("this"), + .e_class => writer.writeAll("class"), + .e_require_string => writer.writeAll("require"), + else => writer.writeAll(@tagName(tag)), + }; + } + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } + + pub fn isArray(self: Tag) bool { + switch (self) { + .e_array => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isUnary(self: Tag) bool { + switch (self) { + .e_unary => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isBinary(self: Tag) bool { + switch (self) { + .e_binary => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isThis(self: Tag) bool { + switch (self) { + .e_this => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isClass(self: Tag) bool { + switch (self) { + .e_class => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isBoolean(self: Tag) bool { + switch (self) { + .e_boolean => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isSuper(self: Tag) bool { + switch (self) { + .e_super => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isNull(self: Tag) bool { + switch (self) { + .e_null => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isUndefined(self: Tag) bool { + switch (self) { + .e_undefined => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isNew(self: Tag) bool { + switch (self) { + .e_new => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isNewTarget(self: Tag) bool { + switch (self) { + .e_new_target => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isFunction(self: Tag) bool { + switch (self) { + .e_function => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isImportMeta(self: Tag) bool { + switch (self) { + .e_import_meta => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isCall(self: Tag) bool { + switch (self) { + .e_call => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isDot(self: Tag) bool { + switch (self) { + .e_dot => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isIndex(self: Tag) bool { + switch (self) { + .e_index => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isArrow(self: Tag) bool { + switch (self) { + .e_arrow => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isIdentifier(self: Tag) bool { + switch (self) { + .e_identifier => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isImportIdentifier(self: Tag) bool { + switch (self) { + .e_import_identifier => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isPrivateIdentifier(self: Tag) bool { + switch (self) { + .e_private_identifier => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isJsxElement(self: Tag) bool { + switch (self) { + .e_jsx_element => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isMissing(self: Tag) bool { + switch (self) { + .e_missing => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isNumber(self: Tag) bool { + switch (self) { + .e_number => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isBigInt(self: Tag) bool { + switch (self) { + .e_big_int => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isObject(self: Tag) bool { + switch (self) { + .e_object => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isSpread(self: Tag) bool { + switch (self) { + .e_spread => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isString(self: Tag) bool { + switch (self) { + .e_string => { + return true; + }, + else => { + return false; + }, + } + } + + pub fn isTemplate(self: Tag) bool { + switch (self) { + .e_template => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isRegExp(self: Tag) bool { + switch (self) { + .e_reg_exp => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isAwait(self: Tag) bool { + switch (self) { + .e_await => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isYield(self: Tag) bool { + switch (self) { + .e_yield => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isIf(self: Tag) bool { + switch (self) { + .e_if => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isRequireResolveString(self: Tag) bool { + switch (self) { + .e_require_resolve_string => { + return true; + }, + else => { + return false; + }, + } + } + pub fn isImport(self: Tag) bool { + switch (self) { + .e_import => { + return true; + }, + else => { + return false; + }, + } + } +}; + +pub fn isBoolean(a: Expr) bool { + switch (a.data) { + .e_boolean => { + return true; + }, + + .e_if => |ex| { + return isBoolean(ex.yes) and isBoolean(ex.no); + }, + .e_unary => |ex| { + return ex.op == .un_not or ex.op == .un_delete; + }, + .e_binary => |ex| { + switch (ex.op) { + .bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => { + return true; + }, + .bin_logical_or => { + return isBoolean(ex.left) and isBoolean(ex.right); + }, + .bin_logical_and => { + return isBoolean(ex.left) and isBoolean(ex.right); + }, + else => {}, + } + }, + else => {}, + } + + return false; +} + +pub fn assign(a: Expr, b: Expr) Expr { + return init(E.Binary, E.Binary{ + .op = .bin_assign, + .left = a, + .right = b, + }, a.loc); +} +pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr { + return init(Type, t, expr.loc); +} + +// Wraps the provided expression in the "!" prefix operator. The expression +// will potentially be simplified to avoid generating unnecessary extra "!" +// operators. For example, calling this with "!!x" will return "!x" instead +// of returning "!!!x". +pub fn not(expr: Expr, allocator: std.mem.Allocator) Expr { + return maybeSimplifyNot( + expr, + allocator, + ) orelse Expr.init( + E.Unary, + E.Unary{ + .op = .un_not, + .value = expr, + }, + expr.loc, + ); +} + +pub fn hasValueForThisInCall(expr: Expr) bool { + return switch (expr.data) { + .e_dot, .e_index => true, + else => false, + }; +} + +/// The given "expr" argument should be the operand of a "!" prefix operator +/// (i.e. the "x" in "!x"). This returns a simplified expression for the +/// whole operator (i.e. the "!x") if it can be simplified, or false if not. +/// It's separate from "Not()" above to avoid allocation on failure in case +/// that is undesired. +pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr { + switch (expr.data) { + .e_null, .e_undefined => { + return expr.at(E.Boolean, E.Boolean{ .value = true }, allocator); + }, + .e_boolean => |b| { + return expr.at(E.Boolean, E.Boolean{ .value = b.value }, allocator); + }, + .e_number => |n| { + return expr.at(E.Boolean, E.Boolean{ .value = (n.value == 0 or std.math.isNan(n.value)) }, allocator); + }, + .e_big_int => |b| { + return expr.at(E.Boolean, E.Boolean{ .value = strings.eqlComptime(b.value, "0") }, allocator); + }, + .e_function, + .e_arrow, + .e_reg_exp, + => { + return expr.at(E.Boolean, E.Boolean{ .value = false }, allocator); + }, + // "!!!a" => "!a" + .e_unary => |un| { + if (un.op == Op.Code.un_not and knownPrimitive(un.value) == .boolean) { + return un.value; + } + }, + .e_binary => |ex| { + // TODO: evaluate whether or not it is safe to do this mutation since it's modifying in-place. + // Make sure that these transformations are all safe for special values. + // For example, "!(a < b)" is not the same as "a >= b" if a and/or b are + // NaN (or undefined, or null, or possibly other problem cases too). + switch (ex.op) { + Op.Code.bin_loose_eq => { + // "!(a == b)" => "a != b" + ex.op = .bin_loose_ne; + return expr; + }, + Op.Code.bin_loose_ne => { + // "!(a != b)" => "a == b" + ex.op = .bin_loose_eq; + return expr; + }, + Op.Code.bin_strict_eq => { + // "!(a === b)" => "a !== b" + ex.op = .bin_strict_ne; + return expr; + }, + Op.Code.bin_strict_ne => { + // "!(a !== b)" => "a === b" + ex.op = .bin_strict_eq; + return expr; + }, + Op.Code.bin_comma => { + // "!(a, b)" => "a, !b" + ex.right = ex.right.not(allocator); + return expr; + }, + else => {}, + } + }, + .e_inlined_enum => |inlined| { + return maybeSimplifyNot(inlined.value, allocator); + }, + + else => {}, + } + + return null; +} + +pub fn toStringExprWithoutSideEffects(expr: Expr, allocator: std.mem.Allocator) ?Expr { + const unwrapped = expr.unwrapInlined(); + const slice = switch (unwrapped.data) { + .e_null => "null", + .e_string => return expr, + .e_undefined => "undefined", + .e_boolean => |data| if (data.value) "true" else "false", + .e_big_int => |bigint| bigint.value, + .e_number => |num| if (num.toString(allocator)) |str| + str + else + null, + .e_reg_exp => |regexp| regexp.value, + .e_dot => |dot| @as(?[]const u8, brk: { + // This is dumb but some JavaScript obfuscators use this to generate string literals + if (bun.strings.eqlComptime(dot.name, "constructor")) { + break :brk switch (dot.target.data) { + .e_string => "function String() { [native code] }", + .e_reg_exp => "function RegExp() { [native code] }", + else => null, + }; + } + break :brk null; + }), + else => null, + }; + return if (slice) |s| Expr.init(E.String, E.String.init(s), expr.loc) else null; +} + +pub fn isOptionalChain(self: *const @This()) bool { + return switch (self.data) { + .e_dot => self.data.e_dot.optional_chain != null, + .e_index => self.data.e_index.optional_chain != null, + .e_call => self.data.e_call.optional_chain != null, + else => false, + }; +} + +pub inline fn knownPrimitive(self: @This()) PrimitiveType { + return self.data.knownPrimitive(); +} + +pub const PrimitiveType = enum { + unknown, + mixed, + null, + undefined, + boolean, + number, + string, + bigint, + + pub const static = std.enums.EnumSet(PrimitiveType).init(.{ + .mixed = true, + .null = true, + .undefined = true, + .boolean = true, + .number = true, + .string = true, + // for our purposes, bigint is dynamic + // it is technically static though + // .@"bigint" = true, + }); + + pub inline fn isStatic(this: PrimitiveType) bool { + return static.contains(this); + } + + pub fn merge(left_known: PrimitiveType, right_known: PrimitiveType) PrimitiveType { + if (right_known == .unknown or left_known == .unknown) + return .unknown; + + return if (left_known == right_known) + left_known + else + .mixed; + } +}; + +pub const Data = union(Tag) { + e_array: *E.Array, + e_unary: *E.Unary, + e_binary: *E.Binary, + e_class: *E.Class, + + e_new: *E.New, + e_function: *E.Function, + e_call: *E.Call, + e_dot: *E.Dot, + e_index: *E.Index, + e_arrow: *E.Arrow, + + e_jsx_element: *E.JSXElement, + e_object: *E.Object, + e_spread: *E.Spread, + e_template: *E.Template, + e_reg_exp: *E.RegExp, + e_await: *E.Await, + e_yield: *E.Yield, + e_if: *E.If, + e_import: *E.Import, + + e_identifier: E.Identifier, + e_import_identifier: E.ImportIdentifier, + e_private_identifier: E.PrivateIdentifier, + e_commonjs_export_identifier: E.CommonJSExportIdentifier, + + e_boolean: E.Boolean, + e_number: E.Number, + e_big_int: *E.BigInt, + e_string: *E.String, + + e_require_string: E.RequireString, + e_require_resolve_string: E.RequireResolveString, + e_require_call_target, + e_require_resolve_call_target, + + e_missing: E.Missing, + e_this: E.This, + e_super: E.Super, + e_null: E.Null, + e_undefined: E.Undefined, + e_new_target: E.NewTarget, + e_import_meta: E.ImportMeta, + + e_import_meta_main: E.ImportMetaMain, + e_require_main, + + /// Covers some exotic AST node types under one namespace, since the + /// places this is found it all follows similar handling. + e_special: E.Special, + + e_inlined_enum: *E.InlinedEnum, + + e_name_of_symbol: *E.NameOfSymbol, + + comptime { + bun.assert_eql(@sizeOf(Data), 24); // Do not increase the size of Expr + } + + pub fn as(data: Data, comptime tag: Tag) ?@FieldType(Data, @tagName(tag)) { + return if (data == tag) @field(data, @tagName(tag)) else null; + } + + pub fn clone(this: Expr.Data, allocator: std.mem.Allocator) !Data { + return switch (this) { + .e_array => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_array))); + item.* = el.*; + return .{ .e_array = item }; + }, + .e_unary => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_unary))); + item.* = el.*; + return .{ .e_unary = item }; + }, + .e_binary => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_binary))); + item.* = el.*; + return .{ .e_binary = item }; + }, + .e_class => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_class))); + item.* = el.*; + return .{ .e_class = item }; + }, + .e_new => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_new))); + item.* = el.*; + return .{ .e_new = item }; + }, + .e_function => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_function))); + item.* = el.*; + return .{ .e_function = item }; + }, + .e_call => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_call))); + item.* = el.*; + return .{ .e_call = item }; + }, + .e_dot => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_dot))); + item.* = el.*; + return .{ .e_dot = item }; + }, + .e_index => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_index))); + item.* = el.*; + return .{ .e_index = item }; + }, + .e_arrow => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_arrow))); + item.* = el.*; + return .{ .e_arrow = item }; + }, + .e_jsx_element => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_jsx_element))); + item.* = el.*; + return .{ .e_jsx_element = item }; + }, + .e_object => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_object))); + item.* = el.*; + return .{ .e_object = item }; + }, + .e_spread => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_spread))); + item.* = el.*; + return .{ .e_spread = item }; + }, + .e_template => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_template))); + item.* = el.*; + return .{ .e_template = item }; + }, + .e_reg_exp => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_reg_exp))); + item.* = el.*; + return .{ .e_reg_exp = item }; + }, + .e_await => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_await))); + item.* = el.*; + return .{ .e_await = item }; + }, + .e_yield => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_yield))); + item.* = el.*; + return .{ .e_yield = item }; + }, + .e_if => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_if))); + item.* = el.*; + return .{ .e_if = item }; + }, + .e_import => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_import))); + item.* = el.*; + return .{ .e_import = item }; + }, + .e_big_int => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_big_int))); + item.* = el.*; + return .{ .e_big_int = item }; + }, + .e_string => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_string))); + item.* = el.*; + return .{ .e_string = item }; + }, + .e_inlined_enum => |el| { + const item = try allocator.create(std.meta.Child(@TypeOf(this.e_inlined_enum))); + item.* = el.*; + return .{ .e_inlined_enum = item }; + }, + else => this, + }; + } + + pub fn deepClone(this: Expr.Data, allocator: std.mem.Allocator) !Data { + return switch (this) { + .e_array => |el| { + const items = try el.items.deepClone(allocator); + const item = bun.create(allocator, E.Array, .{ + .items = items, + .comma_after_spread = el.comma_after_spread, + .was_originally_macro = el.was_originally_macro, + .is_single_line = el.is_single_line, + .is_parenthesized = el.is_parenthesized, + .close_bracket_loc = el.close_bracket_loc, + }); + return .{ .e_array = item }; + }, + .e_unary => |el| { + const item = bun.create(allocator, E.Unary, .{ + .op = el.op, + .value = try el.value.deepClone(allocator), + }); + return .{ .e_unary = item }; + }, + .e_binary => |el| { + const item = bun.create(allocator, E.Binary, .{ + .op = el.op, + .left = try el.left.deepClone(allocator), + .right = try el.right.deepClone(allocator), + }); + return .{ .e_binary = item }; + }, + .e_class => |el| { + const properties = try allocator.alloc(G.Property, el.properties.len); + for (el.properties, 0..) |prop, i| { + properties[i] = try prop.deepClone(allocator); + } + + const item = bun.create(allocator, E.Class, .{ + .class_keyword = el.class_keyword, + .ts_decorators = try el.ts_decorators.deepClone(allocator), + .class_name = el.class_name, + .extends = if (el.extends) |e| try e.deepClone(allocator) else null, + .body_loc = el.body_loc, + .close_brace_loc = el.close_brace_loc, + .properties = properties, + .has_decorators = el.has_decorators, + }); + return .{ .e_class = item }; + }, + .e_new => |el| { + const item = bun.create(allocator, E.New, .{ + .target = try el.target.deepClone(allocator), + .args = try el.args.deepClone(allocator), + .can_be_unwrapped_if_unused = el.can_be_unwrapped_if_unused, + .close_parens_loc = el.close_parens_loc, + }); + + return .{ .e_new = item }; + }, + .e_function => |el| { + const item = bun.create(allocator, E.Function, .{ + .func = try el.func.deepClone(allocator), + }); + return .{ .e_function = item }; + }, + .e_call => |el| { + const item = bun.create(allocator, E.Call, .{ + .target = try el.target.deepClone(allocator), + .args = try el.args.deepClone(allocator), + .optional_chain = el.optional_chain, + .is_direct_eval = el.is_direct_eval, + .close_paren_loc = el.close_paren_loc, + .can_be_unwrapped_if_unused = el.can_be_unwrapped_if_unused, + .was_jsx_element = el.was_jsx_element, + }); + return .{ .e_call = item }; + }, + .e_dot => |el| { + const item = bun.create(allocator, E.Dot, .{ + .target = try el.target.deepClone(allocator), + .name = el.name, + .name_loc = el.name_loc, + .optional_chain = el.optional_chain, + .can_be_removed_if_unused = el.can_be_removed_if_unused, + .call_can_be_unwrapped_if_unused = el.call_can_be_unwrapped_if_unused, + }); + return .{ .e_dot = item }; + }, + .e_index => |el| { + const item = bun.create(allocator, E.Index, .{ + .target = try el.target.deepClone(allocator), + .index = try el.index.deepClone(allocator), + .optional_chain = el.optional_chain, + }); + return .{ .e_index = item }; + }, + .e_arrow => |el| { + const args = try allocator.alloc(G.Arg, el.args.len); + for (0..args.len) |i| { + args[i] = try el.args[i].deepClone(allocator); + } + const item = bun.create(allocator, E.Arrow, .{ + .args = args, + .body = el.body, + .is_async = el.is_async, + .has_rest_arg = el.has_rest_arg, + .prefer_expr = el.prefer_expr, + }); + + return .{ .e_arrow = item }; + }, + .e_jsx_element => |el| { + const item = bun.create(allocator, E.JSXElement, .{ + .tag = if (el.tag) |tag| try tag.deepClone(allocator) else null, + .properties = try el.properties.deepClone(allocator), + .children = try el.children.deepClone(allocator), + .key_prop_index = el.key_prop_index, + .flags = el.flags, + .close_tag_loc = el.close_tag_loc, + }); + return .{ .e_jsx_element = item }; + }, + .e_object => |el| { + const item = bun.create(allocator, E.Object, .{ + .properties = try el.properties.deepClone(allocator), + .comma_after_spread = el.comma_after_spread, + .is_single_line = el.is_single_line, + .is_parenthesized = el.is_parenthesized, + .was_originally_macro = el.was_originally_macro, + .close_brace_loc = el.close_brace_loc, + }); + return .{ .e_object = item }; + }, + .e_spread => |el| { + const item = bun.create(allocator, E.Spread, .{ + .value = try el.value.deepClone(allocator), + }); + return .{ .e_spread = item }; + }, + .e_template => |el| { + const item = bun.create(allocator, E.Template, .{ + .tag = if (el.tag) |tag| try tag.deepClone(allocator) else null, + .parts = el.parts, + .head = el.head, + }); + return .{ .e_template = item }; + }, + .e_reg_exp => |el| { + const item = bun.create(allocator, E.RegExp, .{ + .value = el.value, + .flags_offset = el.flags_offset, + }); + return .{ .e_reg_exp = item }; + }, + .e_await => |el| { + const item = bun.create(allocator, E.Await, .{ + .value = try el.value.deepClone(allocator), + }); + return .{ .e_await = item }; + }, + .e_yield => |el| { + const item = bun.create(allocator, E.Yield, .{ + .value = if (el.value) |value| try value.deepClone(allocator) else null, + .is_star = el.is_star, + }); + return .{ .e_yield = item }; + }, + .e_if => |el| { + const item = bun.create(allocator, E.If, .{ + .test_ = try el.test_.deepClone(allocator), + .yes = try el.yes.deepClone(allocator), + .no = try el.no.deepClone(allocator), + }); + return .{ .e_if = item }; + }, + .e_import => |el| { + const item = bun.create(allocator, E.Import, .{ + .expr = try el.expr.deepClone(allocator), + .options = try el.options.deepClone(allocator), + .import_record_index = el.import_record_index, + }); + return .{ .e_import = item }; + }, + .e_big_int => |el| { + const item = bun.create(allocator, E.BigInt, .{ + .value = el.value, + }); + return .{ .e_big_int = item }; + }, + .e_string => |el| { + const item = bun.create(allocator, E.String, .{ + .data = el.data, + .prefer_template = el.prefer_template, + .next = el.next, + .end = el.end, + .rope_len = el.rope_len, + .is_utf16 = el.is_utf16, + }); + return .{ .e_string = item }; + }, + .e_inlined_enum => |el| { + const item = bun.create(allocator, E.InlinedEnum, .{ + .value = el.value, + .comment = el.comment, + }); + return .{ .e_inlined_enum = item }; + }, + else => this, + }; + } + + /// `hasher` should be something with 'pub fn update([]const u8) void'; + /// symbol table is passed to serialize `Ref` as an identifier names instead of a nondeterministic numbers + pub fn writeToHasher(this: Expr.Data, hasher: anytype, symbol_table: anytype) void { + writeAnyToHasher(hasher, std.meta.activeTag(this)); + switch (this) { + .e_name_of_symbol => |e| { + const symbol = e.ref.getSymbol(symbol_table); + hasher.update(symbol.original_name); + }, + .e_array => |e| { + writeAnyToHasher(hasher, .{ + e.is_single_line, + e.is_parenthesized, + e.was_originally_macro, + e.items.len, + }); + for (e.items.slice()) |item| { + item.data.writeToHasher(hasher, symbol_table); + } + }, + .e_unary => |e| { + writeAnyToHasher(hasher, .{e.op}); + e.value.data.writeToHasher(hasher, symbol_table); + }, + .e_binary => |e| { + writeAnyToHasher(hasher, .{e.op}); + e.left.data.writeToHasher(hasher, symbol_table); + e.right.data.writeToHasher(hasher, symbol_table); + }, + .e_class => |e| { + _ = e; // autofix + }, + inline .e_new, .e_call => |e| { + _ = e; // autofix + }, + .e_function => |e| { + _ = e; // autofix + }, + .e_dot => |e| { + writeAnyToHasher(hasher, .{ e.optional_chain, e.name.len }); + e.target.data.writeToHasher(hasher, symbol_table); + hasher.update(e.name); + }, + .e_index => |e| { + writeAnyToHasher(hasher, .{e.optional_chain}); + e.target.data.writeToHasher(hasher, symbol_table); + e.index.data.writeToHasher(hasher, symbol_table); + }, + .e_arrow => |e| { + _ = e; // autofix + }, + .e_jsx_element => |e| { + _ = e; // autofix + }, + .e_object => |e| { + _ = e; // autofix + }, + inline .e_spread, .e_await => |e| { + e.value.data.writeToHasher(hasher, symbol_table); + }, + inline .e_yield => |e| { + writeAnyToHasher(hasher, .{ e.is_star, e.value }); + if (e.value) |value| + value.data.writeToHasher(hasher, symbol_table); + }, + .e_template => |e| { + _ = e; // autofix + }, + .e_if => |e| { + _ = e; // autofix + }, + .e_import => |e| { + _ = e; // autofix + + }, + inline .e_identifier, + .e_import_identifier, + .e_private_identifier, + .e_commonjs_export_identifier, + => |e| { + const symbol = e.ref.getSymbol(symbol_table); + hasher.update(symbol.original_name); + }, + inline .e_boolean, .e_number => |e| { + writeAnyToHasher(hasher, e.value); + }, + inline .e_big_int, .e_reg_exp => |e| { + hasher.update(e.value); + }, + + .e_string => |e| { + var next: ?*E.String = e; + if (next) |current| { + if (current.isUTF8()) { + hasher.update(current.data); + } else { + hasher.update(bun.reinterpretSlice(u8, current.slice16())); + } + next = current.next; + hasher.update("\x00"); + } + }, + inline .e_require_string, .e_require_resolve_string => |e| { + writeAnyToHasher(hasher, e.import_record_index); // preferably, i'd like to write the filepath + }, + + .e_import_meta_main => |e| { + writeAnyToHasher(hasher, e.inverted); + }, + .e_inlined_enum => |e| { + // pretend there is no comment + e.value.data.writeToHasher(hasher, symbol_table); + }, + + // no data + .e_require_call_target, + .e_require_resolve_call_target, + .e_missing, + .e_this, + .e_super, + .e_null, + .e_undefined, + .e_new_target, + .e_require_main, + .e_import_meta, + .e_special, + => {}, + } + } + + /// "const values" here refers to expressions that can participate in constant + /// inlining, as they have no side effects on instantiation, and there would be + /// no observable difference if duplicated. This is a subset of canBeMoved() + pub fn canBeConstValue(this: Expr.Data) bool { + return switch (this) { + .e_number, + .e_boolean, + .e_null, + .e_undefined, + .e_inlined_enum, + => true, + .e_string => |str| str.next == null, + .e_array => |array| array.was_originally_macro, + .e_object => |object| object.was_originally_macro, + else => false, + }; + } + + /// Expressions that can be moved are those that do not have side + /// effects on their own. This is used to determine what can be moved + /// outside of a module wrapper (__esm/__commonJS). + pub fn canBeMoved(data: Expr.Data) bool { + return switch (data) { + // TODO: identifiers can be removed if unused, however code that + // moves expressions around sometimes does so incorrectly when + // doing destructures. test case: https://github.com/oven-sh/bun/issues/14027 + // .e_identifier => |id| id.can_be_removed_if_unused, + + .e_class => |class| class.canBeMoved(), + + .e_arrow, + .e_function, + + .e_number, + .e_boolean, + .e_null, + .e_undefined, + // .e_reg_exp, + .e_big_int, + .e_string, + .e_inlined_enum, + .e_import_meta, + => true, + + .e_template => |template| template.tag == null and template.parts.len == 0, + + .e_array => |array| array.was_originally_macro, + .e_object => |object| object.was_originally_macro, + + // TODO: experiment with allowing some e_binary, e_unary, e_if as movable + + else => false, + }; + } + + pub fn knownPrimitive(data: Expr.Data) PrimitiveType { + return switch (data) { + .e_big_int => .bigint, + .e_boolean => .boolean, + .e_null => .null, + .e_number => .number, + .e_string => .string, + .e_undefined => .undefined, + .e_template => if (data.e_template.tag == null) PrimitiveType.string else PrimitiveType.unknown, + .e_if => mergeKnownPrimitive(data.e_if.yes.data, data.e_if.no.data), + .e_binary => |binary| brk: { + switch (binary.op) { + .bin_strict_eq, + .bin_strict_ne, + .bin_loose_eq, + .bin_loose_ne, + .bin_lt, + .bin_gt, + .bin_le, + .bin_ge, + .bin_instanceof, + .bin_in, + => break :brk PrimitiveType.boolean, + .bin_logical_or, .bin_logical_and => break :brk binary.left.data.mergeKnownPrimitive(binary.right.data), + + .bin_nullish_coalescing => { + const left = binary.left.data.knownPrimitive(); + const right = binary.right.data.knownPrimitive(); + if (left == .null or left == .undefined) + break :brk right; + + if (left != .unknown) { + if (left != .mixed) + break :brk left; // Definitely not null or undefined + + if (right != .unknown) + break :brk PrimitiveType.mixed; // Definitely some kind of primitive + } + }, + + .bin_add => { + const left = binary.left.data.knownPrimitive(); + const right = binary.right.data.knownPrimitive(); + + if (left == .string or right == .string) + break :brk PrimitiveType.string; + + if (left == .bigint or right == .bigint) + break :brk PrimitiveType.bigint; + + if (switch (left) { + .unknown, .mixed, .bigint => false, + else => true, + } and switch (right) { + .unknown, .mixed, .bigint => false, + else => true, + }) + break :brk PrimitiveType.number; + + break :brk PrimitiveType.mixed; // Can be number or bigint or string (or an exception) + }, + + .bin_sub, + .bin_sub_assign, + .bin_mul, + .bin_mul_assign, + .bin_div, + .bin_div_assign, + .bin_rem, + .bin_rem_assign, + .bin_pow, + .bin_pow_assign, + .bin_bitwise_and, + .bin_bitwise_and_assign, + .bin_bitwise_or, + .bin_bitwise_or_assign, + .bin_bitwise_xor, + .bin_bitwise_xor_assign, + .bin_shl, + .bin_shl_assign, + .bin_shr, + .bin_shr_assign, + .bin_u_shr, + .bin_u_shr_assign, + => break :brk PrimitiveType.mixed, // Can be number or bigint (or an exception) + + .bin_assign, + .bin_comma, + => break :brk binary.right.data.knownPrimitive(), + + else => {}, + } + + break :brk PrimitiveType.unknown; + }, + + .e_unary => switch (data.e_unary.op) { + .un_void => PrimitiveType.undefined, + .un_typeof => PrimitiveType.string, + .un_not, .un_delete => PrimitiveType.boolean, + .un_pos => PrimitiveType.number, // Cannot be bigint because that throws an exception + .un_neg, .un_cpl => switch (data.e_unary.value.data.knownPrimitive()) { + .bigint => PrimitiveType.bigint, + .unknown, .mixed => PrimitiveType.mixed, + else => PrimitiveType.number, // Can be number or bigint + }, + .un_pre_dec, .un_pre_inc, .un_post_dec, .un_post_inc => PrimitiveType.mixed, // Can be number or bigint + + else => PrimitiveType.unknown, + }, + + .e_inlined_enum => |inlined| inlined.value.data.knownPrimitive(), + + else => PrimitiveType.unknown, + }; + } + + pub fn mergeKnownPrimitive(lhs: Expr.Data, rhs: Expr.Data) PrimitiveType { + return lhs.knownPrimitive().merge(rhs.knownPrimitive()); + } + + /// Returns true if the result of the "typeof" operator on this expression is + /// statically determined and this expression has no side effects (i.e. can be + /// removed without consequence). + pub inline fn toTypeof(data: Expr.Data) ?string { + return @as(Expr.Tag, data).typeof(); + } + + pub fn toNumber(data: Expr.Data) ?f64 { + return switch (data) { + .e_null => 0, + .e_undefined => std.math.nan(f64), + .e_string => |str| { + if (str.next != null) return null; + if (!str.isUTF8()) return null; + + // +'1' => 1 + return stringToEquivalentNumberValue(str.slice8()); + }, + .e_boolean => @as(f64, if (data.e_boolean.value) 1.0 else 0.0), + .e_number => data.e_number.value, + .e_inlined_enum => |inlined| switch (inlined.value.data) { + .e_number => |num| num.value, + .e_string => |str| { + if (str.next != null) return null; + if (!str.isUTF8()) return null; + + // +'1' => 1 + return stringToEquivalentNumberValue(str.slice8()); + }, + else => null, + }, + else => null, + }; + } + + pub fn toFiniteNumber(data: Expr.Data) ?f64 { + return switch (data) { + .e_boolean => @as(f64, if (data.e_boolean.value) 1.0 else 0.0), + .e_number => if (std.math.isFinite(data.e_number.value)) + data.e_number.value + else + null, + .e_inlined_enum => |inlined| switch (inlined.value.data) { + .e_number => |num| if (std.math.isFinite(num.value)) + num.value + else + null, + else => null, + }, + else => null, + }; + } + + pub fn extractNumericValue(data: Expr.Data) ?f64 { + return switch (data) { + .e_number => data.e_number.value, + .e_inlined_enum => |inlined| switch (inlined.value.data) { + .e_number => |num| num.value, + else => null, + }, + else => null, + }; + } + + pub const Equality = struct { + equal: bool = false, + ok: bool = false, + + /// This extra flag is unfortunately required for the case of visiting the expression + /// `require.main === module` (and any combination of !==, ==, !=, either ordering) + /// + /// We want to replace this with the dedicated import_meta_main node, which: + /// - Stops this module from having p.require_ref, allowing conversion to ESM + /// - Allows us to inline `import.meta.main`'s value, if it is known (bun build --compile) + is_require_main_and_module: bool = false, + + pub const @"true" = Equality{ .ok = true, .equal = true }; + pub const @"false" = Equality{ .ok = true, .equal = false }; + pub const unknown = Equality{ .ok = false }; + }; + + // Returns "equal, ok". If "ok" is false, then nothing is known about the two + // values. If "ok" is true, the equality or inequality of the two values is + // stored in "equal". + pub fn eql( + left: Expr.Data, + right: Expr.Data, + p: anytype, + comptime kind: enum { loose, strict }, + ) Equality { + comptime bun.assert(@typeInfo(@TypeOf(p)).pointer.size == .one); // pass *Parser + + // https://dorey.github.io/JavaScript-Equality-Table/ + switch (left) { + .e_inlined_enum => |inlined| return inlined.value.data.eql(right, p, kind), + + .e_null, .e_undefined => { + const ok = switch (@as(Expr.Tag, right)) { + .e_null, .e_undefined => true, + else => @as(Expr.Tag, right).isPrimitiveLiteral(), + }; + + if (comptime kind == .loose) { + return .{ + .equal = switch (@as(Expr.Tag, right)) { + .e_null, .e_undefined => true, + else => false, + }, + .ok = ok, + }; + } + + return .{ + .equal = @as(Tag, right) == @as(Tag, left), + .ok = ok, + }; + }, + .e_boolean => |l| { + switch (right) { + .e_boolean => { + return .{ + .ok = true, + .equal = l.value == right.e_boolean.value, + }; + }, + .e_number => |num| { + if (comptime kind == .strict) { + // "true === 1" is false + // "false === 0" is false + return Equality.false; + } + + return .{ + .ok = true, + .equal = if (l.value) + num.value == 1 + else + num.value == 0, + }; + }, + .e_null, .e_undefined => { + return Equality.false; + }, + else => {}, + } + }, + .e_number => |l| { + switch (right) { + .e_number => |r| { + return .{ + .ok = true, + .equal = l.value == r.value, + }; + }, + .e_inlined_enum => |r| if (r.value.data == .e_number) { + return .{ + .ok = true, + .equal = l.value == r.value.data.e_number.value, + }; + }, + .e_boolean => |r| { + if (comptime kind == .loose) { + return .{ + .ok = true, + // "1 == true" is true + // "0 == false" is true + .equal = if (r.value) + l.value == 1 + else + l.value == 0, + }; + } + + // "1 === true" is false + // "0 === false" is false + return Equality.false; + }, + .e_null, .e_undefined => { + // "(not null or undefined) == undefined" is false + return Equality.false; + }, + else => {}, + } + }, + .e_big_int => |l| { + if (right == .e_big_int) { + if (strings.eqlLong(l.value, right.e_big_int.value, true)) { + return Equality.true; + } + + // 0x0000n == 0n is true + return .{ .ok = false }; + } else { + return .{ + .ok = switch (right) { + .e_null, .e_undefined => true, + else => false, + }, + .equal = false, + }; + } + }, + .e_string => |l| { + switch (right) { + .e_string => |r| { + r.resolveRopeIfNeeded(p.allocator); + l.resolveRopeIfNeeded(p.allocator); + return .{ + .ok = true, + .equal = r.eql(E.String, l), + }; + }, + .e_inlined_enum => |inlined| { + if (inlined.value.data == .e_string) { + const r = inlined.value.data.e_string; + + r.resolveRopeIfNeeded(p.allocator); + l.resolveRopeIfNeeded(p.allocator); + + return .{ + .ok = true, + .equal = r.eql(E.String, l), + }; + } + }, + .e_null, .e_undefined => { + return Equality.false; + }, + .e_number => |r| { + if (comptime kind == .loose) { + l.resolveRopeIfNeeded(p.allocator); + if (r.value == 0 and (l.isBlank() or l.eqlComptime("0"))) { + return Equality.true; + } + + if (r.value == 1 and l.eqlComptime("1")) { + return Equality.true; + } + + // the string could still equal 0 or 1 but it could be hex, binary, octal, ... + return Equality.unknown; + } else { + return Equality.false; + } + }, + + else => {}, + } + }, + + else => { + // Do not need to check left because e_require_main is + // always re-ordered to the right side. + if (right == .e_require_main) { + if (left.as(.e_identifier)) |id| { + if (id.ref.eql(p.module_ref)) return .{ + .ok = true, + .equal = true, + .is_require_main_and_module = true, + }; + } + } + }, + } + + return Equality.unknown; + } + + pub fn toJS(this: Data, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { + return switch (this) { + .e_array => |e| e.toJS(allocator, globalObject), + .e_object => |e| e.toJS(allocator, globalObject), + .e_string => |e| e.toJS(allocator, globalObject), + .e_null => JSC.JSValue.null, + .e_undefined => .js_undefined, + .e_boolean => |boolean| if (boolean.value) + JSC.JSValue.true + else + JSC.JSValue.false, + .e_number => |e| e.toJS(), + // .e_big_int => |e| e.toJS(ctx, exception), + + .e_inlined_enum => |inlined| inlined.value.data.toJS(allocator, globalObject), + + .e_identifier, + .e_import_identifier, + .e_private_identifier, + .e_commonjs_export_identifier, + => error.@"Cannot convert identifier to JS. Try a statically-known value", + + // brk: { + // // var node = try allocator.create(Macro.JSNode); + // // node.* = Macro.JSNode.initExpr(Expr{ .data = this, .loc = logger.Loc.Empty }); + // // break :brk JSC.JSValue.c(Macro.JSNode.Class.make(globalObject, node)); + // }, + + else => { + return error.@"Cannot convert argument type to JS"; + }, + }; + } + + pub const Store = struct { + const StoreType = NewStore(&.{ + E.NameOfSymbol, + E.Array, + E.Arrow, + E.Await, + E.BigInt, + E.Binary, + E.Call, + E.Class, + E.Dot, + E.Function, + E.If, + E.Import, + E.Index, + E.InlinedEnum, + E.JSXElement, + E.New, + E.Number, + E.Object, + E.PrivateIdentifier, + E.RegExp, + E.Spread, + E.String, + E.Template, + E.TemplatePart, + E.Unary, + E.Yield, + }, 512); + + pub threadlocal var instance: ?*StoreType = null; + pub threadlocal var memory_allocator: ?*ASTMemoryAllocator = null; + pub threadlocal var disable_reset = false; + + pub fn create() void { + if (instance != null or memory_allocator != null) { + return; + } + + instance = StoreType.init(); + } + + pub fn reset() void { + if (disable_reset or memory_allocator != null) return; + instance.?.reset(); + } + + pub fn deinit() void { + if (instance == null or memory_allocator != null) return; + instance.?.deinit(); + instance = null; + } + + pub inline fn assert() void { + if (comptime Environment.isDebug or Environment.enable_asan) { + if (instance == null and memory_allocator == null) + bun.unreachablePanic("Store must be init'd", .{}); + } + } + + /// create || reset + pub fn begin() void { + if (memory_allocator != null) return; + if (instance == null) { + create(); + return; + } + + if (!disable_reset) + instance.?.reset(); + } + + pub fn append(comptime T: type, value: T) *T { + if (memory_allocator) |allocator| { + return allocator.append(T, value); + } + + Disabler.assert(); + return instance.?.append(T, value); + } + }; + + pub inline fn isStringValue(self: Data) bool { + return @as(Expr.Tag, self) == .e_string; + } +}; + +pub fn StoredData(tag: Tag) type { + const T = @FieldType(Data, tag); + return switch (@typeInfo(T)) { + .pointer => |ptr| ptr.child, + else => T, + }; +} + +extern fn JSC__jsToNumber(latin1_ptr: [*]const u8, len: usize) f64; + +fn stringToEquivalentNumberValue(str: []const u8) f64 { + // +"" -> 0 + if (str.len == 0) return 0; + if (!bun.strings.isAllASCII(str)) + return std.math.nan(f64); + return JSC__jsToNumber(str.ptr, str.len); +} + +// @sortImports + +const JSPrinter = @import("../js_printer.zig"); +const std = @import("std"); + +const bun = @import("bun"); +const BabyList = bun.BabyList; +const Environment = bun.Environment; +const JSC = bun.JSC; +const JSONParser = bun.JSON; +const MutableString = bun.MutableString; +const OOM = bun.OOM; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const stringZ = bun.stringZ; +const strings = bun.strings; +const writeAnyToHasher = bun.writeAnyToHasher; +const MimeType = bun.http.MimeType; + +const js_ast = bun.js_ast; +const ASTMemoryAllocator = js_ast.ASTMemoryAllocator; +const E = js_ast.E; +const Expr = js_ast.Expr; +const G = js_ast.G; +const NewStore = js_ast.NewStore; +const Op = js_ast.Op; +const Ref = js_ast.Ref; +const S = js_ast.S; +const Stmt = js_ast.Stmt; +const ToJSError = js_ast.ToJSError; diff --git a/src/ast/G.zig b/src/ast/G.zig new file mode 100644 index 0000000000..8b1a2a2fd9 --- /dev/null +++ b/src/ast/G.zig @@ -0,0 +1,231 @@ +pub const Decl = struct { + binding: BindingNodeIndex, + value: ?ExprNodeIndex = null, + + pub const List = BabyList(Decl); +}; + +pub const NamespaceAlias = struct { + namespace_ref: Ref, + alias: string, + + was_originally_property_access: bool = false, + + import_record_index: u32 = std.math.maxInt(u32), +}; + +pub const ExportStarAlias = struct { + loc: logger.Loc, + + // Although this alias name starts off as being the same as the statement's + // namespace symbol, it may diverge if the namespace symbol name is minified. + // The original alias name is preserved here to avoid this scenario. + original_name: string, +}; + +pub const Class = struct { + class_keyword: logger.Range = logger.Range.None, + ts_decorators: ExprNodeList = ExprNodeList{}, + class_name: ?LocRef = null, + extends: ?ExprNodeIndex = null, + body_loc: logger.Loc = logger.Loc.Empty, + close_brace_loc: logger.Loc = logger.Loc.Empty, + properties: []Property = &([_]Property{}), + has_decorators: bool = false, + + pub fn canBeMoved(this: *const Class) bool { + if (this.extends != null) + return false; + + if (this.has_decorators) { + return false; + } + + for (this.properties) |property| { + if (property.kind == .class_static_block) + return false; + + const flags = property.flags; + if (flags.contains(.is_computed) or flags.contains(.is_spread)) { + return false; + } + + if (property.kind == .normal) { + if (flags.contains(.is_static)) { + for ([2]?Expr{ property.value, property.initializer }) |val_| { + if (val_) |val| { + switch (val.data) { + .e_arrow, .e_function => {}, + else => { + if (!val.canBeMoved()) { + return false; + } + }, + } + } + } + } + } + } + + return true; + } +}; + +// invalid shadowing if left as Comment +pub const Comment = struct { loc: logger.Loc, text: string }; + +pub const ClassStaticBlock = struct { + stmts: BabyList(Stmt) = .{}, + loc: logger.Loc, +}; + +pub const Property = struct { + /// This is used when parsing a pattern that uses default values: + /// + /// [a = 1] = []; + /// ({a = 1} = {}); + /// + /// It's also used for class fields: + /// + /// class Foo { a = 1 } + /// + initializer: ?ExprNodeIndex = null, + kind: Kind = .normal, + flags: Flags.Property.Set = Flags.Property.None, + + class_static_block: ?*ClassStaticBlock = null, + ts_decorators: ExprNodeList = .{}, + // Key is optional for spread + key: ?ExprNodeIndex = null, + + // This is omitted for class fields + value: ?ExprNodeIndex = null, + + ts_metadata: TypeScript.Metadata = .m_none, + + pub const List = BabyList(Property); + + pub fn deepClone(this: *const Property, allocator: std.mem.Allocator) !Property { + var class_static_block: ?*ClassStaticBlock = null; + if (this.class_static_block != null) { + class_static_block = bun.create(allocator, ClassStaticBlock, .{ + .loc = this.class_static_block.?.loc, + .stmts = try this.class_static_block.?.stmts.clone(allocator), + }); + } + return .{ + .initializer = if (this.initializer) |init| try init.deepClone(allocator) else null, + .kind = this.kind, + .flags = this.flags, + .class_static_block = class_static_block, + .ts_decorators = try this.ts_decorators.deepClone(allocator), + .key = if (this.key) |key| try key.deepClone(allocator) else null, + .value = if (this.value) |value| try value.deepClone(allocator) else null, + .ts_metadata = this.ts_metadata, + }; + } + + pub const Kind = enum(u3) { + normal, + get, + set, + spread, + declare, + abstract, + class_static_block, + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } + }; +}; + +pub const FnBody = struct { + loc: logger.Loc, + stmts: StmtNodeList, + + pub fn initReturnExpr(allocator: std.mem.Allocator, expr: Expr) !FnBody { + return .{ + .stmts = try allocator.dupe(Stmt, &.{Stmt.alloc(S.Return, .{ + .value = expr, + }, expr.loc)}), + .loc = expr.loc, + }; + } +}; + +pub const Fn = struct { + name: ?LocRef = null, + open_parens_loc: logger.Loc = logger.Loc.Empty, + args: []Arg = &.{}, + // This was originally nullable, but doing so I believe caused a miscompilation + // Specifically, the body was always null. + body: FnBody = .{ .loc = logger.Loc.Empty, .stmts = &.{} }, + arguments_ref: ?Ref = null, + + flags: Flags.Function.Set = Flags.Function.None, + + return_ts_metadata: TypeScript.Metadata = .m_none, + + pub fn deepClone(this: *const Fn, allocator: std.mem.Allocator) !Fn { + const args = try allocator.alloc(Arg, this.args.len); + for (0..args.len) |i| { + args[i] = try this.args[i].deepClone(allocator); + } + return .{ + .name = this.name, + .open_parens_loc = this.open_parens_loc, + .args = args, + .body = .{ + .loc = this.body.loc, + .stmts = this.body.stmts, + }, + .arguments_ref = this.arguments_ref, + .flags = this.flags, + .return_ts_metadata = this.return_ts_metadata, + }; + } +}; +pub const Arg = struct { + ts_decorators: ExprNodeList = ExprNodeList{}, + binding: BindingNodeIndex, + default: ?ExprNodeIndex = null, + + // "constructor(public x: boolean) {}" + is_typescript_ctor_field: bool = false, + + ts_metadata: TypeScript.Metadata = .m_none, + + pub fn deepClone(this: *const Arg, allocator: std.mem.Allocator) !Arg { + return .{ + .ts_decorators = try this.ts_decorators.deepClone(allocator), + .binding = this.binding, + .default = if (this.default) |d| try d.deepClone(allocator) else null, + .is_typescript_ctor_field = this.is_typescript_ctor_field, + .ts_metadata = this.ts_metadata, + }; + } +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const BabyList = bun.BabyList; +const logger = bun.logger; +const string = bun.string; +const TypeScript = bun.js_parser.TypeScript; + +const js_ast = bun.js_ast; +const BindingNodeIndex = js_ast.BindingNodeIndex; +const Expr = js_ast.Expr; +const ExprNodeIndex = js_ast.ExprNodeIndex; +const ExprNodeList = js_ast.ExprNodeList; +const Flags = js_ast.Flags; +const LocRef = js_ast.LocRef; +const Ref = js_ast.Ref; +const S = js_ast.S; +const Stmt = js_ast.Stmt; +const StmtNodeList = js_ast.StmtNodeList; diff --git a/src/ast/Macro.zig b/src/ast/Macro.zig new file mode 100644 index 0000000000..546af4b768 --- /dev/null +++ b/src/ast/Macro.zig @@ -0,0 +1,671 @@ +pub const namespace: string = "macro"; +pub const namespaceWithColon: string = namespace ++ ":"; + +pub fn isMacroPath(str: string) bool { + return strings.hasPrefixComptime(str, namespaceWithColon); +} + +pub const MacroContext = struct { + pub const MacroMap = std.AutoArrayHashMap(i32, Macro); + + resolver: *Resolver, + env: *DotEnv.Loader, + macros: MacroMap, + remap: MacroRemap, + javascript_object: JSC.JSValue = JSC.JSValue.zero, + + pub fn getRemap(this: MacroContext, path: string) ?MacroRemapEntry { + if (this.remap.entries.len == 0) return null; + return this.remap.get(path); + } + + pub fn init(transpiler: *Transpiler) MacroContext { + return MacroContext{ + .macros = MacroMap.init(default_allocator), + .resolver = &transpiler.resolver, + .env = transpiler.env, + .remap = transpiler.options.macro_remap, + }; + } + + pub fn call( + this: *MacroContext, + import_record_path: string, + source_dir: string, + log: *logger.Log, + source: *const logger.Source, + import_range: logger.Range, + caller: Expr, + function_name: string, + ) anyerror!Expr { + Expr.Data.Store.disable_reset = true; + Stmt.Data.Store.disable_reset = true; + defer Expr.Data.Store.disable_reset = false; + defer Stmt.Data.Store.disable_reset = false; + // const is_package_path = isPackagePath(specifier); + const import_record_path_without_macro_prefix = if (isMacroPath(import_record_path)) + import_record_path[namespaceWithColon.len..] + else + import_record_path; + + bun.assert(!isMacroPath(import_record_path_without_macro_prefix)); + + const input_specifier = brk: { + if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| { + break :brk replacement.path; + } + + const resolve_result = this.resolver.resolve(source_dir, import_record_path_without_macro_prefix, .stmt) catch |err| { + switch (err) { + error.ModuleNotFound => { + log.addResolveError( + source, + import_range, + log.msgs.allocator, + "Macro \"{s}\" not found", + .{import_record_path}, + .stmt, + err, + ) catch unreachable; + return error.MacroNotFound; + }, + else => { + log.addRangeErrorFmt( + source, + import_range, + log.msgs.allocator, + "{s} resolving macro \"{s}\"", + .{ @errorName(err), import_record_path }, + ) catch unreachable; + return err; + }, + } + }; + break :brk resolve_result.path_pair.primary.text; + }; + + var specifier_buf: [64]u8 = undefined; + var specifier_buf_len: u32 = 0; + const hash = MacroEntryPoint.generateID( + input_specifier, + function_name, + &specifier_buf, + &specifier_buf_len, + ); + + const macro_entry = this.macros.getOrPut(hash) catch unreachable; + if (!macro_entry.found_existing) { + macro_entry.value_ptr.* = Macro.init( + default_allocator, + this.resolver, + input_specifier, + log, + this.env, + function_name, + specifier_buf[0..specifier_buf_len], + hash, + ) catch |err| { + macro_entry.value_ptr.* = Macro{ .resolver = undefined, .disabled = true }; + return err; + }; + Output.flush(); + } + defer Output.flush(); + + const macro = macro_entry.value_ptr.*; + if (macro.disabled) { + return caller; + } + macro.vm.enableMacroMode(); + defer macro.vm.disableMacroMode(); + macro.vm.eventLoop().ensureWaker(); + + const Wrapper = struct { + args: std.meta.ArgsTuple(@TypeOf(Macro.Runner.run)), + ret: Runner.MacroError!Expr, + + pub fn call(self: *@This()) void { + self.ret = @call(.auto, Macro.Runner.run, self.args); + } + }; + var wrapper = Wrapper{ + .args = .{ + macro, + log, + default_allocator, + function_name, + caller, + source, + hash, + this.javascript_object, + }, + .ret = undefined, + }; + + macro.vm.runWithAPILock(Wrapper, &wrapper, Wrapper.call); + return try wrapper.ret; + // this.macros.getOrPut(key: K) + } +}; + +pub const MacroResult = struct { + import_statements: []S.Import = &[_]S.Import{}, + replacement: Expr, +}; + +resolver: *Resolver, +vm: *JavaScript.VirtualMachine = undefined, + +resolved: ResolveResult = undefined, +disabled: bool = false, + +pub fn init( + _: std.mem.Allocator, + resolver: *Resolver, + input_specifier: []const u8, + log: *logger.Log, + env: *DotEnv.Loader, + function_name: string, + specifier: string, + hash: i32, +) !Macro { + var vm: *JavaScript.VirtualMachine = if (JavaScript.VirtualMachine.isLoaded()) + JavaScript.VirtualMachine.get() + else brk: { + const old_transform_options = resolver.opts.transform_options; + defer resolver.opts.transform_options = old_transform_options; + + // JSC needs to be initialized if building from CLI + JSC.initialize(false); + + var _vm = try JavaScript.VirtualMachine.init(.{ + .allocator = default_allocator, + .args = resolver.opts.transform_options, + .log = log, + .is_main_thread = false, + .env_loader = env, + }); + + _vm.enableMacroMode(); + _vm.eventLoop().ensureWaker(); + + try _vm.transpiler.configureDefines(); + break :brk _vm; + }; + + vm.enableMacroMode(); + vm.eventLoop().ensureWaker(); + + const loaded_result = try vm.loadMacroEntryPoint(input_specifier, function_name, specifier, hash); + + switch (loaded_result.unwrap(vm.jsc, .leave_unhandled)) { + .rejected => |result| { + vm.unhandledRejection(vm.global, result, loaded_result.asValue()); + vm.disableMacroMode(); + return error.MacroLoadError; + }, + else => {}, + } + + return Macro{ + .vm = vm, + .resolver = resolver, + }; +} + +pub const Runner = struct { + const VisitMap = std.AutoHashMapUnmanaged(JSC.JSValue, Expr); + + threadlocal var args_buf: [3]js.JSObjectRef = undefined; + threadlocal var exception_holder: JSC.ZigException.Holder = undefined; + pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError || bun.JSError; + + pub const Run = struct { + caller: Expr, + function_name: string, + macro: *const Macro, + global: *JSC.JSGlobalObject, + allocator: std.mem.Allocator, + id: i32, + log: *logger.Log, + source: *const logger.Source, + visited: VisitMap = VisitMap{}, + is_top_level: bool = false, + + pub fn runAsync( + macro: Macro, + log: *logger.Log, + allocator: std.mem.Allocator, + function_name: string, + caller: Expr, + args: []JSC.JSValue, + source: *const logger.Source, + id: i32, + ) MacroError!Expr { + const macro_callback = macro.vm.macros.get(id) orelse return caller; + + const result = js.JSObjectCallAsFunctionReturnValueHoldingAPILock( + macro.vm.global, + macro_callback, + null, + args.len, + @as([*]js.JSObjectRef, @ptrCast(args.ptr)), + ); + + var runner = Run{ + .caller = caller, + .function_name = function_name, + .macro = ¯o, + .allocator = allocator, + .global = macro.vm.global, + .id = id, + .log = log, + .source = source, + .visited = VisitMap{}, + }; + + defer runner.visited.deinit(allocator); + + return try runner.run( + result, + ); + } + + pub fn run( + this: *Run, + value: JSC.JSValue, + ) MacroError!Expr { + return switch ((try JSC.ConsoleObject.Formatter.Tag.get(value, this.global)).tag) { + .Error => this.coerce(value, .Error), + .Undefined => this.coerce(value, .Undefined), + .Null => this.coerce(value, .Null), + .Private => this.coerce(value, .Private), + .Boolean => this.coerce(value, .Boolean), + .Array => this.coerce(value, .Array), + .Object => this.coerce(value, .Object), + .toJSON, .JSON => this.coerce(value, .JSON), + .Integer => this.coerce(value, .Integer), + .Double => this.coerce(value, .Double), + .String => this.coerce(value, .String), + .Promise => this.coerce(value, .Promise), + else => brk: { + const name = value.getClassInfoName() orelse "unknown"; + + this.log.addErrorFmt( + this.source, + this.caller.loc, + this.allocator, + "cannot coerce {s} ({s}) to Bun's AST. Please return a simpler type", + .{ name, @tagName(value.jsType()) }, + ) catch unreachable; + break :brk error.MacroFailed; + }, + }; + } + + pub fn coerce( + this: *Run, + value: JSC.JSValue, + comptime tag: JSC.ConsoleObject.Formatter.Tag, + ) MacroError!Expr { + switch (comptime tag) { + .Error => { + _ = this.macro.vm.uncaughtException(this.global, value, false); + return this.caller; + }, + .Undefined => if (this.is_top_level) + return this.caller + else + return Expr.init(E.Undefined, E.Undefined{}, this.caller.loc), + .Null => return Expr.init(E.Null, E.Null{}, this.caller.loc), + .Private => { + this.is_top_level = false; + const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; + if (_entry.found_existing) { + return _entry.value_ptr.*; + } + + var blob_: ?JSC.WebCore.Blob = null; + const mime_type: ?MimeType = null; + + if (value.jsType() == .DOMWrapper) { + if (value.as(JSC.WebCore.Response)) |resp| { + return this.run(try resp.getBlobWithoutCallFrame(this.global)); + } else if (value.as(JSC.WebCore.Request)) |resp| { + return this.run(try resp.getBlobWithoutCallFrame(this.global)); + } else if (value.as(JSC.WebCore.Blob)) |resp| { + blob_ = resp.*; + blob_.?.allocator = null; + } else if (value.as(bun.api.ResolveMessage) != null or value.as(bun.api.BuildMessage) != null) { + _ = this.macro.vm.uncaughtException(this.global, value, false); + return error.MacroFailed; + } + } + + if (blob_) |*blob| { + const out_expr = Expr.fromBlob( + blob, + this.allocator, + mime_type, + this.log, + this.caller.loc, + ) catch { + blob.deinit(); + return error.MacroFailed; + }; + if (out_expr.data == .e_string) { + blob.deinit(); + } + + return out_expr; + } + + return Expr.init(E.String, E.String.empty, this.caller.loc); + }, + + .Boolean => { + return Expr{ .data = .{ .e_boolean = .{ .value = value.toBoolean() } }, .loc = this.caller.loc }; + }, + JSC.ConsoleObject.Formatter.Tag.Array => { + this.is_top_level = false; + + const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; + if (_entry.found_existing) { + switch (_entry.value_ptr.*.data) { + .e_object, .e_array => { + this.log.addErrorFmt(this.source, this.caller.loc, this.allocator, "converting circular structure to Bun AST is not implemented yet", .{}) catch unreachable; + return error.MacroFailed; + }, + else => {}, + } + return _entry.value_ptr.*; + } + + var iter = try JSC.JSArrayIterator.init(value, this.global); + if (iter.len == 0) { + const result = Expr.init( + E.Array, + E.Array{ + .items = ExprNodeList.init(&[_]Expr{}), + .was_originally_macro = true, + }, + this.caller.loc, + ); + _entry.value_ptr.* = result; + return result; + } + var array = this.allocator.alloc(Expr, iter.len) catch unreachable; + var out = Expr.init( + E.Array, + E.Array{ + .items = ExprNodeList.init(array[0..0]), + .was_originally_macro = true, + }, + this.caller.loc, + ); + _entry.value_ptr.* = out; + + errdefer this.allocator.free(array); + var i: usize = 0; + while (try iter.next()) |item| { + array[i] = try this.run(item); + if (array[i].isMissing()) + continue; + i += 1; + } + out.data.e_array.items = ExprNodeList.init(array); + _entry.value_ptr.* = out; + return out; + }, + // TODO: optimize this + JSC.ConsoleObject.Formatter.Tag.Object => { + this.is_top_level = false; + const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; + if (_entry.found_existing) { + switch (_entry.value_ptr.*.data) { + .e_object, .e_array => { + this.log.addErrorFmt(this.source, this.caller.loc, this.allocator, "converting circular structure to Bun AST is not implemented yet", .{}) catch unreachable; + return error.MacroFailed; + }, + else => {}, + } + return _entry.value_ptr.*; + } + // SAFETY: tag ensures `value` is an object. + const obj = value.getObject() orelse unreachable; + var object_iter = try JSC.JSPropertyIterator(.{ + .skip_empty_name = false, + .include_value = true, + }).init(this.global, obj); + defer object_iter.deinit(); + var properties = this.allocator.alloc(G.Property, object_iter.len) catch unreachable; + errdefer this.allocator.free(properties); + var out = Expr.init( + E.Object, + E.Object{ + .properties = BabyList(G.Property).init(properties), + .was_originally_macro = true, + }, + this.caller.loc, + ); + _entry.value_ptr.* = out; + + while (try object_iter.next()) |prop| { + properties[object_iter.i] = G.Property{ + .key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc), + .value = try this.run(object_iter.value), + }; + } + out.data.e_object.properties = BabyList(G.Property).init(properties[0..object_iter.i]); + _entry.value_ptr.* = out; + return out; + }, + + .JSON => { + this.is_top_level = false; + // if (console_tag.cell == .JSDate) { + // // in the code for printing dates, it never exceeds this amount + // var iso_string_buf = this.allocator.alloc(u8, 36) catch unreachable; + // var str = JSC.ZigString.init(""); + // value.jsonStringify(this.global, 0, &str); + // var out_buf: []const u8 = std.fmt.bufPrint(iso_string_buf, "{}", .{str}) catch ""; + // if (out_buf.len > 2) { + // // trim the quotes + // out_buf = out_buf[1 .. out_buf.len - 1]; + // } + // return Expr.init(E.New, E.New{.target = Expr.init(E.Dot{.target = E}) }) + // } + }, + + .Integer => { + return Expr.init(E.Number, E.Number{ .value = @as(f64, @floatFromInt(value.toInt32())) }, this.caller.loc); + }, + .Double => { + return Expr.init(E.Number, E.Number{ .value = value.asNumber() }, this.caller.loc); + }, + .String => { + var bun_str = try value.toBunString(this.global); + defer bun_str.deref(); + + // encode into utf16 so the printer escapes the string correctly + var utf16_bytes = this.allocator.alloc(u16, bun_str.length()) catch unreachable; + const out_slice = utf16_bytes[0 .. (bun_str.encodeInto(std.mem.sliceAsBytes(utf16_bytes), .utf16le) catch 0) / 2]; + return Expr.init(E.String, E.String.init(out_slice), this.caller.loc); + }, + .Promise => { + const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; + if (_entry.found_existing) { + return _entry.value_ptr.*; + } + + const promise = value.asAnyPromise() orelse @panic("Unexpected promise type"); + + this.macro.vm.waitForPromise(promise); + + const promise_result = promise.result(this.macro.vm.jsc); + const rejected = promise.status(this.macro.vm.jsc) == .rejected; + + if (promise_result.isUndefined() and this.is_top_level) { + this.is_top_level = false; + return this.caller; + } + + if (rejected or promise_result.isError() or promise_result.isAggregateError(this.global) or promise_result.isException(this.global.vm())) { + this.macro.vm.unhandledRejection(this.global, promise_result, promise.asValue()); + return error.MacroFailed; + } + this.is_top_level = false; + const result = try this.run(promise_result); + + _entry.value_ptr.* = result; + return result; + }, + else => {}, + } + + this.log.addErrorFmt( + this.source, + this.caller.loc, + this.allocator, + "cannot coerce {s} to Bun's AST. Please return a simpler type", + .{@tagName(value.jsType())}, + ) catch unreachable; + return error.MacroFailed; + } + }; + + pub fn run( + macro: Macro, + log: *logger.Log, + allocator: std.mem.Allocator, + function_name: string, + caller: Expr, + source: *const logger.Source, + id: i32, + javascript_object: JSC.JSValue, + ) MacroError!Expr { + if (comptime Environment.isDebug) Output.prettyln("[macro] call {s}", .{function_name}); + + exception_holder = JSC.ZigException.Holder.init(); + var js_args: []JSC.JSValue = &.{}; + var js_processed_args_len: usize = 0; + defer { + for (js_args[0..js_processed_args_len -| @as(usize, @intFromBool(javascript_object != .zero))]) |arg| { + arg.unprotect(); + } + + allocator.free(js_args); + } + + const globalObject = JSC.VirtualMachine.get().global; + + switch (caller.data) { + .e_call => |call| { + const call_args: []Expr = call.args.slice(); + js_args = try allocator.alloc(JSC.JSValue, call_args.len + @as(usize, @intFromBool(javascript_object != .zero))); + js_processed_args_len = js_args.len; + + for (0.., call_args, js_args[0..call_args.len]) |i, in, *out| { + const value = in.toJS( + allocator, + globalObject, + ) catch |e| { + // Keeping a separate variable instead of modifying js_args.len + // due to allocator.free call in defer + js_processed_args_len = i; + return e; + }; + value.protect(); + out.* = value; + } + }, + .e_template => { + @panic("TODO: support template literals in macros"); + }, + else => { + @panic("Unexpected caller type"); + }, + } + + if (javascript_object != .zero) { + if (js_args.len == 0) { + js_args = try allocator.alloc(JSC.JSValue, 1); + } + + js_args[js_args.len - 1] = javascript_object; + } + + const CallFunction = @TypeOf(Run.runAsync); + const CallArgs = std.meta.ArgsTuple(CallFunction); + const CallData = struct { + threadlocal var call_args: CallArgs = undefined; + threadlocal var result: MacroError!Expr = undefined; + pub fn callWrapper(args: CallArgs) MacroError!Expr { + JSC.markBinding(@src()); + call_args = args; + Bun__startMacro(&call, JSC.VirtualMachine.get().global); + return result; + } + + pub fn call() callconv(.C) void { + const call_args_copy = call_args; + const local_result = @call(.auto, Run.runAsync, call_args_copy); + result = local_result; + } + }; + + // TODO: can change back to `return CallData.callWrapper(.{` + // when https://github.com/ziglang/zig/issues/16242 is fixed + return CallData.callWrapper(CallArgs{ + macro, + log, + allocator, + function_name, + caller, + js_args, + source, + id, + }); + } + + extern "c" fn Bun__startMacro(function: *const anyopaque, *anyopaque) void; +}; + +// @sortImports + +const DotEnv = @import("../env_loader.zig"); +const std = @import("std"); + +const MacroRemap = @import("../resolver/package_json.zig").MacroMap; +const MacroRemapEntry = @import("../resolver/package_json.zig").MacroImportReplacementMap; + +const ResolveResult = @import("../resolver/resolver.zig").Result; +const Resolver = @import("../resolver/resolver.zig").Resolver; +const isPackagePath = @import("../resolver/resolver.zig").isPackagePath; + +const bun = @import("bun"); +const BabyList = bun.BabyList; +const Environment = bun.Environment; +const Output = bun.Output; +const Transpiler = bun.Transpiler; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; +const Loader = bun.options.Loader; +const MimeType = bun.http.MimeType; +const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint; + +const JSC = bun.JSC; +const JavaScript = bun.JSC; +const js = bun.JSC.C; + +const js_ast = bun.js_ast; +const E = js_ast.E; +const Expr = js_ast.Expr; +const ExprNodeList = js_ast.ExprNodeList; +const G = js_ast.G; +const Macro = js_ast.Macro; +const S = js_ast.S; +const Stmt = js_ast.Stmt; +const ToJSError = js_ast.ToJSError; diff --git a/src/ast/NewStore.zig b/src/ast/NewStore.zig new file mode 100644 index 0000000000..b24640ec43 --- /dev/null +++ b/src/ast/NewStore.zig @@ -0,0 +1,167 @@ +/// This "Store" is a specialized memory allocation strategy very similar to an +/// arena, used for allocating expression and statement nodes during JavaScript +/// parsing and visiting. Allocations are grouped into large blocks, where each +/// block is treated as a fixed-buffer allocator. When a block runs out of +/// space, a new one is created; all blocks are joined as a linked list. +/// +/// Similarly to an arena, you can call .reset() to reset state, reusing memory +/// across operations. +pub fn NewStore(comptime types: []const type, comptime count: usize) type { + const largest_size, const largest_align = brk: { + var largest_size = 0; + var largest_align = 1; + for (types) |T| { + if (@sizeOf(T) == 0) { + @compileError("NewStore does not support 0 size type: " ++ @typeName(T)); + } + largest_size = @max(@sizeOf(T), largest_size); + largest_align = @max(@alignOf(T), largest_align); + } + break :brk .{ largest_size, largest_align }; + }; + + const backing_allocator = bun.default_allocator; + + const log = Output.scoped(.Store, true); + + return struct { + const Store = @This(); + + current: *Block, + debug_lock: std.debug.SafetyLock = .{}, + + pub const Block = struct { + pub const size = largest_size * count * 2; + pub const Size = std.math.IntFittingRange(0, size + largest_size); + + buffer: [size]u8 align(largest_align) = undefined, + bytes_used: Size = 0, + next: ?*Block = null, + + pub fn tryAlloc(block: *Block, comptime T: type) ?*T { + const start = std.mem.alignForward(usize, block.bytes_used, @alignOf(T)); + if (start + @sizeOf(T) > block.buffer.len) return null; + defer block.bytes_used = @intCast(start + @sizeOf(T)); + + // it's simpler to use @ptrCast, but as a sanity check, we also + // try to compute the slice. Zig will report an out of bounds + // panic if the null detection logic above is wrong + if (Environment.isDebug) { + _ = block.buffer[block.bytes_used..][0..@sizeOf(T)]; + } + + return @alignCast(@ptrCast(&block.buffer[start])); + } + }; + + const PreAlloc = struct { + metadata: Store, + first_block: Block, + }; + + pub fn firstBlock(store: *Store) *Block { + return &@as(*PreAlloc, @fieldParentPtr("metadata", store)).first_block; + } + + pub fn init() *Store { + log("init", .{}); + const prealloc = backing_allocator.create(PreAlloc) catch bun.outOfMemory(); + + prealloc.first_block.bytes_used = 0; + prealloc.first_block.next = null; + + prealloc.metadata = .{ + .current = &prealloc.first_block, + }; + + return &prealloc.metadata; + } + + pub fn deinit(store: *Store) void { + log("deinit", .{}); + var it = store.firstBlock().next; // do not free `store.head` + while (it) |next| { + if (Environment.isDebug or Environment.enable_asan) + @memset(next.buffer, undefined); + it = next.next; + backing_allocator.destroy(next); + } + + const prealloc: PreAlloc = @fieldParentPtr("metadata", store); + bun.assert(&prealloc.first_block == store.head); + backing_allocator.destroy(prealloc); + } + + pub fn reset(store: *Store) void { + log("reset", .{}); + + if (Environment.isDebug or Environment.enable_asan) { + var it: ?*Block = store.firstBlock(); + while (it) |next| : (it = next.next) { + next.bytes_used = undefined; + @memset(&next.buffer, undefined); + } + } + + store.current = store.firstBlock(); + store.current.bytes_used = 0; + } + + fn allocate(store: *Store, comptime T: type) *T { + comptime bun.assert(@sizeOf(T) > 0); // don't allocate! + comptime if (!supportsType(T)) { + @compileError("Store does not know about type: " ++ @typeName(T)); + }; + + if (store.current.tryAlloc(T)) |ptr| + return ptr; + + // a new block is needed + const next_block = if (store.current.next) |next| brk: { + next.bytes_used = 0; + break :brk next; + } else brk: { + const new_block = backing_allocator.create(Block) catch + bun.outOfMemory(); + new_block.next = null; + new_block.bytes_used = 0; + store.current.next = new_block; + break :brk new_block; + }; + + store.current = next_block; + + return next_block.tryAlloc(T) orelse + unreachable; // newly initialized blocks must have enough space for at least one + } + + pub inline fn append(store: *Store, comptime T: type, data: T) *T { + const ptr = store.allocate(T); + if (Environment.isDebug) { + log("append({s}) -> 0x{x}", .{ bun.meta.typeName(T), @intFromPtr(ptr) }); + } + ptr.* = data; + return ptr; + } + + pub fn lock(store: *Store) void { + store.debug_lock.lock(); + } + + pub fn unlock(store: *Store) void { + store.debug_lock.unlock(); + } + + fn supportsType(T: type) bool { + return std.mem.indexOfScalar(type, types, T) != null; + } + }; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const Output = bun.Output; diff --git a/src/ast/Op.zig b/src/ast/Op.zig new file mode 100644 index 0000000000..13c59b47f8 --- /dev/null +++ b/src/ast/Op.zig @@ -0,0 +1,293 @@ +// If you add a new token, remember to add it to "Table" too +pub const Code = enum { + // Prefix + un_pos, // +expr + un_neg, // -expr + un_cpl, // ~expr + un_not, // !expr + un_void, + un_typeof, + un_delete, + + // Prefix update + un_pre_dec, + un_pre_inc, + + // Postfix update + un_post_dec, + un_post_inc, + + /// Left-associative + bin_add, + /// Left-associative + bin_sub, + /// Left-associative + bin_mul, + /// Left-associative + bin_div, + /// Left-associative + bin_rem, + /// Left-associative + bin_pow, + /// Left-associative + bin_lt, + /// Left-associative + bin_le, + /// Left-associative + bin_gt, + /// Left-associative + bin_ge, + /// Left-associative + bin_in, + /// Left-associative + bin_instanceof, + /// Left-associative + bin_shl, + /// Left-associative + bin_shr, + /// Left-associative + bin_u_shr, + /// Left-associative + bin_loose_eq, + /// Left-associative + bin_loose_ne, + /// Left-associative + bin_strict_eq, + /// Left-associative + bin_strict_ne, + /// Left-associative + bin_nullish_coalescing, + /// Left-associative + bin_logical_or, + /// Left-associative + bin_logical_and, + /// Left-associative + bin_bitwise_or, + /// Left-associative + bin_bitwise_and, + /// Left-associative + bin_bitwise_xor, + + /// Non-associative + bin_comma, + + /// Right-associative + bin_assign, + /// Right-associative + bin_add_assign, + /// Right-associative + bin_sub_assign, + /// Right-associative + bin_mul_assign, + /// Right-associative + bin_div_assign, + /// Right-associative + bin_rem_assign, + /// Right-associative + bin_pow_assign, + /// Right-associative + bin_shl_assign, + /// Right-associative + bin_shr_assign, + /// Right-associative + bin_u_shr_assign, + /// Right-associative + bin_bitwise_or_assign, + /// Right-associative + bin_bitwise_and_assign, + /// Right-associative + bin_bitwise_xor_assign, + /// Right-associative + bin_nullish_coalescing_assign, + /// Right-associative + bin_logical_or_assign, + /// Right-associative + bin_logical_and_assign, + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } + + pub fn unaryAssignTarget(code: Op.Code) AssignTarget { + if (@intFromEnum(code) >= + @intFromEnum(Op.Code.un_pre_dec) and @intFromEnum(code) <= + @intFromEnum(Op.Code.un_post_inc)) + { + return AssignTarget.update; + } + + return AssignTarget.none; + } + pub fn isLeftAssociative(code: Op.Code) bool { + return @intFromEnum(code) >= + @intFromEnum(Op.Code.bin_add) and + @intFromEnum(code) < @intFromEnum(Op.Code.bin_comma) and code != .bin_pow; + } + pub fn isRightAssociative(code: Op.Code) bool { + return @intFromEnum(code) >= @intFromEnum(Op.Code.bin_assign) or code == .bin_pow; + } + pub fn binaryAssignTarget(code: Op.Code) AssignTarget { + if (code == .bin_assign) { + return AssignTarget.replace; + } + + if (@intFromEnum(code) > @intFromEnum(Op.Code.bin_assign)) { + return AssignTarget.update; + } + + return AssignTarget.none; + } + + pub fn isPrefix(code: Op.Code) bool { + return @intFromEnum(code) < @intFromEnum(Op.Code.un_post_dec); + } +}; + +pub const Level = enum(u6) { + lowest, + comma, + spread, + yield, + assign, + conditional, + nullish_coalescing, + logical_or, + logical_and, + bitwise_or, + bitwise_xor, + bitwise_and, + equals, + compare, + shift, + add, + multiply, + exponentiation, + prefix, + postfix, + new, + call, + member, + + pub inline fn lt(self: Level, b: Level) bool { + return @intFromEnum(self) < @intFromEnum(b); + } + pub inline fn gt(self: Level, b: Level) bool { + return @intFromEnum(self) > @intFromEnum(b); + } + pub inline fn gte(self: Level, b: Level) bool { + return @intFromEnum(self) >= @intFromEnum(b); + } + pub inline fn lte(self: Level, b: Level) bool { + return @intFromEnum(self) <= @intFromEnum(b); + } + pub inline fn eql(self: Level, b: Level) bool { + return @intFromEnum(self) == @intFromEnum(b); + } + + pub inline fn sub(self: Level, i: anytype) Level { + return @as(Level, @enumFromInt(@intFromEnum(self) - i)); + } + + pub inline fn addF(self: Level, i: anytype) Level { + return @as(Level, @enumFromInt(@intFromEnum(self) + i)); + } +}; + +text: string, +level: Level, +is_keyword: bool = false, + +pub fn init(triple: anytype) Op { + return Op{ + .text = triple.@"0", + .level = triple.@"1", + .is_keyword = triple.@"2", + }; +} + +pub fn jsonStringify(self: *const @This(), writer: anytype) !void { + return try writer.write(self.text); +} + +pub const TableType: std.EnumArray(Op.Code, Op) = undefined; +pub const Table = brk: { + var table = std.EnumArray(Op.Code, Op).initUndefined(); + + // Prefix + table.set(Op.Code.un_pos, Op.init(.{ "+", Level.prefix, false })); + table.set(Op.Code.un_neg, Op.init(.{ "-", Level.prefix, false })); + table.set(Op.Code.un_cpl, Op.init(.{ "~", Level.prefix, false })); + table.set(Op.Code.un_not, Op.init(.{ "!", Level.prefix, false })); + table.set(Op.Code.un_void, Op.init(.{ "void", Level.prefix, true })); + table.set(Op.Code.un_typeof, Op.init(.{ "typeof", Level.prefix, true })); + table.set(Op.Code.un_delete, Op.init(.{ "delete", Level.prefix, true })); + + // Prefix update + table.set(Op.Code.un_pre_dec, Op.init(.{ "--", Level.prefix, false })); + table.set(Op.Code.un_pre_inc, Op.init(.{ "++", Level.prefix, false })); + + // Postfix update + table.set(Op.Code.un_post_dec, Op.init(.{ "--", Level.postfix, false })); + table.set(Op.Code.un_post_inc, Op.init(.{ "++", Level.postfix, false })); + + // Left-associative + table.set(Op.Code.bin_add, Op.init(.{ "+", Level.add, false })); + table.set(Op.Code.bin_sub, Op.init(.{ "-", Level.add, false })); + table.set(Op.Code.bin_mul, Op.init(.{ "*", Level.multiply, false })); + table.set(Op.Code.bin_div, Op.init(.{ "/", Level.multiply, false })); + table.set(Op.Code.bin_rem, Op.init(.{ "%", Level.multiply, false })); + table.set(Op.Code.bin_pow, Op.init(.{ "**", Level.exponentiation, false })); + table.set(Op.Code.bin_lt, Op.init(.{ "<", Level.compare, false })); + table.set(Op.Code.bin_le, Op.init(.{ "<=", Level.compare, false })); + table.set(Op.Code.bin_gt, Op.init(.{ ">", Level.compare, false })); + table.set(Op.Code.bin_ge, Op.init(.{ ">=", Level.compare, false })); + table.set(Op.Code.bin_in, Op.init(.{ "in", Level.compare, true })); + table.set(Op.Code.bin_instanceof, Op.init(.{ "instanceof", Level.compare, true })); + table.set(Op.Code.bin_shl, Op.init(.{ "<<", Level.shift, false })); + table.set(Op.Code.bin_shr, Op.init(.{ ">>", Level.shift, false })); + table.set(Op.Code.bin_u_shr, Op.init(.{ ">>>", Level.shift, false })); + table.set(Op.Code.bin_loose_eq, Op.init(.{ "==", Level.equals, false })); + table.set(Op.Code.bin_loose_ne, Op.init(.{ "!=", Level.equals, false })); + table.set(Op.Code.bin_strict_eq, Op.init(.{ "===", Level.equals, false })); + table.set(Op.Code.bin_strict_ne, Op.init(.{ "!==", Level.equals, false })); + table.set(Op.Code.bin_nullish_coalescing, Op.init(.{ "??", Level.nullish_coalescing, false })); + table.set(Op.Code.bin_logical_or, Op.init(.{ "||", Level.logical_or, false })); + table.set(Op.Code.bin_logical_and, Op.init(.{ "&&", Level.logical_and, false })); + table.set(Op.Code.bin_bitwise_or, Op.init(.{ "|", Level.bitwise_or, false })); + table.set(Op.Code.bin_bitwise_and, Op.init(.{ "&", Level.bitwise_and, false })); + table.set(Op.Code.bin_bitwise_xor, Op.init(.{ "^", Level.bitwise_xor, false })); + + // Non-associative + table.set(Op.Code.bin_comma, Op.init(.{ ",", Level.comma, false })); + + // Right-associative + table.set(Op.Code.bin_assign, Op.init(.{ "=", Level.assign, false })); + table.set(Op.Code.bin_add_assign, Op.init(.{ "+=", Level.assign, false })); + table.set(Op.Code.bin_sub_assign, Op.init(.{ "-=", Level.assign, false })); + table.set(Op.Code.bin_mul_assign, Op.init(.{ "*=", Level.assign, false })); + table.set(Op.Code.bin_div_assign, Op.init(.{ "/=", Level.assign, false })); + table.set(Op.Code.bin_rem_assign, Op.init(.{ "%=", Level.assign, false })); + table.set(Op.Code.bin_pow_assign, Op.init(.{ "**=", Level.assign, false })); + table.set(Op.Code.bin_shl_assign, Op.init(.{ "<<=", Level.assign, false })); + table.set(Op.Code.bin_shr_assign, Op.init(.{ ">>=", Level.assign, false })); + table.set(Op.Code.bin_u_shr_assign, Op.init(.{ ">>>=", Level.assign, false })); + table.set(Op.Code.bin_bitwise_or_assign, Op.init(.{ "|=", Level.assign, false })); + table.set(Op.Code.bin_bitwise_and_assign, Op.init(.{ "&=", Level.assign, false })); + table.set(Op.Code.bin_bitwise_xor_assign, Op.init(.{ "^=", Level.assign, false })); + table.set(Op.Code.bin_nullish_coalescing_assign, Op.init(.{ "??=", Level.assign, false })); + table.set(Op.Code.bin_logical_or_assign, Op.init(.{ "||=", Level.assign, false })); + table.set(Op.Code.bin_logical_and_assign, Op.init(.{ "&&=", Level.assign, false })); + + break :brk table; +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const string = bun.string; + +const js_ast = bun.js_ast; +const AssignTarget = js_ast.AssignTarget; +const Op = js_ast.Op; diff --git a/src/ast/S.zig b/src/ast/S.zig new file mode 100644 index 0000000000..e47c3f223a --- /dev/null +++ b/src/ast/S.zig @@ -0,0 +1,233 @@ +pub const Block = struct { + stmts: StmtNodeList, + close_brace_loc: logger.Loc = logger.Loc.Empty, +}; + +pub const SExpr = struct { + value: ExprNodeIndex, + + // This is set to true for automatically-generated expressions that should + // not affect tree shaking. For example, calling a function from the runtime + // that doesn't have externally-visible side effects. + does_not_affect_tree_shaking: bool = false, +}; + +pub const Comment = struct { text: string }; + +pub const Directive = struct { + value: []const u8, +}; + +pub const ExportClause = struct { + items: []ClauseItem, + is_single_line: bool, +}; + +pub const Empty = struct {}; + +pub const ExportStar = struct { + namespace_ref: Ref, + alias: ?G.ExportStarAlias = null, + import_record_index: u32, +}; + +// This is an "export = value;" statement in TypeScript +pub const ExportEquals = struct { value: ExprNodeIndex }; + +pub const Label = struct { name: LocRef, stmt: StmtNodeIndex }; + +// This is a stand-in for a TypeScript type declaration +pub const TypeScript = struct {}; + +pub const Debugger = struct {}; + +pub const ExportFrom = struct { + items: []ClauseItem, + namespace_ref: Ref, + import_record_index: u32, + is_single_line: bool, +}; + +pub const ExportDefault = struct { + default_name: LocRef, // value may be a SFunction or SClass + value: StmtOrExpr, + + pub fn canBeMoved(self: *const ExportDefault) bool { + return switch (self.value) { + .expr => |e| e.canBeMoved(), + .stmt => |s| switch (s.data) { + .s_class => |class| class.class.canBeMoved(), + .s_function => true, + else => false, + }, + }; + } +}; + +pub const Enum = struct { + name: LocRef, + arg: Ref, + values: []EnumValue, + is_export: bool, +}; + +pub const Namespace = struct { + name: LocRef, + arg: Ref, + stmts: StmtNodeList, + is_export: bool, +}; + +pub const Function = struct { + func: G.Fn, +}; + +pub const Class = struct { class: G.Class, is_export: bool = false }; + +pub const If = struct { + test_: ExprNodeIndex, + yes: StmtNodeIndex, + no: ?StmtNodeIndex, +}; + +pub const For = struct { + // May be a SConst, SLet, SVar, or SExpr + init: ?StmtNodeIndex = null, + test_: ?ExprNodeIndex = null, + update: ?ExprNodeIndex = null, + body: StmtNodeIndex, +}; + +pub const ForIn = struct { + // May be a SConst, SLet, SVar, or SExpr + init: StmtNodeIndex, + value: ExprNodeIndex, + body: StmtNodeIndex, +}; + +pub const ForOf = struct { + is_await: bool = false, + // May be a SConst, SLet, SVar, or SExpr + init: StmtNodeIndex, + value: ExprNodeIndex, + body: StmtNodeIndex, +}; + +pub const DoWhile = struct { body: StmtNodeIndex, test_: ExprNodeIndex }; + +pub const While = struct { + test_: ExprNodeIndex, + body: StmtNodeIndex, +}; + +pub const With = struct { + value: ExprNodeIndex, + body: StmtNodeIndex, + body_loc: logger.Loc = logger.Loc.Empty, +}; + +pub const Try = struct { + body_loc: logger.Loc, + body: StmtNodeList, + + catch_: ?Catch = null, + finally: ?Finally = null, +}; + +pub const Switch = struct { + test_: ExprNodeIndex, + body_loc: logger.Loc, + cases: []Case, +}; + +// This object represents all of these types of import statements: +// +// import 'path' +// import {item1, item2} from 'path' +// import * as ns from 'path' +// import defaultItem, {item1, item2} from 'path' +// import defaultItem, * as ns from 'path' +// +// Many parts are optional and can be combined in different ways. The only +// restriction is that you cannot have both a clause and a star namespace. +pub const Import = struct { + // If this is a star import: This is a Ref for the namespace symbol. The Loc + // for the symbol is StarLoc. + // + // Otherwise: This is an auto-generated Ref for the namespace representing + // the imported file. In this case StarLoc is nil. The NamespaceRef is used + // when converting this module to a CommonJS module. + namespace_ref: Ref, + default_name: ?LocRef = null, + items: []ClauseItem = &.{}, + star_name_loc: ?logger.Loc = null, + import_record_index: u32, + is_single_line: bool = false, +}; + +pub const Return = struct { value: ?ExprNodeIndex = null }; +pub const Throw = struct { value: ExprNodeIndex }; + +pub const Local = struct { + kind: Kind = .k_var, + decls: G.Decl.List = .{}, + is_export: bool = false, + // The TypeScript compiler doesn't generate code for "import foo = bar" + // statements where the import is never used. + was_ts_import_equals: bool = false, + + was_commonjs_export: bool = false, + + pub fn canMergeWith(this: *const Local, other: *const Local) bool { + return this.kind == other.kind and this.is_export == other.is_export and + this.was_commonjs_export == other.was_commonjs_export; + } + + pub const Kind = enum { + k_var, + k_let, + k_const, + k_using, + k_await_using, + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } + + pub fn isUsing(self: Kind) bool { + return self == .k_using or self == .k_await_using; + } + + pub fn isReassignable(kind: Kind) bool { + return kind == .k_var or kind == .k_let; + } + }; +}; + +pub const Break = struct { + label: ?LocRef = null, +}; + +pub const Continue = struct { + label: ?LocRef = null, +}; + +// @sortImports + +const bun = @import("bun"); +const logger = bun.logger; +const string = bun.string; + +const js_ast = bun.js_ast; +const Case = js_ast.Case; +const Catch = js_ast.Catch; +const ClauseItem = js_ast.ClauseItem; +const EnumValue = js_ast.EnumValue; +const ExprNodeIndex = js_ast.ExprNodeIndex; +const Finally = js_ast.Finally; +const G = js_ast.G; +const LocRef = js_ast.LocRef; +const Ref = js_ast.Ref; +const StmtNodeIndex = js_ast.StmtNodeIndex; +const StmtNodeList = js_ast.StmtNodeList; +const StmtOrExpr = js_ast.StmtOrExpr; diff --git a/src/ast/Scope.zig b/src/ast/Scope.zig new file mode 100644 index 0000000000..ca9334e0d2 --- /dev/null +++ b/src/ast/Scope.zig @@ -0,0 +1,222 @@ +pub const MemberHashMap = bun.StringHashMapUnmanaged(Member); + +id: usize = 0, +kind: Kind = Kind.block, +parent: ?*Scope = null, +children: BabyList(*Scope) = .{}, +members: MemberHashMap = .{}, +generated: BabyList(Ref) = .{}, + +// This is used to store the ref of the label symbol for ScopeLabel scopes. +label_ref: ?Ref = null, +label_stmt_is_loop: bool = false, + +// If a scope contains a direct eval() expression, then none of the symbols +// inside that scope can be renamed. We conservatively assume that the +// evaluated code might reference anything that it has access to. +contains_direct_eval: bool = false, + +// This is to help forbid "arguments" inside class body scopes +forbid_arguments: bool = false, + +strict_mode: StrictModeKind = StrictModeKind.sloppy_mode, + +is_after_const_local_prefix: bool = false, + +// This will be non-null if this is a TypeScript "namespace" or "enum" +ts_namespace: ?*TSNamespaceScope = null, + +pub const NestedScopeMap = std.AutoArrayHashMap(u32, bun.BabyList(*Scope)); + +pub fn getMemberHash(name: []const u8) u64 { + return bun.StringHashMapContext.hash(.{}, name); +} + +pub fn getMemberWithHash(this: *const Scope, name: []const u8, hash_value: u64) ?Member { + const hashed = bun.StringHashMapContext.Prehashed{ + .value = hash_value, + .input = name, + }; + return this.members.getAdapted(name, hashed); +} + +pub fn getOrPutMemberWithHash( + this: *Scope, + allocator: std.mem.Allocator, + name: []const u8, + hash_value: u64, +) !MemberHashMap.GetOrPutResult { + const hashed = bun.StringHashMapContext.Prehashed{ + .value = hash_value, + .input = name, + }; + return this.members.getOrPutContextAdapted(allocator, name, hashed, .{}); +} + +pub fn reset(this: *Scope) void { + this.children.clearRetainingCapacity(); + this.generated.clearRetainingCapacity(); + this.members.clearRetainingCapacity(); + this.parent = null; + this.id = 0; + this.label_ref = null; + this.label_stmt_is_loop = false; + this.contains_direct_eval = false; + this.strict_mode = .sloppy_mode; + this.kind = .block; +} + +// Do not make this a packed struct +// Two hours of debugging time lost to that. +// It causes a crash due to undefined memory +pub const Member = struct { + ref: Ref, + loc: logger.Loc, + + pub fn eql(a: Member, b: Member) bool { + return @call(bun.callmod_inline, Ref.eql, .{ a.ref, b.ref }) and a.loc.start == b.loc.start; + } +}; + +pub const SymbolMergeResult = enum { + forbidden, + replace_with_new, + overwrite_with_new, + keep_existing, + become_private_get_set_pair, + become_private_static_get_set_pair, +}; + +pub fn canMergeSymbols( + scope: *Scope, + existing: Symbol.Kind, + new: Symbol.Kind, + comptime is_typescript_enabled: bool, +) SymbolMergeResult { + if (existing == .unbound) { + return .replace_with_new; + } + + if (comptime is_typescript_enabled) { + // In TypeScript, imports are allowed to silently collide with symbols within + // the module. Presumably this is because the imports may be type-only: + // + // import {Foo} from 'bar' + // class Foo {} + // + if (existing == .import) { + return .replace_with_new; + } + + // "enum Foo {} enum Foo {}" + // "namespace Foo { ... } enum Foo {}" + if (new == .ts_enum and (existing == .ts_enum or existing == .ts_namespace)) { + return .replace_with_new; + } + + // "namespace Foo { ... } namespace Foo { ... }" + // "function Foo() {} namespace Foo { ... }" + // "enum Foo {} namespace Foo { ... }" + if (new == .ts_namespace) { + switch (existing) { + .ts_namespace, + .ts_enum, + .hoisted_function, + .generator_or_async_function, + .class, + => return .keep_existing, + else => {}, + } + } + } + + // "var foo; var foo;" + // "var foo; function foo() {}" + // "function foo() {} var foo;" + // "function *foo() {} function *foo() {}" but not "{ function *foo() {} function *foo() {} }" + if (Symbol.isKindHoistedOrFunction(new) and + Symbol.isKindHoistedOrFunction(existing) and + (scope.kind == .entry or scope.kind == .function_body or scope.kind == .function_args or + (new == existing and Symbol.isKindHoisted(existing)))) + { + return .replace_with_new; + } + + // "get #foo() {} set #foo() {}" + // "set #foo() {} get #foo() {}" + if ((existing == .private_get and new == .private_set) or + (existing == .private_set and new == .private_get)) + { + return .become_private_get_set_pair; + } + if ((existing == .private_static_get and new == .private_static_set) or + (existing == .private_static_set and new == .private_static_get)) + { + return .become_private_static_get_set_pair; + } + + // "try {} catch (e) { var e }" + if (existing == .catch_identifier and new == .hoisted) { + return .replace_with_new; + } + + // "function() { var arguments }" + if (existing == .arguments and new == .hoisted) { + return .keep_existing; + } + + // "function() { let arguments }" + if (existing == .arguments and new != .hoisted) { + return .overwrite_with_new; + } + + return .forbidden; +} + +pub const Kind = enum(u8) { + block, + with, + label, + class_name, + class_body, + catch_binding, + + // The scopes below stop hoisted variables from extending into parent scopes + entry, // This is a module, TypeScript enum, or TypeScript namespace + function_args, + function_body, + class_static_init, + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } +}; + +pub fn recursiveSetStrictMode(s: *Scope, kind: StrictModeKind) void { + if (s.strict_mode == .sloppy_mode) { + s.strict_mode = kind; + for (s.children.slice()) |child| { + child.recursiveSetStrictMode(kind); + } + } +} + +pub inline fn kindStopsHoisting(s: *const Scope) bool { + return @intFromEnum(s.kind) >= @intFromEnum(Kind.entry); +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const BabyList = bun.BabyList; +const logger = bun.logger; + +const js_ast = bun.js_ast; +const Ref = js_ast.Ref; +const Scope = js_ast.Scope; +const StrictModeKind = js_ast.StrictModeKind; +const Symbol = js_ast.Symbol; +const TSNamespaceScope = js_ast.TSNamespaceScope; +const TypeScript = js_ast.TypeScript; diff --git a/src/ast/ServerComponentBoundary.zig b/src/ast/ServerComponentBoundary.zig new file mode 100644 index 0000000000..b24af72401 --- /dev/null +++ b/src/ast/ServerComponentBoundary.zig @@ -0,0 +1,123 @@ +//! Represents a boundary between client and server code. Every boundary +//! gets bundled twice, once for the desired target, and once to generate +//! a module of "references". Specifically, the generated file takes the +//! canonical Ast as input to derive a wrapper. See `Framework.ServerComponents` +//! for more details about this generated file. +//! +//! This is sometimes abbreviated as SCB +use_directive: UseDirective, + +/// The index of the original file. +source_index: Index.Int, + +/// Index to the file imported on the opposite platform, which is +/// generated by the bundler. For client components, this is the +/// server's code. For server actions, this is the client's code. +reference_source_index: Index.Int, + +/// When `bake.Framework.ServerComponents.separate_ssr_graph` is enabled this +/// points to the separated module. When the SSR graph is not separate, this is +/// equal to `reference_source_index` +// +// TODO: Is this used for server actions. +ssr_source_index: Index.Int, + +/// The requirements for this data structure is to have reasonable lookup +/// speed, but also being able to pull a `[]const Index.Int` of all +/// boundaries for iteration. +pub const List = struct { + list: std.MultiArrayList(ServerComponentBoundary) = .{}, + /// Used to facilitate fast lookups into `items` by `.source_index` + map: Map = .{}, + + const Map = std.ArrayHashMapUnmanaged(void, void, struct {}, true); + + /// Can only be called on the bundler thread. + pub fn put( + m: *List, + allocator: std.mem.Allocator, + source_index: Index.Int, + use_directive: UseDirective, + reference_source_index: Index.Int, + ssr_source_index: Index.Int, + ) !void { + try m.list.append(allocator, .{ + .source_index = source_index, + .use_directive = use_directive, + .reference_source_index = reference_source_index, + .ssr_source_index = ssr_source_index, + }); + const gop = try m.map.getOrPutAdapted( + allocator, + source_index, + Adapter{ .list = m.list.slice() }, + ); + bun.assert(!gop.found_existing); + } + + /// Can only be called on the bundler thread. + pub fn getIndex(l: *const List, real_source_index: Index.Int) ?usize { + return l.map.getIndexAdapted( + real_source_index, + Adapter{ .list = l.list.slice() }, + ); + } + + /// Use this to improve speed of accessing fields at the cost of + /// storing more pointers. Invalidated when input is mutated. + pub fn slice(l: List) Slice { + return .{ .list = l.list.slice(), .map = l.map }; + } + + pub const Slice = struct { + list: std.MultiArrayList(ServerComponentBoundary).Slice, + map: Map, + + pub fn getIndex(l: *const Slice, real_source_index: Index.Int) ?usize { + return l.map.getIndexAdapted( + real_source_index, + Adapter{ .list = l.list }, + ) orelse return null; + } + + pub fn getReferenceSourceIndex(l: *const Slice, real_source_index: Index.Int) ?u32 { + const i = l.map.getIndexAdapted( + real_source_index, + Adapter{ .list = l.list }, + ) orelse return null; + bun.unsafeAssert(l.list.capacity > 0); // optimize MultiArrayList.Slice.items + return l.list.items(.reference_source_index)[i]; + } + + pub fn bitSet(scbs: Slice, alloc: std.mem.Allocator, input_file_count: usize) !bun.bit_set.DynamicBitSetUnmanaged { + var scb_bitset = try bun.bit_set.DynamicBitSetUnmanaged.initEmpty(alloc, input_file_count); + for (scbs.list.items(.source_index)) |source_index| { + scb_bitset.set(source_index); + } + return scb_bitset; + } + }; + + pub const Adapter = struct { + list: std.MultiArrayList(ServerComponentBoundary).Slice, + + pub fn hash(_: Adapter, key: Index.Int) u32 { + return std.hash.uint32(key); + } + + pub fn eql(adapt: Adapter, a: Index.Int, _: void, b_index: usize) bool { + bun.unsafeAssert(adapt.list.capacity > 0); // optimize MultiArrayList.Slice.items + return a == adapt.list.items(.source_index)[b_index]; + } + }; +}; + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); + +const js_ast = bun.js_ast; +const Index = js_ast.Index; +const ServerComponentBoundary = js_ast.ServerComponentBoundary; +const UseDirective = js_ast.UseDirective; diff --git a/src/ast/Stmt.zig b/src/ast/Stmt.zig new file mode 100644 index 0000000000..90cf71d27a --- /dev/null +++ b/src/ast/Stmt.zig @@ -0,0 +1,424 @@ +loc: logger.Loc, +data: Data, + +pub const Batcher = NewBatcher(Stmt); + +pub fn assign(a: Expr, b: Expr) Stmt { + return Stmt.alloc( + S.SExpr, + S.SExpr{ + .value = Expr.assign(a, b), + }, + a.loc, + ); +} + +const Serializable = struct { + type: Tag, + object: string, + value: Data, + loc: logger.Loc, +}; + +pub fn jsonStringify(self: *const Stmt, writer: anytype) !void { + return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "stmt", .value = self.data, .loc = self.loc }); +} + +pub fn isTypeScript(self: *Stmt) bool { + return @as(Stmt.Tag, self.data) == .s_type_script; +} + +pub fn isSuperCall(self: Stmt) bool { + return self.data == .s_expr and self.data.s_expr.value.data == .e_call and self.data.s_expr.value.data.e_call.target.data == .e_super; +} + +pub fn isMissingExpr(self: Stmt) bool { + return self.data == .s_expr and self.data.s_expr.value.data == .e_missing; +} + +pub fn empty() Stmt { + return Stmt{ .data = .{ .s_empty = None }, .loc = logger.Loc{} }; +} + +pub fn toEmpty(this: Stmt) Stmt { + return .{ + .data = .{ + .s_empty = None, + }, + .loc = this.loc, + }; +} + +const None = S.Empty{}; + +pub var icount: usize = 0; +pub fn init(comptime StatementType: type, origData: *StatementType, loc: logger.Loc) Stmt { + icount += 1; + + return switch (comptime StatementType) { + S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } }, + S.Block => Stmt.comptime_init("s_block", S.Block, origData, loc), + S.Break => Stmt.comptime_init("s_break", S.Break, origData, loc), + S.Class => Stmt.comptime_init("s_class", S.Class, origData, loc), + S.Comment => Stmt.comptime_init("s_comment", S.Comment, origData, loc), + S.Continue => Stmt.comptime_init("s_continue", S.Continue, origData, loc), + S.Debugger => Stmt.comptime_init("s_debugger", S.Debugger, origData, loc), + S.Directive => Stmt.comptime_init("s_directive", S.Directive, origData, loc), + S.DoWhile => Stmt.comptime_init("s_do_while", S.DoWhile, origData, loc), + S.Enum => Stmt.comptime_init("s_enum", S.Enum, origData, loc), + S.ExportClause => Stmt.comptime_init("s_export_clause", S.ExportClause, origData, loc), + S.ExportDefault => Stmt.comptime_init("s_export_default", S.ExportDefault, origData, loc), + S.ExportEquals => Stmt.comptime_init("s_export_equals", S.ExportEquals, origData, loc), + S.ExportFrom => Stmt.comptime_init("s_export_from", S.ExportFrom, origData, loc), + S.ExportStar => Stmt.comptime_init("s_export_star", S.ExportStar, origData, loc), + S.SExpr => Stmt.comptime_init("s_expr", S.SExpr, origData, loc), + S.ForIn => Stmt.comptime_init("s_for_in", S.ForIn, origData, loc), + S.ForOf => Stmt.comptime_init("s_for_of", S.ForOf, origData, loc), + S.For => Stmt.comptime_init("s_for", S.For, origData, loc), + S.Function => Stmt.comptime_init("s_function", S.Function, origData, loc), + S.If => Stmt.comptime_init("s_if", S.If, origData, loc), + S.Import => Stmt.comptime_init("s_import", S.Import, origData, loc), + S.Label => Stmt.comptime_init("s_label", S.Label, origData, loc), + S.Local => Stmt.comptime_init("s_local", S.Local, origData, loc), + S.Namespace => Stmt.comptime_init("s_namespace", S.Namespace, origData, loc), + S.Return => Stmt.comptime_init("s_return", S.Return, origData, loc), + S.Switch => Stmt.comptime_init("s_switch", S.Switch, origData, loc), + S.Throw => Stmt.comptime_init("s_throw", S.Throw, origData, loc), + S.Try => Stmt.comptime_init("s_try", S.Try, origData, loc), + S.TypeScript => Stmt.comptime_init("s_type_script", S.TypeScript, origData, loc), + S.While => Stmt.comptime_init("s_while", S.While, origData, loc), + S.With => Stmt.comptime_init("s_with", S.With, origData, loc), + else => @compileError("Invalid type in Stmt.init"), + }; +} +inline fn comptime_alloc(comptime tag_name: string, comptime typename: type, origData: anytype, loc: logger.Loc) Stmt { + return Stmt{ + .loc = loc, + .data = @unionInit( + Data, + tag_name, + Data.Store.append( + typename, + origData, + ), + ), + }; +} + +fn allocateData(allocator: std.mem.Allocator, comptime tag_name: string, comptime typename: type, origData: anytype, loc: logger.Loc) Stmt { + const value = allocator.create(@TypeOf(origData)) catch unreachable; + value.* = origData; + + return comptime_init(tag_name, *typename, value, loc); +} + +inline fn comptime_init(comptime tag_name: string, comptime TypeName: type, origData: TypeName, loc: logger.Loc) Stmt { + return Stmt{ .loc = loc, .data = @unionInit(Data, tag_name, origData) }; +} + +pub fn alloc(comptime StatementData: type, origData: StatementData, loc: logger.Loc) Stmt { + Stmt.Data.Store.assert(); + + icount += 1; + return switch (StatementData) { + S.Block => Stmt.comptime_alloc("s_block", S.Block, origData, loc), + S.Break => Stmt.comptime_alloc("s_break", S.Break, origData, loc), + S.Class => Stmt.comptime_alloc("s_class", S.Class, origData, loc), + S.Comment => Stmt.comptime_alloc("s_comment", S.Comment, origData, loc), + S.Continue => Stmt.comptime_alloc("s_continue", S.Continue, origData, loc), + S.Debugger => Stmt{ .loc = loc, .data = .{ .s_debugger = origData } }, + S.Directive => Stmt.comptime_alloc("s_directive", S.Directive, origData, loc), + S.DoWhile => Stmt.comptime_alloc("s_do_while", S.DoWhile, origData, loc), + S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } }, + S.Enum => Stmt.comptime_alloc("s_enum", S.Enum, origData, loc), + S.ExportClause => Stmt.comptime_alloc("s_export_clause", S.ExportClause, origData, loc), + S.ExportDefault => Stmt.comptime_alloc("s_export_default", S.ExportDefault, origData, loc), + S.ExportEquals => Stmt.comptime_alloc("s_export_equals", S.ExportEquals, origData, loc), + S.ExportFrom => Stmt.comptime_alloc("s_export_from", S.ExportFrom, origData, loc), + S.ExportStar => Stmt.comptime_alloc("s_export_star", S.ExportStar, origData, loc), + S.SExpr => Stmt.comptime_alloc("s_expr", S.SExpr, origData, loc), + S.ForIn => Stmt.comptime_alloc("s_for_in", S.ForIn, origData, loc), + S.ForOf => Stmt.comptime_alloc("s_for_of", S.ForOf, origData, loc), + S.For => Stmt.comptime_alloc("s_for", S.For, origData, loc), + S.Function => Stmt.comptime_alloc("s_function", S.Function, origData, loc), + S.If => Stmt.comptime_alloc("s_if", S.If, origData, loc), + S.Import => Stmt.comptime_alloc("s_import", S.Import, origData, loc), + S.Label => Stmt.comptime_alloc("s_label", S.Label, origData, loc), + S.Local => Stmt.comptime_alloc("s_local", S.Local, origData, loc), + S.Namespace => Stmt.comptime_alloc("s_namespace", S.Namespace, origData, loc), + S.Return => Stmt.comptime_alloc("s_return", S.Return, origData, loc), + S.Switch => Stmt.comptime_alloc("s_switch", S.Switch, origData, loc), + S.Throw => Stmt.comptime_alloc("s_throw", S.Throw, origData, loc), + S.Try => Stmt.comptime_alloc("s_try", S.Try, origData, loc), + S.TypeScript => Stmt{ .loc = loc, .data = Data{ .s_type_script = S.TypeScript{} } }, + S.While => Stmt.comptime_alloc("s_while", S.While, origData, loc), + S.With => Stmt.comptime_alloc("s_with", S.With, origData, loc), + else => @compileError("Invalid type in Stmt.init"), + }; +} + +pub const Disabler = bun.DebugOnlyDisabler(@This()); + +/// When the lifetime of an Stmt.Data's pointer must exist longer than reset() is called, use this function. +/// Be careful to free the memory (or use an allocator that does it for you) +/// Also, prefer Stmt.init or Stmt.alloc when possible. This will be slower. +pub fn allocate(allocator: std.mem.Allocator, comptime StatementData: type, origData: StatementData, loc: logger.Loc) Stmt { + Stmt.Data.Store.assert(); + + icount += 1; + return switch (StatementData) { + S.Block => Stmt.allocateData(allocator, "s_block", S.Block, origData, loc), + S.Break => Stmt.allocateData(allocator, "s_break", S.Break, origData, loc), + S.Class => Stmt.allocateData(allocator, "s_class", S.Class, origData, loc), + S.Comment => Stmt.allocateData(allocator, "s_comment", S.Comment, origData, loc), + S.Continue => Stmt.allocateData(allocator, "s_continue", S.Continue, origData, loc), + S.Debugger => Stmt{ .loc = loc, .data = .{ .s_debugger = origData } }, + S.Directive => Stmt.allocateData(allocator, "s_directive", S.Directive, origData, loc), + S.DoWhile => Stmt.allocateData(allocator, "s_do_while", S.DoWhile, origData, loc), + S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } }, + S.Enum => Stmt.allocateData(allocator, "s_enum", S.Enum, origData, loc), + S.ExportClause => Stmt.allocateData(allocator, "s_export_clause", S.ExportClause, origData, loc), + S.ExportDefault => Stmt.allocateData(allocator, "s_export_default", S.ExportDefault, origData, loc), + S.ExportEquals => Stmt.allocateData(allocator, "s_export_equals", S.ExportEquals, origData, loc), + S.ExportFrom => Stmt.allocateData(allocator, "s_export_from", S.ExportFrom, origData, loc), + S.ExportStar => Stmt.allocateData(allocator, "s_export_star", S.ExportStar, origData, loc), + S.SExpr => Stmt.allocateData(allocator, "s_expr", S.SExpr, origData, loc), + S.ForIn => Stmt.allocateData(allocator, "s_for_in", S.ForIn, origData, loc), + S.ForOf => Stmt.allocateData(allocator, "s_for_of", S.ForOf, origData, loc), + S.For => Stmt.allocateData(allocator, "s_for", S.For, origData, loc), + S.Function => Stmt.allocateData(allocator, "s_function", S.Function, origData, loc), + S.If => Stmt.allocateData(allocator, "s_if", S.If, origData, loc), + S.Import => Stmt.allocateData(allocator, "s_import", S.Import, origData, loc), + S.Label => Stmt.allocateData(allocator, "s_label", S.Label, origData, loc), + S.Local => Stmt.allocateData(allocator, "s_local", S.Local, origData, loc), + S.Namespace => Stmt.allocateData(allocator, "s_namespace", S.Namespace, origData, loc), + S.Return => Stmt.allocateData(allocator, "s_return", S.Return, origData, loc), + S.Switch => Stmt.allocateData(allocator, "s_switch", S.Switch, origData, loc), + S.Throw => Stmt.allocateData(allocator, "s_throw", S.Throw, origData, loc), + S.Try => Stmt.allocateData(allocator, "s_try", S.Try, origData, loc), + S.TypeScript => Stmt{ .loc = loc, .data = Data{ .s_type_script = S.TypeScript{} } }, + S.While => Stmt.allocateData(allocator, "s_while", S.While, origData, loc), + S.With => Stmt.allocateData(allocator, "s_with", S.With, origData, loc), + else => @compileError("Invalid type in Stmt.init"), + }; +} + +pub fn allocateExpr(allocator: std.mem.Allocator, expr: Expr) Stmt { + return Stmt.allocate(allocator, S.SExpr, S.SExpr{ .value = expr }, expr.loc); +} + +pub const Tag = enum { + s_block, + s_break, + s_class, + s_comment, + s_continue, + s_directive, + s_do_while, + s_enum, + s_export_clause, + s_export_default, + s_export_equals, + s_export_from, + s_export_star, + s_expr, + s_for_in, + s_for_of, + s_for, + s_function, + s_if, + s_import, + s_label, + s_local, + s_namespace, + s_return, + s_switch, + s_throw, + s_try, + s_while, + s_with, + s_type_script, + s_empty, + s_debugger, + s_lazy_export, + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } + + pub fn isExportLike(tag: Tag) bool { + return switch (tag) { + .s_export_clause, .s_export_default, .s_export_equals, .s_export_from, .s_export_star, .s_empty => true, + else => false, + }; + } +}; + +pub const Data = union(Tag) { + s_block: *S.Block, + s_break: *S.Break, + s_class: *S.Class, + s_comment: *S.Comment, + s_continue: *S.Continue, + s_directive: *S.Directive, + s_do_while: *S.DoWhile, + s_enum: *S.Enum, + s_export_clause: *S.ExportClause, + s_export_default: *S.ExportDefault, + s_export_equals: *S.ExportEquals, + s_export_from: *S.ExportFrom, + s_export_star: *S.ExportStar, + s_expr: *S.SExpr, + s_for_in: *S.ForIn, + s_for_of: *S.ForOf, + s_for: *S.For, + s_function: *S.Function, + s_if: *S.If, + s_import: *S.Import, + s_label: *S.Label, + s_local: *S.Local, + s_namespace: *S.Namespace, + s_return: *S.Return, + s_switch: *S.Switch, + s_throw: *S.Throw, + s_try: *S.Try, + s_while: *S.While, + s_with: *S.With, + + s_type_script: S.TypeScript, + s_empty: S.Empty, // special case, its a zero value type + s_debugger: S.Debugger, + + s_lazy_export: *Expr.Data, + + comptime { + if (@sizeOf(Stmt) > 24) { + @compileLog("Expected Stmt to be <= 24 bytes, but it is", @sizeOf(Stmt), " bytes"); + } + } + + pub const Store = struct { + const StoreType = NewStore(&.{ + S.Block, + S.Break, + S.Class, + S.Comment, + S.Continue, + S.Directive, + S.DoWhile, + S.Enum, + S.ExportClause, + S.ExportDefault, + S.ExportEquals, + S.ExportFrom, + S.ExportStar, + S.SExpr, + S.ForIn, + S.ForOf, + S.For, + S.Function, + S.If, + S.Import, + S.Label, + S.Local, + S.Namespace, + S.Return, + S.Switch, + S.Throw, + S.Try, + S.While, + S.With, + }, 128); + + pub threadlocal var instance: ?*StoreType = null; + pub threadlocal var memory_allocator: ?*ASTMemoryAllocator = null; + pub threadlocal var disable_reset = false; + + pub fn create() void { + if (instance != null or memory_allocator != null) { + return; + } + + instance = StoreType.init(); + } + + /// create || reset + pub fn begin() void { + if (memory_allocator != null) return; + if (instance == null) { + create(); + return; + } + + if (!disable_reset) + instance.?.reset(); + } + + pub fn reset() void { + if (disable_reset or memory_allocator != null) return; + instance.?.reset(); + } + + pub fn deinit() void { + if (instance == null or memory_allocator != null) return; + instance.?.deinit(); + instance = null; + } + + pub inline fn assert() void { + if (comptime Environment.allow_assert) { + if (instance == null and memory_allocator == null) + bun.unreachablePanic("Store must be init'd", .{}); + } + } + + pub fn append(comptime T: type, value: T) *T { + if (memory_allocator) |allocator| { + return allocator.append(T, value); + } + + Disabler.assert(); + return instance.?.append(T, value); + } + }; +}; + +pub fn StoredData(tag: Tag) type { + const T = @FieldType(Data, tag); + return switch (@typeInfo(T)) { + .pointer => |ptr| ptr.child, + else => T, + }; +} + +pub fn caresAboutScope(self: *Stmt) bool { + return switch (self.data) { + .s_block, .s_empty, .s_debugger, .s_expr, .s_if, .s_for, .s_for_in, .s_for_of, .s_do_while, .s_while, .s_with, .s_try, .s_switch, .s_return, .s_throw, .s_break, .s_continue, .s_directive => { + return false; + }, + + .s_local => |local| { + return local.kind != .k_var; + }, + else => { + return true; + }, + }; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const logger = bun.logger; +const string = bun.string; + +const js_ast = bun.js_ast; +const ASTMemoryAllocator = js_ast.ASTMemoryAllocator; +const Expr = js_ast.Expr; +const NewBatcher = js_ast.NewBatcher; +const NewStore = js_ast.NewStore; +const S = js_ast.S; +const Stmt = js_ast.Stmt; diff --git a/src/ast/Symbol.zig b/src/ast/Symbol.zig new file mode 100644 index 0000000000..e2a1df344d --- /dev/null +++ b/src/ast/Symbol.zig @@ -0,0 +1,489 @@ +/// This is the name that came from the parser. Printed names may be renamed +/// during minification or to avoid name collisions. Do not use the original +/// name during printing. +original_name: []const u8, + +/// This is used for symbols that represent items in the import clause of an +/// ES6 import statement. These should always be referenced by EImportIdentifier +/// instead of an EIdentifier. When this is present, the expression should +/// be printed as a property access off the namespace instead of as a bare +/// identifier. +/// +/// For correctness, this must be stored on the symbol instead of indirectly +/// associated with the Ref for the symbol somehow. In ES6 "flat bundling" +/// mode, re-exported symbols are collapsed using MergeSymbols() and renamed +/// symbols from other files that end up at this symbol must be able to tell +/// if it has a namespace alias. +namespace_alias: ?G.NamespaceAlias = null, + +/// Used by the parser for single pass parsing. +link: Ref = Ref.None, + +/// An estimate of the number of uses of this symbol. This is used to detect +/// whether a symbol is used or not. For example, TypeScript imports that are +/// unused must be removed because they are probably type-only imports. This +/// is an estimate and may not be completely accurate due to oversights in the +/// code. But it should always be non-zero when the symbol is used. +use_count_estimate: u32 = 0, + +/// This is for generating cross-chunk imports and exports for code splitting. +/// +/// Do not use this directly. Use `chunkIndex()` instead. +chunk_index: u32 = invalid_chunk_index, + +/// This is used for minification. Symbols that are declared in sibling scopes +/// can share a name. A good heuristic (from Google Closure Compiler) is to +/// assign names to symbols from sibling scopes in declaration order. That way +/// local variable names are reused in each global function like this, which +/// improves gzip compression: +/// +/// function x(a, b) { ... } +/// function y(a, b, c) { ... } +/// +/// The parser fills this in for symbols inside nested scopes. There are three +/// slot namespaces: regular symbols, label symbols, and private symbols. +/// +/// Do not use this directly. Use `nestedScopeSlot()` instead. +nested_scope_slot: u32 = invalid_nested_scope_slot, + +did_keep_name: bool = true, + +must_start_with_capital_letter_for_jsx: bool = false, + +/// The kind of symbol. This is used to determine how to print the symbol +/// and how to deal with conflicts, renaming, etc. +kind: Kind = Kind.other, + +/// Certain symbols must not be renamed or minified. For example, the +/// "arguments" variable is declared by the runtime for every function. +/// Renaming can also break any identifier used inside a "with" statement. +must_not_be_renamed: bool = false, + +/// We automatically generate import items for property accesses off of +/// namespace imports. This lets us remove the expensive namespace imports +/// while bundling in many cases, replacing them with a cheap import item +/// instead: +/// +/// import * as ns from 'path' +/// ns.foo() +/// +/// That can often be replaced by this, which avoids needing the namespace: +/// +/// import {foo} from 'path' +/// foo() +/// +/// However, if the import is actually missing then we don't want to report a +/// compile-time error like we do for real import items. This status lets us +/// avoid this. We also need to be able to replace such import items with +/// undefined, which this status is also used for. +import_item_status: ImportItemStatus = ImportItemStatus.none, + +/// --- Not actually used yet ----------------------------------------------- +/// Sometimes we lower private symbols even if they are supported. For example, +/// consider the following TypeScript code: +/// +/// class Foo { +/// #foo = 123 +/// bar = this.#foo +/// } +/// +/// If "useDefineForClassFields: false" is set in "tsconfig.json", then "bar" +/// must use assignment semantics instead of define semantics. We can compile +/// that to this code: +/// +/// class Foo { +/// constructor() { +/// this.#foo = 123; +/// this.bar = this.#foo; +/// } +/// #foo; +/// } +/// +/// However, we can't do the same for static fields: +/// +/// class Foo { +/// static #foo = 123 +/// static bar = this.#foo +/// } +/// +/// Compiling these static fields to something like this would be invalid: +/// +/// class Foo { +/// static #foo; +/// } +/// Foo.#foo = 123; +/// Foo.bar = Foo.#foo; +/// +/// Thus "#foo" must be lowered even though it's supported. Another case is +/// when we're converting top-level class declarations to class expressions +/// to avoid the TDZ and the class shadowing symbol is referenced within the +/// class body: +/// +/// class Foo { +/// static #foo = Foo +/// } +/// +/// This cannot be converted into something like this: +/// +/// var Foo = class { +/// static #foo; +/// }; +/// Foo.#foo = Foo; +/// +/// --- Not actually used yet ----------------------------------------------- +private_symbol_must_be_lowered: bool = false, + +remove_overwritten_function_declaration: bool = false, + +/// Used in HMR to decide when live binding code is needed. +has_been_assigned_to: bool = false, + +comptime { + bun.assert_eql(@sizeOf(Symbol), 88); + bun.assert_eql(@alignOf(Symbol), @alignOf([]const u8)); +} + +const invalid_chunk_index = std.math.maxInt(u32); +pub const invalid_nested_scope_slot = std.math.maxInt(u32); + +pub const SlotNamespace = enum { + must_not_be_renamed, + default, + label, + private_name, + mangled_prop, + + pub const CountsArray = std.EnumArray(SlotNamespace, u32); +}; + +/// This is for generating cross-chunk imports and exports for code splitting. +pub inline fn chunkIndex(this: *const Symbol) ?u32 { + const i = this.chunk_index; + return if (i == invalid_chunk_index) null else i; +} + +pub inline fn nestedScopeSlot(this: *const Symbol) ?u32 { + const i = this.nested_scope_slot; + return if (i == invalid_nested_scope_slot) null else i; +} + +pub fn slotNamespace(this: *const Symbol) SlotNamespace { + const kind = this.kind; + + if (kind == .unbound or this.must_not_be_renamed) { + return .must_not_be_renamed; + } + + if (kind.isPrivate()) { + return .private_name; + } + + return switch (kind) { + // .mangled_prop => .mangled_prop, + .label => .label, + else => .default, + }; +} + +pub inline fn hasLink(this: *const Symbol) bool { + return this.link.tag != .invalid; +} + +pub const Kind = enum { + /// An unbound symbol is one that isn't declared in the file it's referenced + /// in. For example, using "window" without declaring it will be unbound. + unbound, + + /// This has special merging behavior. You're allowed to re-declare these + /// symbols more than once in the same scope. These symbols are also hoisted + /// out of the scope they are declared in to the closest containing function + /// or module scope. These are the symbols with this kind: + /// + /// - Function arguments + /// - Function statements + /// - Variables declared using "var" + hoisted, + hoisted_function, + + /// There's a weird special case where catch variables declared using a simple + /// identifier (i.e. not a binding pattern) block hoisted variables instead of + /// becoming an error: + /// + /// var e = 0; + /// try { throw 1 } catch (e) { + /// print(e) // 1 + /// var e = 2 + /// print(e) // 2 + /// } + /// print(e) // 0 (since the hoisting stops at the catch block boundary) + /// + /// However, other forms are still a syntax error: + /// + /// try {} catch (e) { let e } + /// try {} catch ({e}) { var e } + /// + /// This symbol is for handling this weird special case. + catch_identifier, + + /// Generator and async functions are not hoisted, but still have special + /// properties such as being able to overwrite previous functions with the + /// same name + generator_or_async_function, + + /// This is the special "arguments" variable inside functions + arguments, + + /// Classes can merge with TypeScript namespaces. + class, + + /// A class-private identifier (i.e. "#foo"). + private_field, + private_method, + private_get, + private_set, + private_get_set_pair, + private_static_field, + private_static_method, + private_static_get, + private_static_set, + private_static_get_set_pair, + + /// Labels are in their own namespace + label, + + /// TypeScript enums can merge with TypeScript namespaces and other TypeScript + /// enums. + ts_enum, + + /// TypeScript namespaces can merge with classes, functions, TypeScript enums, + /// and other TypeScript namespaces. + ts_namespace, + + /// In TypeScript, imports are allowed to silently collide with symbols within + /// the module. Presumably this is because the imports may be type-only. + /// Import statement namespace references should NOT have this set. + import, + + /// Assigning to a "const" symbol will throw a TypeError at runtime + constant, + + // CSS identifiers that are renamed to be unique to the file they are in + local_css, + + /// This annotates all other symbols that don't have special behavior. + other, + + pub fn jsonStringify(self: @This(), writer: anytype) !void { + return try writer.write(@tagName(self)); + } + + pub inline fn isPrivate(kind: Symbol.Kind) bool { + return @intFromEnum(kind) >= @intFromEnum(Symbol.Kind.private_field) and @intFromEnum(kind) <= @intFromEnum(Symbol.Kind.private_static_get_set_pair); + } + + pub inline fn isHoisted(kind: Symbol.Kind) bool { + return switch (kind) { + .hoisted, .hoisted_function => true, + else => false, + }; + } + + pub inline fn isHoistedOrFunction(kind: Symbol.Kind) bool { + return switch (kind) { + .hoisted, .hoisted_function, .generator_or_async_function => true, + else => false, + }; + } + + pub inline fn isFunction(kind: Symbol.Kind) bool { + return switch (kind) { + .hoisted_function, .generator_or_async_function => true, + else => false, + }; + } +}; + +pub const Use = struct { + count_estimate: u32 = 0, +}; + +pub const List = BabyList(Symbol); +pub const NestedList = BabyList(List); + +pub fn mergeContentsWith(this: *Symbol, old: *Symbol) void { + this.use_count_estimate += old.use_count_estimate; + if (old.must_not_be_renamed) { + this.original_name = old.original_name; + this.must_not_be_renamed = true; + } + + // TODO: MustStartWithCapitalLetterForJSX +} + +pub const Map = struct { + // This could be represented as a "map[Ref]Symbol" but a two-level array was + // more efficient in profiles. This appears to be because it doesn't involve + // a hash. This representation also makes it trivial to quickly merge symbol + // maps from multiple files together. Each file only generates symbols in a + // single inner array, so you can join the maps together by just make a + // single outer array containing all of the inner arrays. See the comment on + // "Ref" for more detail. + symbols_for_source: NestedList = .{}, + + pub fn dump(this: Map) void { + defer Output.flush(); + for (this.symbols_for_source.slice(), 0..) |symbols, i| { + Output.prettyln("\n\n-- Source ID: {d} ({d} symbols) --\n\n", .{ i, symbols.len }); + for (symbols.slice(), 0..) |symbol, inner_index| { + Output.prettyln( + " name: {s}\n tag: {s}\n {any}\n", + .{ + symbol.original_name, @tagName(symbol.kind), + if (symbol.hasLink()) symbol.link else Ref{ + .source_index = @truncate(i), + .inner_index = @truncate(inner_index), + .tag = .symbol, + }, + }, + ); + } + } + } + + pub fn assignChunkIndex(this: *Map, decls_: DeclaredSymbol.List, chunk_index: u32) void { + const Iterator = struct { + map: *Map, + chunk_index: u32, + + pub fn next(self: @This(), ref: Ref) void { + var symbol = self.map.get(ref).?; + symbol.chunk_index = self.chunk_index; + } + }; + var decls = decls_; + + DeclaredSymbol.forEachTopLevelSymbol(&decls, Iterator{ .map = this, .chunk_index = chunk_index }, Iterator.next); + } + + pub fn merge(this: *Map, old: Ref, new: Ref) Ref { + if (old.eql(new)) { + return new; + } + + var old_symbol = this.get(old).?; + if (old_symbol.hasLink()) { + const old_link = old_symbol.link; + old_symbol.link = this.merge(old_link, new); + return old_symbol.link; + } + + var new_symbol = this.get(new).?; + + if (new_symbol.hasLink()) { + const new_link = new_symbol.link; + new_symbol.link = this.merge(old, new_link); + return new_symbol.link; + } + + old_symbol.link = new; + new_symbol.mergeContentsWith(old_symbol); + return new; + } + + pub fn get(self: *const Map, ref: Ref) ?*Symbol { + if (Ref.isSourceIndexNull(ref.sourceIndex()) or ref.isSourceContentsSlice()) { + return null; + } + + return self.symbols_for_source.at(ref.sourceIndex()).mut(ref.innerIndex()); + } + + pub fn getConst(self: *const Map, ref: Ref) ?*const Symbol { + if (Ref.isSourceIndexNull(ref.sourceIndex()) or ref.isSourceContentsSlice()) { + return null; + } + + return self.symbols_for_source.at(ref.sourceIndex()).at(ref.innerIndex()); + } + + pub fn init(sourceCount: usize, allocator: std.mem.Allocator) !Map { + const symbols_for_source: NestedList = NestedList.init(try allocator.alloc([]Symbol, sourceCount)); + return Map{ .symbols_for_source = symbols_for_source }; + } + + pub fn initWithOneList(list: List) Map { + const baby_list = BabyList(List).init((&list)[0..1]); + return initList(baby_list); + } + + pub fn initList(list: NestedList) Map { + return Map{ .symbols_for_source = list }; + } + + pub fn getWithLink(symbols: *const Map, ref: Ref) ?*Symbol { + var symbol: *Symbol = symbols.get(ref) orelse return null; + if (symbol.hasLink()) { + return symbols.get(symbol.link) orelse symbol; + } + return symbol; + } + + pub fn getWithLinkConst(symbols: *Map, ref: Ref) ?*const Symbol { + var symbol: *const Symbol = symbols.getConst(ref) orelse return null; + if (symbol.hasLink()) { + return symbols.getConst(symbol.link) orelse symbol; + } + return symbol; + } + + pub fn followAll(symbols: *Map) void { + const trace = bun.perf.trace("Symbols.followAll"); + defer trace.end(); + for (symbols.symbols_for_source.slice()) |list| { + for (list.slice()) |*symbol| { + if (!symbol.hasLink()) continue; + symbol.link = follow(symbols, symbol.link); + } + } + } + + /// Equivalent to followSymbols in esbuild + pub fn follow(symbols: *const Map, ref: Ref) Ref { + var symbol = symbols.get(ref) orelse return ref; + if (!symbol.hasLink()) { + return ref; + } + + const link = follow(symbols, symbol.link); + + if (!symbol.link.eql(link)) { + symbol.link = link; + } + + return link; + } +}; + +pub inline fn isHoisted(self: *const Symbol) bool { + return Symbol.isKindHoisted(self.kind); +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const BabyList = bun.BabyList; +const Output = bun.Output; + +const js_ast = bun.js_ast; +const DeclaredSymbol = js_ast.DeclaredSymbol; +const G = js_ast.G; +const ImportItemStatus = js_ast.ImportItemStatus; +const Ref = js_ast.Ref; +const Symbol = js_ast.Symbol; + +pub const isKindFunction = Symbol.Kind.isFunction; +pub const isKindHoisted = Symbol.Kind.isHoisted; +pub const isKindHoistedOrFunction = Symbol.Kind.isHoistedOrFunction; +pub const isKindPrivate = Symbol.Kind.isPrivate; diff --git a/src/ast/TS.zig b/src/ast/TS.zig new file mode 100644 index 0000000000..2e91b2fbc1 --- /dev/null +++ b/src/ast/TS.zig @@ -0,0 +1,141 @@ +/// This is for TypeScript "enum" and "namespace" blocks. Each block can +/// potentially be instantiated multiple times. The exported members of each +/// block are merged into a single namespace while the non-exported code is +/// still scoped to just within that block: +/// +/// let x = 1; +/// namespace Foo { +/// let x = 2; +/// export let y = 3; +/// } +/// namespace Foo { +/// console.log(x); // 1 +/// console.log(y); // 3 +/// } +/// +/// Doing this also works inside an enum: +/// +/// enum Foo { +/// A = 3, +/// B = A + 1, +/// } +/// enum Foo { +/// C = A + 2, +/// } +/// console.log(Foo.B) // 4 +/// console.log(Foo.C) // 5 +/// +/// This is a form of identifier lookup that works differently than the +/// hierarchical scope-based identifier lookup in JavaScript. Lookup now needs +/// to search sibling scopes in addition to parent scopes. This is accomplished +/// by sharing the map of exported members between all matching sibling scopes. +pub const TSNamespaceScope = struct { + /// This is specific to this namespace block. It's the argument of the + /// immediately-invoked function expression that the namespace block is + /// compiled into: + /// + /// var ns; + /// (function (ns2) { + /// ns2.x = 123; + /// })(ns || (ns = {})); + /// + /// This variable is "ns2" in the above example. It's the symbol to use when + /// generating property accesses off of this namespace when it's in scope. + arg_ref: Ref, + + /// This is shared between all sibling namespace blocks + exported_members: *TSNamespaceMemberMap, + + /// This is a lazily-generated map of identifiers that actually represent + /// property accesses to this namespace's properties. For example: + /// + /// namespace x { + /// export let y = 123 + /// } + /// namespace x { + /// export let z = y + /// } + /// + /// This should be compiled into the following code: + /// + /// var x; + /// (function(x2) { + /// x2.y = 123; + /// })(x || (x = {})); + /// (function(x3) { + /// x3.z = x3.y; + /// })(x || (x = {})); + /// + /// When we try to find the symbol "y", we instead return one of these lazily + /// generated proxy symbols that represent the property access "x3.y". This + /// map is unique per namespace block because "x3" is the argument symbol that + /// is specific to that particular namespace block. + property_accesses: bun.StringArrayHashMapUnmanaged(Ref) = .{}, + + /// Even though enums are like namespaces and both enums and namespaces allow + /// implicit references to properties of sibling scopes, they behave like + /// separate, er, namespaces. Implicit references only work namespace-to- + /// namespace and enum-to-enum. They do not work enum-to-namespace. And I'm + /// not sure what's supposed to happen for the namespace-to-enum case because + /// the compiler crashes: https://github.com/microsoft/TypeScript/issues/46891. + /// So basically these both work: + /// + /// enum a { b = 1 } + /// enum a { c = b } + /// + /// namespace x { export let y = 1 } + /// namespace x { export let z = y } + /// + /// This doesn't work: + /// + /// enum a { b = 1 } + /// namespace a { export let c = b } + /// + /// And this crashes the TypeScript compiler: + /// + /// namespace a { export let b = 1 } + /// enum a { c = b } + /// + /// Therefore we only allow enum/enum and namespace/namespace interactions. + is_enum_scope: bool, +}; + +pub const TSNamespaceMemberMap = bun.StringArrayHashMapUnmanaged(TSNamespaceMember); + +pub const TSNamespaceMember = struct { + loc: logger.Loc, + data: Data, + + pub const Data = union(enum) { + /// "namespace ns { export let it }" + property, + /// "namespace ns { export namespace it {} }" + namespace: *TSNamespaceMemberMap, + /// "enum ns { it }" + enum_number: f64, + /// "enum ns { it = 'it' }" + enum_string: *E.String, + /// "enum ns { it = something() }" + enum_property: void, + + pub fn isEnum(data: Data) bool { + return switch (data) { + inline else => |_, tag| comptime std.mem.startsWith(u8, @tagName(tag), "enum_"), + }; + } + }; +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const logger = bun.logger; + +const js_ast = bun.js_ast; +const E = js_ast.E; +const Ref = js_ast.Ref; + +const G = js_ast.G; +pub const Class = G.Class; diff --git a/src/ast/UseDirective.zig b/src/ast/UseDirective.zig new file mode 100644 index 0000000000..03a7cfe527 --- /dev/null +++ b/src/ast/UseDirective.zig @@ -0,0 +1,66 @@ +pub const UseDirective = enum(u2) { + // TODO: Remove this, and provide `UseDirective.Optional` instead + none, + /// "use client" + client, + /// "use server" + server, + + pub const Boundering = enum(u2) { + client = @intFromEnum(UseDirective.client), + server = @intFromEnum(UseDirective.server), + }; + + pub const Flags = struct { + has_any_client: bool = false, + }; + + pub fn isBoundary(this: UseDirective, other: UseDirective) bool { + if (this == other or other == .none) + return false; + + return true; + } + + pub fn boundering(this: UseDirective, other: UseDirective) ?Boundering { + if (this == other or other == .none) + return null; + return @enumFromInt(@intFromEnum(other)); + } + + pub fn parse(contents: []const u8) ?UseDirective { + const truncated = std.mem.trimLeft(u8, contents, " \t\n\r;"); + + if (truncated.len < "'use client';".len) + return .none; + + const directive_string = truncated[0.."'use client';".len].*; + + const first_quote = directive_string[0]; + const last_quote = directive_string[directive_string.len - 2]; + if (first_quote != last_quote or (first_quote != '"' and first_quote != '\'' and first_quote != '`')) + return .none; + + const unquoted = directive_string[1 .. directive_string.len - 2]; + + if (strings.eqlComptime(unquoted, "use client")) { + return .client; + } + + if (strings.eqlComptime(unquoted, "use server")) { + return .server; + } + + return null; + } +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const strings = bun.strings; + +const js_ast = bun.js_ast; +const Flags = js_ast.Flags; diff --git a/src/baby_list.zig b/src/baby_list.zig index 4e95e7d9f2..539c548a47 100644 --- a/src/baby_list.zig +++ b/src/baby_list.zig @@ -389,7 +389,18 @@ pub fn BabyList(comptime Type: type) type { const orig_len = list_.items.len; const slice_ = list_.items.ptr[orig_len..list_.capacity]; - const result = strings.copyUTF16IntoUTF8WithBuffer(slice_, []const u16, remain, trimmed, out_len, true); + const result = strings.copyUTF16IntoUTF8WithBufferImpl( + slice_, + []const u16, + remain, + trimmed, + out_len, + // FIXME: Unclear whether or not we should allow + // incomplete UTF-8 sequences. If you are solving a bug + // with invalid UTF-8 sequences, this may be the + // culprit... + true, + ); remain = remain[result.read..]; list_.items.len += @as(usize, result.written); if (result.read == 0 or result.written == 0) break; @@ -406,6 +417,10 @@ pub fn BabyList(comptime Type: type) type { @as([*]align(1) Int, @ptrCast(this.ptr[this.len .. this.len + @sizeOf(Int)]))[0] = int; this.len += @sizeOf(Int); } + + pub fn memoryCost(self: *const @This()) usize { + return self.cap; + } }; } diff --git a/src/bake/BakeGlobalObject.cpp b/src/bake/BakeGlobalObject.cpp index 33f44c737b..f7ce8b326e 100644 --- a/src/bake/BakeGlobalObject.cpp +++ b/src/bake/BakeGlobalObject.cpp @@ -46,6 +46,7 @@ bakeModuleLoaderImportModule(JSC::JSGlobalObject* global, JSC::jsUndefined(), parameters, JSC::jsUndefined()); } + // TODO: make static cast instead of jscast // Use Zig::GlobalObject's function return jsCast(global)->moduleLoaderImportModule(global, moduleLoader, moduleNameValue, parameters, sourceOrigin); } @@ -72,6 +73,18 @@ JSC::Identifier bakeModuleLoaderResolve(JSC::JSGlobalObject* jsGlobal, } } + if (auto string = jsDynamicCast(key)) { + auto keyView = string->getString(global); + RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier); + + if (keyView.startsWith("bake:/"_s)) { + BunString result = BakeProdResolve(global, Bun::toString("bake:/"_s), Bun::toString(keyView.substringSharingImpl("bake:"_s.length()))); + RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier); + + return JSC::Identifier::fromString(vm, result.transferToWTFString()); + } + } + // Use Zig::GlobalObject's function return Zig::GlobalObject::moduleLoaderResolve(jsGlobal, loader, key, referrer, origin); } @@ -113,6 +126,7 @@ JSC::JSInternalPromise* bakeModuleLoaderFetch(JSC::JSGlobalObject* globalObject, if (source.tag != BunStringTag::Dead) { JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(moduleKey)); JSC::SourceCode sourceCode = JSC::SourceCode(Bake::SourceProvider::create( + globalObject, source.toWTFString(), origin, WTFMove(moduleKey), diff --git a/src/bake/BakeSourceProvider.cpp b/src/bake/BakeSourceProvider.cpp index 10e2175887..15d17d28ba 100644 --- a/src/bake/BakeSourceProvider.cpp +++ b/src/bake/BakeSourceProvider.cpp @@ -16,6 +16,12 @@ namespace Bake { + +extern "C" BunString BakeSourceProvider__getSourceSlice(SourceProvider* provider) +{ + return Bun::toStringView(provider->source()); +} + extern "C" JSC::EncodedJSValue BakeLoadInitialServerCode(GlobalObject* global, BunString source, bool separateSSRGraph) { auto& vm = JSC::getVM(global); auto scope = DECLARE_THROW_SCOPE(vm); @@ -23,6 +29,7 @@ extern "C" JSC::EncodedJSValue BakeLoadInitialServerCode(GlobalObject* global, B String string = "bake://server-runtime.js"_s; JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string)); JSC::SourceCode sourceCode = JSC::SourceCode(SourceProvider::create( + global, source.toWTFString(), origin, WTFMove(string), @@ -31,7 +38,7 @@ extern "C" JSC::EncodedJSValue BakeLoadInitialServerCode(GlobalObject* global, B )); JSC::JSValue fnValue = vm.interpreter.executeProgram(sourceCode, global, global); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); RELEASE_ASSERT(fnValue); @@ -56,6 +63,7 @@ extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunS String string = "bake://server.patch.js"_s; JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string)); JSC::SourceCode sourceCode = JSC::SourceCode(SourceProvider::create( + global, source.toWTFString(), origin, WTFMove(string), @@ -64,7 +72,7 @@ extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunS )); JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); RELEASE_ASSERT(result); return JSC::JSValue::encode(result); @@ -119,6 +127,7 @@ extern "C" JSC::EncodedJSValue BakeRegisterProductionChunk(JSC::JSGlobalObject* JSC::JSString* key = JSC::jsString(vm, string); JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string)); JSC::SourceCode sourceCode = JSC::SourceCode(SourceProvider::create( + global, source.toWTFString(), origin, WTFMove(string), @@ -127,7 +136,7 @@ extern "C" JSC::EncodedJSValue BakeRegisterProductionChunk(JSC::JSGlobalObject* )); global->moduleLoader()->provideFetch(global, key, sourceCode); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); return JSC::JSValue::encode(key); } diff --git a/src/bake/BakeSourceProvider.h b/src/bake/BakeSourceProvider.h index 3a3706af85..7055fcedbe 100644 --- a/src/bake/BakeSourceProvider.h +++ b/src/bake/BakeSourceProvider.h @@ -6,33 +6,43 @@ namespace Bake { +class SourceProvider; + +extern "C" void Bun__addBakeSourceProviderSourceMap(void* bun_vm, SourceProvider* opaque_source_provider, BunString* specifier); + class SourceProvider final : public JSC::StringSourceProvider { public: static Ref create( - const String& source, - const JSC::SourceOrigin& sourceOrigin, - String&& sourceURL, - const TextPosition& startPosition, - JSC::SourceProviderSourceType sourceType - ) { - return adoptRef(*new SourceProvider(source, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType)); + JSC::JSGlobalObject* globalObject, + const String& source, + const JSC::SourceOrigin& sourceOrigin, + String&& sourceURL, + const TextPosition& startPosition, + JSC::SourceProviderSourceType sourceType) + { + auto provider = adoptRef(*new SourceProvider(source, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType)); + auto* zigGlobalObject = jsCast(globalObject); + auto specifier = Bun::toString(provider->sourceURL()); + Bun__addBakeSourceProviderSourceMap(zigGlobalObject->bunVM(), provider.ptr(), &specifier); + return provider; } private: - SourceProvider( - const String& source, - const JSC::SourceOrigin& sourceOrigin, - String&& sourceURL, - const TextPosition& startPosition, - JSC::SourceProviderSourceType sourceType - ) : StringSourceProvider( - source, - sourceOrigin, - JSC::SourceTaintedOrigin::Untainted, - WTFMove(sourceURL), - startPosition, - sourceType - ) {} + SourceProvider( + const String& source, + const JSC::SourceOrigin& sourceOrigin, + String&& sourceURL, + const TextPosition& startPosition, + JSC::SourceProviderSourceType sourceType) + : StringSourceProvider( + source, + sourceOrigin, + JSC::SourceTaintedOrigin::Untainted, + WTFMove(sourceURL), + startPosition, + sourceType) + { + } }; } // namespace Bake diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 78bedb78ba..71acfebac7 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -642,8 +642,8 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { errdefer types.deinit(allocator); for (options.framework.file_system_router_types, 0..) |fsr, i| { - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); const joined_root = bun.path.joinAbsStringBuf(dev.root, buf, &.{fsr.root}, .auto); const entry = dev.server_transpiler.resolver.readDirInfoIgnoreError(joined_root) orelse continue; @@ -1531,8 +1531,61 @@ fn onFrameworkRequestWithBundle( ) bun.JSError!void { const route_bundle = dev.routeBundlePtr(route_bundle_index); assert(route_bundle.data == .framework); + const bundle = &route_bundle.data.framework; + // Extract route params by re-matching the URL + var params: FrameworkRouter.MatchedParams = undefined; + const url_bunstr = switch (req) { + .stack => |r| bun.String{ + .tag = .ZigString, + .value = .{ .ZigString = bun.ZigString.fromUTF8(r.url()) }, + }, + .saved => |data| brk: { + const url = data.request.url; + url.ref(); + break :brk url; + }, + }; + defer url_bunstr.deref(); + const url = url_bunstr.toUTF8(bun.default_allocator); + defer url.deinit(); + + // Extract pathname from URL (remove protocol, host, query, hash) + const pathname = if (std.mem.indexOf(u8, url.byteSlice(), "://")) |proto_end| blk: { + const after_proto = url.byteSlice()[proto_end + 3 ..]; + if (std.mem.indexOfScalar(u8, after_proto, '/')) |path_start| { + const path_with_query = after_proto[path_start..]; + // Remove query string and hash + const end = bun.strings.indexOfAny(path_with_query, "?#") orelse path_with_query.len; + break :blk path_with_query[0..end]; + } + break :blk "/"; + } else url.byteSlice(); + + // Create params JSValue + // TODO: lazy structure caching since we are making these objects a lot + const params_js_value = if (dev.router.matchSlow(pathname, ¶ms)) |_| blk: { + const global = dev.vm.global; + const params_array = params.params.slice(); + + if (params_array.len == 0) { + break :blk JSValue.null; + } + + // Create a JavaScript object with params + const obj = JSValue.createEmptyObject(global, params_array.len); + for (params_array) |param| { + const key_str = bun.String.createUTF8(param.key); + defer key_str.deref(); + const value_str = bun.String.createUTF8(param.value); + defer value_str.deref(); + + obj.put(global, key_str, value_str.toJS(global)); + } + break :blk obj; + } else JSValue.null; + const server_request_callback = dev.server_fetch_function_callback.get() orelse unreachable; // did not initialize server code @@ -1542,7 +1595,7 @@ fn onFrameworkRequestWithBundle( req, resp, server_request_callback, - 4, + 5, .{ // routerTypeMain router_type.server_file_string.get() orelse str: { @@ -1565,14 +1618,14 @@ fn onFrameworkRequestWithBundle( const arr = try JSValue.createEmptyArray(global, n); route = dev.router.routePtr(bundle.route_index); var route_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()])); - arr.putIndex(global, 0, route_name.transferToJS(global)); + try arr.putIndex(global, 0, route_name.transferToJS(global)); dev.releaseRelativePathBuf(); n = 1; while (true) { if (route.file_layout.unwrap()) |layout| { var layout_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, layout).get()])); defer dev.releaseRelativePathBuf(); - arr.putIndex(global, @intCast(n), layout_name.transferToJS(global)); + try arr.putIndex(global, @intCast(n), layout_name.transferToJS(global)); n += 1; } route = dev.router.routePtr(route.parent.unwrap() orelse break); @@ -1599,6 +1652,8 @@ fn onFrameworkRequestWithBundle( bundle.cached_css_file_array = .create(js, dev.vm.global); break :arr js; }, + // params + params_js_value, }, ); } @@ -2154,7 +2209,7 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.JSError!J }) catch unreachable; const str = bun.String.createUTF8(path); defer str.deref(); - arr.putIndex(dev.vm.global, @intCast(i), str.toJS(dev.vm.global)); + try arr.putIndex(dev.vm.global, @intCast(i), str.toJS(dev.vm.global)); } return arr; } @@ -2198,7 +2253,7 @@ fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *JSC.JSGlobalObjec const str = bun.String.createUTF8(dev.relativePath(names[item.get()])); defer dev.releaseRelativePathBuf(); defer str.deref(); - arr.putIndex(global, @intCast(i), str.toJS(global)); + try arr.putIndex(global, @intCast(i), str.toJS(global)); } return arr; } @@ -3083,12 +3138,8 @@ fn onRequest(dev: *DevServer, req: *Request, resp: anytype) void { return; } - if (DevServer.AnyResponse != @typeInfo(@TypeOf(resp)).pointer.child) { - unreachable; // mismatch between `is_ssl` with server and response types. optimize these checks out. - } - - if (dev.server.?.config.onRequest != .zero) { - dev.server.?.onRequest(req, resp); + if (dev.server.?.config().onRequest != .zero) { + dev.server.?.onRequest(req, AnyResponse.init(resp)); return; } @@ -5129,8 +5180,8 @@ pub fn IncrementalGraph(side: bake.Side) type { dev.relative_path_buf_lock.lock(); defer dev.relative_path_buf_lock.unlock(); - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); var file_paths = try ArrayListUnmanaged([]const u8).initCapacity(gpa, g.current_chunk_parts.items.len); errdefer file_paths.deinit(gpa); @@ -5413,8 +5464,8 @@ const DirectoryWatchStore = struct { => bun.debugAssert(false), } - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); const joined = bun.path.joinAbsStringBuf(bun.path.dirname(import_source, .auto), buf, &.{specifier}, .auto); const dir = bun.path.dirname(joined, .auto); @@ -5843,8 +5894,8 @@ pub const SerializedFailure = struct { // For debugging, it is helpful to be able to see bundles. fn dumpBundle(dump_dir: std.fs.Dir, graph: bake.Graph, rel_path: []const u8, chunk: []const u8, wrap: bool) !void { - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); const name = bun.path.joinAbsStringBuf("/", buf, &.{ @tagName(graph), rel_path, @@ -7597,8 +7648,8 @@ pub const SourceMapStore = struct { dev.relative_path_buf_lock.lock(); defer dev.relative_path_buf_lock.unlock(); - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); for (paths) |native_file_path| { try source_map_strings.appendSlice(","); @@ -7989,6 +8040,7 @@ pub const SourceMapStore = struct { null, @intCast(entry.paths.len), 0, // unused + .{}, )) { .fail => |fail| { Output.debugWarn("Failed to re-parse source map: {s}", .{fail.msg}); @@ -8148,7 +8200,7 @@ const ErrorReportRequest = struct { const result: *const SourceMapStore.GetResult = &(gop.value_ptr.* orelse continue); // When before the first generated line, remap to the HMR runtime - const generated_mappings = result.mappings.items(.generated); + const generated_mappings = result.mappings.generated(); if (frame.position.line.oneBased() < generated_mappings[1].lines) { frame.source_url = .init(runtime_name); // matches value in source map frame.position = .invalid; @@ -8156,8 +8208,7 @@ const ErrorReportRequest = struct { } // Remap the frame - const remapped = SourceMap.Mapping.find( - result.mappings, + const remapped = result.mappings.find( frame.position.line.oneBased(), frame.position.column.zeroBased(), ); diff --git a/src/bake/FrameworkRouter.zig b/src/bake/FrameworkRouter.zig index 4ff15e5f33..98b9074cb8 100644 --- a/src/bake/FrameworkRouter.zig +++ b/src/bake/FrameworkRouter.zig @@ -31,6 +31,25 @@ static_routes: StaticRouteMap, // TODO: no code to sort this data structure dynamic_routes: DynamicRouteMap, +/// Arena allocator for pattern strings. +/// +/// This should be passed into `EncodedPattern.initFromParts` or should be the +/// allocator used to allocate `StaticRoute.route_path`. +/// +/// Q: Why use this and not just free the strings for `EncodedPattern` and +/// `StaticRoute` manually? +/// +/// A: Inside `fr.insert(...)` we iterate over `EncodedPattern/StaticRoute`, +/// turning them into a bunch of `Route.Part`s, and we discard the original +/// `EncodePattern/StaticRoute` structure. +/// +/// In this process it's too easy to lose the original base pointer and +/// length of the entire allocation. So we'll just allocate everything in +/// this arena to ensure that everything gets freed. +/// +/// Thank you to `AllocationScope` for catching this! Hell yeah! +pattern_string_arena: bun.ArenaAllocator, + /// The above structure is optimized for incremental updates, but /// production has a different set of requirements: /// - Trivially serializable to a binary file (no pointers) @@ -128,6 +147,7 @@ pub fn initEmpty(root: []const u8, types: []Type, allocator: Allocator) !Framewo .routes = routes, .dynamic_routes = .{}, .static_routes = .{}, + .pattern_string_arena = bun.ArenaAllocator.init(allocator), }; } @@ -136,6 +156,7 @@ pub fn deinit(fr: *FrameworkRouter, allocator: Allocator) void { fr.static_routes.deinit(allocator); fr.dynamic_routes.deinit(allocator); allocator.free(fr.types); + fr.pattern_string_arena.deinit(); } pub fn memoryCost(fr: *FrameworkRouter) usize { @@ -1029,9 +1050,9 @@ fn scanInner( const result = switch (param_count > 0) { inline else => |has_dynamic_comptime| result: { const pattern = if (has_dynamic_comptime) - try EncodedPattern.initFromParts(parsed.parts, alloc) + try EncodedPattern.initFromParts(parsed.parts, fr.pattern_string_arena.allocator()) else static_route: { - const allocation = try bun.default_allocator.alloc(u8, static_total_len); + const allocation = try fr.pattern_string_arena.allocator().alloc(u8, static_total_len); var s = std.io.fixedBufferStream(allocation); for (parsed.parts) |part| switch (part) { @@ -1146,7 +1167,7 @@ pub const JSFrameworkRouter = struct { if (jsfr.stored_parse_errors.items.len > 0) { const arr = try JSValue.createEmptyArray(global, jsfr.stored_parse_errors.items.len); for (jsfr.stored_parse_errors.items, 0..) |*item, i| { - arr.putIndex( + try arr.putIndex( global, @intCast(i), global.createErrorInstance("Invalid route {}: {s}", .{ @@ -1218,7 +1239,7 @@ pub const JSFrameworkRouter = struct { next = route.first_child.unwrap(); var i: u32 = 0; while (next) |r| : (next = jsfr.router.routePtr(r).next_sibling.unwrap()) { - arr.putIndex(global, i, try routeToJson(jsfr, global, r, allocator)); + try arr.putIndex(global, i, try routeToJson(jsfr, global, r, allocator)); i += 1; } break :brk arr; diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 39a665251b..4e82d397ec 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -104,13 +104,13 @@ pub const SplitBundlerOptions = struct { .ssr = .{}, }; - pub fn parsePluginArray(opts: *SplitBundlerOptions, plugin_array: JSValue, global: *JSC.JSGlobalObject) !void { + pub fn parsePluginArray(opts: *SplitBundlerOptions, plugin_array: JSValue, global: *JSC.JSGlobalObject) bun.JSError!void { const plugin = opts.plugin orelse Plugin.create(global, .bun); opts.plugin = plugin; const empty_object = JSValue.createEmptyObject(global, 0); - var iter = plugin_array.arrayIterator(global); - while (iter.next()) |plugin_config| { + var iter = try plugin_array.arrayIterator(global); + while (try iter.next()) |plugin_config| { if (!plugin_config.isObject()) { return global.throwInvalidArguments("Expected plugin to be an object", .{}); } @@ -290,7 +290,7 @@ pub const Framework = struct { import_source: []const u8 = "react-refresh/runtime", }; - pub const react_install_command = "bun i react@experimental react-dom@experimental react-server-dom-bun"; + pub const react_install_command = "bun i react@experimental react-dom@experimental react-server-dom-bun react-refresh@experimental"; pub fn addReactInstallCommandNote(log: *bun.logger.Log) !void { try log.addMsg(.{ @@ -359,7 +359,7 @@ pub const Framework = struct { refs: *StringRefList, bundler_options: *SplitBundlerOptions, arena: Allocator, - ) !Framework { + ) bun.JSError!Framework { if (opts.isString()) { const str = try opts.toBunString(global); defer str.deref(); @@ -446,13 +446,13 @@ pub const Framework = struct { const array = try opts.getArray(global, "builtInModules") orelse break :built_in_modules .{}; - const len = array.getLength(global); + const len = try array.getLength(global); var files: bun.StringArrayHashMapUnmanaged(BuiltInModule) = .{}; try files.ensureTotalCapacity(arena, len); - var it = array.arrayIterator(global); + var it = try array.arrayIterator(global); var i: usize = 0; - while (it.next()) |file| : (i += 1) { + while (try it.next()) |file| : (i += 1) { if (!file.isObject()) { return global.throwInvalidArguments("'builtInModules[{d}]' is not an object", .{i}); } @@ -477,16 +477,16 @@ pub const Framework = struct { const array: JSValue = try opts.getArray(global, "fileSystemRouterTypes") orelse { return global.throwInvalidArguments("Missing 'framework.fileSystemRouterTypes'", .{}); }; - const len = array.getLength(global); + const len = try array.getLength(global); if (len > 256) { return global.throwInvalidArguments("Framework can only define up to 256 file-system router types", .{}); } const file_system_router_types = try arena.alloc(FileSystemRouterType, len); - var it = array.arrayIterator(global); + var it = try array.arrayIterator(global); var i: usize = 0; errdefer for (file_system_router_types[0..i]) |*fsr| fsr.style.deinit(); - while (it.next()) |fsr_opts| : (i += 1) { + while (try it.next()) |fsr_opts| : (i += 1) { const root = try getOptionalString(fsr_opts, global, "root", refs, arena) orelse { return global.throwInvalidArguments("'fileSystemRouterTypes[{d}]' is missing 'root'", .{i}); }; @@ -511,10 +511,10 @@ pub const Framework = struct { break :exts &.{}; } } else if (exts_js.isArray()) { - var it_2 = exts_js.arrayIterator(global); + var it_2 = try exts_js.arrayIterator(global); var i_2: usize = 0; - const extensions = try arena.alloc([]const u8, exts_js.getLength(global)); - while (it_2.next()) |array_item| : (i_2 += 1) { + const extensions = try arena.alloc([]const u8, try exts_js.getLength(global)); + while (try it_2.next()) |array_item| : (i_2 += 1) { const slice = refs.track(try array_item.toSlice(global, arena)); if (bun.strings.eqlComptime(slice, "*")) return global.throwInvalidArguments("'extensions' cannot include \"*\" as an extension. Pass \"*\" instead of the array.", .{}); @@ -536,10 +536,10 @@ pub const Framework = struct { const ignore_dirs: []const []const u8 = if (try fsr_opts.get(global, "ignoreDirs")) |exts_js| exts: { if (exts_js.isArray()) { - var it_2 = array.arrayIterator(global); + var it_2 = try array.arrayIterator(global); var i_2: usize = 0; const dirs = try arena.alloc([]const u8, len); - while (it_2.next()) |array_item| : (i_2 += 1) { + while (try it_2.next()) |array_item| : (i_2 += 1) { dirs[i_2] = refs.track(try array_item.toSlice(global, arena)); } break :exts dirs; @@ -593,6 +593,37 @@ pub const Framework = struct { renderer: Graph, out: *bun.transpiler.Transpiler, bundler_options: *const BuildConfigSubset, + ) !void { + const source_map: bun.options.SourceMapOption = switch (mode) { + // Source maps must always be external, as DevServer special cases + // the linking and part of the generation of these. It also relies + // on source maps always being enabled. + .development => .external, + // TODO: follow user configuration + else => .none, + }; + + return initTranspilerWithSourceMap( + framework, + arena, + log, + mode, + renderer, + out, + bundler_options, + source_map, + ); + } + + pub fn initTranspilerWithSourceMap( + framework: *Framework, + arena: std.mem.Allocator, + log: *bun.logger.Log, + mode: Mode, + renderer: Graph, + out: *bun.transpiler.Transpiler, + bundler_options: *const BuildConfigSubset, + source_map: bun.options.SourceMapOption, ) !void { const JSAst = bun.JSAst; @@ -646,6 +677,11 @@ pub const Framework = struct { // Support `esm-env` package using this condition. try out.options.conditions.appendSlice(&.{"development"}); } + // Ensure "node" condition is included for server-side rendering + // This helps with package.json imports field resolution + if (renderer == .server or renderer == .ssr) { + try out.options.conditions.appendSlice(&.{"node"}); + } if (bundler_options.conditions.count() > 0) { try out.options.conditions.appendSlice(bundler_options.conditions.keys()); } @@ -661,14 +697,7 @@ pub const Framework = struct { if (bundler_options.ignoreDCEAnnotations) |ignore| out.options.ignore_dce_annotations = ignore; - out.options.source_map = switch (mode) { - // Source maps must always be external, as DevServer special cases - // the linking and part of the generation of these. It also relies - // on source maps always being enabled. - .development => .external, - // TODO: follow user configuration - else => .none, - }; + out.options.source_map = source_map; if (bundler_options.env != ._none) { out.options.env.behavior = bundler_options.env; out.options.env.prefix = bundler_options.env_prefix orelse ""; @@ -876,7 +905,7 @@ pub fn printWarning() void { bun.Output.warn( \\Be advised that Bun Bake is highly experimental, and its API \\will have breaking changes. Join the #bake Discord - \\channel to help us find bugs: https://bun.sh/discord + \\channel to help us find bugs: https://bun.com/discord \\ \\ , .{}); diff --git a/src/bake/hmr-module.ts b/src/bake/hmr-module.ts index 308e9dca75..8c8b666eb3 100644 --- a/src/bake/hmr-module.ts +++ b/src/bake/hmr-module.ts @@ -249,7 +249,9 @@ export class HMRModule { declare builtin: (id: string) => any; } if (side === "server") { - HMRModule.prototype.builtin = import.meta.require; + HMRModule.prototype.builtin = (id: string) => + // @ts-expect-error + import.meta.bakeBuiltin(import.meta.resolve(id)); } // prettier-ignore HMRModule.prototype.indirectHot = new Proxy({}, { diff --git a/src/bake/production.zig b/src/bake/production.zig index b4c1ed10f7..a0eaa75494 100644 --- a/src/bake/production.zig +++ b/src/bake/production.zig @@ -118,7 +118,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa \\ \\The default location for this is `bun.app.ts` \\ - \\TODO: insert a link to `bun.sh/docs` + \\TODO: insert a link to `bun.com/docs` , .{}); bun.Global.crash(); } @@ -174,10 +174,10 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa var client_transpiler: bun.transpiler.Transpiler = undefined; var server_transpiler: bun.transpiler.Transpiler = undefined; var ssr_transpiler: bun.transpiler.Transpiler = undefined; - try framework.initTranspiler(allocator, vm.log, .production_static, .server, &server_transpiler, &options.bundler_options.server); - try framework.initTranspiler(allocator, vm.log, .production_static, .client, &client_transpiler, &options.bundler_options.client); + try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .server, &server_transpiler, &options.bundler_options.server, .@"inline"); + try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .client, &client_transpiler, &options.bundler_options.client, .@"inline"); if (separate_ssr_graph) { - try framework.initTranspiler(allocator, vm.log, .production_static, .ssr, &ssr_transpiler, &options.bundler_options.ssr); + try framework.initTranspilerWithSourceMap(allocator, vm.log, .production_static, .ssr, &ssr_transpiler, &options.bundler_options.ssr, .@"inline"); } if (ctx.bundler_options.bake_debug_disable_minify) { @@ -366,9 +366,9 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa public_path, pt.outputFile(client_file).dest_path, })).toJS(global); - client_entry_urls.putIndex(global, @intCast(i), str); + try client_entry_urls.putIndex(global, @intCast(i), str); } else { - client_entry_urls.putIndex(global, @intCast(i), .null); + try client_entry_urls.putIndex(global, @intCast(i), .null); } const server_entry_point = try pt.loadBundledModule(router_type.server_file); @@ -404,8 +404,8 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa } else JSValue.null; - server_render_funcs.putIndex(global, @intCast(i), server_render_func); - server_param_funcs.putIndex(global, @intCast(i), server_param_func); + try server_render_funcs.putIndex(global, @intCast(i), server_render_func); + try server_param_funcs.putIndex(global, @intCast(i), server_param_func); } var navigatable_routes = std.ArrayList(FrameworkRouter.Route.Index).init(allocator); @@ -482,15 +482,15 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa next = route.parent.unwrap(); file_count = 1; css_file_count = 0; - file_list.putIndex(global, 0, pt.preloadBundledModule(main_file_route_index)); + try file_list.putIndex(global, 0, try pt.preloadBundledModule(main_file_route_index)); for (main_file.referenced_css_files) |ref| { - styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]); + try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]); css_file_count += 1; } if (route.file_layout.unwrap()) |file| { - file_list.putIndex(global, file_count, pt.preloadBundledModule(file)); + try file_list.putIndex(global, file_count, try pt.preloadBundledModule(file)); for (pt.outputFile(file).referenced_css_files) |ref| { - styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]); + try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]); css_file_count += 1; } file_count += 1; @@ -499,9 +499,9 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa while (next) |parent_index| { const parent = router.routePtr(parent_index); if (parent.file_layout.unwrap()) |file| { - file_list.putIndex(global, file_count, pt.preloadBundledModule(file)); + try file_list.putIndex(global, file_count, try pt.preloadBundledModule(file)); for (pt.outputFile(file).referenced_css_files) |ref| { - styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]); + try styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]); css_file_count += 1; } file_count += 1; @@ -512,26 +512,26 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa // Init the items var pattern_string = bun.String.createUTF8(pattern.slice()); defer pattern_string.deref(); - route_patterns.putIndex(global, @intCast(nav_index), pattern_string.toJS(global)); + try route_patterns.putIndex(global, @intCast(nav_index), pattern_string.toJS(global)); var src_path = bun.String.createUTF8(bun.path.relative(cwd, pt.inputFile(main_file_route_index).absPath())); - route_source_files.putIndex(global, @intCast(nav_index), src_path.transferToJS(global)); + try route_source_files.putIndex(global, @intCast(nav_index), src_path.transferToJS(global)); - route_nested_files.putIndex(global, @intCast(nav_index), file_list); - route_type_and_flags.putIndex(global, @intCast(nav_index), JSValue.jsNumberFromInt32(@bitCast(TypeAndFlags{ + try route_nested_files.putIndex(global, @intCast(nav_index), file_list); + try route_type_and_flags.putIndex(global, @intCast(nav_index), JSValue.jsNumberFromInt32(@bitCast(TypeAndFlags{ .type = route.type.get(), }))); if (params_buf.items.len > 0) { const param_info_array = try JSValue.createEmptyArray(global, params_buf.items.len); for (params_buf.items, 0..) |param, i| { - param_info_array.putIndex(global, @intCast(params_buf.items.len - i - 1), bun.String.createUTF8ForJS(global, param)); + try param_info_array.putIndex(global, @intCast(params_buf.items.len - i - 1), bun.String.createUTF8ForJS(global, param)); } - route_param_info.putIndex(global, @intCast(nav_index), param_info_array); + try route_param_info.putIndex(global, @intCast(nav_index), param_info_array); } else { - route_param_info.putIndex(global, @intCast(nav_index), .null); + try route_param_info.putIndex(global, @intCast(nav_index), .null); } - route_style_references.putIndex(global, @intCast(nav_index), styles); + try route_style_references.putIndex(global, @intCast(nav_index), styles); } const render_promise = BakeRenderRoutesForProdStatic( @@ -561,6 +561,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa return vm.global.throwValue(err); }, } + vm.waitForTasks(); } /// unsafe function, must be run outside of the event loop @@ -821,10 +822,10 @@ pub const PerThread = struct { // What could be done here is generating a new index type, which is // specifically for referenced files. This would remove the holes, but make // it harder to pre-allocate. It's probably worth it. - pub fn preloadBundledModule(pt: *PerThread, id: OpaqueFileId) JSValue { + pub fn preloadBundledModule(pt: *PerThread, id: OpaqueFileId) bun.JSError!JSValue { if (!pt.loaded_files.isSet(id.get())) { pt.loaded_files.set(id.get()); - pt.all_server_files.putIndex( + try pt.all_server_files.putIndex( pt.vm.global, @intCast(id.get()), pt.module_keys[id.get()].toJS(pt.vm.global), diff --git a/src/bun.js/BuildMessage.zig b/src/bun.js/BuildMessage.zig index fcc14d0f55..b59e5e5045 100644 --- a/src/bun.js/BuildMessage.zig +++ b/src/bun.js/BuildMessage.zig @@ -29,7 +29,7 @@ pub const BuildMessage = struct { const array = try JSC.JSValue.createEmptyArray(globalThis, notes.len); for (notes, 0..) |note, i| { const cloned = try note.clone(bun.default_allocator); - array.putIndex( + try array.putIndex( globalThis, @intCast(i), try BuildMessage.create(globalThis, bun.default_allocator, logger.Msg{ .data = cloned, .kind = .note }), diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index 70fa6654e7..a7a5357463 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -185,7 +185,7 @@ fn messageWithTypeAndLevel_( var tabular_data = vals[0]; if (tabular_data.isObject()) { const properties: JSValue = if (len >= 2 and vals[1].jsType().isArray()) vals[1] else .js_undefined; - var table_printer = TablePrinter.init( + var table_printer = try TablePrinter.init( global, level, tabular_data, @@ -277,13 +277,13 @@ pub const TablePrinter = struct { level: MessageLevel, tabular_data: JSValue, properties: JSValue, - ) TablePrinter { + ) bun.JSError!TablePrinter { return TablePrinter{ .level = level, .globalObject = globalObject, .tabular_data = tabular_data, .properties = properties, - .is_iterable = tabular_data.isIterable(globalObject), + .is_iterable = try tabular_data.isIterable(globalObject), .jstype = tabular_data.jsType(), .value_formatter = ConsoleObject.Formatter{ .remaining_values = &[_]JSValue{}, @@ -320,11 +320,11 @@ pub const TablePrinter = struct { }; /// Compute how much horizontal space will take a JSValue when printed - fn getWidthForValue(this: *TablePrinter, value: JSValue) u32 { + fn getWidthForValue(this: *TablePrinter, value: JSValue) bun.JSError!u32 { var width: usize = 0; var value_formatter = this.value_formatter; - const tag = ConsoleObject.Formatter.Tag.get(value, this.globalObject); + const tag = try ConsoleObject.Formatter.Tag.get(value, this.globalObject); value_formatter.quote_strings = !(tag.tag == .String or tag.tag == .StringPossiblyFormatted); value_formatter.format( tag, @@ -343,7 +343,7 @@ pub const TablePrinter = struct { } /// Update the sizes of the columns for the values of a given row, and create any additional columns as needed - fn updateColumnsForRow(this: *TablePrinter, columns: *std.ArrayList(Column), row_key: RowKey, row_value: JSValue) !void { + fn updateColumnsForRow(this: *TablePrinter, columns: *std.ArrayList(Column), row_key: RowKey, row_value: JSValue) bun.JSError!void { // update size of "(index)" column const row_key_len: u32 = switch (row_key) { .str => |value| @intCast(value.visibleWidthExcludeANSIColors(false)), @@ -353,10 +353,10 @@ pub const TablePrinter = struct { // special handling for Map: column with idx=1 is "Keys" if (this.jstype.isMap()) { - const entry_key = row_value.getIndex(this.globalObject, 0); - const entry_value = row_value.getIndex(this.globalObject, 1); - columns.items[1].width = @max(columns.items[1].width, this.getWidthForValue(entry_key)); - this.values_col_width = @max(this.values_col_width orelse 0, this.getWidthForValue(entry_value)); + const entry_key = try row_value.getIndex(this.globalObject, 0); + const entry_value = try row_value.getIndex(this.globalObject, 1); + columns.items[1].width = @max(columns.items[1].width, try this.getWidthForValue(entry_key)); + this.values_col_width = @max(this.values_col_width orelse 0, try this.getWidthForValue(entry_value)); return; } @@ -366,8 +366,8 @@ pub const TablePrinter = struct { // - otherwise: iterate the object properties, and create the columns on-demand if (!this.properties.isUndefined()) { for (columns.items[1..]) |*column| { - if (row_value.getOwn(this.globalObject, column.name)) |value| { - column.width = @max(column.width, this.getWidthForValue(value)); + if (try row_value.getOwn(this.globalObject, column.name)) |value| { + column.width = @max(column.width, try this.getWidthForValue(value)); } } } else { @@ -399,12 +399,12 @@ pub const TablePrinter = struct { break :brk &columns.items[columns.items.len - 1]; }; - column.width = @max(column.width, this.getWidthForValue(value)); + column.width = @max(column.width, try this.getWidthForValue(value)); } } } else if (this.properties.isUndefined()) { // not object -> the value will go to the special "Values" column - this.values_col_width = @max(this.values_col_width orelse 1, this.getWidthForValue(row_value)); + this.values_col_width = @max(this.values_col_width orelse 1, try this.getWidthForValue(row_value)); } } @@ -452,24 +452,24 @@ pub const TablePrinter = struct { var value = JSValue.zero; if (col_idx == 1 and this.jstype.isMap()) { // is the "Keys" column, when iterating a Map? - value = row_value.getIndex(this.globalObject, 0); + value = try row_value.getIndex(this.globalObject, 0); } else if (col_idx == this.values_col_idx) { // is the "Values" column? if (this.jstype.isMap()) { - value = row_value.getIndex(this.globalObject, 1); + value = try row_value.getIndex(this.globalObject, 1); } else if (!row_value.isObject()) { value = row_value; } } else if (row_value.isObject()) { - value = row_value.getOwn(this.globalObject, col.name) orelse JSValue.zero; + value = try row_value.getOwn(this.globalObject, col.name) orelse JSValue.zero; } if (value == .zero) { try writer.writeByteNTimes(' ', col.width + (PADDING * 2)); } else { - const len: u32 = this.getWidthForValue(value); + const len: u32 = try this.getWidthForValue(value); const needed = col.width -| len; try writer.writeByteNTimes(' ', PADDING); - const tag = ConsoleObject.Formatter.Tag.get(value, this.globalObject); + const tag = try ConsoleObject.Formatter.Tag.get(value, this.globalObject); var value_formatter = this.value_formatter; value_formatter.quote_strings = !(tag.tag == .String or tag.tag == .StringPossiblyFormatted); @@ -532,8 +532,8 @@ pub const TablePrinter = struct { // if the "properties" arg was provided, pre-populate the columns if (!this.properties.isUndefined()) { - var properties_iter = JSC.JSArrayIterator.init(this.properties, globalObject); - while (properties_iter.next()) |value| { + var properties_iter = try JSC.JSArrayIterator.init(this.properties, globalObject); + while (try properties_iter.next()) |value| { try columns.append(.{ .name = try value.toBunString(globalObject), }); @@ -767,7 +767,7 @@ pub const FormatOptions = struct { } formatOptions.max_depth = @as(u16, @truncate(@as(u32, @intCast(@min(arg, std.math.maxInt(u16)))))); } else if (opt.isNumber()) { - const v = opt.coerce(f64, globalThis); + const v = try opt.coerce(f64, globalThis); if (std.math.isInf(v)) { formatOptions.max_depth = std.math.maxInt(u16); } else { @@ -795,7 +795,7 @@ pub const FormatOptions = struct { } formatOptions.max_depth = @as(u16, @truncate(@as(u32, @intCast(@min(arg, std.math.maxInt(u16)))))); } else if (depthArg.isNumber()) { - const v = depthArg.coerce(f64, globalThis); + const v = try depthArg.coerce(f64, globalThis); if (std.math.isInf(v)) { formatOptions.max_depth = std.math.maxInt(u16); } else { @@ -803,10 +803,7 @@ pub const FormatOptions = struct { } } if (arguments.len > 1 and !arguments[1].isEmptyOrUndefinedOrNull()) { - formatOptions.enable_colors = arguments[1].coerce(bool, globalThis); - if (globalThis.hasException()) { - return error.JSError; - } + formatOptions.enable_colors = arguments[1].toBoolean(); } } } @@ -838,7 +835,7 @@ pub fn format2( .error_display_level = options.error_display_level, }; defer fmt.deinit(); - const tag = ConsoleObject.Formatter.Tag.get(vals[0], global); + const tag = try ConsoleObject.Formatter.Tag.get(vals[0], global); fmt.writeIndent(Writer, writer) catch return; if (tag.tag == .String) { @@ -937,7 +934,7 @@ pub fn format2( } any = true; - tag = ConsoleObject.Formatter.Tag.get(this_value, global); + tag = try ConsoleObject.Formatter.Tag.get(this_value, global); if (tag.tag == .String and fmt.remaining_values.len > 0) { tag.tag = .{ .StringPossiblyFormatted = {} }; } @@ -959,7 +956,7 @@ pub fn format2( _ = writer.write(" ") catch 0; } any = true; - tag = ConsoleObject.Formatter.Tag.get(this_value, global); + tag = try ConsoleObject.Formatter.Tag.get(this_value, global); if (tag.tag == .String and fmt.remaining_values.len > 0) { tag.tag = .{ .StringPossiblyFormatted = {} }; } @@ -1046,7 +1043,7 @@ pub const Formatter = struct { self.formatter.remaining_values = &[_]JSValue{}; } try self.formatter.format( - Tag.get(self.value, self.formatter.globalThis), + try Tag.get(self.value, self.formatter.globalThis), @TypeOf(writer), writer, self.value, @@ -1181,7 +1178,7 @@ pub const Formatter = struct { cell: JSValue.JSType = JSValue.JSType.Cell, }; - pub fn get(value: JSValue, globalThis: *JSGlobalObject) Result { + pub fn get(value: JSValue, globalThis: *JSGlobalObject) bun.JSError!Result { return getAdvanced(value, globalThis, .{ .hide_global = false }); } @@ -1191,12 +1188,12 @@ pub const Formatter = struct { disable_inspect_custom: bool = false, }; - pub fn getAdvanced(value: JSValue, globalThis: *JSGlobalObject, opts: Options) Result { - switch (@intFromEnum(value)) { - 0, 0xa => return Result{ + pub fn getAdvanced(value: JSValue, globalThis: *JSGlobalObject, opts: Options) bun.JSError!Result { + switch (value) { + .zero, .js_undefined => return Result{ .tag = .{ .Undefined = {} }, }, - 0x2 => return Result{ + .null => return Result{ .tag = .{ .Null = {} }, }, else => {}, @@ -1294,16 +1291,16 @@ pub const Formatter = struct { // Is this a react element? if (js_type.isObject() and js_type != .ProxyObject) { - if (value.getOwnTruthy(globalThis, "$$typeof")) |typeof_symbol| { + if (try value.getOwnTruthy(globalThis, "$$typeof")) |typeof_symbol| { // React 18 and below var react_element_legacy = ZigString.init("react.element"); // For React 19 - https://github.com/oven-sh/bun/issues/17223 var react_element_transitional = ZigString.init("react.transitional.element"); var react_fragment = ZigString.init("react.fragment"); - if (JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &react_element_legacy), globalThis) or - JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &react_element_transitional), globalThis) or - JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &react_fragment), globalThis)) + if (try typeof_symbol.isSameValue(.symbolFor(globalThis, &react_element_legacy), globalThis) or + try typeof_symbol.isSameValue(.symbolFor(globalThis, &react_element_transitional), globalThis) or + try typeof_symbol.isSameValue(.symbolFor(globalThis, &react_fragment), globalThis)) { return .{ .tag = .{ .JSX = {} }, .cell = js_type }; } @@ -1613,7 +1610,7 @@ pub const Formatter = struct { // > implementation-specific, potentially-interactive representation // > of an object judged to be maximally useful and informative. } - try this.format(Tag.get(next_value, global), Writer, writer_, next_value, global, enable_ansi_colors); + try this.format(try Tag.get(next_value, global), Writer, writer_, next_value, global, enable_ansi_colors); }, .c => { @@ -1766,8 +1763,8 @@ pub const Formatter = struct { this.writer.writeAll(" ") catch unreachable; } if (!is_iterator) { - const key = nextValue.getIndex(globalObject, 0); - const value = nextValue.getIndex(globalObject, 1); + const key = nextValue.getIndex(globalObject, 0) catch return; + const value = nextValue.getIndex(globalObject, 1) catch return; if (!single_line) { this.formatter.writeIndent(Writer, this.writer) catch unreachable; @@ -1775,7 +1772,7 @@ pub const Formatter = struct { const key_tag = Tag.getAdvanced(key, globalObject, .{ .hide_global = true, .disable_inspect_custom = this.formatter.disable_inspect_custom, - }); + }) catch return; this.formatter.format( key_tag, @@ -1789,7 +1786,7 @@ pub const Formatter = struct { const value_tag = Tag.getAdvanced(value, globalObject, .{ .hide_global = true, .disable_inspect_custom = this.formatter.disable_inspect_custom, - }); + }) catch return; this.formatter.format( value_tag, Writer, @@ -1806,7 +1803,7 @@ pub const Formatter = struct { const tag = Tag.getAdvanced(nextValue, globalObject, .{ .hide_global = true, .disable_inspect_custom = this.formatter.disable_inspect_custom, - }); + }) catch return; this.formatter.format( tag, Writer, @@ -1847,7 +1844,7 @@ pub const Formatter = struct { const key_tag = Tag.getAdvanced(nextValue, globalObject, .{ .hide_global = true, .disable_inspect_custom = this.formatter.disable_inspect_custom, - }); + }) catch return; this.formatter.format( key_tag, Writer, @@ -1924,7 +1921,7 @@ pub const Formatter = struct { const tag = Tag.getAdvanced(value, globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, - }); + }) catch return; if (tag.cell.isHidden()) return; if (ctx.i == 0) { @@ -2044,7 +2041,6 @@ pub const Formatter = struct { depth: u32, max_depth: u32, colors: bool, - is_exception: *bool, ) JSValue; pub fn printAs( @@ -2176,7 +2172,7 @@ pub const Formatter = struct { } }, .Integer => { - const int = value.coerce(i64, this.globalThis); + const int = try value.coerce(i64, this.globalThis); if (int < std.math.maxInt(u32)) { var i = int; const is_negative = i < 0; @@ -2251,26 +2247,21 @@ pub const Formatter = struct { writer.print(comptime Output.prettyFmt("null", enable_ansi_colors), .{}); }, .CustomFormattedObject => { - var is_exception = false; // Call custom inspect function. Will return the error if there is one // we'll need to pass the callback through to the "this" value in here - const result = JSC__JSValue__callCustomInspectFunction( + const result = try bun.jsc.fromJSHostCall(this.globalThis, @src(), JSC__JSValue__callCustomInspectFunction, .{ this.globalThis, this.custom_formatted_object.function, this.custom_formatted_object.this, this.max_depth -| this.depth, this.max_depth, enable_ansi_colors, - &is_exception, - ); - if (is_exception) { - return error.JSError; - } + }); // Strings are printed directly, otherwise we recurse. It is possible to end up in an infinite loop. if (result.isString()) { writer.print("{}", .{result.fmtString(this.globalThis)}); } else { - try this.format(ConsoleObject.Formatter.Tag.get(result, this.globalThis), Writer, writer_, result, this.globalThis, enable_ansi_colors); + try this.format(try ConsoleObject.Formatter.Tag.get(result, this.globalThis), Writer, writer_, result, this.globalThis, enable_ansi_colors); } }, .Symbol => { @@ -2369,7 +2360,7 @@ pub const Formatter = struct { } }, .Array => { - const len = value.getLength(this.globalThis); + const len = try value.getLength(this.globalThis); // TODO: DerivedArray does not get passed along in JSType, and it's not clear why. // if (jsType == .DerivedArray) { @@ -2404,7 +2395,7 @@ pub const Formatter = struct { first: { const element = value.getDirectIndex(this.globalThis, 0); - const tag = Tag.getAdvanced(element, this.globalThis, .{ + const tag = try Tag.getAdvanced(element, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }); @@ -2487,7 +2478,7 @@ pub const Formatter = struct { writer.space(); } - const tag = Tag.getAdvanced(element, this.globalThis, .{ + const tag = try Tag.getAdvanced(element, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }); @@ -2534,10 +2525,7 @@ pub const Formatter = struct { .parent = value, .i = i, }; - value.forEachPropertyNonIndexed(this.globalThis, &iter, Iterator.forEach); - if (this.globalThis.hasException()) { - return error.JSError; - } + try value.forEachPropertyNonIndexed(this.globalThis, &iter, Iterator.forEach); if (this.failed) return; } } @@ -2571,7 +2559,7 @@ pub const Formatter = struct { s3client.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {}; return; } else if (value.as(bun.webcore.FetchHeaders) != null) { - if (value.get_unsafe(this.globalThis, "toJSON")) |toJSONFunction| { + if (try value.get(this.globalThis, "toJSON")) |toJSONFunction| { this.addForNewLine("Headers ".len); writer.writeAll(comptime Output.prettyFmt("Headers ", enable_ansi_colors)); const prev_quote_keys = this.quote_keys; @@ -2589,7 +2577,7 @@ pub const Formatter = struct { ); } } else if (value.as(JSC.DOMFormData) != null) { - if (value.get_unsafe(this.globalThis, "toJSON")) |toJSONFunction| { + if (try value.get(this.globalThis, "toJSON")) |toJSONFunction| { const prev_quote_keys = this.quote_keys; this.quote_keys = true; defer this.quote_keys = prev_quote_keys; @@ -2705,7 +2693,7 @@ pub const Formatter = struct { writer.writeAll(comptime Output.prettyFmt("" ++ fmt ++ "", enable_ansi_colors)); }, .Map => { - const length_value = value.get_unsafe(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); + const length_value = try value.get(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); const length = length_value.toInt32(); const prev_quote_strings = this.quote_strings; @@ -2812,7 +2800,7 @@ pub const Formatter = struct { writer.writeAll("}"); }, .Set => { - const length_value = value.get_unsafe(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); + const length_value = try value.get(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); const length = length_value.toInt32(); const prev_quote_strings = this.quote_strings; @@ -2855,7 +2843,7 @@ pub const Formatter = struct { writer.writeAll("}"); }, .toJSON => { - if (value.get_unsafe(this.globalThis, "toJSON")) |func| brk: { + if (try value.get(this.globalThis, "toJSON")) |func| brk: { const result = func.call(this.globalThis, value, &.{}) catch { this.globalThis.clearException(); break :brk; @@ -2863,7 +2851,7 @@ pub const Formatter = struct { const prev_quote_keys = this.quote_keys; this.quote_keys = true; defer this.quote_keys = prev_quote_keys; - const tag = ConsoleObject.Formatter.Tag.get(result, this.globalThis); + const tag = try ConsoleObject.Formatter.Tag.get(result, this.globalThis); try this.format(tag, Writer, writer_, result, this.globalThis, enable_ansi_colors); return; } @@ -2874,7 +2862,7 @@ pub const Formatter = struct { var str = bun.String.empty; defer str.deref(); - value.jsonStringify(this.globalThis, this.indent, &str); + try value.jsonStringify(this.globalThis, this.indent, &str); this.addForNewLine(str.length()); if (jsType == JSValue.JSType.JSDate) { // in the code for printing dates, it never exceeds this amount @@ -2896,7 +2884,7 @@ pub const Formatter = struct { }, .Event => { const event_type_value: JSValue = brk: { - const value_ = value.get_unsafe(this.globalThis, "type") orelse break :brk .js_undefined; + const value_ = try value.get(this.globalThis, "type") orelse break :brk .js_undefined; if (value_.isString()) { break :brk value_; } @@ -2949,7 +2937,7 @@ pub const Formatter = struct { .{}, ); - const tag = Tag.getAdvanced(message_value, this.globalThis, .{ + const tag = try Tag.getAdvanced(message_value, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }); @@ -2973,7 +2961,7 @@ pub const Formatter = struct { .{}, ); const data: JSValue = (try value.fastGet(this.globalThis, .data)) orelse .js_undefined; - const tag = Tag.getAdvanced(data, this.globalThis, .{ + const tag = try Tag.getAdvanced(data, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }); @@ -2995,7 +2983,7 @@ pub const Formatter = struct { .{}, ); - const tag = Tag.getAdvanced(error_value, this.globalThis, .{ + const tag = try Tag.getAdvanced(error_value, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }); @@ -3029,8 +3017,8 @@ pub const Formatter = struct { defer if (tag_name_slice.isAllocated()) tag_name_slice.deinit(); - if (value.get_unsafe(this.globalThis, "type")) |type_value| { - const _tag = Tag.getAdvanced(type_value, this.globalThis, .{ + if (try value.get(this.globalThis, "type")) |type_value| { + const _tag = try Tag.getAdvanced(type_value, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }); @@ -3062,7 +3050,7 @@ pub const Formatter = struct { writer.writeAll(tag_name_slice.slice()); if (enable_ansi_colors) writer.writeAll(comptime Output.prettyFmt("", enable_ansi_colors)); - if (value.get_unsafe(this.globalThis, "key")) |key_value| { + if (try value.get(this.globalThis, "key")) |key_value| { if (!key_value.isUndefinedOrNull()) { if (needs_space) writer.writeAll(" key=") @@ -3073,7 +3061,7 @@ pub const Formatter = struct { this.quote_strings = true; defer this.quote_strings = old_quote_strings; - try this.format(Tag.getAdvanced(key_value, this.globalThis, .{ + try this.format(try Tag.getAdvanced(key_value, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }), Writer, writer_, key_value, this.globalThis, enable_ansi_colors); @@ -3082,7 +3070,7 @@ pub const Formatter = struct { } } - if (value.get_unsafe(this.globalThis, "props")) |props| { + if (try value.get(this.globalThis, "props")) |props| { const prev_quote_strings = this.quote_strings; defer this.quote_strings = prev_quote_strings; this.quote_strings = true; @@ -3095,7 +3083,7 @@ pub const Formatter = struct { }).init(this.globalThis, props_obj); defer props_iter.deinit(); - const children_prop = props.get_unsafe(this.globalThis, "children"); + const children_prop = try props.get(this.globalThis, "children"); if (props_iter.len > 0) { { this.indent += 1; @@ -3107,7 +3095,7 @@ pub const Formatter = struct { continue; const property_value = props_iter.value; - const tag = Tag.getAdvanced(property_value, this.globalThis, .{ + const tag = try Tag.getAdvanced(property_value, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }); @@ -3156,7 +3144,7 @@ pub const Formatter = struct { } if (children_prop) |children| { - const tag = Tag.get(children, this.globalThis); + const tag = try Tag.get(children, this.globalThis); const print_children = switch (tag.tag) { .String, .JSX, .Array => true, @@ -3191,14 +3179,14 @@ pub const Formatter = struct { this.indent += 1; this.writeIndent(Writer, writer_) catch unreachable; defer this.indent -|= 1; - try this.format(Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors); + try this.format(try Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors); } writer.writeAll("\n"); this.writeIndent(Writer, writer_) catch unreachable; }, .Array => { - const length = children.getLength(this.globalThis); + const length = try children.getLength(this.globalThis); if (length == 0) break :print_children; writer.writeAll(">\n"); @@ -3213,8 +3201,8 @@ pub const Formatter = struct { var j: usize = 0; while (j < length) : (j += 1) { - const child = children.getIndex(this.globalThis, @as(u32, @intCast(j))); - try this.format(Tag.getAdvanced(child, this.globalThis, .{ + const child = try children.getIndex(this.globalThis, @as(u32, @intCast(j))); + try this.format(try Tag.getAdvanced(child, this.globalThis, .{ .hide_global = true, .disable_inspect_custom = this.disable_inspect_custom, }), Writer, writer_, child, this.globalThis, enable_ansi_colors); @@ -3304,14 +3292,11 @@ pub const Formatter = struct { }); return; } else if (this.ordered_properties) { - value.forEachPropertyOrdered(this.globalThis, &iter, Iterator.forEach); + try value.forEachPropertyOrdered(this.globalThis, &iter, Iterator.forEach); } else { - value.forEachProperty(this.globalThis, &iter, Iterator.forEach); + try value.forEachProperty(this.globalThis, &iter, Iterator.forEach); } - if (this.globalThis.hasException()) { - return error.JSError; - } if (this.failed) return; if (iter.i == 0) { @@ -3464,7 +3449,7 @@ pub const Formatter = struct { } // TODO: if (options.showProxy), print like `Proxy { target: ..., handlers: ... }` // this is default off so it is not used. - try this.format(ConsoleObject.Formatter.Tag.get(target, this.globalThis), Writer, writer_, target, this.globalThis, enable_ansi_colors); + try this.format(try ConsoleObject.Formatter.Tag.get(target, this.globalThis), Writer, writer_, target, this.globalThis, enable_ansi_colors); }, } } @@ -3654,7 +3639,7 @@ pub fn timeLog( var writer = console.error_writer.writer(); const Writer = @TypeOf(writer); for (args[0..args_len]) |arg| { - const tag = ConsoleObject.Formatter.Tag.get(arg, global); + const tag = ConsoleObject.Formatter.Tag.get(arg, global) catch return; _ = writer.write(" ") catch 0; if (Output.enable_ansi_colors_stderr) { fmt.format(tag, Writer, writer, arg, global, true) catch {}; // TODO: diff --git a/src/bun.js/ModuleLoader.zig b/src/bun.js/ModuleLoader.zig index d190e053ea..40e3783866 100644 --- a/src/bun.js/ModuleLoader.zig +++ b/src/bun.js/ModuleLoader.zig @@ -429,7 +429,7 @@ pub const AsyncModule = struct { errorable = JSC.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| { switch (err) { error.JSError => { - errorable = .err(error.JSError, this.globalThis.takeError(error.JSError).asVoid()); + errorable = .err(error.JSError, this.globalThis.takeError(error.JSError)); break :outer; }, else => { @@ -468,13 +468,16 @@ pub const AsyncModule = struct { specifier_: bun.String, referrer_: bun.String, log: *logger.Log, - ) void { + ) bun.JSExecutionTerminated!void { JSC.markBinding(@src()); var specifier = specifier_; var referrer = referrer_; + var scope: JSC.CatchScope = undefined; + scope.init(globalThis, @src()); defer { specifier.deref(); referrer.deref(); + scope.deinit(); } var errorable: JSC.ErrorableResolvedSource = undefined; @@ -487,7 +490,7 @@ pub const AsyncModule = struct { } if (e == error.JSError) { - errorable = JSC.ErrorableResolvedSource.err(error.JSError, globalThis.takeError(error.JSError).asVoid()); + errorable = JSC.ErrorableResolvedSource.err(error.JSError, globalThis.takeError(error.JSError)); } else { VirtualMachine.processFetchLog( globalThis, @@ -512,6 +515,7 @@ pub const AsyncModule = struct { &specifier, &referrer, ); + try scope.assertNoExceptionExceptTermination(); } pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void { @@ -1354,7 +1358,7 @@ pub fn transpileSourceCode( globalValue.put( globalThis, ZigString.static("wasmSourceBytes"), - JSC.ArrayBuffer.create(globalThis, source.contents, .Uint8Array), + try JSC.ArrayBuffer.create(globalThis, source.contents, .Uint8Array), ); } } @@ -1616,7 +1620,7 @@ pub export fn Bun__transpileFile( var virtual_source_to_use: ?logger.Source = null; var blob_to_deinit: ?JSC.WebCore.Blob = null; var lr = options.getLoaderAndVirtualSource(_specifier.slice(), jsc_vm, &virtual_source_to_use, &blob_to_deinit, type_attribute_str) catch { - ret.* = JSC.ErrorableResolvedSource.err(error.JSErrorObject, globalObject.ERR(.MODULE_NOT_FOUND, "Blob not found", .{}).toJS().asVoid()); + ret.* = JSC.ErrorableResolvedSource.err(error.JSErrorObject, globalObject.ERR(.MODULE_NOT_FOUND, "Blob not found", .{}).toJS()); return null; }; defer if (blob_to_deinit) |*blob| blob.deinit(); @@ -1815,7 +1819,7 @@ pub export fn Bun__transpileFile( }, error.PluginError => return null, error.JSError => { - ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError).asVoid()); + ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError)); return null; }, else => { @@ -1991,7 +1995,7 @@ export fn Bun__transpileVirtualModule( switch (err) { error.PluginError => return true, error.JSError => { - ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError).asVoid()); + ret.* = JSC.ErrorableResolvedSource.err(error.JSError, globalObject.takeError(error.JSError)); return true; }, else => { @@ -2140,12 +2144,12 @@ pub const RuntimeTranspilerStore = struct { } // This is run at the top of the event loop on the JS thread. - pub fn drain(this: *RuntimeTranspilerStore) void { + pub fn drain(this: *RuntimeTranspilerStore) bun.JSExecutionTerminated!void { var batch = this.queue.popBatch(); var iter = batch.iterator(); if (iter.next()) |job| { // we run just one job first to see if there are more - job.runFromJSThread(); + try job.runFromJSThread(); } else { return; } @@ -2155,8 +2159,8 @@ pub const RuntimeTranspilerStore = struct { const jsc_vm = vm.jsc; while (iter.next()) |job| { // if there are more, we need to drain the microtasks from the previous run - event_loop.drainMicrotasksWithGlobal(global, jsc_vm); - job.runFromJSThread(); + try event_loop.drainMicrotasksWithGlobal(global, jsc_vm); + try job.runFromJSThread(); } // immediately after this is called, the microtasks will be drained again. @@ -2263,7 +2267,7 @@ pub const RuntimeTranspilerStore = struct { this.vm.eventLoop().enqueueTaskConcurrent(JSC.ConcurrentTask.createFrom(&this.vm.transpiler_store)); } - pub fn runFromJSThread(this: *TranspilerJob) void { + pub fn runFromJSThread(this: *TranspilerJob) bun.JSExecutionTerminated!void { var vm = this.vm; const promise = this.promise.swap(); const globalThis = this.globalThis; @@ -2296,7 +2300,7 @@ pub const RuntimeTranspilerStore = struct { _ = vm.transpiler_store.store.put(this); - ModuleLoader.AsyncModule.fulfill(globalThis, promise, &resolved_source, parse_error, specifier, referrer, &log); + try ModuleLoader.AsyncModule.fulfill(globalThis, promise, &resolved_source, parse_error, specifier, referrer, &log); } pub fn schedule(this: *TranspilerJob) void { diff --git a/src/bun.js/STREAMS.md b/src/bun.js/STREAMS.md new file mode 100644 index 0000000000..8199e29a87 --- /dev/null +++ b/src/bun.js/STREAMS.md @@ -0,0 +1,396 @@ +# **Bun Streams Architecture: High-Performance I/O in JavaScript** + +### **Table of Contents** + +1. [**Overview & Core Philosophy**](#1-overview--core-philosophy) +2. [**Foundational Concepts**](#2-foundational-concepts) + - 2.1. The Stream Tagging System: Enabling Optimization + - 2.2. The `Body` Mixin: An Intelligent Gateway +3. [**Deep Dive: The Major Performance Optimizations**](#3-deep-dive-the-major-performance-optimizations) + - 3.1. Optimization 1: Synchronous Coercion - Eliminating Streams Entirely + - 3.2. Optimization 2: The Direct Path - Zero-Copy Native Piping + - 3.3. Optimization 3: `readMany()` - Efficient Async Iteration +4. [**Low-Level Implementation Details**](#4-low-level-implementation-details) + - 4.1. The Native Language: `streams.zig` Primitives + - 4.2. Native Sink In-Depth: `HTTPSResponseSink` and Buffering + - 4.3. The Native Collector: `Body.ValueBufferer` + - 4.4. Memory and String Optimizations +5. [**The Unified System: A Complete Data Flow Example**](#5-the-unified-system-a-complete-data-flow-example) +6. [**Conclusion**](#6-conclusion) + +--- + +## 1. Overview & Core Philosophy + +Streams in Bun makes I/O performance in JavaScript competitive with lower-level languages like Go, Rust, and C, while presenting a fully WHATWG-compliant API. + +The core philosophy is **"native-first, JS-fallback"**. Bun assumes that for many high-performance use cases, the JavaScript layer should act as a high-level controller for system-level I/O operations. We try to execute I/O operations with minimal abstraction cost, bypassing the JavaScript virtual machine entirely for performance-critical paths. + +This document details the specific architectural patterns, from the JS/native boundary down to the I/O layer, that enable this level of performance. + +## 2. Foundational Concepts + +To understand Bun's stream optimizations, two foundational concepts must be understood first: the tagging system and the `Body` mixin's role as a state machine. + +### 2.1. The Stream Tagging System: Enabling Optimization + +Identifying the _source_ of a `ReadableStream` at the native level unlocks many optimization opportunities. This is achieved by "tagging" the stream object internally. + +- **Mechanism:** Every `ReadableStream` in Bun holds a private field, `bunNativePtr`, which can point to a native Zig struct representing the stream's underlying source. +- **Identification:** A C++ binding, `ReadableStreamTag__tagged` (from `ReadableStream.zig`), is the primary entry point for this identification. When native code needs to consume a stream (e.g., when sending a `Response` body), it calls this function on the JS `ReadableStream` object to determine its origin. + +```zig +// src/bun.js/webcore/ReadableStream.zig +pub const Tag = enum(i32) { + JavaScript = 0, // A generic, user-defined stream. This is the "slow path". + Blob = 1, // An in-memory blob. Fast path available. + File = 2, // Backed by a native file reader. Fast path available. + Bytes = 4, // Backed by a native network byte stream. Fast path available. + Direct = 3, // Internal native-to-native stream. + Invalid = -1, +}; +``` + +This tag is the key that unlocks all subsequent optimizations. It allows the runtime to dispatch to the correct, most efficient implementation path. + +### 2.2. The `Body` Mixin: An Intelligent Gateway + +The `Body` mixin (used by `Request` and `Response`) is not merely a stream container; it's a sophisticated state machine and the primary API gateway to Bun's optimization paths. A `Body`'s content is represented by the `Body.Value` union in Zig, which can be a static buffer (`.InternalBlob`, `.WTFStringImpl`) or a live stream (`.Locked`). + +Methods like `.text()`, `.json()`, and `.arrayBuffer()` are not simple stream consumers. They are entry points to a decision tree that aggressively seeks the fastest possible way to fulfill the request. + +```mermaid +stateDiagram-v2 + direction TB + [*] --> StaticBuffer : new Response("hello") + + state StaticBuffer { + [*] --> Ready + Ready : Data in memory + Ready : .WTFStringImpl | .InternalBlob + } + + StaticBuffer --> Locked : Access .body + StaticBuffer --> Used : .text() ⚡ + + state Locked { + [*] --> Streaming + Streaming : ReadableStream created + Streaming : Tagged (File/Bytes/etc) + } + + Locked --> Used : consume stream + Used --> [*] : Complete + + note right of StaticBuffer + Fast Path + Skip streams entirely! + end note + + note right of Locked + Slow Path + Full streaming + end note + + classDef buffer fill:#fbbf24,stroke:#92400e,stroke-width:3px,color:#451a03 + classDef stream fill:#60a5fa,stroke:#1e40af,stroke-width:3px,color:#172554 + classDef final fill:#34d399,stroke:#14532d,stroke-width:3px,color:#052e16 + + class StaticBuffer buffer + class Locked stream + class Used final +``` + +**Diagram 1: `Body.Value` State Transitions** + +## 3. Deep Dive: The Major Performance Optimizations + +### 3.1. Optimization 1: Synchronous Coercion - Eliminating Streams Entirely + +This is the most impactful optimization for a vast number of common API and data processing tasks. + +**The Conventional Problem:** In other JavaScript runtimes, consuming a response body with `.text()` is an inherently asynchronous, multi-step process involving the creation of multiple streams, readers, and promises, which incurs significant overhead. + +**Bun's fast path:** Bun correctly assumes that for many real-world scenarios (e.g., small JSON API responses), the entire response body is already available in a single, contiguous memory buffer when the consuming method is called. It therefore **bypasses the entire stream processing model** and returns the buffer directly. + +**Implementation Architecture & Data Flow:** + +```mermaid +flowchart TB + A["response.text()"] --> B{Check Body Type} + + B -->|"✅ Already Buffered
(InternalBlob, etc.)"|C[⚡ FAST PATH] + B -->|"❌ Is Stream
(.Locked)"|D[🐌 SLOW PATH] + + subgraph fast[" "] + C --> C1[Get buffer pointer] + C1 --> C2[Decode to string] + C2 --> C3[Return resolved Promise] + end + + subgraph slow[" "] + D --> D1[Create pending Promise] + D1 --> D2[Setup native buffering] + D2 --> D3[Collect all chunks] + D3 --> D4[Decode & resolve Promise] + end + + C3 --> E["✨ Result available immediately
(0 async operations)"] + D4 --> F["⏳ Result after I/O completes
(multiple async operations)"] + + style fast fill:#dcfce7,stroke:#166534,stroke-width:3px + style slow fill:#fee2e2,stroke:#991b1b,stroke-width:3px + style C fill:#22c55e,stroke:#166534,stroke-width:3px,color:#14532d + style C1 fill:#86efac,stroke:#166534,stroke-width:2px,color:#14532d + style C2 fill:#86efac,stroke:#166534,stroke-width:2px,color:#14532d + style C3 fill:#86efac,stroke:#166534,stroke-width:2px,color:#14532d + style D fill:#ef4444,stroke:#991b1b,stroke-width:3px,color:#ffffff + style D1 fill:#fca5a5,stroke:#991b1b,stroke-width:2px,color:#450a0a + style D2 fill:#fca5a5,stroke:#991b1b,stroke-width:2px,color:#450a0a + style D3 fill:#fca5a5,stroke:#991b1b,stroke-width:2px,color:#450a0a + style D4 fill:#fca5a5,stroke:#991b1b,stroke-width:2px,color:#450a0a + style E fill:#166534,stroke:#14532d,stroke-width:4px,color:#ffffff + style F fill:#dc2626,stroke:#991b1b,stroke-width:3px,color:#ffffff +``` + +**Diagram 2: Synchronous Coercion Logic Flow** + +1. **Entry Point:** A JS call to `response.text()` triggers `readableStreamToText` (`ReadableStream.ts`), which immediately calls `tryUseReadableStreamBufferedFastPath`. +2. **Native Check:** `tryUseReadableStreamBufferedFastPath` calls the native binding `jsFunctionGetCompleteRequestOrResponseBodyValueAsArrayBuffer` (`Response.zig`). +3. **State Inspection:** This native function inspects the `Body.Value` tag. If the tag is `.InternalBlob`, `.Blob` (and not a disk-backed file), or `.WTFStringImpl`, the complete data is already in memory. +4. **Synchronous Data Transfer:** The function **synchronously** returns the underlying buffer as a native `ArrayBuffer` handle to JavaScript. The `Body` state is immediately transitioned to `.Used`. The buffer's ownership is often transferred (`.transfer` lifetime), avoiding a data copy. +5. **JS Resolution:** The JS layer receives a promise that is **already fulfilled** with the complete `ArrayBuffer`. It then performs the final conversion (e.g., `TextDecoder.decode()`) in a single step. + +**Architectural Impact:** This optimization transforms a complex, multi-tick asynchronous operation into a single, synchronous native call followed by a single conversion step. The performance gain is an order of magnitude or more, as it eliminates the allocation and processing overhead of the entire stream and promise chain. + +### 3.2. Optimization 2: The Direct Path - Zero-Copy Native Piping + +This optimization targets high-throughput scenarios like serving files or proxying requests, where both the data source and destination are native. + +**The Conventional Problem:** Piping a file to an HTTP response in other runtimes involves a costly per-chunk round trip through the JavaScript layer: `Native (read) -> JS (chunk as Uint8Array) -> JS (response.write) -> Native (socket)`. + +**Bun's direct path:** Bun's runtime inspects the source and sink of a pipe. If it identifies a compatible native pair, it establishes a direct data channel between them entirely within the native layer. + +**Implementation Architecture & Data Flow:** + +```mermaid +%%{init: {'theme':'base', 'themeVariables': {'primaryColor':'#2563eb','primaryTextColor':'#fff','primaryBorderColor':'#3b82f6','lineColor':'#94a3b8','secondaryColor':'#fbbf24','background':'#f8fafc','mainBkg':'#ffffff','secondBkg':'#f1f5f9'}}}%% +graph TD + subgraph " " + subgraph js["🟨 JavaScript Layer"] + C["📄 new Response(file.stream())"] + end + subgraph native["⚡ Native Layer (Zig)"] + A["💾 Disk I/O
FileReader Source"] + B["🔌 Socket Buffer
HTTPSResponseSink"] + A -."🚀 Zero-Copy View
streams.Result.temporary".-> B + B -."🔙 Backpressure Signal".-> A + end + end + B ==>|"📡 Send"|D["🌐 Network"] + C ==>|"Direct Native
Connection"|A + + style js fill:#fef3c7,stroke:#92400e,stroke-width:3px,color:#451a03 + style native fill:#dbeafe,stroke:#1e40af,stroke-width:3px,color:#172554 + style A fill:#60a5fa,stroke:#1e40af,stroke-width:2px,color:#172554 + style B fill:#60a5fa,stroke:#1e40af,stroke-width:2px,color:#172554 + style C fill:#fbbf24,stroke:#92400e,stroke-width:2px,color:#451a03 + style D fill:#22c55e,stroke:#166534,stroke-width:2px,color:#ffffff + + classDef jsClass fill:#fef3c7,stroke:#f59e0b,stroke-width:2px + classDef nativeClass fill:#dbeafe,stroke:#3b82f6,stroke-width:2px + classDef networkClass fill:#d1fae5,stroke:#10b981,stroke-width:2px +``` + +**Diagram 3: Direct Path for File Serving** + +1. **Scenario:** A server handler returns `new Response(Bun.file("video.mp4").stream())`. +2. **Tagging:** The stream is created with a `File` tag, and its `bunNativePtr` points to a native `webcore.FileReader` struct. The HTTP server's response sink is a native `HTTPSResponseSink`. +3. **Connection via `assignToStream`:** The server's internal logic triggers `assignToStream` (`ReadableStreamInternals.ts`). This function detects the native source via its tag and dispatches to `readDirectStream`. +4. **Native Handoff:** `readDirectStream` calls the C++ binding `$startDirectStream`, which passes pointers to the native `FileReader` (source) and `HTTPSResponseSink` (sink) to the Zig engine. +5. **Zero-Copy Native Data Flow:** The Zig layer takes over. The `FileReader` reads a chunk from the disk. It yields a `streams.Result.temporary` variant, which is a **zero-copy view** into a shared read buffer. This view is passed directly to the `HTTPSResponseSink.write()` method, which appends it to its internal socket write buffer. When possible, Bun will skip the FileReader and use the `sendfile` system call for even less system call interactions. + +**Architectural Impact:** + +- **No Per-Chunk JS Execution:** The JavaScript event loop is not involved in the chunk-by-chunk transfer. +- **Zero Intermediate Copies:** Data moves from the kernel's page cache directly to the network socket's send buffer. +- **Hardware-Limited Throughput:** This architecture removes the runtime as a bottleneck, allowing I/O performance to be limited primarily by hardware speed. + +### 3.3. Optimization 3: `readMany()` - Efficient Async Iteration + +Bun optimizes the standard `for-await-of` loop syntax for streams. + +**The Conventional Problem:** A naive `[Symbol.asyncIterator]` implementation calls `await reader.read()` for every chunk, which is inefficient if many small chunks arrive in quick succession. + +**Bun's Solution:** Bun provides a custom, non-standard `reader.readMany()` method that synchronously drains the stream's entire internal buffer into a JavaScript array. + +**Implementation Architecture & Data Flow:** + +```mermaid +flowchart TB + subgraph trad["Traditional for-await-of"] + direction TB + T1["🔄 for await (chunk of stream)"] + T2["await read() → chunk1"] + T3["Process chunk1"] + T4["await read() → chunk2"] + T5["Process chunk2"] + T6["await read() → chunk3"] + T7["..."] + T1 --> T2 --> T3 --> T4 --> T5 --> T6 --> T7 + end + + subgraph bun["Bun's readMany() Optimization"] + direction TB + B1["🚀 for await (chunks of stream)"] + B2["readMany()"] + B3{"Buffer
Status?"} + B4["⚡ Return [c1, c2, c3]
SYNCHRONOUS"] + B5["Process ALL chunks
in one go"] + B6["await (only if empty)"] + + B1 --> B2 + B2 --> B3 + B3 -->|"Has Data"|B4 + B3 -->|"Empty"|B6 + B4 --> B5 + B5 --> B2 + B6 --> B2 + end + + trad --> P1["❌ Performance Impact
• Promise per chunk
• await per chunk
• High overhead"] + bun --> P2["✅ Performance Win
• Batch processing
• Minimal promises
• Low overhead"] + + style trad fill:#fee2e2,stroke:#7f1d1d,stroke-width:3px + style bun fill:#dcfce7,stroke:#14532d,stroke-width:3px + style T2 fill:#ef4444,stroke:#7f1d1d,color:#ffffff + style T4 fill:#ef4444,stroke:#7f1d1d,color:#ffffff + style T6 fill:#ef4444,stroke:#7f1d1d,color:#ffffff + style B4 fill:#22c55e,stroke:#14532d,stroke-width:3px,color:#ffffff + style B5 fill:#22c55e,stroke:#14532d,stroke-width:3px,color:#ffffff + style P1 fill:#dc2626,stroke:#7f1d1d,stroke-width:3px,color:#ffffff + style P2 fill:#16a34a,stroke:#14532d,stroke-width:3px,color:#ffffff +``` + +**Diagram 4: `readMany()` Async Iterator Flow** + +**Architectural Impact:** This pattern coalesces multiple chunks into a single macro-task. It drastically reduces the number of promise allocations and `await` suspensions required to process a stream, leading to significantly lower CPU usage and higher throughput for chunked data processing. + +### **4. Low-Level Implementation Details** + +The high-level optimizations are made possible by a robust and carefully designed native foundation in Zig. + +#### **4.1. The Native Language: `streams.zig` Primitives** + +The entire native architecture is built upon a set of generic, powerful Zig primitives that define the contracts for data flow. + +- **`streams.Result` Union:** This is the universal data-carrying type for all native stream reads. Its variants are not just data containers; they are crucial signals from the source to the sink. + - `owned: bun.ByteList`: Represents a heap-allocated buffer. The receiver is now responsible for freeing this memory. This is used when data must outlive the current scope. + - `temporary: bun.ByteList`: A borrowed, read-only view into a source's internal buffer. This is the key to **zero-copy reads**, as the sink can process the data without taking ownership or performing a copy. It is only valid for the duration of the function call. + - `owned_and_done` / `temporary_and_done`: These variants bundle the final data chunk with the end-of-stream signal. This is a critical latency optimization, as it collapses two distinct events (data and close) into one, saving an I/O round trip. + - `into_array`: Used for BYOB (Bring-Your-Own-Buffer) readers. It contains a handle to the JS-provided `ArrayBufferView` (`value: JSValue`) and the number of bytes written (`len`). This confirms a zero-copy write directly into JS-managed memory. + - `pending: *Pending`: A handle to a future/promise, used to signal that the result is not yet available and the operation should be suspended. + +- **`streams.Signal` V-Table:** This struct provides a generic, type-erased interface (`start`, `ready`, `close`) for a sink to communicate backpressure and state changes to a source. + - **`start()`**: Tells the source to begin producing data. + - **`ready()`**: The sink calls this to signal it has processed data and is ready for more, effectively managing backpressure. + - **`close()`**: The sink calls this to tell the source to stop, either due to completion or an error. + This v-table decouples native components, allowing any native source to be connected to any native sink without direct knowledge of each other's concrete types, which is essential for the Direct Path optimization. + +#### **4.2. Native Sink In-Depth: `HTTPSResponseSink` and Buffering** + +The `HTTPServerWritable` struct (instantiated as `HTTPSResponseSink` in `streams.zig`) is part of what makes Bun's HTTP server fast. + +- **Intelligent Write Buffering:** The `write` method (`writeBytes`, `writeLatin1`, etc.) does not immediately issue a `write` syscall. It appends the incoming `streams.Result` slice to its internal `buffer: bun.ByteList`. This coalesces multiple small, high-frequency writes (common in streaming LLM responses or SSE) into a single, larger, more efficient syscall. + +- **Backpressure Logic (`send` method):** The `send` method attempts to write the buffer to the underlying `uWebSockets` socket. + - It uses the optimized `res.tryEnd()` for the final chunk. + - If `res.write()` or `res.tryEnd()` returns a "backpressure" signal, the sink immediately sets `this.has_backpressure = true` and registers an `onWritable` callback. + - The `onWritable` callback is triggered by the OS/`uWebSockets` when the socket can accept more data. It clears the backpressure flag, attempts to send the rest of the buffered data, and then signals `ready()` back to the source stream via its `streams.Signal`. This creates a tight, efficient, native backpressure loop. + +- **The Auto-Flusher (`onAutoFlush`):** This mechanism provides a perfect balance between throughput and latency. + - **Mechanism:** When `write` is called but the `highWaterMark` is not reached, `registerAutoFlusher` queues a task that runs AFTER all JavaScript microtasks are completed. + - **Execution:** The `onAutoFlush` method is executed by the event loop at the very end of the current tick, after all JavaScript microtasks are completed. It checks `!this.hasBackpressure()` and, if the buffer is not empty, calls `sendWithoutAutoFlusher` to flush the buffered data. + - **Architectural Impact:** This allows multiple `writer.write()` calls within a single synchronous block of JS code to be batched into one syscall, but guarantees that the data is sent immediately after the current JS task completes, ensuring low, predictable latency for real-time applications. + +#### **4.3. The Native Collector: `Body.ValueBufferer`** + +When a consuming method like `.text()` is called on a body that cannot be resolved synchronously, the `Body.ValueBufferer` (`Body.zig`) is used to efficiently collect all chunks into a single native buffer. + +- **Instantiation:** A `Body.ValueBufferer` is created with a callback, `onFinishedBuffering`, which will be invoked upon completion to resolve the original JS promise. +- **Native Piping (`onStreamPipe`):** For a `ByteStream` source, the bufferer sets itself as the `pipe` destination. The `ByteStream.onData` method, instead of interacting with JavaScript, now directly calls the bufferer's `onStreamPipe` function. This function appends the received `streams.Result` slice to its internal `stream_buffer`. The entire collection loop happens natively. +- **Completion:** When a chunk with the `_and_done` flag is received, `onStreamPipe` calls the `onFinishedBuffering` callback, passing the final, fully concatenated buffer. This callback then resolves the original JavaScript promise. + +**Architectural Impact:** This pattern ensures that even when a body must be fully buffered, the collection process is highly efficient. Data chunks are concatenated in native memory without repeatedly crossing the JS boundary, minimizing overhead. + +#### **4.4. Memory and String Optimizations** + +- **`Blob` and `Blob.Store` (`Blob.zig`):** A `Blob` is a lightweight handle to a `Blob.Store`. The store can be backed by memory (`.bytes`), a file (`.file`), or an S3 object (`.s3`). This allows Bun to implement optimized operations based on the blob's backing store (e.g., `Bun.write(file1, file2)` becomes a native file copy via `copy_file.zig`). +- **`Blob.slice()` as a Zero-Copy View:** `blob.slice()` is a constant-time operation that creates a new `Blob` handle pointing to the same store but with a different `offset` and `size`, avoiding any data duplication. +- **`is_all_ascii` Flag:** `Blob`s and `ByteStream`s track whether their content is known to be pure ASCII. This allows `.text()` to skip expensive UTF-8 validation and decoding for a large class of text-based data, treating the Latin-1 bytes directly as a string. +- **`WTFStringImpl` Integration:** Bun avoids copying JS strings by default, instead storing a pointer to WebKit's internal `WTF::StringImpl` (`Body.Value.WTFStringImpl`). The conversion to a UTF-8 byte buffer is deferred until it's absolutely necessary (e.g., writing to a socket), avoiding copies for string-based operations that might never touch the network. + +## 5. The Unified System: A Complete Data Flow Example + +This diagram illustrates how the components work together when a `fetch` response is consumed. + +```mermaid +%%{init: {'theme':'base', 'themeVariables': {'primaryColor':'#2563eb','primaryTextColor':'#fff','primaryBorderColor':'#3b82f6','lineColor':'#94a3b8','secondaryColor':'#fbbf24','tertiaryColor':'#a78bfa','background':'#f8fafc','mainBkg':'#ffffff','secondBkg':'#f1f5f9'}}}%% +graph TD + subgraph flow["🚀 Response Consumption Flow"] + A["📱 JS Code"] --> B{"🎯 response.text()"} + B --> C{"❓ Is Body
Buffered?"} + C -->|"✅ Yes"|D["⚡ Optimization 1
Sync Coercion"] + C -->|"❌ No"|E{"❓ Is Stream
Native?"} + D --> F(("📄 Final String")) + + E -->|"✅ Yes"|G["🚀 Optimization 2
Direct Pipe to
Native ValueBufferer"] + E -->|"❌ No"|H["🐌 JS Fallback
read() loop"] + + G --> I{"💾 Native
Buffering"} + H --> I + + I --> J["🔤 Decode
Buffer"] + J --> F + end + + subgraph Legend + direction LR + L1("🟨 JS Layer") + L2("🟦 Native Layer") + style L1 fill:#fef3c7,stroke:#f59e0b,stroke-width:2px,color:#92400e + style L2 fill:#dbeafe,stroke:#3b82f6,stroke-width:2px,color:#1e40af + end + + style flow fill:#f8fafc,stroke:#64748b,stroke-width:2px + style A fill:#fef3c7,stroke:#f59e0b,stroke-width:2px,color:#92400e + style B fill:#fef3c7,stroke:#f59e0b,stroke-width:2px,color:#92400e + style H fill:#fee2e2,stroke:#ef4444,stroke-width:2px,color:#991b1b + style C fill:#e0e7ff,stroke:#6366f1,stroke-width:2px,color:#4338ca + style E fill:#e0e7ff,stroke:#6366f1,stroke-width:2px,color:#4338ca + style D fill:#dbeafe,stroke:#3b82f6,stroke-width:3px,color:#1e40af + style G fill:#dbeafe,stroke:#3b82f6,stroke-width:3px,color:#1e40af + style I fill:#e0e7ff,stroke:#6366f1,stroke-width:2px,color:#4338ca + style J fill:#e0e7ff,stroke:#6366f1,stroke-width:2px,color:#4338ca + style F fill:#d1fae5,stroke:#10b981,stroke-width:4px,color:#065f46 + +``` + +**Diagram 5: Unified Consumption Flow** + +1. User calls `response.text()`. +2. Bun checks if the body is already fully buffered in memory. +3. **Path 1 (Fastest):** If yes, it performs the **Synchronous Coercion** optimization and returns a resolved promise. +4. **Path 2 (Fast):** If no, it checks the stream's tag. If it's a native source (`File`, `Bytes`), it uses the **Direct Path** to pipe the stream to a native `Body.ValueBufferer`. +5. **Path 3 (Slowest):** If it's a generic `JavaScript` stream, it falls back to a JS-based `read()` loop that pushes chunks to the `Body.ValueBufferer`. +6. Once the bufferer is full, the final buffer is decoded and the original promise is resolved. + +## 6. Conclusion + +Streams in Bun aggressively optimize common paths, while providing a fully WHATWG-compliant API. + +- **Key Architectural Principle:** Dispatching between generic and optimized paths based on runtime type information (tagging) is the central strategy. +- **Primary Optimizations:** The **Synchronous Coercion Fast Path** and the **Direct Native Piping Path** are the two most significant innovations, eliminating entire layers of abstraction for common use cases. +- **Supporting Optimizations:** Efficient async iteration (`readMany`), intelligent sink-side buffering (`AutoFlusher`), and careful memory management (`owned` vs. `temporary` buffers, object pooling) contribute to a system that is fast at every level. + +This deep integration between the native and JavaScript layers allows Bun to deliver performance that rivals, and in many cases exceeds, that of systems written in lower-level languages, without sacrificing the productivity and ecosystem of JavaScript. diff --git a/src/bun.js/SavedSourceMap.zig b/src/bun.js/SavedSourceMap.zig index d909f854f1..01e6286785 100644 --- a/src/bun.js/SavedSourceMap.zig +++ b/src/bun.js/SavedSourceMap.zig @@ -50,6 +50,7 @@ pub const SavedMappings = struct { @as(usize, @bitCast(this.data[8..16].*)), 1, @as(usize, @bitCast(this.data[16..24].*)), + .{}, ); switch (result) { .fail => |fail| { @@ -78,6 +79,8 @@ pub const SavedMappings = struct { } }; +const BakeSourceProvider = bun.sourcemap.BakeSourceProvider; + /// ParsedSourceMap is the canonical form for sourcemaps, /// /// but `SavedMappings` and `SourceProviderMap` are much cheaper to construct. @@ -86,6 +89,7 @@ pub const Value = bun.TaggedPointerUnion(.{ ParsedSourceMap, SavedMappings, SourceProviderMap, + BakeSourceProvider, }); pub const MissingSourceMapNoteInfo = struct { @@ -102,6 +106,10 @@ pub const MissingSourceMapNoteInfo = struct { } }; +pub fn putBakeSourceProvider(this: *SavedSourceMap, opaque_source_provider: *BakeSourceProvider, path: []const u8) void { + this.putValue(path, Value.init(opaque_source_provider)) catch bun.outOfMemory(); +} + pub fn putZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *anyopaque, path: []const u8) void { const source_provider: *SourceProviderMap = @ptrCast(opaque_source_provider); this.putValue(path, Value.init(source_provider)) catch bun.outOfMemory(); @@ -120,7 +128,7 @@ pub fn removeZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *a } } else if (old_value.get(ParsedSourceMap)) |map| { if (map.underlying_provider.provider()) |prov| { - if (@intFromPtr(prov) == @intFromPtr(opaque_source_provider)) { + if (@intFromPtr(prov.ptr()) == @intFromPtr(opaque_source_provider)) { this.map.removeByPtr(entry.key_ptr); map.deref(); } @@ -246,11 +254,39 @@ fn getWithContent( MissingSourceMapNoteInfo.path = storage; return .{}; }, + @field(Value.Tag, @typeName(BakeSourceProvider)) => { + // TODO: This is a copy-paste of above branch + const ptr: *BakeSourceProvider = Value.from(mapping.value_ptr.*).as(BakeSourceProvider); + this.unlock(); + + // Do not lock the mutex while we're parsing JSON! + if (ptr.getSourceMap(path, .none, hint)) |parse| { + if (parse.map) |map| { + map.ref(); + // The mutex is not locked. We have to check the hash table again. + this.putValue(path, Value.init(map)) catch bun.outOfMemory(); + + return parse; + } + } + + this.lock(); + defer this.unlock(); + // does not have a valid source map. let's not try again + _ = this.map.remove(hash); + + // Store path for a user note. + const storage = MissingSourceMapNoteInfo.storage[0..path.len]; + @memcpy(storage, path); + MissingSourceMapNoteInfo.path = storage; + return .{}; + }, else => { if (Environment.allow_assert) { @panic("Corrupt pointer tag"); } this.unlock(); + return .{}; }, } @@ -275,7 +311,7 @@ pub fn resolveMapping( const map = parse.map orelse return null; const mapping = parse.mapping orelse - SourceMap.Mapping.find(map.mappings, line, column) orelse + map.mappings.find(line, column) orelse return null; return .{ diff --git a/src/bun.js/VirtualMachine.zig b/src/bun.js/VirtualMachine.zig index 10e2a063de..adad143990 100644 --- a/src/bun.js/VirtualMachine.zig +++ b/src/bun.js/VirtualMachine.zig @@ -535,60 +535,80 @@ fn wrapUnhandledRejectionErrorForUncaughtException(globalObject: *JSGlobalObject return globalObject.ERR(.UNHANDLED_REJECTION, msg, .{"undefined"}).toJS(); } -pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, reason: JSValue, promise: JSValue) bool { +pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, reason: JSValue, promise: JSValue) void { if (this.isShuttingDown()) { Output.debugWarn("unhandledRejection during shutdown.", .{}); - return true; + return; } if (isBunTest) { this.unhandled_error_counter += 1; this.onUnhandledRejection(this, globalObject, reason); - return true; + return; } switch (this.unhandledRejectionsMode()) { .bun => { - if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return true; + if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return; // continue to default handler }, .none => { - defer this.eventLoop().drainMicrotasks(); - if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return true; - return true; // ignore the unhandled rejection + defer this.eventLoop().drainMicrotasks() catch |e| switch (e) { + error.JSExecutionTerminated => {}, // we are returning anyway + }; + if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return; + return; // ignore the unhandled rejection }, .warn => { - defer this.eventLoop().drainMicrotasks(); + defer this.eventLoop().drainMicrotasks() catch |e| switch (e) { + error.JSExecutionTerminated => {}, // we are returning anyway + }; _ = Bun__handleUnhandledRejection(globalObject, reason, promise); Bun__promises__emitUnhandledRejectionWarning(globalObject, reason, promise); - return true; + return; }, .warn_with_error_code => { - defer this.eventLoop().drainMicrotasks(); - if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return true; + defer this.eventLoop().drainMicrotasks() catch |e| switch (e) { + error.JSExecutionTerminated => {}, // we are returning anyway + }; + if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return; Bun__promises__emitUnhandledRejectionWarning(globalObject, reason, promise); this.exit_handler.exit_code = 1; - return true; + return; }, .strict => { - defer this.eventLoop().drainMicrotasks(); + defer this.eventLoop().drainMicrotasks() catch |e| switch (e) { + error.JSExecutionTerminated => {}, // we are returning anyway + }; const wrapped_reason = wrapUnhandledRejectionErrorForUncaughtException(globalObject, reason); _ = this.uncaughtException(globalObject, wrapped_reason, true); - if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return true; + if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return; Bun__promises__emitUnhandledRejectionWarning(globalObject, reason, promise); - return true; + return; }, .throw => { - defer this.eventLoop().drainMicrotasks(); - if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) return true; + if (Bun__handleUnhandledRejection(globalObject, reason, promise) > 0) { + this.eventLoop().drainMicrotasks() catch |e| switch (e) { + error.JSExecutionTerminated => {}, // we are returning anyway + }; + return; + } const wrapped_reason = wrapUnhandledRejectionErrorForUncaughtException(globalObject, reason); - if (this.uncaughtException(globalObject, wrapped_reason, true)) return true; + if (this.uncaughtException(globalObject, wrapped_reason, true)) { + this.eventLoop().drainMicrotasks() catch |e| switch (e) { + error.JSExecutionTerminated => {}, // we are returning anyway + }; + return; + } // continue to default handler + this.eventLoop().drainMicrotasks() catch |e| switch (e) { + error.JSExecutionTerminated => return, + }; }, } this.unhandled_error_counter += 1; this.onUnhandledRejection(this, globalObject, reason); - return false; + return; } pub fn handledPromise(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, promise: JSValue) bool { @@ -636,7 +656,7 @@ pub fn uncaughtException(this: *JSC.VirtualMachine, globalObject: *JSGlobalObjec pub fn handlePendingInternalPromiseRejection(this: *JSC.VirtualMachine) void { var promise = this.pending_internal_promise.?; if (promise.status(this.global.vm()) == .rejected and !promise.isHandled(this.global.vm())) { - _ = this.unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue()); + this.unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue()); promise.setHandled(this.global.vm()); } } @@ -849,7 +869,13 @@ pub fn waitForPromise(this: *VirtualMachine, promise: JSC.AnyPromise) void { } pub fn waitForTasks(this: *VirtualMachine) void { - this.eventLoop().waitForTasks(); + while (this.isEventLoopAlive()) { + this.eventLoop().tick(); + + if (this.isEventLoopAlive()) { + this.eventLoop().autoTick(); + } + } } pub const MacroMap = std.AutoArrayHashMap(i32, JSC.C.JSObjectRef); @@ -1660,7 +1686,7 @@ pub fn resolveMaybeNeedsTrailingSlash( printed, ), }; - res.* = ErrorableString.err(error.NameTooLong, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice())).asVoid()); + res.* = ErrorableString.err(error.NameTooLong, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()))); return; } @@ -1750,7 +1776,7 @@ pub fn resolveMaybeNeedsTrailingSlash( }; { - res.* = ErrorableString.err(err, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice())).asVoid()); + res.* = ErrorableString.err(err, (try bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()))); } return; @@ -1772,7 +1798,8 @@ pub export fn Bun__drainMicrotasksFromJS(globalObject: *JSGlobalObject, callfram } pub fn drainMicrotasks(this: *VirtualMachine) void { - this.eventLoop().drainMicrotasks(); + // TODO: properly propagate exception upwards + this.eventLoop().drainMicrotasks() catch {}; } pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, referrer: bun.String, log: *logger.Log, ret: *ErrorableResolvedSource, err: anyerror) void { @@ -1794,7 +1821,7 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer }; }; { - ret.* = ErrorableResolvedSource.err(err, (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e)).asVoid()); + ret.* = ErrorableResolvedSource.err(err, (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e))); } return; }, @@ -1802,13 +1829,13 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer 1 => { const msg = log.msgs.items[0]; ret.* = ErrorableResolvedSource.err(err, switch (msg.metadata) { - .build => (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e)).asVoid(), + .build => (bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg) catch |e| globalThis.takeException(e)), .resolve => (bun.api.ResolveMessage.create( globalThis, globalThis.allocator(), msg, referrer.toUTF8(bun.default_allocator).slice(), - ) catch |e| globalThis.takeException(e)).asVoid(), + ) catch |e| globalThis.takeException(e)), }); return; }, @@ -1841,7 +1868,7 @@ pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, refer specifier, }) catch unreachable, ), - ).asVoid(), + ), ); }, } @@ -1910,8 +1937,7 @@ pub noinline fn runErrorHandler(this: *VirtualMachine, result: JSValue, exceptio const writer = buffered_writer.writer(); - if (result.isException(this.global.vm())) { - const exception = @as(*Exception, @ptrCast(result.asVoid())); + if (result.asException(this.jsc)) |exception| { this.printException( exception, exception_list, @@ -1987,7 +2013,7 @@ fn loadPreloads(this: *VirtualMachine) !?*JSInternalPromise { return error.ModuleNotFound; }, }; - var promise = JSModuleLoader.import(this.global, &String.fromBytes(result.path().?.text)); + var promise = try JSModuleLoader.import(this.global, &String.fromBytes(result.path().?.text)); this.pending_internal_promise = promise; JSValue.fromCell(promise).protect(); @@ -3091,7 +3117,7 @@ fn printErrorInstance( } formatter.format( - JSC.Formatter.Tag.getAdvanced( + try JSC.Formatter.Tag.getAdvanced( value, this.global, .{ .disable_inspect_custom = true, .hide_global = true }, @@ -3132,7 +3158,7 @@ fn printErrorInstance( // "cause" is not enumerable, so the above loop won't see it. if (!saw_cause) { - if (error_instance.getOwn(this.global, "cause")) |cause| { + if (try error_instance.getOwn(this.global, "cause")) |cause| { if (cause.jsType() == .ErrorInstance) { cause.protect(); try errors_to_append.append(cause); @@ -3141,7 +3167,7 @@ fn printErrorInstance( } } else if (mode == .js and error_instance != .zero) { // If you do reportError([1,2,3]] we should still show something at least. - const tag = JSC.Formatter.Tag.getAdvanced( + const tag = try JSC.Formatter.Tag.getAdvanced( error_instance, this.global, .{ .disable_inspect_custom = true, .hide_global = true }, @@ -3382,7 +3408,7 @@ pub fn resolveSourceMapping( this.source_mappings.putValue(path, SavedSourceMap.Value.init(map)) catch bun.outOfMemory(); - const mapping = SourceMap.Mapping.find(map.mappings, line, column) orelse + const mapping = map.mappings.find(line, column) orelse return null; return .{ @@ -3582,7 +3608,7 @@ pub const ExitHandler = struct { pub fn dispatchOnBeforeExit(this: *ExitHandler) void { JSC.markBinding(@src()); const vm: *VirtualMachine = @alignCast(@fieldParentPtr("exit_handler", this)); - Process__dispatchOnBeforeExit(vm.global, this.exit_code); + bun.jsc.fromJSHostCallGeneric(vm.global, @src(), Process__dispatchOnBeforeExit, .{ vm.global, this.exit_code }) catch return; } }; diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index 22901fde80..6f1d6e5897 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -95,7 +95,7 @@ pub const BunObject = struct { fn toJSGetter(comptime getter: anytype) LazyPropertyCallback { return struct { pub fn callback(this: *JSC.JSGlobalObject, object: *JSC.JSObject) callconv(JSC.conv) JSValue { - return bun.jsc.toJSHostValue(this, getter(this, object)); + return bun.jsc.toJSHostCall(this, @src(), getter, .{ this, object }); } }.callback; } @@ -271,8 +271,8 @@ pub fn braces(global: *JSC.JSGlobalObject, brace_str: bun.String, opts: gen.Brac pub fn which(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments_ = callframe.arguments_old(2); - const path_buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(path_buf); + const path_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(path_buf); var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); defer arguments.deinit(); const path_arg = arguments.nextEat() orelse { @@ -381,7 +381,7 @@ pub fn inspectTable(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) const writer = buffered_writer.writer(); const Writer = @TypeOf(writer); const properties: JSValue = if (arguments[1].jsType().isArray()) arguments[1] else .js_undefined; - var table_printer = ConsoleObject.TablePrinter.init( + var table_printer = try ConsoleObject.TablePrinter.init( globalThis, .Log, value, @@ -762,7 +762,7 @@ pub fn sleepSync(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b } //NOTE: if argument is > max(i32) then it will be truncated - const milliseconds = arg.coerce(i32, globalObject); + const milliseconds = try arg.coerce(i32, globalObject); if (milliseconds < 0) { return globalObject.throwInvalidArguments("argument to sleepSync must not be negative, got {d}", .{milliseconds}); } @@ -847,7 +847,7 @@ fn doResolveWithArgs(ctx: *JSC.JSGlobalObject, specifier: bun.String, from: bun. ); if (!errorable.success) { - return ctx.throwValue(bun.cast(JSC.C.JSValueRef, errorable.result.err.ptr.?).?.value()); + return ctx.throwValue(errorable.result.err.value); } if (query_string.len > 0) { @@ -905,7 +905,7 @@ export fn Bun__resolveSync(global: *JSGlobalObject, specifier: JSValue, source: const source_str = source.toBunString(global) catch return .zero; defer source_str.deref(); - return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier_str, source_str, is_esm, true, is_user_require_resolve)); + return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier_str, source_str, is_esm, true, is_user_require_resolve }); } export fn Bun__resolveSyncWithPaths( @@ -934,12 +934,12 @@ export fn Bun__resolveSyncWithPaths( bun_vm.transpiler.resolver.custom_dir_paths = paths; defer bun_vm.transpiler.resolver.custom_dir_paths = null; - return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier_str, source_str, is_esm, true, is_user_require_resolve)); + return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier_str, source_str, is_esm, true, is_user_require_resolve }); } export fn Bun__resolveSyncWithStrings(global: *JSGlobalObject, specifier: *bun.String, source: *bun.String, is_esm: bool) JSC.JSValue { Output.scoped(.importMetaResolve, false)("source: {s}, specifier: {s}", .{ source.*, specifier.* }); - return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier.*, source.*, is_esm, true, false)); + return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier.*, source.*, is_esm, true, false }); } export fn Bun__resolveSyncWithSource(global: *JSGlobalObject, specifier: JSValue, source: *bun.String, is_esm: bool, is_user_require_resolve: bool) JSC.JSValue { @@ -948,7 +948,7 @@ export fn Bun__resolveSyncWithSource(global: *JSGlobalObject, specifier: JSValue if (specifier_str.length() == 0) { return global.ERR(.INVALID_ARG_VALUE, "The argument 'id' must be a non-empty string. Received ''", .{}).throw() catch .zero; } - return JSC.toJSHostValue(global, doResolveWithArgs(global, specifier_str, source.*, is_esm, true, is_user_require_resolve)); + return JSC.toJSHostCall(global, @src(), doResolveWithArgs, .{ global, specifier_str, source.*, is_esm, true, is_user_require_resolve }); } pub fn indexOfLine(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { @@ -1184,7 +1184,6 @@ pub fn allocUnsafe(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b if (!size.isUInt32AsAnyInt()) { return globalThis.throwInvalidArguments("Expected a positive number", .{}); } - return JSC.JSValue.createUninitializedUint8Array(globalThis, size.toUInt64NoTruncate()); } @@ -1249,7 +1248,7 @@ pub fn mmapFile(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun. .result => |map| map, .err => |err| { - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, }; @@ -1315,7 +1314,7 @@ pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) bun.J // We don't really do that right now, but exposing the output source // code here as an easily accessible Blob is even worse for them. // So let's omit any source code files from the list. - if (unsorted_files[index].loader.isJavaScriptLike()) continue; + if (!unsorted_files[index].appearsInEmbeddedFilesArray()) continue; sort_indices.appendAssumeCapacity(@intCast(index)); } @@ -1329,7 +1328,7 @@ pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) bun.J const blob = JSC.WebCore.Blob.new(input_blob.dupeWithContentType(true)); blob.allocator = bun.default_allocator; blob.name = input_blob.name.dupeRef(); - array.putIndex(globalThis, i, blob.toJS(globalThis)); + try array.putIndex(globalThis, i, blob.toJS(globalThis)); i += 1; } @@ -1507,21 +1506,21 @@ pub const JSZlib = struct { var library: Library = .zlib; if (options_val_) |options_val| { if (try options_val.get(globalThis, "windowBits")) |window| { - opts.windowBits = window.coerce(i32, globalThis); + opts.windowBits = try window.coerce(i32, globalThis); library = .zlib; } if (try options_val.get(globalThis, "level")) |level| { - opts.level = level.coerce(i32, globalThis); + opts.level = try level.coerce(i32, globalThis); } if (try options_val.get(globalThis, "memLevel")) |memLevel| { - opts.memLevel = memLevel.coerce(i32, globalThis); + opts.memLevel = try memLevel.coerce(i32, globalThis); library = .zlib; } if (try options_val.get(globalThis, "strategy")) |strategy| { - opts.strategy = strategy.coerce(i32, globalThis); + opts.strategy = try strategy.coerce(i32, globalThis); library = .zlib; } @@ -1633,7 +1632,7 @@ pub const JSZlib = struct { if (options_val_) |options_val| { if (try options_val.get(globalThis, "windowBits")) |window| { - windowBits = window.coerce(i32, globalThis); + windowBits = try window.coerce(i32, globalThis); library = .zlib; } @@ -1648,7 +1647,7 @@ pub const JSZlib = struct { } if (try options_val.get(globalThis, "level")) |level_value| { - level = level_value.coerce(i32, globalThis); + level = try level_value.coerce(i32, globalThis); if (globalThis.hasException()) return .zero; } } @@ -1749,7 +1748,7 @@ pub const JSZstd = struct { fn getLevel(globalThis: *JSGlobalObject, options_val: ?JSValue) bun.JSError!i32 { if (options_val) |option_obj| { if (try option_obj.get(globalThis, "level")) |level_val| { - const value = level_val.coerce(i32, globalThis); + const value = try level_val.coerce(i32, globalThis); if (globalThis.hasException()) return error.JSError; if (value < 1 or value > 22) { @@ -1948,15 +1947,6 @@ pub const JSZstd = struct { const output_slice = this.output; const buffer_value = JSC.JSValue.createBuffer(globalThis, output_slice, bun.default_allocator); - if (globalThis.hasException()) { - promise.reject(globalThis, error.JSError); - return; - } - if (buffer_value == .zero) { - promise.reject(globalThis, ZigString.init("Failed to create buffer").toErrorInstance(globalThis)); - return; - } - this.output = &[_]u8{}; promise.resolve(globalThis, buffer_value); } diff --git a/src/bun.js/api/FFIObject.zig b/src/bun.js/api/FFIObject.zig index dcd733ac58..0f32dab42b 100644 --- a/src/bun.js/api/FFIObject.zig +++ b/src/bun.js/api/FFIObject.zig @@ -546,7 +546,7 @@ pub fn toBuffer( valueLength: ?JSValue, finalizationCtxOrPtr: ?JSValue, finalizationCallback: ?JSValue, -) JSC.JSValue { +) bun.JSError!JSC.JSValue { switch (getPtrSlice(globalThis, value, byteOffset, valueLength)) { .err => |err| { return err; diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 66da89ea28..7ffa49672d 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -86,11 +86,11 @@ pub const JSBundler = struct { // Plugins must be resolved first as they are allowed to mutate the config JSValue if (try config.getArray(globalThis, "plugins")) |array| { - const length = array.getLength(globalThis); - var iter = array.arrayIterator(globalThis); + const length = try array.getLength(globalThis); + var iter = try array.arrayIterator(globalThis); var onstart_promise_array: JSValue = .js_undefined; var i: usize = 0; - while (iter.next()) |plugin| : (i += 1) { + while (try iter.next()) |plugin| : (i += 1) { if (!plugin.isObject()) { return globalThis.throwInvalidArguments("Expected plugin to be an object", .{}); } @@ -267,8 +267,8 @@ pub const JSBundler = struct { } if (try config.getArray(globalThis, "entrypoints") orelse try config.getArray(globalThis, "entryPoints")) |entry_points| { - var iter = entry_points.arrayIterator(globalThis); - while (iter.next()) |entry_point| { + var iter = try entry_points.arrayIterator(globalThis); + while (try iter.next()) |entry_point| { var slice = try entry_point.toSliceOrNull(globalThis); defer slice.deinit(); try this.entry_points.insert(slice.slice()); @@ -291,8 +291,8 @@ pub const JSBundler = struct { defer slice.deinit(); try this.conditions.insert(slice.slice()); } else if (conditions_value.jsType().isArray()) { - var iter = conditions_value.arrayIterator(globalThis); - while (iter.next()) |entry_point| { + var iter = try conditions_value.arrayIterator(globalThis); + while (try iter.next()) |entry_point| { var slice = try entry_point.toSliceOrNull(globalThis); defer slice.deinit(); try this.conditions.insert(slice.slice()); @@ -332,8 +332,8 @@ pub const JSBundler = struct { } if (try config.getOwnArray(globalThis, "external")) |externals| { - var iter = externals.arrayIterator(globalThis); - while (iter.next()) |entry_point| { + var iter = try externals.arrayIterator(globalThis); + while (try iter.next()) |entry_point| { var slice = try entry_point.toSliceOrNull(globalThis); defer slice.deinit(); try this.external.insert(slice.slice()); @@ -341,8 +341,8 @@ pub const JSBundler = struct { } if (try config.getOwnArray(globalThis, "drop")) |drops| { - var iter = drops.arrayIterator(globalThis); - while (iter.next()) |entry| { + var iter = try drops.arrayIterator(globalThis); + while (try iter.next()) |entry| { var slice = try entry.toSliceOrNull(globalThis); defer slice.deinit(); try this.drop.insert(slice.slice()); @@ -782,7 +782,7 @@ pub const JSBundler = struct { } export fn JSBundlerPlugin__onDefer(load: *Load, global: *JSC.JSGlobalObject) JSValue { - return JSC.toJSHostValue(global, load.onDefer(global)); + return JSC.toJSHostCall(global, @src(), Load.onDefer, .{ load, global }); } fn onDefer(this: *Load, globalObject: *JSC.JSGlobalObject) bun.JSError!JSValue { if (this.called_defer) { diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index b9ec44b4d4..376c68e560 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -6,7 +6,6 @@ const JSC = bun.JSC; const Transpiler = bun.transpiler; const options = @import("../../options.zig"); const ZigString = JSC.ZigString; -const JSObject = JSC.JSObject; const JSValue = bun.JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; const strings = bun.strings; @@ -354,13 +353,13 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st single_external[0] = std.fmt.allocPrint(allocator, "{}", .{external}) catch unreachable; transpiler.transform.external = single_external; } else if (toplevel_type.isArray()) { - const count = external.getLength(globalThis); + const count = try external.getLength(globalThis); if (count == 0) break :external; var externals = allocator.alloc(string, count) catch unreachable; - var iter = external.arrayIterator(globalThis); + var iter = try external.arrayIterator(globalThis); var i: usize = 0; - while (iter.next()) |entry| { + while (try iter.next()) |entry| { if (!entry.jsType().isStringLike()) { return globalObject.throwInvalidArguments("external must be a string or string[]", .{}); } @@ -407,9 +406,9 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st } if (!kind.isStringLike()) { - tsconfig.jsonStringify(globalThis, 0, &out); + try tsconfig.jsonStringify(globalThis, 0, &out); } else { - out = tsconfig.toBunString(globalThis) catch @panic("unexpected exception"); + out = try tsconfig.toBunString(globalThis); } if (out.isEmpty()) break :tsconfig; @@ -446,7 +445,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st defer out.deref(); // TODO: write a converter between JSC types and Bun AST types if (is_object) { - macros.jsonStringify(globalThis, 0, &out); + try macros.jsonStringify(globalThis, 0, &out); } else { out = try macros.toBunString(globalThis); } @@ -487,7 +486,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st if (try object.getTruthy(globalThis, "minify")) |minify| { if (minify.isBoolean()) { - transpiler.minify_whitespace = minify.coerce(bool, globalThis); + transpiler.minify_whitespace = minify.toBoolean(); transpiler.minify_syntax = transpiler.minify_whitespace; transpiler.minify_identifiers = transpiler.minify_syntax; } else if (minify.isObject()) { @@ -550,13 +549,13 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st var total_name_buf_len: u32 = 0; var string_count: u32 = 0; - const iter = JSC.JSArrayIterator.init(eliminate, globalThis); + const iter = try JSC.JSArrayIterator.init(eliminate, globalThis); { var length_iter = iter; - while (length_iter.next()) |value| { + while (try length_iter.next()) |value| { if (value.isString()) { - const length = @as(u32, @truncate(value.getLength(globalThis))); - string_count += @as(u32, @intFromBool(length > 0)); + const length: u32 = @truncate(try value.getLength(globalThis)); + string_count += @intFromBool(length > 0); total_name_buf_len += length; } } @@ -567,7 +566,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st try replacements.ensureUnusedCapacity(bun.default_allocator, string_count); { var length_iter = iter; - while (length_iter.next()) |value| { + while (try length_iter.next()) |value| { if (!value.isString()) continue; const str = try value.getZigString(globalThis); if (str.len == 0) continue; @@ -624,10 +623,10 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st continue; } - if (value.isObject() and value.getLength(globalObject) == 2) { - const replacementValue = JSC.JSObject.getIndex(value, globalThis, 1); + if (value.isObject() and try value.getLength(globalObject) == 2) { + const replacementValue = try value.getIndex(globalThis, 1); if (try exportReplacementValue(replacementValue, globalThis)) |to_replace| { - const replacementKey = JSC.JSObject.getIndex(value, globalThis, 0); + const replacementKey = try value.getIndex(globalThis, 0); var slice = (try (try replacementKey.toSlice(globalThis, bun.default_allocator)).cloneIfNeeded(bun.default_allocator)); const replacement_name = slice.slice(); @@ -1065,7 +1064,7 @@ fn namedImportsToJS(global: *JSGlobalObject, import_records: []const ImportRecor array.ensureStillAlive(); const path = JSC.ZigString.init(record.path.text).toJS(global); const kind = JSC.ZigString.init(record.kind.label()).toJS(global); - array.putIndex(global, @as(u32, @truncate(i)), JSC.JSValue.createObject2(global, path_label, kind_label, path, kind)); + try array.putIndex(global, @as(u32, @truncate(i)), try JSC.JSValue.createObject2(global, path_label, kind_label, path, kind)); } return array; diff --git a/src/bun.js/api/ResumableSink.classes.ts b/src/bun.js/api/ResumableSink.classes.ts new file mode 100644 index 0000000000..44e4461761 --- /dev/null +++ b/src/bun.js/api/ResumableSink.classes.ts @@ -0,0 +1,33 @@ +import { define } from "../../codegen/class-definitions"; + +function generate(name) { + return define({ + name: name, + construct: true, + finalize: true, + configurable: false, + klass: {}, + JSType: "0b11101110", + proto: { + start: { + fn: "jsStart", + length: 1, + }, + write: { + fn: "jsWrite", + length: 1, + }, + end: { + fn: "jsEnd", + length: 1, + }, + setHandlers: { + fn: "jsSetHandlers", + length: 2, + passThis: true, + }, + }, + values: ["ondrain", "oncancel", "stream"], + }); +} +export default [generate("ResumableFetchSink"), generate("ResumableS3UploadSink")]; diff --git a/src/bun.js/api/Timer.zig b/src/bun.js/api/Timer.zig index 7e21663a13..247ff23b39 100644 --- a/src/bun.js/api/Timer.zig +++ b/src/bun.js/api/Timer.zig @@ -5,13 +5,9 @@ const VirtualMachine = JSC.VirtualMachine; const JSValue = JSC.JSValue; const JSError = bun.JSError; const JSGlobalObject = JSC.JSGlobalObject; -const Debugger = JSC.Debugger; const Environment = bun.Environment; const uv = bun.windows.libuv; -const api = bun.api; -const StatWatcherScheduler = @import("../node/node_fs_stat_watcher.zig").StatWatcherScheduler; const Timer = @This(); -const DNSResolver = @import("./bun/dns_resolver.zig").DNSResolver; /// TimeoutMap is map of i32 to nullable Timeout structs /// i32 is exposed to JavaScript and can be used with clearTimeout, clearInterval, etc. @@ -213,10 +209,6 @@ pub const All = struct { } pub fn getTimeout(this: *All, spec: *timespec, vm: *VirtualMachine) bool { - if (this.active_timer_count == 0) { - return false; - } - var maybe_now: ?timespec = null; while (this.timers.peek()) |min| { const now = maybe_now orelse now: { @@ -499,10 +491,10 @@ pub const All = struct { } break :brk if (TimeoutObject.fromJS(timer_id_value)) |timeout| - &timeout.internals + // clearImmediate should be a noop if anything other than an Immediate is passed to it. + if (kind != .setImmediate) &timeout.internals else return else if (ImmediateObject.fromJS(timer_id_value)) |immediate| // setImmediate can only be cleared by clearImmediate, not by clearTimeout or clearInterval. - // setTimeout and setInterval can be cleared by any of the 3 clear functions. if (kind == .setImmediate) &immediate.internals else return else null; @@ -548,698 +540,11 @@ pub const All = struct { } }; -const uws = bun.uws; +pub const EventLoopTimer = @import("./Timer/EventLoopTimer.zig"); -pub const TimeoutObject = struct { - const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{}); - pub const ref = RefCount.ref; - pub const deref = RefCount.deref; - - pub const js = JSC.Codegen.JSTimeout; - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - ref_count: RefCount, - event_loop_timer: EventLoopTimer = .{ - .next = .{}, - .tag = .TimeoutObject, - }, - internals: TimerObjectInternals, - - pub fn init( - globalThis: *JSGlobalObject, - id: i32, - kind: Kind, - interval: u31, - callback: JSValue, - arguments: JSValue, - ) JSValue { - // internals are initialized by init() - const timeout = bun.new(TimeoutObject, .{ .ref_count = .init(), .internals = undefined }); - const js_value = timeout.toJS(globalThis); - defer js_value.ensureStillAlive(); - timeout.internals.init( - js_value, - globalThis, - id, - kind, - interval, - callback, - arguments, - ); - - if (globalThis.bunVM().isInspectorEnabled()) { - Debugger.didScheduleAsyncCall( - globalThis, - .DOMTimer, - ID.asyncID(.{ .id = id, .kind = kind.big() }), - kind != .setInterval, - ); - } - - return js_value; - } - - fn deinit(this: *TimeoutObject) void { - this.internals.deinit(); - bun.destroy(this); - } - - pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*TimeoutObject { - _ = callFrame; - return globalObject.throw("Timeout is not constructible", .{}); - } - - pub fn toPrimitive(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.toPrimitive(); - } - - pub fn doRef(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.doRef(globalThis, callFrame.this()); - } - - pub fn doUnref(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.doUnref(globalThis, callFrame.this()); - } - - pub fn doRefresh(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.doRefresh(globalThis, callFrame.this()); - } - - pub fn hasRef(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.hasRef(); - } - - pub fn finalize(this: *TimeoutObject) void { - this.internals.finalize(); - } - - pub fn getDestroyed(this: *TimeoutObject, globalThis: *JSGlobalObject) JSValue { - _ = globalThis; - return .jsBoolean(this.internals.getDestroyed()); - } - - pub fn close(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) JSValue { - this.internals.cancel(globalThis.bunVM()); - return callFrame.this(); - } - - pub fn get_onTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue { - return TimeoutObject.js.callbackGetCached(thisValue).?; - } - - pub fn set_onTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void { - TimeoutObject.js.callbackSetCached(thisValue, globalThis, value); - } - - pub fn get_idleTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue { - return TimeoutObject.js.idleTimeoutGetCached(thisValue).?; - } - - pub fn set_idleTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void { - TimeoutObject.js.idleTimeoutSetCached(thisValue, globalThis, value); - } - - pub fn get_repeat(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue { - return TimeoutObject.js.repeatGetCached(thisValue).?; - } - - pub fn set_repeat(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void { - TimeoutObject.js.repeatSetCached(thisValue, globalThis, value); - } - - pub fn dispose(this: *TimeoutObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - this.internals.cancel(globalThis.bunVM()); - return .js_undefined; - } -}; - -pub const ImmediateObject = struct { - const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{}); - pub const ref = RefCount.ref; - pub const deref = RefCount.deref; - - pub const js = JSC.Codegen.JSImmediate; - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - ref_count: RefCount, - event_loop_timer: EventLoopTimer = .{ - .next = .{}, - .tag = .ImmediateObject, - }, - internals: TimerObjectInternals, - - pub fn init( - globalThis: *JSGlobalObject, - id: i32, - callback: JSValue, - arguments: JSValue, - ) JSValue { - // internals are initialized by init() - const immediate = bun.new(ImmediateObject, .{ .ref_count = .init(), .internals = undefined }); - const js_value = immediate.toJS(globalThis); - defer js_value.ensureStillAlive(); - immediate.internals.init( - js_value, - globalThis, - id, - .setImmediate, - 0, - callback, - arguments, - ); - - if (globalThis.bunVM().isInspectorEnabled()) { - Debugger.didScheduleAsyncCall( - globalThis, - .DOMTimer, - ID.asyncID(.{ .id = id, .kind = .setImmediate }), - true, - ); - } - - return js_value; - } - - fn deinit(this: *ImmediateObject) void { - this.internals.deinit(); - bun.destroy(this); - } - - pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*ImmediateObject { - _ = callFrame; - return globalObject.throw("Immediate is not constructible", .{}); - } - - /// returns true if an exception was thrown - pub fn runImmediateTask(this: *ImmediateObject, vm: *VirtualMachine) bool { - return this.internals.runImmediateTask(vm); - } - - pub fn toPrimitive(this: *ImmediateObject, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.toPrimitive(); - } - - pub fn doRef(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.doRef(globalThis, callFrame.this()); - } - - pub fn doUnref(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.doUnref(globalThis, callFrame.this()); - } - - pub fn hasRef(this: *ImmediateObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - return this.internals.hasRef(); - } - - pub fn finalize(this: *ImmediateObject) void { - this.internals.finalize(); - } - - pub fn getDestroyed(this: *ImmediateObject, globalThis: *JSGlobalObject) JSValue { - _ = globalThis; - return .jsBoolean(this.internals.getDestroyed()); - } - - pub fn dispose(this: *ImmediateObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - this.internals.cancel(globalThis.bunVM()); - return .js_undefined; - } -}; - -/// Data that TimerObject and ImmediateObject have in common -pub const TimerObjectInternals = struct { - /// Identifier for this timer that is exposed to JavaScript (by `+timer`) - id: i32 = -1, - interval: u31 = 0, - strong_this: JSC.Strong.Optional = .empty, - flags: Flags = .{}, - - const Flags = packed struct(u32) { - /// Whenever a timer is inserted into the heap (which happen on creation or refresh), the global - /// epoch is incremented and the new epoch is set on the timer. For timers created by - /// JavaScript, the epoch is used to break ties between timers scheduled for the same - /// millisecond. This ensures that if you set two timers for the same amount of time, and - /// refresh the first one, the first one will fire last. This mimics Node.js's behavior where - /// the refreshed timer will be inserted at the end of a list, which makes it fire later. - epoch: u25 = 0, - - kind: Kind = .setTimeout, - - // we do not allow the timer to be refreshed after we call clearInterval/clearTimeout - has_cleared_timer: bool = false, - is_keeping_event_loop_alive: bool = false, - - // if they never access the timer by integer, don't create a hashmap entry. - has_accessed_primitive: bool = false, - - has_js_ref: bool = true, - - /// Set to `true` only during execution of the JavaScript function so that `_destroyed` can be - /// false during the callback, even though the `state` will be `FIRED`. - in_callback: bool = false, - }; - - fn eventLoopTimer(this: *TimerObjectInternals) *EventLoopTimer { - switch (this.flags.kind) { - .setImmediate => { - const parent: *ImmediateObject = @fieldParentPtr("internals", this); - assert(parent.event_loop_timer.tag == .ImmediateObject); - return &parent.event_loop_timer; - }, - .setTimeout, .setInterval => { - const parent: *TimeoutObject = @fieldParentPtr("internals", this); - assert(parent.event_loop_timer.tag == .TimeoutObject); - return &parent.event_loop_timer; - }, - } - } - - fn ref(this: *TimerObjectInternals) void { - switch (this.flags.kind) { - .setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).ref(), - .setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).ref(), - } - } - - fn deref(this: *TimerObjectInternals) void { - switch (this.flags.kind) { - .setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).deref(), - .setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).deref(), - } - } - - extern "c" fn Bun__JSTimeout__call(globalObject: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue) bool; - - /// returns true if an exception was thrown - pub fn runImmediateTask(this: *TimerObjectInternals, vm: *VirtualMachine) bool { - if (this.flags.has_cleared_timer or - // unref'd setImmediate callbacks should only run if there are things keeping the event - // loop alive other than setImmediates - (!this.flags.is_keeping_event_loop_alive and !vm.isEventLoopAliveExcludingImmediates())) - { - this.deref(); - return false; - } - - const timer = this.strong_this.get() orelse { - if (Environment.isDebug) { - @panic("TimerObjectInternals.runImmediateTask: this_object is null"); - } - return false; - }; - const globalThis = vm.global; - this.strong_this.deinit(); - this.eventLoopTimer().state = .FIRED; - this.setEnableKeepingEventLoopAlive(vm, false); - - vm.eventLoop().enter(); - const callback = ImmediateObject.js.callbackGetCached(timer).?; - const arguments = ImmediateObject.js.argumentsGetCached(timer).?; - this.ref(); - const exception_thrown = this.run(globalThis, timer, callback, arguments, this.asyncID(), vm); - this.deref(); - - if (this.eventLoopTimer().state == .FIRED) { - this.deref(); - } - - vm.eventLoop().exitMaybeDrainMicrotasks(!exception_thrown); - - return exception_thrown; - } - - pub fn asyncID(this: *const TimerObjectInternals) u64 { - return ID.asyncID(.{ .id = this.id, .kind = this.flags.kind.big() }); - } - - pub fn fire(this: *TimerObjectInternals, _: *const timespec, vm: *JSC.VirtualMachine) EventLoopTimer.Arm { - const id = this.id; - const kind = this.flags.kind.big(); - const async_id: ID = .{ .id = id, .kind = kind }; - const has_been_cleared = this.eventLoopTimer().state == .CANCELLED or this.flags.has_cleared_timer or vm.scriptExecutionStatus() != .running; - - this.eventLoopTimer().state = .FIRED; - - const globalThis = vm.global; - const this_object = this.strong_this.get().?; - - const callback: JSValue, const arguments: JSValue, var idle_timeout: JSValue, var repeat: JSValue = switch (kind) { - .setImmediate => .{ - ImmediateObject.js.callbackGetCached(this_object).?, - ImmediateObject.js.argumentsGetCached(this_object).?, - .js_undefined, - .js_undefined, - }, - .setTimeout, .setInterval => .{ - TimeoutObject.js.callbackGetCached(this_object).?, - TimeoutObject.js.argumentsGetCached(this_object).?, - TimeoutObject.js.idleTimeoutGetCached(this_object).?, - TimeoutObject.js.repeatGetCached(this_object).?, - }, - }; - - if (has_been_cleared or !callback.toBoolean()) { - if (vm.isInspectorEnabled()) { - Debugger.didCancelAsyncCall(globalThis, .DOMTimer, ID.asyncID(async_id)); - } - this.setEnableKeepingEventLoopAlive(vm, false); - this.flags.has_cleared_timer = true; - this.strong_this.deinit(); - this.deref(); - - return .disarm; - } - - var time_before_call: timespec = undefined; - - if (kind != .setInterval) { - this.strong_this.clearWithoutDeallocation(); - } else { - time_before_call = timespec.msFromNow(this.interval); - } - this_object.ensureStillAlive(); - - vm.eventLoop().enter(); - { - // Ensure it stays alive for this scope. - this.ref(); - defer this.deref(); - - _ = this.run(globalThis, this_object, callback, arguments, ID.asyncID(async_id), vm); - - switch (kind) { - .setTimeout, .setInterval => { - idle_timeout = TimeoutObject.js.idleTimeoutGetCached(this_object).?; - repeat = TimeoutObject.js.repeatGetCached(this_object).?; - }, - else => {}, - } - - const is_timer_done = is_timer_done: { - // Node doesn't drain microtasks after each timer callback. - if (kind == .setInterval) { - if (!this.shouldRescheduleTimer(repeat, idle_timeout)) { - break :is_timer_done true; - } - switch (this.eventLoopTimer().state) { - .FIRED => { - // If we didn't clear the setInterval, reschedule it starting from - vm.timer.update(this.eventLoopTimer(), &time_before_call); - - if (this.flags.has_js_ref) { - this.setEnableKeepingEventLoopAlive(vm, true); - } - - // The ref count doesn't change. It wasn't decremented. - }, - .ACTIVE => { - // The developer called timer.refresh() synchronously in the callback. - vm.timer.update(this.eventLoopTimer(), &time_before_call); - - // Balance out the ref count. - // the transition from "FIRED" -> "ACTIVE" caused it to increment. - this.deref(); - }, - else => { - break :is_timer_done true; - }, - } - } else { - if (kind == .setTimeout and !repeat.isNull()) { - if (idle_timeout.getNumber()) |num| { - if (num != -1) { - this.convertToInterval(globalThis, this_object, repeat); - break :is_timer_done false; - } - } - } - - if (this.eventLoopTimer().state == .FIRED) { - break :is_timer_done true; - } - } - - break :is_timer_done false; - }; - - if (is_timer_done) { - this.setEnableKeepingEventLoopAlive(vm, false); - // The timer will not be re-entered into the event loop at this point. - this.deref(); - } - } - vm.eventLoop().exit(); - - return .disarm; - } - - fn convertToInterval(this: *TimerObjectInternals, global: *JSGlobalObject, timer: JSValue, repeat: JSValue) void { - bun.debugAssert(this.flags.kind == .setTimeout); - - const vm = global.bunVM(); - - const new_interval: u31 = if (repeat.getNumber()) |num| if (num < 1 or num > std.math.maxInt(u31)) 1 else @intFromFloat(num) else 1; - - // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L613 - TimeoutObject.js.idleTimeoutSetCached(timer, global, repeat); - this.strong_this.set(global, timer); - this.flags.kind = .setInterval; - this.interval = new_interval; - this.reschedule(timer, vm); - } - - pub fn run(this: *TimerObjectInternals, globalThis: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue, async_id: u64, vm: *JSC.VirtualMachine) bool { - if (vm.isInspectorEnabled()) { - Debugger.willDispatchAsyncCall(globalThis, .DOMTimer, async_id); - } - - defer { - if (vm.isInspectorEnabled()) { - Debugger.didDispatchAsyncCall(globalThis, .DOMTimer, async_id); - } - } - - // Bun__JSTimeout__call handles exceptions. - this.flags.in_callback = true; - defer this.flags.in_callback = false; - return Bun__JSTimeout__call(globalThis, timer, callback, arguments); - } - - pub fn init( - this: *TimerObjectInternals, - timer: JSValue, - global: *JSGlobalObject, - id: i32, - kind: Kind, - interval: u31, - callback: JSValue, - arguments: JSValue, - ) void { - const vm = global.bunVM(); - this.* = .{ - .id = id, - .flags = .{ .kind = kind, .epoch = vm.timer.epoch }, - .interval = interval, - }; - - if (kind == .setImmediate) { - ImmediateObject.js.argumentsSetCached(timer, global, arguments); - ImmediateObject.js.callbackSetCached(timer, global, callback); - const parent: *ImmediateObject = @fieldParentPtr("internals", this); - vm.enqueueImmediateTask(parent); - this.setEnableKeepingEventLoopAlive(vm, true); - // ref'd by event loop - parent.ref(); - } else { - TimeoutObject.js.argumentsSetCached(timer, global, arguments); - TimeoutObject.js.callbackSetCached(timer, global, callback); - TimeoutObject.js.idleTimeoutSetCached(timer, global, JSC.jsNumber(interval)); - TimeoutObject.js.repeatSetCached(timer, global, if (kind == .setInterval) JSC.jsNumber(interval) else .null); - - // this increments the refcount - this.reschedule(timer, vm); - } - - this.strong_this.set(global, timer); - } - - pub fn doRef(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue { - this_value.ensureStillAlive(); - - const did_have_js_ref = this.flags.has_js_ref; - this.flags.has_js_ref = true; - - // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L256 - // and - // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L685-L687 - if (!did_have_js_ref and !this.flags.has_cleared_timer) { - this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), true); - } - - return this_value; - } - - pub fn doRefresh(this: *TimerObjectInternals, globalObject: *JSC.JSGlobalObject, this_value: JSValue) JSValue { - // Immediates do not have a refresh function, and our binding generator should not let this - // function be reached even if you override the `this` value calling a Timeout object's - // `refresh` method - assert(this.flags.kind != .setImmediate); - - // setImmediate does not support refreshing and we do not support refreshing after cleanup - if (this.id == -1 or this.flags.kind == .setImmediate or this.flags.has_cleared_timer) { - return this_value; - } - - this.strong_this.set(globalObject, this_value); - this.reschedule(this_value, VirtualMachine.get()); - - return this_value; - } - - pub fn doUnref(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue { - this_value.ensureStillAlive(); - - const did_have_js_ref = this.flags.has_js_ref; - this.flags.has_js_ref = false; - - if (did_have_js_ref) { - this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), false); - } - - return this_value; - } - - pub fn cancel(this: *TimerObjectInternals, vm: *VirtualMachine) void { - this.setEnableKeepingEventLoopAlive(vm, false); - this.flags.has_cleared_timer = true; - - if (this.flags.kind == .setImmediate) return; - - const was_active = this.eventLoopTimer().state == .ACTIVE; - - this.eventLoopTimer().state = .CANCELLED; - this.strong_this.deinit(); - - if (was_active) { - vm.timer.remove(this.eventLoopTimer()); - this.deref(); - } - } - - fn shouldRescheduleTimer(this: *TimerObjectInternals, repeat: JSValue, idle_timeout: JSValue) bool { - if (this.flags.kind == .setInterval and repeat.isNull()) return false; - if (idle_timeout.getNumber()) |num| { - if (num == -1) return false; - } - return true; - } - - pub fn reschedule(this: *TimerObjectInternals, timer: JSValue, vm: *VirtualMachine) void { - if (this.flags.kind == .setImmediate) return; - - const idle_timeout = TimeoutObject.js.idleTimeoutGetCached(timer).?; - const repeat = TimeoutObject.js.repeatGetCached(timer).?; - - // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L612 - if (!this.shouldRescheduleTimer(repeat, idle_timeout)) return; - - const now = timespec.msFromNow(this.interval); - const was_active = this.eventLoopTimer().state == .ACTIVE; - if (was_active) { - vm.timer.remove(this.eventLoopTimer()); - } else { - this.ref(); - } - - vm.timer.update(this.eventLoopTimer(), &now); - this.flags.has_cleared_timer = false; - - if (this.flags.has_js_ref) { - this.setEnableKeepingEventLoopAlive(vm, true); - } - } - - fn setEnableKeepingEventLoopAlive(this: *TimerObjectInternals, vm: *VirtualMachine, enable: bool) void { - if (this.flags.is_keeping_event_loop_alive == enable) { - return; - } - this.flags.is_keeping_event_loop_alive = enable; - switch (this.flags.kind) { - .setTimeout, .setInterval => vm.timer.incrementTimerRef(if (enable) 1 else -1), - - // setImmediate has slightly different event loop logic - .setImmediate => vm.timer.incrementImmediateRef(if (enable) 1 else -1), - } - } - - pub fn hasRef(this: *TimerObjectInternals) JSValue { - return JSValue.jsBoolean(this.flags.is_keeping_event_loop_alive); - } - - pub fn toPrimitive(this: *TimerObjectInternals) bun.JSError!JSValue { - if (!this.flags.has_accessed_primitive) { - this.flags.has_accessed_primitive = true; - const vm = VirtualMachine.get(); - try vm.timer.maps.get(this.flags.kind).put(bun.default_allocator, this.id, this.eventLoopTimer()); - } - return JSValue.jsNumber(this.id); - } - - /// This is the getter for `_destroyed` on JS Timeout and Immediate objects - pub fn getDestroyed(this: *TimerObjectInternals) bool { - if (this.flags.has_cleared_timer) { - return true; - } - if (this.flags.in_callback) { - return false; - } - return switch (this.eventLoopTimer().state) { - .ACTIVE, .PENDING => false, - .FIRED, .CANCELLED => true, - }; - } - - pub fn finalize(this: *TimerObjectInternals) void { - this.strong_this.deinit(); - this.deref(); - } - - pub fn deinit(this: *TimerObjectInternals) void { - this.strong_this.deinit(); - const vm = VirtualMachine.get(); - const kind = this.flags.kind; - - if (this.eventLoopTimer().state == .ACTIVE) { - vm.timer.remove(this.eventLoopTimer()); - } - - if (this.flags.has_accessed_primitive) { - const map = vm.timer.maps.get(kind); - if (map.orderedRemove(this.id)) { - // If this array gets large, let's shrink it down - // Array keys are i32 - // Values are 1 ptr - // Therefore, 12 bytes per entry - // So if you created 21,000 timers and accessed them by ID, you'd be using 252KB - const allocated_bytes = map.capacity() * @sizeOf(TimeoutMap.Data); - const used_bytes = map.count() * @sizeOf(TimeoutMap.Data); - if (allocated_bytes - used_bytes > 256 * 1024) { - map.shrinkAndFree(bun.default_allocator, map.count() + 8); - } - } - } - - this.setEnableKeepingEventLoopAlive(vm, false); - switch (kind) { - .setImmediate => (@as(*ImmediateObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(), - .setTimeout, .setInterval => (@as(*TimeoutObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(), - } - } -}; +pub const TimeoutObject = @import("./Timer/TimeoutObject.zig"); +pub const ImmediateObject = @import("./Timer/ImmediateObject.zig"); +pub const TimerObjectInternals = @import("./Timer/TimerObjectInternals.zig"); pub const Kind = enum(u2) { setTimeout = 0, @@ -1275,235 +580,6 @@ pub const ID = extern struct { const assert = bun.assert; const heap = bun.io.heap; -pub const EventLoopTimer = struct { - /// The absolute time to fire this timer next. - next: timespec, - state: State = .PENDING, - tag: Tag, - /// Internal heap fields. - heap: heap.IntrusiveField(EventLoopTimer) = .{}, - - pub fn initPaused(tag: Tag) EventLoopTimer { - return .{ - .next = .{}, - .tag = tag, - }; - } - - pub fn less(_: void, a: *const EventLoopTimer, b: *const EventLoopTimer) bool { - const sec_order = std.math.order(a.next.sec, b.next.sec); - if (sec_order != .eq) return sec_order == .lt; - - // collapse sub-millisecond precision for JavaScript timers - const maybe_a_internals = a.jsTimerInternals(); - const maybe_b_internals = b.jsTimerInternals(); - var a_ns = a.next.nsec; - var b_ns = b.next.nsec; - if (maybe_a_internals != null) a_ns = std.time.ns_per_ms * @divTrunc(a_ns, std.time.ns_per_ms); - if (maybe_b_internals != null) b_ns = std.time.ns_per_ms * @divTrunc(b_ns, std.time.ns_per_ms); - - const order = std.math.order(a_ns, b_ns); - if (order == .eq) { - if (maybe_a_internals) |a_internals| { - if (maybe_b_internals) |b_internals| { - // We expect that the epoch will overflow sometimes. - // If it does, we would ideally like timers with an epoch from before the - // overflow to be sorted *before* timers with an epoch from after the overflow - // (even though their epoch will be numerically *larger*). - // - // Wrapping subtraction gives us a distance that is consistent even if one - // epoch has overflowed and the other hasn't. If the distance from a to b is - // small, it's likely that b is really newer than a, so we consider a less than - // b. If the distance from a to b is large (greater than half the u25 range), - // it's more likely that b is older than a so the true distance is from b to a. - return b_internals.flags.epoch -% a_internals.flags.epoch < std.math.maxInt(u25) / 2; - } - } - } - return order == .lt; - } - - pub const Tag = if (Environment.isWindows) enum { - TimerCallback, - TimeoutObject, - ImmediateObject, - TestRunner, - StatWatcherScheduler, - UpgradedDuplex, - DNSResolver, - WindowsNamedPipe, - WTFTimer, - PostgresSQLConnectionTimeout, - PostgresSQLConnectionMaxLifetime, - ValkeyConnectionTimeout, - ValkeyConnectionReconnect, - SubprocessTimeout, - DevServerSweepSourceMaps, - DevServerMemoryVisualizerTick, - - pub fn Type(comptime T: Tag) type { - return switch (T) { - .TimerCallback => TimerCallback, - .TimeoutObject => TimeoutObject, - .ImmediateObject => ImmediateObject, - .TestRunner => JSC.Jest.TestRunner, - .StatWatcherScheduler => StatWatcherScheduler, - .UpgradedDuplex => uws.UpgradedDuplex, - .DNSResolver => DNSResolver, - .WindowsNamedPipe => uws.WindowsNamedPipe, - .WTFTimer => WTFTimer, - .PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection, - .PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection, - .SubprocessTimeout => JSC.Subprocess, - .ValkeyConnectionReconnect => JSC.API.Valkey, - .ValkeyConnectionTimeout => JSC.API.Valkey, - .DevServerSweepSourceMaps, - .DevServerMemoryVisualizerTick, - => bun.bake.DevServer, - }; - } - } else enum { - TimerCallback, - TimeoutObject, - ImmediateObject, - TestRunner, - StatWatcherScheduler, - UpgradedDuplex, - WTFTimer, - DNSResolver, - PostgresSQLConnectionTimeout, - PostgresSQLConnectionMaxLifetime, - ValkeyConnectionTimeout, - ValkeyConnectionReconnect, - SubprocessTimeout, - DevServerSweepSourceMaps, - DevServerMemoryVisualizerTick, - - pub fn Type(comptime T: Tag) type { - return switch (T) { - .TimerCallback => TimerCallback, - .TimeoutObject => TimeoutObject, - .ImmediateObject => ImmediateObject, - .TestRunner => JSC.Jest.TestRunner, - .StatWatcherScheduler => StatWatcherScheduler, - .UpgradedDuplex => uws.UpgradedDuplex, - .WTFTimer => WTFTimer, - .DNSResolver => DNSResolver, - .PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection, - .PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection, - .ValkeyConnectionTimeout => JSC.API.Valkey, - .ValkeyConnectionReconnect => JSC.API.Valkey, - .SubprocessTimeout => JSC.Subprocess, - .DevServerSweepSourceMaps, - .DevServerMemoryVisualizerTick, - => bun.bake.DevServer, - }; - } - }; - - const TimerCallback = struct { - callback: *const fn (*TimerCallback) Arm, - ctx: *anyopaque, - event_loop_timer: EventLoopTimer, - }; - - pub const State = enum { - /// The timer is waiting to be enabled. - PENDING, - - /// The timer is active and will fire at the next time. - ACTIVE, - - /// The timer has been cancelled and will not fire. - CANCELLED, - - /// The timer has fired and the callback has been called. - FIRED, - }; - - /// If self was created by set{Immediate,Timeout,Interval}, get a pointer to the common data - /// for all those kinds of timers - fn jsTimerInternals(self: anytype) switch (@TypeOf(self)) { - *EventLoopTimer => ?*TimerObjectInternals, - *const EventLoopTimer => ?*const TimerObjectInternals, - else => |T| @compileError("wrong type " ++ @typeName(T) ++ " passed to jsTimerInternals"), - } { - switch (self.tag) { - inline .TimeoutObject, .ImmediateObject => |tag| { - const parent: switch (@TypeOf(self)) { - *EventLoopTimer => *tag.Type(), - *const EventLoopTimer => *const tag.Type(), - else => unreachable, - } = @fieldParentPtr("event_loop_timer", self); - return &parent.internals; - }, - else => return null, - } - } - - fn ns(self: *const EventLoopTimer) u64 { - return self.next.ns(); - } - - pub const Arm = union(enum) { - rearm: timespec, - disarm, - }; - - pub fn fire(this: *EventLoopTimer, now: *const timespec, vm: *VirtualMachine) Arm { - switch (this.tag) { - .PostgresSQLConnectionTimeout => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), - .PostgresSQLConnectionMaxLifetime => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", this))).onMaxLifetimeTimeout(), - .ValkeyConnectionTimeout => return @as(*api.Valkey, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), - .ValkeyConnectionReconnect => return @as(*api.Valkey, @alignCast(@fieldParentPtr("reconnect_timer", this))).onReconnectTimer(), - .DevServerMemoryVisualizerTick => return bun.bake.DevServer.emitMemoryVisualizerMessageTimer(this, now), - .DevServerSweepSourceMaps => return bun.bake.DevServer.SourceMapStore.sweepWeakRefs(this, now), - inline else => |t| { - if (@FieldType(t.Type(), "event_loop_timer") != EventLoopTimer) { - @compileError(@typeName(t.Type()) ++ " has wrong type for 'event_loop_timer'"); - } - var container: *t.Type() = @alignCast(@fieldParentPtr("event_loop_timer", this)); - if (comptime t.Type() == TimeoutObject or t.Type() == ImmediateObject) { - return container.internals.fire(now, vm); - } - - if (comptime t.Type() == WTFTimer) { - return container.fire(now, vm); - } - - if (comptime t.Type() == StatWatcherScheduler) { - return container.timerCallback(); - } - if (comptime t.Type() == uws.UpgradedDuplex) { - return container.onTimeout(); - } - if (Environment.isWindows) { - if (comptime t.Type() == uws.WindowsNamedPipe) { - return container.onTimeout(); - } - } - - if (comptime t.Type() == JSC.Jest.TestRunner) { - container.onTestTimeout(now, vm); - return .disarm; - } - - if (comptime t.Type() == DNSResolver) { - return container.checkTimeouts(now, vm); - } - - if (comptime t.Type() == JSC.Subprocess) { - return container.timeoutCallback(); - } - - return container.callback(container); - }, - } - } - - pub fn deinit(_: *EventLoopTimer) void {} -}; - const timespec = bun.timespec; /// A timer created by WTF code and invoked by Bun's event loop diff --git a/src/bun.js/api/Timer/EventLoopTimer.zig b/src/bun.js/api/Timer/EventLoopTimer.zig new file mode 100644 index 0000000000..232949cb4a --- /dev/null +++ b/src/bun.js/api/Timer/EventLoopTimer.zig @@ -0,0 +1,247 @@ +const EventLoopTimer = @This(); + +/// The absolute time to fire this timer next. +next: timespec, +state: State = .PENDING, +tag: Tag, +/// Internal heap fields. +heap: bun.io.heap.IntrusiveField(EventLoopTimer) = .{}, + +pub fn initPaused(tag: Tag) EventLoopTimer { + return .{ + .next = .{}, + .tag = tag, + }; +} + +pub fn less(_: void, a: *const EventLoopTimer, b: *const EventLoopTimer) bool { + const sec_order = std.math.order(a.next.sec, b.next.sec); + if (sec_order != .eq) return sec_order == .lt; + + // collapse sub-millisecond precision for JavaScript timers + const maybe_a_internals = a.jsTimerInternals(); + const maybe_b_internals = b.jsTimerInternals(); + var a_ns = a.next.nsec; + var b_ns = b.next.nsec; + if (maybe_a_internals != null) a_ns = std.time.ns_per_ms * @divTrunc(a_ns, std.time.ns_per_ms); + if (maybe_b_internals != null) b_ns = std.time.ns_per_ms * @divTrunc(b_ns, std.time.ns_per_ms); + + const order = std.math.order(a_ns, b_ns); + if (order == .eq) { + if (maybe_a_internals) |a_internals| { + if (maybe_b_internals) |b_internals| { + // We expect that the epoch will overflow sometimes. + // If it does, we would ideally like timers with an epoch from before the + // overflow to be sorted *before* timers with an epoch from after the overflow + // (even though their epoch will be numerically *larger*). + // + // Wrapping subtraction gives us a distance that is consistent even if one + // epoch has overflowed and the other hasn't. If the distance from a to b is + // small, it's likely that b is really newer than a, so we consider a less than + // b. If the distance from a to b is large (greater than half the u25 range), + // it's more likely that b is older than a so the true distance is from b to a. + return b_internals.flags.epoch -% a_internals.flags.epoch < std.math.maxInt(u25) / 2; + } + } + } + return order == .lt; +} + +pub const Tag = if (Environment.isWindows) enum { + TimerCallback, + TimeoutObject, + ImmediateObject, + TestRunner, + StatWatcherScheduler, + UpgradedDuplex, + DNSResolver, + WindowsNamedPipe, + WTFTimer, + PostgresSQLConnectionTimeout, + PostgresSQLConnectionMaxLifetime, + ValkeyConnectionTimeout, + ValkeyConnectionReconnect, + SubprocessTimeout, + DevServerSweepSourceMaps, + DevServerMemoryVisualizerTick, + + pub fn Type(comptime T: Tag) type { + return switch (T) { + .TimerCallback => TimerCallback, + .TimeoutObject => TimeoutObject, + .ImmediateObject => ImmediateObject, + .TestRunner => JSC.Jest.TestRunner, + .StatWatcherScheduler => StatWatcherScheduler, + .UpgradedDuplex => uws.UpgradedDuplex, + .DNSResolver => DNSResolver, + .WindowsNamedPipe => uws.WindowsNamedPipe, + .WTFTimer => WTFTimer, + .PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection, + .PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection, + .SubprocessTimeout => JSC.Subprocess, + .ValkeyConnectionReconnect => JSC.API.Valkey, + .ValkeyConnectionTimeout => JSC.API.Valkey, + .DevServerSweepSourceMaps, + .DevServerMemoryVisualizerTick, + => bun.bake.DevServer, + }; + } +} else enum { + TimerCallback, + TimeoutObject, + ImmediateObject, + TestRunner, + StatWatcherScheduler, + UpgradedDuplex, + WTFTimer, + DNSResolver, + PostgresSQLConnectionTimeout, + PostgresSQLConnectionMaxLifetime, + ValkeyConnectionTimeout, + ValkeyConnectionReconnect, + SubprocessTimeout, + DevServerSweepSourceMaps, + DevServerMemoryVisualizerTick, + + pub fn Type(comptime T: Tag) type { + return switch (T) { + .TimerCallback => TimerCallback, + .TimeoutObject => TimeoutObject, + .ImmediateObject => ImmediateObject, + .TestRunner => JSC.Jest.TestRunner, + .StatWatcherScheduler => StatWatcherScheduler, + .UpgradedDuplex => uws.UpgradedDuplex, + .WTFTimer => WTFTimer, + .DNSResolver => DNSResolver, + .PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection, + .PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection, + .ValkeyConnectionTimeout => JSC.API.Valkey, + .ValkeyConnectionReconnect => JSC.API.Valkey, + .SubprocessTimeout => JSC.Subprocess, + .DevServerSweepSourceMaps, + .DevServerMemoryVisualizerTick, + => bun.bake.DevServer, + }; + } +}; + +const TimerCallback = struct { + callback: *const fn (*TimerCallback) Arm, + ctx: *anyopaque, + event_loop_timer: EventLoopTimer, +}; + +pub const State = enum { + /// The timer is waiting to be enabled. + PENDING, + + /// The timer is active and will fire at the next time. + ACTIVE, + + /// The timer has been cancelled and will not fire. + CANCELLED, + + /// The timer has fired and the callback has been called. + FIRED, +}; + +/// If self was created by set{Immediate,Timeout,Interval}, get a pointer to the common data +/// for all those kinds of timers +pub fn jsTimerInternals(self: anytype) switch (@TypeOf(self)) { + *EventLoopTimer => ?*TimerObjectInternals, + *const EventLoopTimer => ?*const TimerObjectInternals, + else => |T| @compileError("wrong type " ++ @typeName(T) ++ " passed to jsTimerInternals"), +} { + switch (self.tag) { + inline .TimeoutObject, .ImmediateObject => |tag| { + const parent: switch (@TypeOf(self)) { + *EventLoopTimer => *tag.Type(), + *const EventLoopTimer => *const tag.Type(), + else => unreachable, + } = @fieldParentPtr("event_loop_timer", self); + return &parent.internals; + }, + else => return null, + } +} + +fn ns(self: *const EventLoopTimer) u64 { + return self.next.ns(); +} + +pub const Arm = union(enum) { + rearm: timespec, + disarm, +}; + +pub fn fire(this: *EventLoopTimer, now: *const timespec, vm: *VirtualMachine) Arm { + switch (this.tag) { + .PostgresSQLConnectionTimeout => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), + .PostgresSQLConnectionMaxLifetime => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", this))).onMaxLifetimeTimeout(), + .ValkeyConnectionTimeout => return @as(*api.Valkey, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), + .ValkeyConnectionReconnect => return @as(*api.Valkey, @alignCast(@fieldParentPtr("reconnect_timer", this))).onReconnectTimer(), + .DevServerMemoryVisualizerTick => return bun.bake.DevServer.emitMemoryVisualizerMessageTimer(this, now), + .DevServerSweepSourceMaps => return bun.bake.DevServer.SourceMapStore.sweepWeakRefs(this, now), + inline else => |t| { + if (@FieldType(t.Type(), "event_loop_timer") != EventLoopTimer) { + @compileError(@typeName(t.Type()) ++ " has wrong type for 'event_loop_timer'"); + } + var container: *t.Type() = @alignCast(@fieldParentPtr("event_loop_timer", this)); + if (comptime t.Type() == TimeoutObject or t.Type() == ImmediateObject) { + return container.internals.fire(now, vm); + } + + if (comptime t.Type() == WTFTimer) { + return container.fire(now, vm); + } + + if (comptime t.Type() == StatWatcherScheduler) { + return container.timerCallback(); + } + if (comptime t.Type() == uws.UpgradedDuplex) { + return container.onTimeout(); + } + if (Environment.isWindows) { + if (comptime t.Type() == uws.WindowsNamedPipe) { + return container.onTimeout(); + } + } + + if (comptime t.Type() == JSC.Jest.TestRunner) { + container.onTestTimeout(now, vm); + return .disarm; + } + + if (comptime t.Type() == DNSResolver) { + return container.checkTimeouts(now, vm); + } + + if (comptime t.Type() == JSC.Subprocess) { + return container.timeoutCallback(); + } + + return container.callback(container); + }, + } +} + +pub fn deinit(_: *EventLoopTimer) void {} + +const timespec = bun.timespec; + +/// A timer created by WTF code and invoked by Bun's event loop +const WTFTimer = @import("../../WTFTimer.zig"); +const VirtualMachine = JSC.VirtualMachine; +const TimerObjectInternals = @import("../Timer.zig").TimerObjectInternals; +const TimeoutObject = @import("../Timer.zig").TimeoutObject; +const ImmediateObject = @import("../Timer.zig").ImmediateObject; +const StatWatcherScheduler = @import("../../node/node_fs_stat_watcher.zig").StatWatcherScheduler; +const DNSResolver = @import("../bun/dns_resolver.zig").DNSResolver; + +const bun = @import("bun"); +const std = @import("std"); +const Environment = bun.Environment; +const JSC = bun.JSC; + +const uws = bun.uws; +const api = JSC.API; diff --git a/src/bun.js/api/Timer/ImmediateObject.zig b/src/bun.js/api/Timer/ImmediateObject.zig new file mode 100644 index 0000000000..d695be5bc2 --- /dev/null +++ b/src/bun.js/api/Timer/ImmediateObject.zig @@ -0,0 +1,104 @@ +const ImmediateObject = @This(); + +const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{}); +pub const ref = RefCount.ref; +pub const deref = RefCount.deref; + +pub const js = JSC.Codegen.JSImmediate; +pub const toJS = js.toJS; +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; + +ref_count: RefCount, +event_loop_timer: EventLoopTimer = .{ + .next = .{}, + .tag = .ImmediateObject, +}, +internals: TimerObjectInternals, + +pub fn init( + globalThis: *JSGlobalObject, + id: i32, + callback: JSValue, + arguments: JSValue, +) JSValue { + // internals are initialized by init() + const immediate = bun.new(ImmediateObject, .{ .ref_count = .init(), .internals = undefined }); + const js_value = immediate.toJS(globalThis); + defer js_value.ensureStillAlive(); + immediate.internals.init( + js_value, + globalThis, + id, + .setImmediate, + 0, + callback, + arguments, + ); + + if (globalThis.bunVM().isInspectorEnabled()) { + Debugger.didScheduleAsyncCall( + globalThis, + .DOMTimer, + ID.asyncID(.{ .id = id, .kind = .setImmediate }), + true, + ); + } + + return js_value; +} + +fn deinit(this: *ImmediateObject) void { + this.internals.deinit(); + bun.destroy(this); +} + +pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*ImmediateObject { + _ = callFrame; + return globalObject.throw("Immediate is not constructible", .{}); +} + +/// returns true if an exception was thrown +pub fn runImmediateTask(this: *ImmediateObject, vm: *VirtualMachine) bool { + return this.internals.runImmediateTask(vm); +} + +pub fn toPrimitive(this: *ImmediateObject, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.toPrimitive(); +} + +pub fn doRef(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.doRef(globalThis, callFrame.this()); +} + +pub fn doUnref(this: *ImmediateObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.doUnref(globalThis, callFrame.this()); +} + +pub fn hasRef(this: *ImmediateObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.hasRef(); +} + +pub fn finalize(this: *ImmediateObject) void { + this.internals.finalize(); +} + +pub fn getDestroyed(this: *ImmediateObject, globalThis: *JSGlobalObject) JSValue { + _ = globalThis; + return .jsBoolean(this.internals.getDestroyed()); +} + +pub fn dispose(this: *ImmediateObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + this.internals.cancel(globalThis.bunVM()); + return .js_undefined; +} + +const bun = @import("bun"); +const JSC = bun.JSC; +const VirtualMachine = JSC.VirtualMachine; +const TimerObjectInternals = @import("../Timer.zig").TimerObjectInternals; +const Debugger = @import("../../Debugger.zig"); +const ID = @import("../Timer.zig").ID; +const EventLoopTimer = @import("../Timer.zig").EventLoopTimer; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; diff --git a/src/bun.js/api/Timer/TimeoutObject.zig b/src/bun.js/api/Timer/TimeoutObject.zig new file mode 100644 index 0000000000..7e69cea0c6 --- /dev/null +++ b/src/bun.js/api/Timer/TimeoutObject.zig @@ -0,0 +1,134 @@ +const TimeoutObject = @This(); + +const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{}); +pub const ref = RefCount.ref; +pub const deref = RefCount.deref; + +pub const js = JSC.Codegen.JSTimeout; +pub const toJS = js.toJS; +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; + +ref_count: RefCount, +event_loop_timer: EventLoopTimer = .{ + .next = .{}, + .tag = .TimeoutObject, +}, +internals: TimerObjectInternals, + +pub fn init( + globalThis: *JSGlobalObject, + id: i32, + kind: Kind, + interval: u31, + callback: JSValue, + arguments: JSValue, +) JSValue { + // internals are initialized by init() + const timeout = bun.new(TimeoutObject, .{ .ref_count = .init(), .internals = undefined }); + const js_value = timeout.toJS(globalThis); + defer js_value.ensureStillAlive(); + timeout.internals.init( + js_value, + globalThis, + id, + kind, + interval, + callback, + arguments, + ); + + if (globalThis.bunVM().isInspectorEnabled()) { + Debugger.didScheduleAsyncCall( + globalThis, + .DOMTimer, + ID.asyncID(.{ .id = id, .kind = kind.big() }), + kind != .setInterval, + ); + } + + return js_value; +} + +fn deinit(this: *TimeoutObject) void { + this.internals.deinit(); + bun.destroy(this); +} + +pub fn constructor(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) !*TimeoutObject { + _ = callFrame; + return globalObject.throw("Timeout is not constructible", .{}); +} + +pub fn toPrimitive(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.toPrimitive(); +} + +pub fn doRef(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.doRef(globalThis, callFrame.this()); +} + +pub fn doUnref(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.doUnref(globalThis, callFrame.this()); +} + +pub fn doRefresh(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.doRefresh(globalThis, callFrame.this()); +} + +pub fn hasRef(this: *TimeoutObject, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + return this.internals.hasRef(); +} + +pub fn finalize(this: *TimeoutObject) void { + this.internals.finalize(); +} + +pub fn getDestroyed(this: *TimeoutObject, globalThis: *JSGlobalObject) JSValue { + _ = globalThis; + return .jsBoolean(this.internals.getDestroyed()); +} + +pub fn close(this: *TimeoutObject, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) JSValue { + this.internals.cancel(globalThis.bunVM()); + return callFrame.this(); +} + +pub fn get_onTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue { + return TimeoutObject.js.callbackGetCached(thisValue).?; +} + +pub fn set_onTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void { + TimeoutObject.js.callbackSetCached(thisValue, globalThis, value); +} + +pub fn get_idleTimeout(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue { + return TimeoutObject.js.idleTimeoutGetCached(thisValue).?; +} + +pub fn set_idleTimeout(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void { + TimeoutObject.js.idleTimeoutSetCached(thisValue, globalThis, value); +} + +pub fn get_repeat(_: *TimeoutObject, thisValue: JSValue, _: *JSGlobalObject) JSValue { + return TimeoutObject.js.repeatGetCached(thisValue).?; +} + +pub fn set_repeat(_: *TimeoutObject, thisValue: JSValue, globalThis: *JSGlobalObject, value: JSValue) void { + TimeoutObject.js.repeatSetCached(thisValue, globalThis, value); +} + +pub fn dispose(this: *TimeoutObject, globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + this.internals.cancel(globalThis.bunVM()); + return .js_undefined; +} + +const bun = @import("bun"); +const JSC = bun.JSC; +const TimerObjectInternals = @import("../Timer.zig").TimerObjectInternals; +const Debugger = @import("../../Debugger.zig"); +const ID = @import("../Timer.zig").ID; +const Kind = @import("../Timer.zig").Kind; +const EventLoopTimer = @import("../Timer.zig").EventLoopTimer; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; diff --git a/src/bun.js/api/Timer/TimerObjectInternals.zig b/src/bun.js/api/Timer/TimerObjectInternals.zig new file mode 100644 index 0000000000..60e3c88cba --- /dev/null +++ b/src/bun.js/api/Timer/TimerObjectInternals.zig @@ -0,0 +1,487 @@ +/// Data that TimerObject and ImmediateObject have in common +const TimerObjectInternals = @This(); + +/// Identifier for this timer that is exposed to JavaScript (by `+timer`) +id: i32 = -1, +interval: u31 = 0, +strong_this: JSC.Strong.Optional = .empty, +flags: Flags = .{}, + +const Flags = packed struct(u32) { + /// Whenever a timer is inserted into the heap (which happen on creation or refresh), the global + /// epoch is incremented and the new epoch is set on the timer. For timers created by + /// JavaScript, the epoch is used to break ties between timers scheduled for the same + /// millisecond. This ensures that if you set two timers for the same amount of time, and + /// refresh the first one, the first one will fire last. This mimics Node.js's behavior where + /// the refreshed timer will be inserted at the end of a list, which makes it fire later. + epoch: u25 = 0, + + kind: Kind = .setTimeout, + + // we do not allow the timer to be refreshed after we call clearInterval/clearTimeout + has_cleared_timer: bool = false, + is_keeping_event_loop_alive: bool = false, + + // if they never access the timer by integer, don't create a hashmap entry. + has_accessed_primitive: bool = false, + + has_js_ref: bool = true, + + /// Set to `true` only during execution of the JavaScript function so that `_destroyed` can be + /// false during the callback, even though the `state` will be `FIRED`. + in_callback: bool = false, +}; + +fn eventLoopTimer(this: *TimerObjectInternals) *EventLoopTimer { + switch (this.flags.kind) { + .setImmediate => { + const parent: *ImmediateObject = @fieldParentPtr("internals", this); + assert(parent.event_loop_timer.tag == .ImmediateObject); + return &parent.event_loop_timer; + }, + .setTimeout, .setInterval => { + const parent: *TimeoutObject = @fieldParentPtr("internals", this); + assert(parent.event_loop_timer.tag == .TimeoutObject); + return &parent.event_loop_timer; + }, + } +} + +fn ref(this: *TimerObjectInternals) void { + switch (this.flags.kind) { + .setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).ref(), + .setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).ref(), + } +} + +fn deref(this: *TimerObjectInternals) void { + switch (this.flags.kind) { + .setImmediate => @as(*ImmediateObject, @fieldParentPtr("internals", this)).deref(), + .setTimeout, .setInterval => @as(*TimeoutObject, @fieldParentPtr("internals", this)).deref(), + } +} + +extern "c" fn Bun__JSTimeout__call(globalObject: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue) bool; + +/// returns true if an exception was thrown +pub fn runImmediateTask(this: *TimerObjectInternals, vm: *VirtualMachine) bool { + if (this.flags.has_cleared_timer or + // unref'd setImmediate callbacks should only run if there are things keeping the event + // loop alive other than setImmediates + (!this.flags.is_keeping_event_loop_alive and !vm.isEventLoopAliveExcludingImmediates())) + { + this.deref(); + return false; + } + + const timer = this.strong_this.get() orelse { + if (Environment.isDebug) { + @panic("TimerObjectInternals.runImmediateTask: this_object is null"); + } + return false; + }; + const globalThis = vm.global; + this.strong_this.deinit(); + this.eventLoopTimer().state = .FIRED; + this.setEnableKeepingEventLoopAlive(vm, false); + + vm.eventLoop().enter(); + const callback = ImmediateObject.js.callbackGetCached(timer).?; + const arguments = ImmediateObject.js.argumentsGetCached(timer).?; + this.ref(); + const exception_thrown = this.run(globalThis, timer, callback, arguments, this.asyncID(), vm); + this.deref(); + + if (this.eventLoopTimer().state == .FIRED) { + this.deref(); + } + + vm.eventLoop().exitMaybeDrainMicrotasks(!exception_thrown) catch return true; + + return exception_thrown; +} + +pub fn asyncID(this: *const TimerObjectInternals) u64 { + return ID.asyncID(.{ .id = this.id, .kind = this.flags.kind.big() }); +} + +pub fn fire(this: *TimerObjectInternals, _: *const timespec, vm: *JSC.VirtualMachine) EventLoopTimer.Arm { + const id = this.id; + const kind = this.flags.kind.big(); + const async_id: ID = .{ .id = id, .kind = kind }; + const has_been_cleared = this.eventLoopTimer().state == .CANCELLED or this.flags.has_cleared_timer or vm.scriptExecutionStatus() != .running; + + this.eventLoopTimer().state = .FIRED; + + const globalThis = vm.global; + const this_object = this.strong_this.get().?; + + const callback: JSValue, const arguments: JSValue, var idle_timeout: JSValue, var repeat: JSValue = switch (kind) { + .setImmediate => .{ + ImmediateObject.js.callbackGetCached(this_object).?, + ImmediateObject.js.argumentsGetCached(this_object).?, + .js_undefined, + .js_undefined, + }, + .setTimeout, .setInterval => .{ + TimeoutObject.js.callbackGetCached(this_object).?, + TimeoutObject.js.argumentsGetCached(this_object).?, + TimeoutObject.js.idleTimeoutGetCached(this_object).?, + TimeoutObject.js.repeatGetCached(this_object).?, + }, + }; + + if (has_been_cleared or !callback.toBoolean()) { + if (vm.isInspectorEnabled()) { + Debugger.didCancelAsyncCall(globalThis, .DOMTimer, ID.asyncID(async_id)); + } + this.setEnableKeepingEventLoopAlive(vm, false); + this.flags.has_cleared_timer = true; + this.strong_this.deinit(); + this.deref(); + + return .disarm; + } + + var time_before_call: timespec = undefined; + + if (kind != .setInterval) { + this.strong_this.clearWithoutDeallocation(); + } else { + time_before_call = timespec.msFromNow(this.interval); + } + this_object.ensureStillAlive(); + + vm.eventLoop().enter(); + { + // Ensure it stays alive for this scope. + this.ref(); + defer this.deref(); + + _ = this.run(globalThis, this_object, callback, arguments, ID.asyncID(async_id), vm); + + switch (kind) { + .setTimeout, .setInterval => { + idle_timeout = TimeoutObject.js.idleTimeoutGetCached(this_object).?; + repeat = TimeoutObject.js.repeatGetCached(this_object).?; + }, + else => {}, + } + + const is_timer_done = is_timer_done: { + // Node doesn't drain microtasks after each timer callback. + if (kind == .setInterval) { + if (!this.shouldRescheduleTimer(repeat, idle_timeout)) { + break :is_timer_done true; + } + switch (this.eventLoopTimer().state) { + .FIRED => { + // If we didn't clear the setInterval, reschedule it starting from + vm.timer.update(this.eventLoopTimer(), &time_before_call); + + if (this.flags.has_js_ref) { + this.setEnableKeepingEventLoopAlive(vm, true); + } + + // The ref count doesn't change. It wasn't decremented. + }, + .ACTIVE => { + // The developer called timer.refresh() synchronously in the callback. + vm.timer.update(this.eventLoopTimer(), &time_before_call); + + // Balance out the ref count. + // the transition from "FIRED" -> "ACTIVE" caused it to increment. + this.deref(); + }, + else => { + break :is_timer_done true; + }, + } + } else { + if (kind == .setTimeout and !repeat.isNull()) { + if (idle_timeout.getNumber()) |num| { + if (num != -1) { + this.convertToInterval(globalThis, this_object, repeat); + break :is_timer_done false; + } + } + } + + if (this.eventLoopTimer().state == .FIRED) { + break :is_timer_done true; + } + } + + break :is_timer_done false; + }; + + if (is_timer_done) { + this.setEnableKeepingEventLoopAlive(vm, false); + // The timer will not be re-entered into the event loop at this point. + this.deref(); + } + } + vm.eventLoop().exit(); + + return .disarm; +} + +fn convertToInterval(this: *TimerObjectInternals, global: *JSGlobalObject, timer: JSValue, repeat: JSValue) void { + bun.debugAssert(this.flags.kind == .setTimeout); + + const vm = global.bunVM(); + + const new_interval: u31 = if (repeat.getNumber()) |num| if (num < 1 or num > std.math.maxInt(u31)) 1 else @intFromFloat(num) else 1; + + // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L613 + TimeoutObject.js.idleTimeoutSetCached(timer, global, repeat); + this.strong_this.set(global, timer); + this.flags.kind = .setInterval; + this.interval = new_interval; + this.reschedule(timer, vm); +} + +pub fn run(this: *TimerObjectInternals, globalThis: *JSC.JSGlobalObject, timer: JSValue, callback: JSValue, arguments: JSValue, async_id: u64, vm: *JSC.VirtualMachine) bool { + if (vm.isInspectorEnabled()) { + Debugger.willDispatchAsyncCall(globalThis, .DOMTimer, async_id); + } + + defer { + if (vm.isInspectorEnabled()) { + Debugger.didDispatchAsyncCall(globalThis, .DOMTimer, async_id); + } + } + + // Bun__JSTimeout__call handles exceptions. + this.flags.in_callback = true; + defer this.flags.in_callback = false; + return Bun__JSTimeout__call(globalThis, timer, callback, arguments); +} + +pub fn init( + this: *TimerObjectInternals, + timer: JSValue, + global: *JSGlobalObject, + id: i32, + kind: Kind, + interval: u31, + callback: JSValue, + arguments: JSValue, +) void { + const vm = global.bunVM(); + this.* = .{ + .id = id, + .flags = .{ .kind = kind, .epoch = vm.timer.epoch }, + .interval = interval, + }; + + if (kind == .setImmediate) { + ImmediateObject.js.argumentsSetCached(timer, global, arguments); + ImmediateObject.js.callbackSetCached(timer, global, callback); + const parent: *ImmediateObject = @fieldParentPtr("internals", this); + vm.enqueueImmediateTask(parent); + this.setEnableKeepingEventLoopAlive(vm, true); + // ref'd by event loop + parent.ref(); + } else { + TimeoutObject.js.argumentsSetCached(timer, global, arguments); + TimeoutObject.js.callbackSetCached(timer, global, callback); + TimeoutObject.js.idleTimeoutSetCached(timer, global, JSC.jsNumber(interval)); + TimeoutObject.js.repeatSetCached(timer, global, if (kind == .setInterval) JSC.jsNumber(interval) else .null); + + // this increments the refcount + this.reschedule(timer, vm); + } + + this.strong_this.set(global, timer); +} + +pub fn doRef(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue { + this_value.ensureStillAlive(); + + const did_have_js_ref = this.flags.has_js_ref; + this.flags.has_js_ref = true; + + // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L256 + // and + // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L685-L687 + if (!did_have_js_ref and !this.flags.has_cleared_timer) { + this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), true); + } + + return this_value; +} + +pub fn doRefresh(this: *TimerObjectInternals, globalObject: *JSC.JSGlobalObject, this_value: JSValue) JSValue { + // Immediates do not have a refresh function, and our binding generator should not let this + // function be reached even if you override the `this` value calling a Timeout object's + // `refresh` method + assert(this.flags.kind != .setImmediate); + + // setImmediate does not support refreshing and we do not support refreshing after cleanup + if (this.id == -1 or this.flags.kind == .setImmediate or this.flags.has_cleared_timer) { + return this_value; + } + + this.strong_this.set(globalObject, this_value); + this.reschedule(this_value, VirtualMachine.get()); + + return this_value; +} + +pub fn doUnref(this: *TimerObjectInternals, _: *JSC.JSGlobalObject, this_value: JSValue) JSValue { + this_value.ensureStillAlive(); + + const did_have_js_ref = this.flags.has_js_ref; + this.flags.has_js_ref = false; + + if (did_have_js_ref) { + this.setEnableKeepingEventLoopAlive(JSC.VirtualMachine.get(), false); + } + + return this_value; +} + +pub fn cancel(this: *TimerObjectInternals, vm: *VirtualMachine) void { + this.setEnableKeepingEventLoopAlive(vm, false); + this.flags.has_cleared_timer = true; + + if (this.flags.kind == .setImmediate) return; + + const was_active = this.eventLoopTimer().state == .ACTIVE; + + this.eventLoopTimer().state = .CANCELLED; + this.strong_this.deinit(); + + if (was_active) { + vm.timer.remove(this.eventLoopTimer()); + this.deref(); + } +} + +fn shouldRescheduleTimer(this: *TimerObjectInternals, repeat: JSValue, idle_timeout: JSValue) bool { + if (this.flags.kind == .setInterval and repeat.isNull()) return false; + if (idle_timeout.getNumber()) |num| { + if (num == -1) return false; + } + return true; +} + +pub fn reschedule(this: *TimerObjectInternals, timer: JSValue, vm: *VirtualMachine) void { + if (this.flags.kind == .setImmediate) return; + + const idle_timeout = TimeoutObject.js.idleTimeoutGetCached(timer).?; + const repeat = TimeoutObject.js.repeatGetCached(timer).?; + + // https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L612 + if (!this.shouldRescheduleTimer(repeat, idle_timeout)) return; + + const now = timespec.msFromNow(this.interval); + const was_active = this.eventLoopTimer().state == .ACTIVE; + if (was_active) { + vm.timer.remove(this.eventLoopTimer()); + } else { + this.ref(); + } + + vm.timer.update(this.eventLoopTimer(), &now); + this.flags.has_cleared_timer = false; + + if (this.flags.has_js_ref) { + this.setEnableKeepingEventLoopAlive(vm, true); + } +} + +fn setEnableKeepingEventLoopAlive(this: *TimerObjectInternals, vm: *VirtualMachine, enable: bool) void { + if (this.flags.is_keeping_event_loop_alive == enable) { + return; + } + this.flags.is_keeping_event_loop_alive = enable; + switch (this.flags.kind) { + .setTimeout, .setInterval => vm.timer.incrementTimerRef(if (enable) 1 else -1), + + // setImmediate has slightly different event loop logic + .setImmediate => vm.timer.incrementImmediateRef(if (enable) 1 else -1), + } +} + +pub fn hasRef(this: *TimerObjectInternals) JSValue { + return JSValue.jsBoolean(this.flags.is_keeping_event_loop_alive); +} + +pub fn toPrimitive(this: *TimerObjectInternals) bun.JSError!JSValue { + if (!this.flags.has_accessed_primitive) { + this.flags.has_accessed_primitive = true; + const vm = VirtualMachine.get(); + try vm.timer.maps.get(this.flags.kind).put(bun.default_allocator, this.id, this.eventLoopTimer()); + } + return JSValue.jsNumber(this.id); +} + +/// This is the getter for `_destroyed` on JS Timeout and Immediate objects +pub fn getDestroyed(this: *TimerObjectInternals) bool { + if (this.flags.has_cleared_timer) { + return true; + } + if (this.flags.in_callback) { + return false; + } + return switch (this.eventLoopTimer().state) { + .ACTIVE, .PENDING => false, + .FIRED, .CANCELLED => true, + }; +} + +pub fn finalize(this: *TimerObjectInternals) void { + this.strong_this.deinit(); + this.deref(); +} + +pub fn deinit(this: *TimerObjectInternals) void { + this.strong_this.deinit(); + const vm = VirtualMachine.get(); + const kind = this.flags.kind; + + if (this.eventLoopTimer().state == .ACTIVE) { + vm.timer.remove(this.eventLoopTimer()); + } + + if (this.flags.has_accessed_primitive) { + const map = vm.timer.maps.get(kind); + if (map.orderedRemove(this.id)) { + // If this array gets large, let's shrink it down + // Array keys are i32 + // Values are 1 ptr + // Therefore, 12 bytes per entry + // So if you created 21,000 timers and accessed them by ID, you'd be using 252KB + const allocated_bytes = map.capacity() * @sizeOf(TimeoutMap.Data); + const used_bytes = map.count() * @sizeOf(TimeoutMap.Data); + if (allocated_bytes - used_bytes > 256 * 1024) { + map.shrinkAndFree(bun.default_allocator, map.count() + 8); + } + } + } + + this.setEnableKeepingEventLoopAlive(vm, false); + switch (kind) { + .setImmediate => (@as(*ImmediateObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(), + .setTimeout, .setInterval => (@as(*TimeoutObject, @fieldParentPtr("internals", this))).ref_count.assertNoRefs(), + } +} + +const bun = @import("bun"); +const std = @import("std"); +const JSC = bun.JSC; +const VirtualMachine = JSC.VirtualMachine; +const TimeoutObject = @import("../Timer.zig").TimeoutObject; +const ImmediateObject = @import("../Timer.zig").ImmediateObject; +const Debugger = @import("../../Debugger.zig"); +const timespec = bun.timespec; +const Environment = bun.Environment; +const ID = @import("../Timer.zig").ID; +const TimeoutMap = @import("../Timer.zig").TimeoutMap; +const Kind = @import("../Timer.zig").Kind; +const EventLoopTimer = @import("../Timer.zig").EventLoopTimer; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const assert = bun.assert; diff --git a/src/bun.js/api/UnsafeObject.zig b/src/bun.js/api/UnsafeObject.zig index a9f66fb954..a7568d8766 100644 --- a/src/bun.js/api/UnsafeObject.zig +++ b/src/bun.js/api/UnsafeObject.zig @@ -23,7 +23,7 @@ pub fn gcAggressionLevel( const value = callframe.arguments_old(1).ptr[0]; if (!value.isEmptyOrUndefinedOrNull()) { - switch (value.coerce(i32, globalThis)) { + switch (try value.coerce(i32, globalThis)) { 1 => globalThis.bunVM().aggressive_garbage_collection = .mild, 2 => globalThis.bunVM().aggressive_garbage_collection = .aggressive, 0 => globalThis.bunVM().aggressive_garbage_collection = .none, diff --git a/src/bun.js/api/bun/dns_resolver.zig b/src/bun.js/api/bun/dns_resolver.zig index f7bfc86f29..a2d637bc4d 100644 --- a/src/bun.js/api/bun/dns_resolver.zig +++ b/src/bun.js/api/bun/dns_resolver.zig @@ -3207,16 +3207,16 @@ pub const DNSResolver = struct { const size = bun.len(bun.cast([*:0]u8, buf[1..])) + 1; if (port == IANA_DNS_PORT) { - values.putIndex(globalThis, i, JSC.ZigString.init(buf[1..size]).withEncoding().toJS(globalThis)); + try values.putIndex(globalThis, i, JSC.ZigString.init(buf[1..size]).withEncoding().toJS(globalThis)); } else { if (family == std.posix.AF.INET6) { buf[0] = '['; buf[size] = ']'; const port_slice = std.fmt.bufPrint(buf[size + 1 ..], ":{d}", .{port}) catch unreachable; - values.putIndex(globalThis, i, JSC.ZigString.init(buf[0 .. size + 1 + port_slice.len]).withEncoding().toJS(globalThis)); + try values.putIndex(globalThis, i, JSC.ZigString.init(buf[0 .. size + 1 + port_slice.len]).withEncoding().toJS(globalThis)); } else { const port_slice = std.fmt.bufPrint(buf[size..], ":{d}", .{port}) catch unreachable; - values.putIndex(globalThis, i, JSC.ZigString.init(buf[1 .. size + port_slice.len]).withEncoding().toJS(globalThis)); + try values.putIndex(globalThis, i, JSC.ZigString.init(buf[1 .. size + port_slice.len]).withEncoding().toJS(globalThis)); } } } @@ -3303,7 +3303,7 @@ pub const DNSResolver = struct { return globalThis.throwInvalidArgumentType("setServers", "servers", "array"); } - var triplesIterator = argument.arrayIterator(globalThis); + var triplesIterator = try argument.arrayIterator(globalThis); if (triplesIterator.len == 0) { const r = c_ares.ares_set_servers_ports(channel, null); @@ -3321,19 +3321,19 @@ pub const DNSResolver = struct { var i: u32 = 0; - while (triplesIterator.next()) |triple| : (i += 1) { + while (try triplesIterator.next()) |triple| : (i += 1) { if (!triple.isArray()) { return globalThis.throwInvalidArgumentType("setServers", "triple", "array"); } - const family = JSValue.getIndex(triple, globalThis, 0).toInt32(); - const port = JSValue.getIndex(triple, globalThis, 2).toInt32(); + const family = (try triple.getIndex(globalThis, 0)).toInt32(); + const port = (try triple.getIndex(globalThis, 2)).toInt32(); if (family != 4 and family != 6) { return globalThis.throwInvalidArguments("Invalid address family", .{}); } - const addressString = try JSValue.getIndex(triple, globalThis, 1).toBunString(globalThis); + const addressString = try (try triple.getIndex(globalThis, 1)).toBunString(globalThis); defer addressString.deref(); const addressSlice = try addressString.toOwnedSlice(allocator); @@ -3387,11 +3387,11 @@ pub const DNSResolver = struct { const options = callframe.argument(0); if (options.isObject()) { if (try options.getTruthy(globalThis, "timeout")) |timeout| { - resolver.options.timeout = timeout.coerceToInt32(globalThis); + resolver.options.timeout = try timeout.coerceToInt32(globalThis); } if (try options.getTruthy(globalThis, "tries")) |tries| { - resolver.options.tries = tries.coerceToInt32(globalThis); + resolver.options.tries = try tries.coerceToInt32(globalThis); } } diff --git a/src/bun.js/api/bun/h2_frame_parser.zig b/src/bun.js/api/bun/h2_frame_parser.zig index 5e6e4305d7..d022fff6f7 100644 --- a/src/bun.js/api/bun/h2_frame_parser.zig +++ b/src/bun.js/api/bun/h2_frame_parser.zig @@ -605,7 +605,7 @@ const Handlers = struct { return globalObject.throwInvalidArguments("Expected \"{s}\" callback to be a function", .{pair[1]}); } - @field(handlers, pair.@"0") = callback_value; + @field(handlers, pair.@"0") = callback_value.withAsyncContextIfNeeded(globalObject); } } @@ -614,7 +614,7 @@ const Handlers = struct { return globalObject.throwInvalidArguments("Expected \"error\" callback to be a function", .{}); } - handlers.onError = callback_value; + handlers.onError = callback_value.withAsyncContextIfNeeded(globalObject); } // onWrite is required for duplex support or if more than 1 parser is attached to the same socket (unliked) @@ -700,13 +700,13 @@ pub const H2FrameParser = struct { // local Window limits the download of data // current window size for the connection - windowSize: u64 = 65535, + windowSize: u64 = DEFAULT_WINDOW_SIZE, // used window size for the connection usedWindowSize: u64 = 0, // remote Window limits the upload of data // remote window size for the connection - remoteWindowSize: u64 = 0, + remoteWindowSize: u64 = DEFAULT_WINDOW_SIZE, // remote used window size for the connection remoteUsedWindowSize: u64 = 0, @@ -993,13 +993,13 @@ pub const H2FrameParser = struct { const able_to_send = frame_slice[0..max_size]; client.queuedDataSize -= able_to_send.len; written.* += able_to_send.len; - this.remoteUsedWindowSize += able_to_send.len; - client.remoteUsedWindowSize += able_to_send.len; - log("dataFrame partial flushed {} {} {} {} {} {} {}", .{ able_to_send.len, frame.end_stream, client.queuedDataSize, this.remoteUsedWindowSize, client.remoteUsedWindowSize, this.remoteWindowSize, client.remoteWindowSize }); - - const padding = this.getPadding(able_to_send.len, MAX_PAYLOAD_SIZE_WITHOUT_FRAME - 1); + const padding = this.getPadding(able_to_send.len, max_size); const payload_size = able_to_send.len + (if (padding != 0) padding + 1 else 0); + + this.remoteUsedWindowSize += payload_size; + client.remoteUsedWindowSize += payload_size; + var flags: u8 = 0; // we ignore end_stream for now because we know we have more data to send if (padding != 0) { flags |= @intFromEnum(DataFrameFlags.PADDED); @@ -1024,12 +1024,11 @@ pub const H2FrameParser = struct { // flush with some payload client.queuedDataSize -= frame_slice.len; written.* += frame_slice.len; - this.remoteUsedWindowSize += frame_slice.len; - client.remoteUsedWindowSize += frame_slice.len; - log("dataFrame flushed {} {}", .{ frame_slice.len, frame.end_stream }); - const padding = this.getPadding(frame_slice.len, MAX_PAYLOAD_SIZE_WITHOUT_FRAME - 1); + const padding = this.getPadding(frame_slice.len, max_size); const payload_size = frame_slice.len + (if (padding != 0) padding + 1 else 0); + this.remoteUsedWindowSize += payload_size; + client.remoteUsedWindowSize += payload_size; var flags: u8 = if (frame.end_stream and !this.waitForTrailers) @intFromEnum(DataFrameFlags.END_STREAM) else 0; if (padding != 0) { flags |= @intFromEnum(DataFrameFlags.PADDED); @@ -1330,6 +1329,9 @@ pub const H2FrameParser = struct { pub fn endStream(this: *H2FrameParser, stream: *Stream, rstCode: ErrorCode) void { log("HTTP_FRAME_RST_STREAM id: {} code: {}", .{ stream.id, @intFromEnum(rstCode) }); + if (stream.state == .CLOSED) { + return; + } var buffer: [FrameHeader.byteSize + 4]u8 = undefined; @memset(&buffer, 0); var writerStream = std.io.fixedBufferStream(&buffer); @@ -1384,7 +1386,10 @@ pub const H2FrameParser = struct { if (debug_data.len > 0) { _ = this.write(debug_data); } - const chunk = this.handlers.binary_type.toJS(debug_data, this.handlers.globalObject); + const chunk = this.handlers.binary_type.toJS(debug_data, this.handlers.globalObject) catch |err| { + this.dispatch(.onError, this.globalThis.takeException(err)); + return; + }; if (emitError) { if (rstCode != .NO_ERROR) { @@ -1461,7 +1466,7 @@ pub const H2FrameParser = struct { } pub fn sendSettingsACK(this: *H2FrameParser) void { - log("HTTP_FRAME_SETTINGS ack true", .{}); + log("send HTTP_FRAME_SETTINGS ack true", .{}); var buffer: [FrameHeader.byteSize]u8 = undefined; @memset(&buffer, 0); var stream = std.io.fixedBufferStream(&buffer); @@ -1688,7 +1693,7 @@ pub const H2FrameParser = struct { this.writeBuffer.clearRetainingCapacity(); } } - const output_value = this.handlers.binary_type.toJS(bytes, this.handlers.globalObject); + const output_value = this.handlers.binary_type.toJS(bytes, this.handlers.globalObject) catch .zero; // TODO: properly propagate exception upwards const result = this.call(.onWrite, output_value); if (result.isBoolean() and !result.toBoolean()) { this.has_nonnative_backpressure = true; @@ -1721,7 +1726,7 @@ pub const H2FrameParser = struct { return false; } // fallback to onWrite non-native callback - const output_value = this.handlers.binary_type.toJS(bytes, this.handlers.globalObject); + const output_value = this.handlers.binary_type.toJS(bytes, this.handlers.globalObject) catch .zero; // TODO: properly propagate exception upwards const result = this.call(.onWrite, output_value); const code = if (result.isNumber()) result.to(i32) else -1; switch (code) { @@ -1914,14 +1919,14 @@ pub const H2FrameParser = struct { } if (getHTTP2CommonString(globalObject, header.well_know)) |js_header_name| { - headers.push(globalObject, js_header_name); - headers.push(globalObject, bun.String.createUTF8ForJS(globalObject, header.value)); + try headers.push(globalObject, js_header_name); + try headers.push(globalObject, bun.String.createUTF8ForJS(globalObject, header.value)); if (header.never_index) { if (sensitiveHeaders.isUndefined()) { sensitiveHeaders = try JSC.JSValue.createEmptyArray(globalObject, 0); sensitiveHeaders.ensureStillAlive(); } - sensitiveHeaders.push(globalObject, js_header_name); + try sensitiveHeaders.push(globalObject, js_header_name); } } else { const js_header_name = bun.String.createUTF8ForJS(globalObject, header.name); @@ -1932,11 +1937,11 @@ pub const H2FrameParser = struct { sensitiveHeaders = try JSC.JSValue.createEmptyArray(globalObject, 0); sensitiveHeaders.ensureStillAlive(); } - sensitiveHeaders.push(globalObject, js_header_name); + try sensitiveHeaders.push(globalObject, js_header_name); } - headers.push(globalObject, js_header_name); - headers.push(globalObject, js_header_value); + try headers.push(globalObject, js_header_name); + try headers.push(globalObject, js_header_value); js_header_name.ensureStillAlive(); js_header_value.ensureStillAlive(); @@ -2000,7 +2005,7 @@ pub const H2FrameParser = struct { data_needed -= padding; log("data received {} {}", .{ padding, payload.len }); payload = payload[0..@min(@as(usize, @intCast(data_needed)), payload.len)]; - const chunk = this.handlers.binary_type.toJS(payload, this.handlers.globalObject); + const chunk = this.handlers.binary_type.toJS(payload, this.handlers.globalObject) catch .zero; // TODO: properly propagate exception upwards // its fine to truncate because is not possible to receive more data than u32 here, usize is only because of slices in size this.ajustWindowSize(stream, @truncate(payload.len)); this.dispatchWithExtra(.onStreamData, stream.getIdentifier(), chunk); @@ -2048,7 +2053,7 @@ pub const H2FrameParser = struct { if (handleIncommingPayload(this, data, frame.streamIdentifier)) |content| { const payload = content.data; const error_code = u32FromBytes(payload[4..8]); - const chunk = this.handlers.binary_type.toJS(payload[8..], this.handlers.globalObject); + const chunk = this.handlers.binary_type.toJS(payload[8..], this.handlers.globalObject) catch .zero; // TODO: properly propagate exception upwards this.readBuffer.reset(); this.dispatchWith2Extra(.onGoAway, JSC.JSValue.jsNumber(error_code), JSC.JSValue.jsNumber(this.lastStreamID), chunk); return content.end; @@ -2100,12 +2105,12 @@ pub const H2FrameParser = struct { // need to create an array const array = try JSC.JSValue.createEmptyArray(this.handlers.globalObject, 0); array.ensureStillAlive(); - array.push(this.handlers.globalObject, originValue); - array.push(this.handlers.globalObject, this.stringOrEmptyToJS(origin_str)); + try array.push(this.handlers.globalObject, originValue); + try array.push(this.handlers.globalObject, this.stringOrEmptyToJS(origin_str)); originValue = array; } else { // we already have an array, just add the origin to it - originValue.push(this.handlers.globalObject, this.stringOrEmptyToJS(origin_str)); + try originValue.push(this.handlers.globalObject, this.stringOrEmptyToJS(origin_str)); } count += 1; payload = payload[origin_length + 2 ..]; @@ -2207,7 +2212,7 @@ pub const H2FrameParser = struct { } else { this.outStandingPings -|= 1; } - const buffer = this.handlers.binary_type.toJS(payload, this.handlers.globalObject); + const buffer = this.handlers.binary_type.toJS(payload, this.handlers.globalObject) catch .zero; // TODO: properly propagate exception upwards this.dispatchWithExtra(.onPing, buffer, JSC.JSValue.jsBoolean(!isNotACK)); return content.end; } @@ -2386,12 +2391,11 @@ pub const H2FrameParser = struct { if (this.remoteSettings == null) { // ok empty settings so default settings - const remoteSettings: FullSettingsPayload = .{}; + var remoteSettings: FullSettingsPayload = .{}; this.remoteSettings = remoteSettings; log("remoteSettings.initialWindowSize: {} {} {}", .{ remoteSettings.initialWindowSize, this.remoteUsedWindowSize, this.remoteWindowSize }); if (remoteSettings.initialWindowSize >= this.remoteWindowSize) { - this.remoteWindowSize = remoteSettings.initialWindowSize; var it = this.streams.valueIterator(); while (it.next()) |stream| { if (remoteSettings.initialWindowSize >= stream.remoteWindowSize) { @@ -2399,6 +2403,7 @@ pub const H2FrameParser = struct { } } } + this.dispatch(.onRemoteSettings, remoteSettings.toJS(this.handlers.globalObject)); } } @@ -2422,7 +2427,6 @@ pub const H2FrameParser = struct { this.remoteSettings = remoteSettings; log("remoteSettings.initialWindowSize: {} {} {}", .{ remoteSettings.initialWindowSize, this.remoteUsedWindowSize, this.remoteWindowSize }); if (remoteSettings.initialWindowSize >= this.remoteWindowSize) { - this.remoteWindowSize = remoteSettings.initialWindowSize; var it = this.streams.valueIterator(); while (it.next()) |stream| { if (remoteSettings.initialWindowSize >= stream.remoteWindowSize) { @@ -2878,9 +2882,9 @@ pub const H2FrameParser = struct { var stream = std.io.fixedBufferStream(&buffer); const writer = stream.writer(); stream.seekTo(FrameHeader.byteSize) catch {}; - var value_iter = origin_arg.arrayIterator(globalObject); + var value_iter = try origin_arg.arrayIterator(globalObject); - while (value_iter.next()) |item| { + while (try value_iter.next()) |item| { if (!item.isString()) { return globalObject.throwInvalidArguments("Expected origin to be a string or an array of strings", .{}); } @@ -3258,11 +3262,7 @@ pub const H2FrameParser = struct { max_size = MAX_PAYLOAD_SIZE_WITHOUT_FRAME; } const size = @min(payload.len - offset, max_size); - defer if (!enqueued) { - log("remoteUsedWindowSize += {} {} {} {}", .{ size, stream.remoteUsedWindowSize, this.remoteUsedWindowSize, this.isServer }); - stream.remoteUsedWindowSize += size; - this.remoteUsedWindowSize += size; - }; + const slice = payload[offset..(size + offset)]; offset += size; const end_stream = offset >= payload.len and can_close; @@ -3273,8 +3273,10 @@ pub const H2FrameParser = struct { // the callback will only be called after the last frame is sended stream.queueFrame(this, slice, if (offset >= payload.len) callback else .js_undefined, offset >= payload.len and close); } else { - const padding = stream.getPadding(size, max_size - 1); + const padding = stream.getPadding(size, max_size); const payload_size = size + (if (padding != 0) padding + 1 else 0); + stream.remoteUsedWindowSize += payload_size; + this.remoteUsedWindowSize += payload_size; var flags: u8 = if (end_stream) @intFromEnum(DataFrameFlags.END_STREAM) else 0; if (padding != 0) { flags |= @intFromEnum(DataFrameFlags.PADDED); @@ -3446,7 +3448,7 @@ pub const H2FrameParser = struct { if (js_value.jsType().isArray()) { // https://github.com/oven-sh/bun/issues/8940 - var value_iter = js_value.arrayIterator(globalObject); + var value_iter = try js_value.arrayIterator(globalObject); if (SingleValueHeaders.indexOf(validated_name)) |idx| { if (value_iter.len > 1 or single_value_headers[idx]) { @@ -3456,7 +3458,7 @@ pub const H2FrameParser = struct { single_value_headers[idx] = true; } - while (value_iter.next()) |item| { + while (try value_iter.next()) |item| { if (item.isEmptyOrUndefinedOrNull()) { const exception = globalObject.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}); return globalObject.throwValue(exception); @@ -3785,6 +3787,7 @@ pub const H2FrameParser = struct { pub fn request(this: *H2FrameParser, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { JSC.markBinding(@src()); + log("request", .{}); const args_list = callframe.arguments_old(5); if (args_list.len < 4) { @@ -3889,7 +3892,7 @@ pub const H2FrameParser = struct { if (js_value.jsType().isArray()) { log("array header {s}", .{name}); // https://github.com/oven-sh/bun/issues/8940 - var value_iter = js_value.arrayIterator(globalObject); + var value_iter = try js_value.arrayIterator(globalObject); if (SingleValueHeaders.indexOf(validated_name)) |idx| { if (value_iter.len > 1 or single_value_headers[idx]) { @@ -3902,7 +3905,7 @@ pub const H2FrameParser = struct { single_value_headers[idx] = true; } - while (value_iter.next()) |item| { + while (try value_iter.next()) |item| { if (item.isEmptyOrUndefinedOrNull()) { if (!globalObject.hasException()) { return globalObject.ERR(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}).throw(); @@ -4107,7 +4110,7 @@ pub const H2FrameParser = struct { this.rejectedStreams += 1; this.dispatchWithExtra(.onStreamError, stream.getIdentifier(), JSC.JSValue.jsNumber(stream.rstCode)); if (this.rejectedStreams >= this.maxRejectedStreams) { - const chunk = this.handlers.binary_type.toJS("ENHANCE_YOUR_CALM", this.handlers.globalObject); + const chunk = try this.handlers.binary_type.toJS("ENHANCE_YOUR_CALM", this.handlers.globalObject); this.dispatchWith2Extra(.onError, JSC.JSValue.jsNumber(@intFromEnum(ErrorCode.ENHANCE_YOUR_CALM)), JSC.JSValue.jsNumber(this.lastStreamID), chunk); } return JSC.JSValue.jsNumber(stream_id); @@ -4423,8 +4426,6 @@ pub const H2FrameParser = struct { this.strong_ctx.set(globalObject, context_obj); this.hpack = lshpack.HPACK.init(this.localSettings.headerTableSize); - this.windowSize = this.localSettings.initialWindowSize; - log("windowSize: {d} isServer: {}", .{ this.windowSize, is_server }); if (is_server) { _ = this.setSettings(this.localSettings); } else { diff --git a/src/bun.js/api/bun/process.zig b/src/bun.js/api/bun/process.zig index 1d079e128b..7812fc120f 100644 --- a/src/bun.js/api/bun/process.zig +++ b/src/bun.js/api/bun/process.zig @@ -1,13 +1,6 @@ -const bun = @import("bun"); -const std = @import("std"); -const PosixSpawn = bun.spawn; -const Environment = bun.Environment; -const JSC = bun.JSC; -const Output = bun.Output; -const uv = bun.windows.libuv; const pid_t = if (Environment.isPosix) std.posix.pid_t else uv.uv_pid_t; const fd_t = if (Environment.isPosix) std.posix.fd_t else i32; -const Maybe = JSC.Maybe; +const log = bun.Output.scoped(.PROCESS, false); const win_rusage = struct { utime: struct { @@ -78,10 +71,6 @@ pub fn uv_getrusage(process: *uv.uv_process_t) win_rusage { } pub const Rusage = if (Environment.isWindows) win_rusage else std.posix.rusage; -const Subprocess = JSC.Subprocess; -const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess; -const ShellSubprocess = bun.shell.ShellSubprocess; -const ProcessHandle = @import("../../../cli/filter_run.zig").ProcessHandle; // const ShellSubprocessMini = bun.shell.ShellSubprocessMini; pub const ProcessExitHandler = struct { ptr: TaggedPointer = TaggedPointer.Null, @@ -143,7 +132,7 @@ pub const PidFDType = if (Environment.isLinux) fd_t else u0; pub const Process = struct { const Self = @This(); - const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{}); + const RefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", deinit, .{}); pub const ref = RefCount.ref; pub const deref = RefCount.deref; @@ -990,6 +979,13 @@ pub const PosixSpawnOptions = struct { /// and posix_spawnp(2) will behave as a more /// featureful execve(2). use_execve_on_macos: bool = false, + /// If we need to call `socketpair()`, this + /// sets SO_NOSIGPIPE when true. + /// + /// If false, this avoids setting SO_NOSIGPIPE + /// for stdout. This is used to preserve + /// consistent shell semantics. + no_sigpipe: bool = true, pub const Stdio = union(enum) { path: []const u8, @@ -1347,7 +1343,17 @@ pub fn spawnProcessPosix( } const fds: [2]bun.FileDescriptor = brk: { - const pair = try bun.sys.socketpair(std.posix.AF.UNIX, std.posix.SOCK.STREAM, 0, .blocking).unwrap(); + const pair = if (!options.no_sigpipe) try bun.sys.socketpairForShell( + std.posix.AF.UNIX, + std.posix.SOCK.STREAM, + 0, + .blocking, + ).unwrap() else try bun.sys.socketpair( + std.posix.AF.UNIX, + std.posix.SOCK.STREAM, + 0, + .blocking, + ).unwrap(); break :brk .{ pair[if (i == 0) 1 else 0], pair[if (i == 0) 0 else 1] }; }; @@ -2228,3 +2234,20 @@ pub const sync = struct { }; } }; + +// @sortImports + +const std = @import("std"); +const ProcessHandle = @import("../../../cli/filter_run.zig").ProcessHandle; + +const bun = @import("bun"); +const Environment = bun.Environment; +const Output = bun.Output; +const PosixSpawn = bun.spawn; +const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess; +const ShellSubprocess = bun.shell.ShellSubprocess; +const uv = bun.windows.libuv; + +const JSC = bun.JSC; +const Maybe = JSC.Maybe; +const Subprocess = JSC.Subprocess; diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 1d6551f486..1be36b9bf0 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -190,7 +190,7 @@ pub fn NewSocket(comptime ssl: bool) type { const enabled: bool = brk: { if (args.len >= 1) { - break :brk args.ptr[0].coerce(bool, globalThis); + break :brk args.ptr[0].toBoolean(); } break :brk false; }; @@ -208,11 +208,12 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn setNoDelay(this: *This, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { JSC.markBinding(@src()); + _ = globalThis; const args = callframe.arguments_old(1); const enabled: bool = brk: { if (args.len >= 1) { - break :brk args.ptr[0].coerce(bool, globalThis); + break :brk args.ptr[0].toBoolean(); } break :brk true; }; @@ -228,8 +229,10 @@ pub fn NewSocket(comptime ssl: bool) type { if (vm.isShuttingDown()) { return; } - vm.eventLoop().enter(); - defer vm.eventLoop().exit(); + // the handlers must be kept alive for the duration of the function call + // that way if we need to call the error handler, we can + var scope = handlers.enter(); + defer scope.exit(); const globalObject = handlers.globalObject; const this_value = this.getThisValue(globalObject); _ = handlers.callErrorHandler(this_value, &.{ this_value, err_value }); @@ -254,8 +257,10 @@ pub fn NewSocket(comptime ssl: bool) type { // is not writable if we have buffered data or if we are already detached if (this.buffered_data_for_node_net.len > 0 or this.socket.isDetached()) return; - vm.eventLoop().enter(); - defer vm.eventLoop().exit(); + // the handlers must be kept alive for the duration of the function call + // that way if we need to call the error handler, we can + var scope = handlers.enter(); + defer scope.exit(); const globalObject = handlers.globalObject; const this_value = this.getThisValue(globalObject); @@ -320,10 +325,11 @@ pub fn NewSocket(comptime ssl: bool) type { .syscall = bun.String.static("connect"), .code = code_, }; - vm.eventLoop().enter(); - defer { - vm.eventLoop().exit(); - } + + // the handlers must be kept alive for the duration of the function call + // that way if we need to call the error handler, we can + var scope = handlers.enter(); + defer scope.exit(); if (callback == .zero) { if (handlers.promise.trySwap()) |promise| { @@ -346,10 +352,7 @@ pub fn NewSocket(comptime ssl: bool) type { this.has_pending_activity.store(false, .release); const err_value = err.toErrorInstance(globalObject); - const result = callback.call(globalObject, this_value, &[_]JSValue{ - this_value, - err_value, - }) catch |e| globalObject.takeException(e); + const result = callback.call(globalObject, this_value, &[_]JSValue{ this_value, err_value }) catch |e| globalObject.takeException(e); if (result.toError()) |err_val| { if (handlers.rejectPromise(err_val)) return; @@ -472,9 +475,11 @@ pub fn NewSocket(comptime ssl: bool) type { } else { if (callback == .zero) return; } - const vm = handlers.vm; - vm.eventLoop().enter(); - defer vm.eventLoop().exit(); + + // the handlers must be kept alive for the duration of the function call + // that way if we need to call the error handler, we can + var scope = handlers.enter(); + defer scope.exit(); const result = callback.call(globalObject, this_value, &[_]JSValue{this_value}) catch |err| globalObject.takeException(err); if (result.toError()) |err| { @@ -635,7 +640,7 @@ pub fn NewSocket(comptime ssl: bool) type { var js_error: JSValue = .js_undefined; if (err != 0) { // errors here are always a read error - js_error = bun.sys.Error.fromCodeInt(err, .read).toJSC(globalObject); + js_error = bun.sys.Error.fromCodeInt(err, .read).toJS(globalObject); } _ = callback.call(globalObject, this_value, &[_]JSValue{ @@ -661,7 +666,10 @@ pub fn NewSocket(comptime ssl: bool) type { const globalObject = handlers.globalObject; const this_value = this.getThisValue(globalObject); - const output_value = handlers.binary_type.toJS(data, globalObject); + const output_value = handlers.binary_type.toJS(data, globalObject) catch |err| { + this.handleError(globalObject.takeException(err)); + return; + }; // the handlers must be kept alive for the duration of the function call // that way if we need to call the error handler, we can @@ -722,7 +730,7 @@ pub fn NewSocket(comptime ssl: bool) type { if (args.len == 0) { return globalObject.throw("Expected 1 argument, got 0", .{}); } - const t = args.ptr[0].coerce(i32, globalObject); + const t = try args.ptr[0].coerce(i32, globalObject); if (t < 0) { return globalObject.throw("Timeout must be a positive integer", .{}); } @@ -865,7 +873,7 @@ pub fn NewSocket(comptime ssl: bool) type { if (comptime ssl) { // TLS wrapped but in TCP mode if (this.wrapped == .tcp) { - const res = this.socket.rawWrite(buffer, false); + const res = this.socket.rawWrite(buffer); const uwrote: usize = @intCast(@max(res, 0)); this.bytes_written += uwrote; log("write({d}) = {d}", .{ buffer.len, res }); @@ -873,7 +881,7 @@ pub fn NewSocket(comptime ssl: bool) type { } } - const res = this.socket.write(buffer, false); + const res = this.socket.write(buffer); const uwrote: usize = @intCast(@max(res, 0)); this.bytes_written += uwrote; log("write({d}) = {d}", .{ buffer.len, res }); @@ -883,7 +891,14 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn writeBuffered(this: *This, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { if (this.socket.isDetached()) { this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); - return JSValue.jsBoolean(false); + // TODO: should we separate unattached and detached? unattached shouldn't throw here + const err: JSC.SystemError = .{ + .errno = @intFromEnum(bun.sys.SystemErrno.EBADF), + .code = .static("EBADF"), + .message = .static("write EBADF"), + .syscall = .static("write"), + }; + return globalObject.throwValue(err.toErrorInstance(globalObject)); } const args = callframe.argumentsUndef(2); @@ -1187,7 +1202,7 @@ pub fn NewSocket(comptime ssl: bool) type { fn internalFlush(this: *This) void { if (this.buffered_data_for_node_net.len > 0) { - const written: usize = @intCast(@max(this.socket.write(this.buffered_data_for_node_net.slice(), false), 0)); + const written: usize = @intCast(@max(this.socket.write(this.buffered_data_for_node_net.slice()), 0)); this.bytes_written += written; if (written > 0) { if (this.buffered_data_for_node_net.len > written) { @@ -1347,6 +1362,7 @@ pub fn NewSocket(comptime ssl: bool) type { var prev_handlers = this.handlers; prev_handlers.unprotect(); this.handlers.* = handlers; // TODO: this is a memory leak + this.handlers.withAsyncContextIfNeeded(globalObject); this.handlers.protect(); return .js_undefined; @@ -1389,7 +1405,7 @@ pub fn NewSocket(comptime ssl: bool) type { return .zero; } - const handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); + var handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); if (globalObject.hasException()) { return .zero; @@ -1444,6 +1460,7 @@ pub fn NewSocket(comptime ssl: bool) type { const ext_size = @sizeOf(WrappedSocket); var handlers_ptr = bun.default_allocator.create(Handlers) catch bun.outOfMemory(); + handlers.withAsyncContextIfNeeded(globalObject); handlers_ptr.* = handlers; handlers_ptr.protect(); var tls = bun.new(TLSSocket, .{ @@ -1579,8 +1596,8 @@ pub fn NewSocket(comptime ssl: bool) type { } const array = try JSC.JSValue.createEmptyArray(globalObject, 2); - array.putIndex(globalObject, 0, raw_js_value); - array.putIndex(globalObject, 1, tls_js_value); + try array.putIndex(globalObject, 0, raw_js_value); + try array.putIndex(globalObject, 1, tls_js_value); defer this.deref(); @@ -1965,6 +1982,7 @@ pub fn jsUpgradeDuplexToTLS(globalObject: *JSC.JSGlobalObject, callframe: *JSC.C var handlers_ptr = handlers.vm.allocator.create(Handlers) catch bun.outOfMemory(); handlers_ptr.* = handlers; handlers_ptr.is_server = is_server; + handlers_ptr.withAsyncContextIfNeeded(globalObject); handlers_ptr.protect(); var tls = bun.new(TLSSocket, .{ .ref_count = .init(), @@ -2009,9 +2027,9 @@ pub fn jsUpgradeDuplexToTLS(globalObject: *JSC.JSGlobalObject, callframe: *JSC.C duplexContext.startTLS(); const array = try JSC.JSValue.createEmptyArray(globalObject, 2); - array.putIndex(globalObject, 0, tls_js_value); + try array.putIndex(globalObject, 0, tls_js_value); // data, end, drain and close events must be reported - array.putIndex(globalObject, 1, try duplexContext.upgrade.getJSHandlers(globalObject)); + try array.putIndex(globalObject, 1, try duplexContext.upgrade.getJSHandlers(globalObject)); return array; } @@ -2059,15 +2077,15 @@ pub fn jsCreateSocketPair(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JS const rc = std.c.socketpair(std.posix.AF.UNIX, std.posix.SOCK.STREAM, 0, &fds_); if (rc != 0) { const err = bun.sys.Error.fromCode(bun.sys.getErrno(rc), .socketpair); - return global.throwValue(err.toJSC(global)); + return global.throwValue(err.toJS(global)); } _ = bun.FD.fromNative(fds_[0]).updateNonblocking(true); _ = bun.FD.fromNative(fds_[1]).updateNonblocking(true); const array = try JSC.JSValue.createEmptyArray(global, 2); - array.putIndex(global, 0, JSC.jsNumber(fds_[0])); - array.putIndex(global, 1, JSC.jsNumber(fds_[1])); + try array.putIndex(global, 0, JSC.jsNumber(fds_[0])); + try array.putIndex(global, 1, JSC.jsNumber(fds_[1])); return array; } @@ -2091,12 +2109,12 @@ pub fn jsSetSocketOptions(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame if (is_for_send_buffer) { const result = bun.sys.setsockopt(file_descriptor, std.posix.SOL.SOCKET, std.posix.SO.SNDBUF, buffer_size); if (result.asErr()) |err| { - return global.throwValue(err.toJSC(global)); + return global.throwValue(err.toJS(global)); } } else if (is_for_recv_buffer) { const result = bun.sys.setsockopt(file_descriptor, std.posix.SOL.SOCKET, std.posix.SO.RCVBUF, buffer_size); if (result.asErr()) |err| { - return global.throwValue(err.toJSC(global)); + return global.throwValue(err.toJS(global)); } } } diff --git a/src/bun.js/api/bun/socket/Handlers.zig b/src/bun.js/api/bun/socket/Handlers.zig index ce37da515f..7ff2a63e70 100644 --- a/src/bun.js/api/bun/socket/Handlers.zig +++ b/src/bun.js/api/bun/socket/Handlers.zig @@ -30,8 +30,7 @@ pub const Scope = struct { handlers: *Handlers, pub fn exit(this: *Scope) void { - var vm = this.handlers.vm; - defer vm.eventLoop().exit(); + this.handlers.vm.eventLoop().exit(); this.handlers.markInactive(); } }; @@ -39,9 +38,7 @@ pub const Scope = struct { pub fn enter(this: *Handlers) Scope { this.markActive(); this.vm.eventLoop().enter(); - return .{ - .handlers = this, - }; + return .{ .handlers = this }; } // corker: Corker = .{}, @@ -175,6 +172,25 @@ pub fn unprotect(this: *Handlers) void { this.onHandshake.unprotect(); } +pub fn withAsyncContextIfNeeded(this: *Handlers, globalObject: *JSC.JSGlobalObject) void { + inline for (.{ + "onOpen", + "onClose", + "onData", + "onWritable", + "onTimeout", + "onConnectError", + "onEnd", + "onError", + "onHandshake", + }) |field| { + const value = @field(this, field); + if (value != .zero) { + @field(this, field) = value.withAsyncContextIfNeeded(globalObject); + } + } +} + pub fn protect(this: *Handlers) void { if (comptime Environment.isDebug) { this.protection_count += 1; @@ -317,11 +333,7 @@ pub const SocketConfig = struct { return globalObject.throwInvalidArguments("Expected \"port\" to be a number between 0 and 65535", .{}); } - const porti32 = port_value.coerceToInt32(globalObject); - if (globalObject.hasException()) { - return error.JSError; - } - + const porti32 = try port_value.coerceToInt32(globalObject); if (porti32 < 0 or porti32 > 65535) { return globalObject.throwInvalidArguments("Expected \"port\" to be a number between 0 and 65535", .{}); } @@ -350,6 +362,7 @@ pub const SocketConfig = struct { default_data = default_data_value; } + handlers.withAsyncContextIfNeeded(globalObject); handlers.protect(); return SocketConfig{ diff --git a/src/bun.js/api/bun/socket/Listener.zig b/src/bun.js/api/bun/socket/Listener.zig index 164a5884c5..2be8e5791d 100644 --- a/src/bun.js/api/bun/socket/Listener.zig +++ b/src/bun.js/api/bun/socket/Listener.zig @@ -90,10 +90,11 @@ pub fn reload(this: *Listener, globalObject: *JSC.JSGlobalObject, callframe: *JS return globalObject.throw("Expected \"socket\" object", .{}); }; - const handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); + var handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); var prev_handlers = &this.handlers; prev_handlers.unprotect(); + handlers.withAsyncContextIfNeeded(globalObject); this.handlers = handlers; // TODO: this is a memory leak this.handlers.protect(); @@ -264,8 +265,14 @@ pub fn listen(globalObject: *JSC.JSGlobalObject, opts: JSValue) bun.JSError!JSVa break :brk socket_context.listenUnix(ssl_enabled, host, host.len, socket_flags, 8, &errno); }, .fd => |fd| { - _ = fd; - return globalObject.ERR(.INVALID_ARG_VALUE, "Bun does not support listening on a file descriptor.", .{}).throw(); + const err: bun.jsc.SystemError = .{ + .errno = @intFromEnum(bun.sys.SystemErrno.EINVAL), + .code = .static("EINVAL"), + .message = .static("Bun does not support listening on a file descriptor."), + .syscall = .static("listen"), + .fd = fd.uv(), + }; + return globalObject.throwValue(err.toErrorInstance(globalObject)); }, } } orelse { diff --git a/src/bun.js/api/bun/socket/WindowsNamedPipeContext.zig b/src/bun.js/api/bun/socket/WindowsNamedPipeContext.zig index e3ea05d2da..94d8db186f 100644 --- a/src/bun.js/api/bun/socket/WindowsNamedPipeContext.zig +++ b/src/bun.js/api/bun/socket/WindowsNamedPipeContext.zig @@ -97,17 +97,12 @@ fn onWritable(this: *WindowsNamedPipeContext) void { fn onError(this: *WindowsNamedPipeContext, err: bun.sys.Error) void { if (this.is_open) { - if (this.vm.isShuttingDown()) { - // dont touch global just wait to close vm is shutting down - return; - } - switch (this.socket) { .tls => |tls| { - tls.handleError(err.toJSC(this.globalThis)); + tls.handleError(err.toJS(this.globalThis)); }, .tcp => |tcp| { - tcp.handleError(err.toJSC(this.globalThis)); + tcp.handleError(err.toJS(this.globalThis)); }, else => {}, } diff --git a/src/bun.js/api/bun/socket/tls_socket_functions.zig b/src/bun.js/api/bun/socket/tls_socket_functions.zig index 233bfaa40f..9b09f9b8a8 100644 --- a/src/bun.js/api/bun/socket/tls_socket_functions.zig +++ b/src/bun.js/api/bun/socket/tls_socket_functions.zig @@ -92,7 +92,7 @@ pub fn setMaxSendFragment(this: *This, globalObject: *JSC.JSGlobalObject, callfr if (!arg.isNumber()) { return globalObject.throw("Expected size to be a number", .{}); } - const size = args.ptr[0].coerceToInt64(globalObject); + const size = try args.ptr[0].coerceToInt64(globalObject); if (size < 1) { return globalObject.throw("Expected size to be greater than 1", .{}); } @@ -245,14 +245,14 @@ pub fn getSharedSigalgs(this: *This, globalObject: *JSC.JSGlobalObject, _: *JSC. bun.copy(u8, buffer, sig_with_md); buffer[sig_with_md.len] = '+'; bun.copy(u8, buffer[sig_with_md.len + 1 ..], hash_slice); - array.putIndex(globalObject, @as(u32, @intCast(i)), JSC.ZigString.fromUTF8(buffer).toJS(globalObject)); + try array.putIndex(globalObject, @as(u32, @intCast(i)), JSC.ZigString.fromUTF8(buffer).toJS(globalObject)); } else { const buffer = bun.default_allocator.alloc(u8, sig_with_md.len + 6) catch bun.outOfMemory(); defer bun.default_allocator.free(buffer); bun.copy(u8, buffer, sig_with_md); bun.copy(u8, buffer[sig_with_md.len..], "+UNDEF"); - array.putIndex(globalObject, @as(u32, @intCast(i)), JSC.ZigString.fromUTF8(buffer).toJS(globalObject)); + try array.putIndex(globalObject, @as(u32, @intCast(i)), JSC.ZigString.fromUTF8(buffer).toJS(globalObject)); } } return array; @@ -328,7 +328,7 @@ pub fn exportKeyingMaterial(this: *This, globalObject: *JSC.JSGlobalObject, call return globalObject.throw("Expected length to be a number", .{}); } - const length = length_arg.coerceToInt64(globalObject); + const length = try length_arg.coerceToInt64(globalObject); if (length < 0) { return globalObject.throw("Expected length to be a positive number", .{}); } diff --git a/src/bun.js/api/bun/spawn/stdio.zig b/src/bun.js/api/bun/spawn/stdio.zig index 1d640e532a..f0f1951e20 100644 --- a/src/bun.js/api/bun/spawn/stdio.zig +++ b/src/bun.js/api/bun/spawn/stdio.zig @@ -23,6 +23,7 @@ pub const Stdio = union(enum) { memfd: bun.FileDescriptor, pipe, ipc, + readable_stream: JSC.WebCore.ReadableStream, const log = bun.sys.syslog; @@ -78,6 +79,9 @@ pub const Stdio = union(enum) { .memfd => |fd| { fd.close(); }, + .readable_stream => { + // ReadableStream cleanup is handled by the subprocess + }, else => {}, } } @@ -191,7 +195,7 @@ pub const Stdio = union(enum) { break :brk .{ .buffer = {} }; }, .dup2 => .{ .dup2 = .{ .out = stdio.dup2.out, .to = stdio.dup2.to } }, - .capture, .pipe, .array_buffer => .{ .buffer = {} }, + .capture, .pipe, .array_buffer, .readable_stream => .{ .buffer = {} }, .ipc => .{ .ipc = {} }, .fd => |fd| .{ .pipe = fd }, .memfd => |fd| .{ .pipe = fd }, @@ -244,7 +248,7 @@ pub const Stdio = union(enum) { break :brk .{ .buffer = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() }; }, .ipc => .{ .ipc = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() }, - .capture, .pipe, .array_buffer => .{ .buffer = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() }, + .capture, .pipe, .array_buffer, .readable_stream => .{ .buffer = bun.default_allocator.create(uv.Pipe) catch bun.outOfMemory() }, .fd => |fd| .{ .pipe = fd }, .dup2 => .{ .dup2 = .{ .out = stdio.dup2.out, .to = stdio.dup2.to } }, .path => |pathlike| .{ .path = pathlike.slice() }, @@ -277,13 +281,67 @@ pub const Stdio = union(enum) { pub fn isPiped(self: Stdio) bool { return switch (self) { - .capture, .array_buffer, .blob, .pipe => true, + .capture, .array_buffer, .blob, .pipe, .readable_stream => true, .ipc => Environment.isWindows, else => false, }; } - pub fn extract(out_stdio: *Stdio, globalThis: *JSC.JSGlobalObject, i: i32, value: JSValue) bun.JSError!void { + fn extractBodyValue(out_stdio: *Stdio, globalThis: *JSC.JSGlobalObject, i: i32, body: *JSC.WebCore.Body.Value, is_sync: bool) bun.JSError!void { + body.toBlobIfPossible(); + + if (body.tryUseAsAnyBlob()) |blob| { + return out_stdio.extractBlob(globalThis, blob, i); + } + + switch (body.*) { + .Null, .Empty => { + out_stdio.* = .{ .ignore = {} }; + return; + }, + .Used => { + return globalThis.ERR(.BODY_ALREADY_USED, "Body already used", .{}).throw(); + }, + .Error => { + return globalThis.throwValue(body.Error.toJS(globalThis)); + }, + + .Blob, .WTFStringImpl, .InternalBlob => unreachable, // handled above. + .Locked => { + if (is_sync) { + return globalThis.throwInvalidArguments("ReadableStream cannot be used in sync mode", .{}); + } + + switch (i) { + 0 => {}, + 1 => { + return globalThis.throwInvalidArguments("ReadableStream cannot be used for stdout yet. For now, do .stdout", .{}); + }, + 2 => { + return globalThis.throwInvalidArguments("ReadableStream cannot be used for stderr yet. For now, do .stderr", .{}); + }, + else => unreachable, + } + + const stream_value = body.toReadableStream(globalThis); + if (globalThis.hasException()) { + return error.JSError; + } + + const stream = JSC.WebCore.ReadableStream.fromJS(stream_value, globalThis) orelse return globalThis.throwInvalidArguments("Failed to create ReadableStream", .{}); + + if (stream.isDisturbed(globalThis)) { + return globalThis.ERR(.BODY_ALREADY_USED, "ReadableStream has already been used", .{}).throw(); + } + + out_stdio.* = .{ .readable_stream = stream }; + }, + } + + return; + } + + pub fn extract(out_stdio: *Stdio, globalThis: *JSC.JSGlobalObject, i: i32, value: JSValue, is_sync: bool) bun.JSError!void { if (value == .zero) return; if (value.isUndefined()) return; if (value.isNull()) { @@ -346,34 +404,36 @@ pub const Stdio = union(enum) { } else if (value.as(JSC.WebCore.Blob)) |blob| { return out_stdio.extractBlob(globalThis, .{ .Blob = blob.dupe() }, i); } else if (value.as(JSC.WebCore.Request)) |req| { - req.getBodyValue().toBlobIfPossible(); - return out_stdio.extractBlob(globalThis, req.getBodyValue().useAsAnyBlob(), i); - } else if (value.as(JSC.WebCore.Response)) |req| { - req.getBodyValue().toBlobIfPossible(); - return out_stdio.extractBlob(globalThis, req.getBodyValue().useAsAnyBlob(), i); - } else if (JSC.WebCore.ReadableStream.fromJS(value, globalThis)) |req_const| { - var req = req_const; - if (i == 0) { - if (req.toAnyBlob(globalThis)) |blob| { - return out_stdio.extractBlob(globalThis, blob, i); - } + return extractBodyValue(out_stdio, globalThis, i, req.getBodyValue(), is_sync); + } else if (value.as(JSC.WebCore.Response)) |res| { + return extractBodyValue(out_stdio, globalThis, i, res.getBodyValue(), is_sync); + } - switch (req.ptr) { - .File, .Blob => { - globalThis.throwTODO("Support fd/blob backed ReadableStream in spawn stdin. See https://github.com/oven-sh/bun/issues/8049") catch {}; - return error.JSError; - }, - .Direct, .JavaScript, .Bytes => { - // out_stdio.* = .{ .connect = req }; - globalThis.throwTODO("Re-enable ReadableStream support in spawn stdin. ") catch {}; - return error.JSError; - }, - .Invalid => { - return globalThis.throwInvalidArguments("ReadableStream is in invalid state.", .{}); - }, - } + if (JSC.WebCore.ReadableStream.fromJS(value, globalThis)) |stream_| { + var stream = stream_; + if (stream.toAnyBlob(globalThis)) |blob| { + return out_stdio.extractBlob(globalThis, blob, i); } - } else if (value.asArrayBuffer(globalThis)) |array_buffer| { + + const name: []const u8 = switch (i) { + 0 => "stdin", + 1 => "stdout", + 2 => "stderr", + else => unreachable, + }; + + if (is_sync) { + return globalThis.throwInvalidArguments("'{s}' ReadableStream cannot be used in sync mode", .{name}); + } + + if (stream.isDisturbed(globalThis)) { + return globalThis.ERR(.INVALID_STATE, "'{s}' ReadableStream has already been used", .{name}).throw(); + } + out_stdio.* = .{ .readable_stream = stream }; + return; + } + + if (value.asArrayBuffer(globalThis)) |array_buffer| { // Change in Bun v1.0.34: don't throw for empty ArrayBuffer if (array_buffer.byteSlice().len == 0) { out_stdio.* = .{ .ignore = {} }; diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index fcb4307457..152b658e07 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -56,7 +56,8 @@ pub const Flags = packed struct(u8) { has_stdin_destructor_called: bool = false, finalized: bool = false, deref_on_stdin_destroyed: bool = false, - _: u3 = 0, + is_stdin_a_readable_stream: bool = false, + _: u2 = 0, }; pub const SignalCode = bun.SignalCode; @@ -446,6 +447,7 @@ const Readable = union(enum) { .pipe => Readable{ .pipe = PipeReader.create(event_loop, process, result, max_size) }, .array_buffer, .blob => Output.panic("TODO: implement ArrayBuffer & Blob support in Stdio readable", .{}), .capture => Output.panic("TODO: implement capture support in Stdio readable", .{}), + .readable_stream => Readable{ .ignore = {} }, // ReadableStream is handled separately }; } @@ -608,7 +610,7 @@ pub fn asyncDispose( .result => {}, .err => |err| { // Signal 9 should always be fine, but just in case that somehow fails. - return global.throwValue(err.toJSC(global)); + return global.throwValue(err.toJS(global)); }, } @@ -695,7 +697,7 @@ pub fn kill( .result => {}, .err => |err| { // EINVAL or ENOSYS means the signal is not supported in the current platform (most likely unsupported on windows) - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, } @@ -784,14 +786,14 @@ pub fn getKilled(this: *Subprocess, _: *JSGlobalObject) JSValue { pub fn getStdio(this: *Subprocess, global: *JSGlobalObject) bun.JSError!JSValue { const array = try JSValue.createEmptyArray(global, 0); - array.push(global, .null); - array.push(global, .null); // TODO: align this with options - array.push(global, .null); // TODO: align this with options + try array.push(global, .null); + try array.push(global, .null); // TODO: align this with options + try array.push(global, .null); // TODO: align this with options this.observable_getters.insert(.stdio); var pipes = this.stdio_pipes.items; if (this.ipc_data != null) { - array.push(global, .null); + try array.push(global, .null); pipes = pipes[@min(1, pipes.len)..]; } @@ -799,10 +801,10 @@ pub fn getStdio(this: *Subprocess, global: *JSGlobalObject) bun.JSError!JSValue if (Environment.isWindows) { if (item == .buffer) { const fdno: usize = @intFromPtr(item.buffer.fd().cast()); - array.push(global, JSValue.jsNumber(fdno)); + try array.push(global, JSValue.jsNumber(fdno)); } } else { - array.push(global, JSValue.jsNumber(item.cast())); + try array.push(global, JSValue.jsNumber(item.cast())); } } return array; @@ -1021,6 +1023,7 @@ pub const PipeReader = struct { if (Environment.isWindows) { this.reader.source = .{ .pipe = this.stdio_result.buffer }; } + this.reader.setParent(this); return this; } @@ -1047,6 +1050,9 @@ pub const PipeReader = struct { const poll = this.reader.handle.poll; poll.flags.insert(.socket); this.reader.flags.socket = true; + this.reader.flags.nonblocking = true; + this.reader.flags.pollable = true; + poll.flags.insert(.nonblocking); } return .{ .result = {} }; @@ -1265,16 +1271,17 @@ const Writable = union(enum) { pub fn onStart(_: *Writable) void {} pub fn init( - stdio: Stdio, + stdio: *Stdio, event_loop: *JSC.EventLoop, subprocess: *Subprocess, result: StdioResult, + promise_for_stream: *JSC.JSValue, ) !Writable { assertStdioResult(result); if (Environment.isWindows) { - switch (stdio) { - .pipe => { + switch (stdio.*) { + .pipe, .readable_stream => { if (result == .buffer) { const pipe = JSC.WebCore.FileSink.createWithPipe(event_loop, result.buffer); @@ -1283,6 +1290,9 @@ const Writable = union(enum) { .err => |err| { _ = err; // autofix pipe.deref(); + if (stdio.* == .readable_stream) { + stdio.readable_stream.cancel(event_loop.global); + } return error.UnexpectedCreatingStdin; }, } @@ -1292,6 +1302,16 @@ const Writable = union(enum) { subprocess.flags.deref_on_stdin_destroyed = true; subprocess.flags.has_stdin_destructor_called = false; + if (stdio.* == .readable_stream) { + const assign_result = pipe.assignToStream(&stdio.readable_stream, event_loop.global); + if (assign_result.toError()) |err| { + pipe.deref(); + subprocess.deref(); + return event_loop.global.throwValue(err); + } + promise_for_stream.* = assign_result; + } + return Writable{ .pipe = pipe, }; @@ -1328,14 +1348,14 @@ const Writable = union(enum) { } if (comptime Environment.isPosix) { - if (stdio == .pipe) { + if (stdio.* == .pipe) { _ = bun.sys.setNonblocking(result.?); } } - switch (stdio) { + switch (stdio.*) { .dup2 => @panic("TODO dup2 stdio"), - .pipe => { + .pipe, .readable_stream => { const pipe = JSC.WebCore.FileSink.create(event_loop, result.?); switch (pipe.writer.start(pipe.fd, true)) { @@ -1343,16 +1363,30 @@ const Writable = union(enum) { .err => |err| { _ = err; // autofix pipe.deref(); + if (stdio.* == .readable_stream) { + stdio.readable_stream.cancel(event_loop.global); + } + return error.UnexpectedCreatingStdin; }, } + pipe.writer.handle.poll.flags.insert(.socket); + subprocess.weak_file_sink_stdin_ptr = pipe; subprocess.ref(); subprocess.flags.has_stdin_destructor_called = false; subprocess.flags.deref_on_stdin_destroyed = true; - pipe.writer.handle.poll.flags.insert(.socket); + if (stdio.* == .readable_stream) { + const assign_result = pipe.assignToStream(&stdio.readable_stream, event_loop.global); + if (assign_result.toError()) |err| { + pipe.deref(); + subprocess.deref(); + return event_loop.global.throwValue(err); + } + promise_for_stream.* = assign_result; + } return Writable{ .pipe = pipe, @@ -1403,7 +1437,7 @@ const Writable = union(enum) { // https://github.com/oven-sh/bun/pull/14092 bun.debugAssert(!subprocess.flags.deref_on_stdin_destroyed); const debug_ref_count = if (Environment.isDebug) subprocess.ref_count else 0; - pipe.onAttachedProcessExit(); + pipe.onAttachedProcessExit(&subprocess.process.status); if (Environment.isDebug) { bun.debugAssert(subprocess.ref_count.active_counts == debug_ref_count.active_counts); } @@ -1518,6 +1552,7 @@ pub fn onProcessExit(this: *Subprocess, process: *Process, status: bun.spawn.Sta this.pid_rusage = rusage.*; const is_sync = this.flags.is_sync; this.clearAbortSignal(); + defer this.deref(); defer this.disconnectIPC(true); @@ -1528,7 +1563,7 @@ pub fn onProcessExit(this: *Subprocess, process: *Process, status: bun.spawn.Sta jsc_vm.onSubprocessExit(process); - var stdin: ?*JSC.WebCore.FileSink = this.weak_file_sink_stdin_ptr; + var stdin: ?*JSC.WebCore.FileSink = if (this.stdin == .pipe and this.flags.is_stdin_a_readable_stream) this.stdin.pipe else this.weak_file_sink_stdin_ptr; var existing_stdin_value = JSC.JSValue.zero; if (this_jsvalue != .zero) { if (JSC.Codegen.JSSubprocess.stdinGetCached(this_jsvalue)) |existing_value| { @@ -1538,30 +1573,49 @@ pub fn onProcessExit(this: *Subprocess, process: *Process, status: bun.spawn.Sta stdin = @alignCast(@ptrCast(JSC.WebCore.FileSink.JSSink.fromJS(existing_value))); } - existing_stdin_value = existing_value; + if (!this.flags.is_stdin_a_readable_stream) { + existing_stdin_value = existing_value; + } } } } + // We won't be sending any more data. if (this.stdin == .buffer) { this.stdin.buffer.close(); } - if (this.stdout == .pipe) { - this.stdout.pipe.close(); - } - if (this.stderr == .pipe) { - this.stderr.pipe.close(); - } if (existing_stdin_value != .zero) { JSC.WebCore.FileSink.JSSink.setDestroyCallback(existing_stdin_value, 0); } + if (this.flags.is_sync) { + // This doesn't match Node.js' behavior, but for synchronous + // subprocesses the streams should not keep the timers going. + if (this.stdout == .pipe) { + this.stdout.close(); + } + + if (this.stderr == .pipe) { + this.stderr.close(); + } + } else { + // This matches Node.js behavior. Node calls resume() on the streams. + if (this.stdout == .pipe and !this.stdout.pipe.reader.isDone()) { + this.stdout.pipe.reader.read(); + } + + if (this.stderr == .pipe and !this.stderr.pipe.reader.isDone()) { + this.stderr.pipe.reader.read(); + } + } + if (stdin) |pipe| { this.weak_file_sink_stdin_ptr = null; this.flags.has_stdin_destructor_called = true; + // It is okay if it does call deref() here, as in that case it was truly ref'd. - pipe.onAttachedProcessExit(); + pipe.onAttachedProcessExit(&status); } var did_update_has_pending_activity = false; @@ -1582,7 +1636,7 @@ pub fn onProcessExit(this: *Subprocess, process: *Process, status: bun.spawn.Sta switch (status) { .exited => |exited| promise.asAnyPromise().?.resolve(globalThis, JSValue.jsNumber(exited.code)), - .err => |err| promise.asAnyPromise().?.reject(globalThis, err.toJSC(globalThis)), + .err => |err| promise.asAnyPromise().?.reject(globalThis, err.toJS(globalThis)), .signaled => promise.asAnyPromise().?.resolve(globalThis, JSValue.jsNumber(128 +% @intFromEnum(status.signaled))), else => { // crash in debug mode @@ -1595,7 +1649,7 @@ pub fn onProcessExit(this: *Subprocess, process: *Process, status: bun.spawn.Sta if (consumeOnExitCallback(this_jsvalue, globalThis)) |callback| { const waitpid_value: JSValue = if (status == .err) - status.err.toJSC(globalThis) + status.err.toJS(globalThis) else .js_undefined; @@ -1737,7 +1791,7 @@ pub fn getExited( return JSC.JSPromise.resolvedPromiseValue(globalThis, JSValue.jsNumber(signal.toExitCode() orelse 254)); }, .err => |err| { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJS(globalThis)); }, else => { const promise = JSC.JSPromise.create(globalThis).toJS(); @@ -1827,7 +1881,7 @@ fn getArgv0(globalThis: *JSC.JSGlobalObject, PATH: []const u8, cwd: []const u8, } fn getArgv(globalThis: *JSC.JSGlobalObject, args: JSValue, PATH: []const u8, cwd: []const u8, argv0: *?[*:0]const u8, allocator: std.mem.Allocator, argv: *std.ArrayList(?[*:0]const u8)) bun.JSError!void { - var cmds_array = args.arrayIterator(globalThis); + var cmds_array = try args.arrayIterator(globalThis); // + 1 for argv0 // + 1 for null terminator argv.* = try @TypeOf(argv.*).initCapacity(allocator, cmds_array.len + 2); @@ -1840,12 +1894,12 @@ fn getArgv(globalThis: *JSC.JSGlobalObject, args: JSValue, PATH: []const u8, cwd return globalThis.throwInvalidArguments("cmd must not be empty", .{}); } - const argv0_result = try getArgv0(globalThis, PATH, cwd, argv0.*, cmds_array.next().?, allocator); + const argv0_result = try getArgv0(globalThis, PATH, cwd, argv0.*, (try cmds_array.next()).?, allocator); argv0.* = argv0_result.argv0.ptr; argv.appendAssumeCapacity(argv0_result.arg0.ptr); - while (cmds_array.next()) |value| { + while (try cmds_array.next()) |value| { const arg = try value.toBunString(globalThis); defer arg.deref(); @@ -2031,18 +2085,18 @@ pub fn spawnMaybeSync( if (try args.get(globalThis, "stdio")) |stdio_val| { if (!stdio_val.isEmptyOrUndefinedOrNull()) { if (stdio_val.jsType().isArray()) { - var stdio_iter = stdio_val.arrayIterator(globalThis); + var stdio_iter = try stdio_val.arrayIterator(globalThis); var i: u31 = 0; - while (stdio_iter.next()) |value| : (i += 1) { - try stdio[i].extract(globalThis, i, value); + while (try stdio_iter.next()) |value| : (i += 1) { + try stdio[i].extract(globalThis, i, value, is_sync); if (i == 2) break; } i += 1; - while (stdio_iter.next()) |value| : (i += 1) { + while (try stdio_iter.next()) |value| : (i += 1) { var new_item: Stdio = undefined; - try new_item.extract(globalThis, i, value); + try new_item.extract(globalThis, i, value, is_sync); const opt = switch (new_item.asSpawnOption(i)) { .result => |opt| opt, @@ -2061,15 +2115,15 @@ pub fn spawnMaybeSync( } } else { if (try args.get(globalThis, "stdin")) |value| { - try stdio[0].extract(globalThis, 0, value); + try stdio[0].extract(globalThis, 0, value, is_sync); } if (try args.get(globalThis, "stderr")) |value| { - try stdio[2].extract(globalThis, 2, value); + try stdio[2].extract(globalThis, 2, value, is_sync); } if (try args.get(globalThis, "stdout")) |value| { - try stdio[1].extract(globalThis, 1, value); + try stdio[1].extract(globalThis, 1, value, is_sync); } } @@ -2119,7 +2173,7 @@ pub fn spawnMaybeSync( if (try args.get(globalThis, "maxBuffer")) |val| { if (val.isNumber() and val.isFinite()) { // 'Infinity' does not set maxBuffer - const value = val.coerce(i64, globalThis); + const value = try val.coerce(i64, globalThis); if (value > 0) { maxBuffer = value; } @@ -2302,7 +2356,7 @@ pub fn spawnMaybeSync( else => {}, } - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, .result => |result| result, }; @@ -2335,16 +2389,19 @@ pub fn spawnMaybeSync( MaxBuf.createForSubprocess(subprocess, &subprocess.stderr_maxbuf, maxBuffer); MaxBuf.createForSubprocess(subprocess, &subprocess.stdout_maxbuf, maxBuffer); + var promise_for_stream: JSC.JSValue = .zero; + // When run synchronously, subprocess isn't garbage collected subprocess.* = Subprocess{ .globalThis = globalThis, .process = process, .pid_rusage = null, .stdin = Writable.init( - stdio[0], + &stdio[0], loop, subprocess, spawned.stdin, + &promise_for_stream, ) catch { subprocess.deref(); return globalThis.throwOutOfMemory(); @@ -2388,6 +2445,27 @@ pub fn spawnMaybeSync( subprocess.process.setExitHandler(subprocess); + promise_for_stream.ensureStillAlive(); + subprocess.flags.is_stdin_a_readable_stream = promise_for_stream != .zero; + + if (promise_for_stream != .zero and !globalThis.hasException()) { + if (promise_for_stream.toError()) |err| { + _ = globalThis.throwValue(err) catch {}; + } + } + + if (globalThis.hasException()) { + const err = globalThis.takeException(error.JSError); + // Ensure we kill the process so we don't leave things in an unexpected state. + _ = subprocess.tryKill(subprocess.killSignal); + + if (globalThis.hasException()) { + return error.JSError; + } + + return globalThis.throwValue(err); + } + var posix_ipc_info: if (Environment.isPosix) IPC.Socket else void = undefined; if (Environment.isPosix and !is_sync) { if (maybe_ipc_mode) |mode| { @@ -2414,14 +2492,14 @@ pub fn spawnMaybeSync( subprocess.stdio_pipes.items[@intCast(ipc_channel)].buffer, ).asErr()) |err| { subprocess.deref(); - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); } subprocess.stdio_pipes.items[@intCast(ipc_channel)] = .unavailable; } ipc_data.writeVersionPacket(globalThis); } - if (subprocess.stdin == .pipe) { + if (subprocess.stdin == .pipe and promise_for_stream == .zero) { subprocess.stdin.pipe.signal = JSC.WebCore.streams.Signal.init(&subprocess.stdin); } @@ -2433,6 +2511,13 @@ pub fn spawnMaybeSync( var send_exit_notification = false; + // This must go before other things happen so that the exit handler is registered before onProcessExit can potentially be called. + if (timeout) |timeout_val| { + subprocess.event_loop_timer.next = bun.timespec.msFromNow(timeout_val); + globalThis.bunVM().timer.insert(&subprocess.event_loop_timer); + subprocess.setEventLoopTimerRefd(true); + } + if (comptime !is_sync) { bun.debugAssert(out != .zero); @@ -2446,6 +2531,10 @@ pub fn spawnMaybeSync( JSC.Codegen.JSSubprocess.ipcCallbackSetCached(out, globalThis, ipc_callback); } + if (stdio[0] == .readable_stream) { + JSC.Codegen.JSSubprocess.stdinSetCached(out, globalThis, stdio[0].readable_stream.value); + } + switch (subprocess.process.watch()) { .result => {}, .err => { @@ -2489,12 +2578,6 @@ pub fn spawnMaybeSync( should_close_memfd = false; - if (timeout) |timeout_val| { - subprocess.event_loop_timer.next = bun.timespec.msFromNow(timeout_val); - globalThis.bunVM().timer.insert(&subprocess.event_loop_timer); - subprocess.setEventLoopTimerRefd(true); - } - if (comptime !is_sync) { // Once everything is set up, we can add the abort listener // Adding the abort listener may call the onAbortSignal callback immediately if it was already aborted diff --git a/src/bun.js/api/bun/udp_socket.zig b/src/bun.js/api/bun/udp_socket.zig index ca7f196f4f..d4ae3e6d8a 100644 --- a/src/bun.js/api/bun/udp_socket.zig +++ b/src/bun.js/api/bun/udp_socket.zig @@ -106,7 +106,7 @@ fn onData(socket: *uws.udp.Socket, buf: *uws.udp.PacketBuffer, packets: c_int) c _ = callback.call(globalThis, udpSocket.thisValue, &.{ udpSocket.thisValue, - udpSocket.config.binary_type.toJS(slice, globalThis), + udpSocket.config.binary_type.toJS(slice, globalThis) catch return, // TODO: properly propagate exception upwards JSC.jsNumber(port), hostname_string.transferToJS(globalThis), }) catch |err| { @@ -158,7 +158,7 @@ pub const UDPSocketConfig = struct { const port: u16 = brk: { if (try options.getTruthy(globalThis, "port")) |value| { - const number = value.coerceToInt32(globalThis); + const number = try value.coerceToInt32(globalThis); if (number < 0 or number > 0xffff) { return globalThis.throwInvalidArguments("Expected \"port\" to be an integer between 0 and 65535", .{}); } @@ -199,7 +199,7 @@ pub const UDPSocketConfig = struct { if (!value.isCell() or !value.isCallable()) { return globalThis.throwInvalidArguments("Expected \"socket.{s}\" to be a function", .{handler.@"0"}); } - @field(config, handler.@"1") = value; + @field(config, handler.@"1") = value.withAsyncContextIfNeeded(globalThis); } } } @@ -228,7 +228,7 @@ pub const UDPSocketConfig = struct { const connect_port_js = try connect.getTruthy(globalThis, "port") orelse { return globalThis.throwInvalidArguments("Expected \"connect.port\" to be an integer", .{}); }; - const connect_port = connect_port_js.coerceToInt32(globalThis); + const connect_port = try connect_port_js.coerceToInt32(globalThis); const str = try connect_host_js.toBunString(globalThis); defer str.deref(); @@ -354,11 +354,11 @@ pub const UDPSocket = struct { const ret = this.socket.connect(connect.address, connect.port); if (ret != 0) { if (JSC.Maybe(void).errnoSys(ret, .connect)) |sys_err| { - return globalThis.throwValue(sys_err.toJS(globalThis)); + return globalThis.throwValue(try sys_err.toJS(globalThis)); } if (bun.c_ares.Error.initEAI(ret)) |eai_err| { - return globalThis.throwValue(eai_err.toJS(globalThis)); + return globalThis.throwValue(eai_err.toJSWithSyscallAndHostname(globalThis, "connect", connect.address)); } } this.connect_info = .{ .port = connect.port }; @@ -380,7 +380,7 @@ pub const UDPSocket = struct { const globalThis = this.globalThis; const vm = globalThis.bunVM(); - if (err.isTerminationException(vm.jsc)) { + if (err.isTerminationException()) { return; } if (callback == .zero) { @@ -393,7 +393,7 @@ pub const UDPSocket = struct { pub fn setBroadcast(this: *This, globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { if (this.closed) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); } const arguments = callframe.arguments(); @@ -405,7 +405,7 @@ pub const UDPSocket = struct { const res = this.socket.setBroadcast(enabled); if (getUSError(res, .setsockopt, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return arguments[0]; @@ -413,7 +413,7 @@ pub const UDPSocket = struct { pub fn setMulticastLoopback(this: *This, globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { if (this.closed) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); } const arguments = callframe.arguments(); @@ -425,7 +425,7 @@ pub const UDPSocket = struct { const res = this.socket.setMulticastLoopback(enabled); if (getUSError(res, .setsockopt, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return arguments[0]; @@ -433,7 +433,7 @@ pub const UDPSocket = struct { fn setMembership(this: *This, globalThis: *JSGlobalObject, callframe: *CallFrame, drop: bool) bun.JSError!JSValue { if (this.closed) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); } const arguments = callframe.arguments(); @@ -442,13 +442,13 @@ pub const UDPSocket = struct { } var addr = std.mem.zeroes(std.posix.sockaddr.storage); - if (!parseAddr(this, globalThis, JSC.jsNumber(0), arguments[0], &addr)) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.INVAL))), .setsockopt).?.toJS(globalThis)); + if (!try parseAddr(this, globalThis, JSC.jsNumber(0), arguments[0], &addr)) { + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.INVAL))), .setsockopt).?.toJS(globalThis)); } var interface = std.mem.zeroes(std.posix.sockaddr.storage); - const res = if (arguments.len > 1 and parseAddr(this, globalThis, JSC.jsNumber(0), arguments[1], &interface)) blk: { + const res = if (arguments.len > 1 and try parseAddr(this, globalThis, JSC.jsNumber(0), arguments[1], &interface)) blk: { if (addr.family != interface.family) { return globalThis.throwInvalidArguments("Family mismatch between address and interface", .{}); } @@ -456,7 +456,7 @@ pub const UDPSocket = struct { } else this.socket.setMembership(&addr, null, drop); if (getUSError(res, .setsockopt, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return .true; @@ -472,7 +472,7 @@ pub const UDPSocket = struct { fn setSourceSpecificMembership(this: *This, globalThis: *JSGlobalObject, callframe: *CallFrame, drop: bool) bun.JSError!JSValue { if (this.closed) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); } const arguments = callframe.arguments(); @@ -481,13 +481,13 @@ pub const UDPSocket = struct { } var source_addr: std.posix.sockaddr.storage = undefined; - if (!parseAddr(this, globalThis, JSC.jsNumber(0), arguments[0], &source_addr)) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.INVAL))), .setsockopt).?.toJS(globalThis)); + if (!try parseAddr(this, globalThis, JSC.jsNumber(0), arguments[0], &source_addr)) { + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.INVAL))), .setsockopt).?.toJS(globalThis)); } var group_addr: std.posix.sockaddr.storage = undefined; - if (!parseAddr(this, globalThis, JSC.jsNumber(0), arguments[1], &group_addr)) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.INVAL))), .setsockopt).?.toJS(globalThis)); + if (!try parseAddr(this, globalThis, JSC.jsNumber(0), arguments[1], &group_addr)) { + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.INVAL))), .setsockopt).?.toJS(globalThis)); } if (source_addr.family != group_addr.family) { @@ -496,7 +496,7 @@ pub const UDPSocket = struct { var interface: std.posix.sockaddr.storage = undefined; - const res = if (arguments.len > 2 and parseAddr(this, globalThis, JSC.jsNumber(0), arguments[2], &interface)) blk: { + const res = if (arguments.len > 2 and try parseAddr(this, globalThis, JSC.jsNumber(0), arguments[2], &interface)) blk: { if (source_addr.family != interface.family) { return globalThis.throwInvalidArguments("Family mismatch among source, group and interface addresses", .{}); } @@ -504,7 +504,7 @@ pub const UDPSocket = struct { } else this.socket.setSourceSpecificMembership(&source_addr, &group_addr, null, drop); if (getUSError(res, .setsockopt, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return .true; @@ -520,7 +520,7 @@ pub const UDPSocket = struct { pub fn setMulticastInterface(this: *This, globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { if (this.closed) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); } const arguments = callframe.arguments(); @@ -530,14 +530,14 @@ pub const UDPSocket = struct { var addr: std.posix.sockaddr.storage = undefined; - if (!parseAddr(this, globalThis, JSC.jsNumber(0), arguments[0], &addr)) { + if (!try parseAddr(this, globalThis, JSC.jsNumber(0), arguments[0], &addr)) { return .false; } const res = this.socket.setMulticastInterface(&addr); if (getUSError(res, .setsockopt, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return .true; @@ -576,7 +576,7 @@ pub const UDPSocket = struct { fn setAnyTTL(this: *This, globalThis: *JSGlobalObject, callframe: *CallFrame, comptime function: fn (*uws.udp.Socket, i32) c_int) bun.JSError!JSValue { if (this.closed) { - return globalThis.throwValue(bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); + return globalThis.throwValue(try bun.JSC.Maybe(void).errnoSys(@as(i32, @intCast(@intFromEnum(std.posix.E.BADF))), .setsockopt).?.toJS(globalThis)); } const arguments = callframe.arguments(); @@ -584,11 +584,11 @@ pub const UDPSocket = struct { return globalThis.throwInvalidArguments("Expected 1 argument, got {}", .{arguments.len}); } - const ttl = arguments[0].coerceToInt32(globalThis); + const ttl = try arguments[0].coerceToInt32(globalThis); const res = function(this.socket, ttl); if (getUSError(res, .setsockopt, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return JSValue.jsNumber(ttl); @@ -608,7 +608,7 @@ pub const UDPSocket = struct { return globalThis.throwInvalidArgumentType("sendMany", "first argument", "array"); } - const array_len = arg.getLength(globalThis); + const array_len = try arg.getLength(globalThis); if (this.connect_info == null and array_len % 3 != 0) { return globalThis.throwInvalidArguments("Expected 3 arguments for each packet", .{}); } @@ -624,11 +624,11 @@ pub const UDPSocket = struct { var addr_ptrs = alloc.alloc(?*const anyopaque, len) catch bun.outOfMemory(); var addrs = alloc.alloc(std.posix.sockaddr.storage, len) catch bun.outOfMemory(); - var iter = arg.arrayIterator(globalThis); + var iter = try arg.arrayIterator(globalThis); var i: u16 = 0; var port: JSValue = .zero; - while (iter.next()) |val| : (i += 1) { + while (try iter.next()) |val| : (i += 1) { if (i >= array_len) { return globalThis.throwInvalidArguments("Mismatch between array length property and number of items", .{}); } @@ -655,7 +655,7 @@ pub const UDPSocket = struct { continue; } if (i % 3 == 2) { - if (!this.parseAddr(globalThis, port, val, &addrs[slice_idx])) { + if (!try this.parseAddr(globalThis, port, val, &addrs[slice_idx])) { return globalThis.throwInvalidArguments("Invalid address", .{}); } addr_ptrs[slice_idx] = &addrs[slice_idx]; @@ -666,7 +666,7 @@ pub const UDPSocket = struct { } const res = this.socket.send(payloads, lens, addr_ptrs); if (getUSError(res, .send, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return JSValue.jsNumber(res); } @@ -713,7 +713,7 @@ pub const UDPSocket = struct { var addr: std.posix.sockaddr.storage = std.mem.zeroes(std.posix.sockaddr.storage); const addr_ptr = brk: { if (dst) |dest| { - if (!this.parseAddr(globalThis, dest.port, dest.address, &addr)) { + if (!try this.parseAddr(globalThis, dest.port, dest.address, &addr)) { return globalThis.throwInvalidArguments("Invalid address", .{}); } break :brk &addr; @@ -724,25 +724,19 @@ pub const UDPSocket = struct { const res = this.socket.send(&.{payload.ptr}, &.{payload.len}, &.{addr_ptr}); if (getUSError(res, .send, true)) |err| { - return globalThis.throwValue(err.toJS(globalThis)); + return globalThis.throwValue(try err.toJS(globalThis)); } return JSValue.jsBoolean(res > 0); } - fn parseAddr( - this: *This, - globalThis: *JSGlobalObject, - port_val: JSValue, - address_val: JSValue, - storage: *std.posix.sockaddr.storage, - ) bool { + fn parseAddr(this: *This, globalThis: *JSGlobalObject, port_val: JSValue, address_val: JSValue, storage: *std.posix.sockaddr.storage) bun.JSError!bool { _ = this; - const number = port_val.coerceToInt32(globalThis); + const number = try port_val.coerceToInt32(globalThis); const port: u16 = if (number < 1 or number > 0xffff) 0 else @intCast(number); - const str = address_val.toBunString(globalThis) catch @panic("unexpected exception"); + const str = try address_val.toBunString(globalThis); defer str.deref(); - const address_slice = str.toOwnedSliceZ(default_allocator) catch bun.outOfMemory(); + const address_slice = try str.toOwnedSliceZ(default_allocator); defer default_allocator.free(address_slice); var addr4: *std.posix.sockaddr.in = @ptrCast(storage); diff --git a/src/bun.js/api/bun/x509.zig b/src/bun.js/api/bun/x509.zig index c84e6ac0e5..af3fedbb26 100644 --- a/src/bun.js/api/bun/x509.zig +++ b/src/bun.js/api/bun/x509.zig @@ -45,7 +45,7 @@ pub inline fn isSafeAltName(name: []const u8, utf8: bool) bool { } pub fn toJS(cert: *BoringSSL.X509, globalObject: *JSGlobalObject) bun.JSError!JSValue { - return Bun__X509__toJSLegacyEncoding(cert, globalObject); + return bun.jsc.fromJSHostCall(globalObject, @src(), Bun__X509__toJSLegacyEncoding, .{ cert, globalObject }); } pub fn toJSObject(cert: *BoringSSL.X509, globalObject: *JSGlobalObject) bun.JSError!JSValue { diff --git a/src/bun.js/api/crypto/CryptoHasher.zig b/src/bun.js/api/crypto/CryptoHasher.zig index f7bd57e3ee..3f4e97e898 100644 --- a/src/bun.js/api/crypto/CryptoHasher.zig +++ b/src/bun.js/api/crypto/CryptoHasher.zig @@ -839,7 +839,7 @@ fn StaticCryptoHasher(comptime Hasher: type, comptime name: [:0]const u8) type { } } - fn digestToEncoding(this: *@This(), globalThis: *JSGlobalObject, encoding: JSC.Node.Encoding) JSC.JSValue { + fn digestToEncoding(this: *@This(), globalThis: *JSGlobalObject, encoding: JSC.Node.Encoding) bun.JSError!JSC.JSValue { var output_digest_buf: Hasher.Digest = comptime brk: { var bytes: Hasher.Digest = undefined; var i: usize = 0; diff --git a/src/bun.js/api/crypto/PBKDF2.zig b/src/bun.js/api/crypto/PBKDF2.zig index 101fa7a512..e4764d2aba 100644 --- a/src/bun.js/api/crypto/PBKDF2.zig +++ b/src/bun.js/api/crypto/PBKDF2.zig @@ -76,11 +76,6 @@ pub const Job = struct { const output_slice = this.output; assert(output_slice.len == @as(usize, @intCast(this.pbkdf2.length))); const buffer_value = JSC.JSValue.createBuffer(globalThis, output_slice, bun.default_allocator); - if (buffer_value == .zero) { - promise.reject(globalThis, ZigString.init("Failed to create buffer").toErrorInstance(globalThis)); - return; - } - this.output = &[_]u8{}; promise.resolve(globalThis, buffer_value); } @@ -149,7 +144,7 @@ pub fn fromJS(globalThis: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame, is_asy return globalThis.throwInvalidArgumentTypeValue("iterations", "number", arg2); } - const iteration_count = arg2.coerce(i64, globalThis); + const iteration_count = try arg2.coerce(i64, globalThis); if (!globalThis.hasException() and (iteration_count < 1 or iteration_count > std.math.maxInt(i32))) { return globalThis.throwRangeError(iteration_count, .{ .field_name = "iterations", .min = 1, .max = std.math.maxInt(i32) + 1 }); diff --git a/src/bun.js/api/crypto/PasswordObject.zig b/src/bun.js/api/crypto/PasswordObject.zig index 559a4e3011..273f768be9 100644 --- a/src/bun.js/api/crypto/PasswordObject.zig +++ b/src/bun.js/api/crypto/PasswordObject.zig @@ -41,7 +41,7 @@ pub const PasswordObject = struct { return globalObject.throwInvalidArgumentType("hash", "cost", "number"); } - const rounds = rounds_value.coerce(i32, globalObject); + const rounds = try rounds_value.coerce(i32, globalObject); if (rounds < 4 or rounds > 31) { return globalObject.throwInvalidArguments("Rounds must be between 4 and 31", .{}); @@ -60,7 +60,7 @@ pub const PasswordObject = struct { return globalObject.throwInvalidArgumentType("hash", "timeCost", "number"); } - const time_cost = time_value.coerce(i32, globalObject); + const time_cost = try time_value.coerce(i32, globalObject); if (time_cost < 1) { return globalObject.throwInvalidArguments("Time cost must be greater than 0", .{}); @@ -74,7 +74,7 @@ pub const PasswordObject = struct { return globalObject.throwInvalidArgumentType("hash", "memoryCost", "number"); } - const memory_cost = memory_value.coerce(i32, globalObject); + const memory_cost = try memory_value.coerce(i32, globalObject); if (memory_cost < 1) { return globalObject.throwInvalidArguments("Memory cost must be greater than 0", .{}); diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index ff6e0075ad..406ecf077c 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -538,10 +538,10 @@ pub const FFI = struct { } pub fn fromJSArray(globalThis: *JSC.JSGlobalObject, value: JSC.JSValue, comptime property: []const u8) bun.JSError!StringArray { - var iter = value.arrayIterator(globalThis); + var iter = try value.arrayIterator(globalThis); var items = std.ArrayList([:0]const u8).init(bun.default_allocator); - while (iter.next()) |val| { + while (try iter.next()) |val| { if (!val.isString()) { for (items.items) |item| { bun.default_allocator.free(@constCast(item)); @@ -595,7 +595,7 @@ pub const FFI = struct { } } - const symbols_object: JSValue = object.getOwn(globalThis, "symbols") orelse .js_undefined; + const symbols_object: JSValue = try object.getOwn(globalThis, "symbols") orelse .js_undefined; if (!globalThis.hasException() and (symbols_object == .zero or !symbols_object.isObject())) { return globalThis.throwInvalidArgumentTypeValue("symbols", "object", symbols_object); } @@ -615,7 +615,7 @@ pub const FFI = struct { return globalThis.throw("Expected at least one exported symbol", .{}); } - if (object.getOwn(globalThis, "library")) |library_value| { + if (try object.getOwn(globalThis, "library")) |library_value| { compile_c.libraries = try StringArray.fromJS(globalThis, library_value, "library"); } @@ -625,13 +625,13 @@ pub const FFI = struct { if (try object.getTruthy(globalThis, "flags")) |flags_value| { if (flags_value.isArray()) { - var iter = flags_value.arrayIterator(globalThis); + var iter = try flags_value.arrayIterator(globalThis); var flags = std.ArrayList(u8).init(allocator); defer flags.deinit(); flags.appendSlice(CompileC.default_tcc_options) catch bun.outOfMemory(); - while (iter.next()) |value| { + while (try iter.next()) |value| { if (!value.isString()) { return globalThis.throwInvalidArgumentTypeValue("flags", "array of strings", value); } @@ -698,11 +698,11 @@ pub const FFI = struct { return error.JSError; } - if (object.getOwn(globalThis, "source")) |source_value| { + if (try object.getOwn(globalThis, "source")) |source_value| { if (source_value.isArray()) { compile_c.source = .{ .files = .{} }; - var iter = source_value.arrayIterator(globalThis); - while (iter.next()) |value| { + var iter = try source_value.arrayIterator(globalThis); + while (try iter.next()) |value| { if (!value.isString()) { return globalThis.throwInvalidArgumentTypeValue("source", "array of strings", value); } @@ -810,7 +810,7 @@ pub const FFI = struct { return .js_undefined; } - pub fn callback(globalThis: *JSGlobalObject, interface: JSC.JSValue, js_callback: JSC.JSValue) JSValue { + pub fn callback(globalThis: *JSGlobalObject, interface: JSC.JSValue, js_callback: JSC.JSValue) bun.JSError!JSValue { JSC.markBinding(@src()); if (!interface.isObject()) { return globalThis.toInvalidArguments("Expected object", .{}); @@ -1249,15 +1249,15 @@ pub const FFI = struct { var abi_types = std.ArrayListUnmanaged(ABIType){}; - if (value.getOwn(global, "args")) |args| { + if (try value.getOwn(global, "args")) |args| { if (args.isEmptyOrUndefinedOrNull() or !args.jsType().isArray()) { return ZigString.static("Expected an object with \"args\" as an array").toErrorInstance(global); } - var array = args.arrayIterator(global); + var array = try args.arrayIterator(global); try abi_types.ensureTotalCapacityPrecise(allocator, array.len); - while (array.next()) |val| { + while (try array.next()) |val| { if (val.isEmptyOrUndefinedOrNull()) { abi_types.clearAndFree(allocator); return ZigString.static("param must be a string (type name) or number").toErrorInstance(global); diff --git a/src/bun.js/api/filesystem_router.zig b/src/bun.js/api/filesystem_router.zig index a0e4894a62..eafd80a9c5 100644 --- a/src/bun.js/api/filesystem_router.zig +++ b/src/bun.js/api/filesystem_router.zig @@ -14,7 +14,7 @@ const JSGlobalObject = JSC.JSGlobalObject; const strings = bun.strings; const Request = WebCore.Request; const Environment = bun.Environment; -const URLPath = @import("../../http/url_path.zig"); +const URLPath = @import("../../http/URLPath.zig"); const URL = @import("../../url.zig").URL; const Log = bun.logger; const Resolver = @import("../../resolver/resolver.zig").Resolver; @@ -99,16 +99,16 @@ pub const FileSystemRouter = struct { return globalThis.throwInvalidArguments("Expected fileExtensions to be an Array", .{}); } - var iter = file_extensions.arrayIterator(globalThis); + var iter = try file_extensions.arrayIterator(globalThis); extensions.ensureTotalCapacityPrecise(iter.len) catch unreachable; - while (iter.next()) |val| { + while (try iter.next()) |val| { if (!val.isString()) { origin_str.deinit(); arena.deinit(); globalThis.allocator().destroy(arena); return globalThis.throwInvalidArguments("Expected fileExtensions to be an Array of strings", .{}); } - if (val.getLength(globalThis) == 0) continue; + if (try val.getLength(globalThis) == 0) continue; extensions.appendAssumeCapacity(((try val.toSlice(globalThis, allocator)).clone(allocator) catch unreachable).slice()[1..]); } } diff --git a/src/bun.js/api/glob.zig b/src/bun.js/api/glob.zig index 8d49134623..1a6f223b81 100644 --- a/src/bun.js/api/glob.zig +++ b/src/bun.js/api/glob.zig @@ -58,7 +58,7 @@ const ScanOpts = struct { const cwd = switch (bun.sys.getcwd((&path_buf))) { .result => |cwd| cwd, .err => |err| { - const errJs = err.toJSC(globalThis); + const errJs = err.toJS(globalThis); return globalThis.throwValue(errJs); }, }; @@ -150,9 +150,9 @@ pub const WalkTask = struct { syscall: Syscall.Error, unknown: anyerror, - pub fn toJSC(this: Err, globalThis: *JSGlobalObject) JSValue { + pub fn toJS(this: Err, globalThis: *JSGlobalObject) JSValue { return switch (this) { - .syscall => |err| err.toJSC(globalThis), + .syscall => |err| err.toJS(globalThis), .unknown => |err| ZigString.fromBytes(@errorName(err)).toJS(globalThis), }; } @@ -194,7 +194,7 @@ pub const WalkTask = struct { defer this.deinit(); if (this.err) |err| { - const errJs = err.toJSC(this.global); + const errJs = err.toJS(this.global); promise.reject(this.global, errJs); return; } @@ -252,7 +252,7 @@ fn makeGlobWalker( only_files, )) { .err => |err| { - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, else => {}, } @@ -269,7 +269,7 @@ fn makeGlobWalker( only_files, )) { .err => |err| { - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, else => {}, } @@ -360,7 +360,7 @@ pub fn __scanSync(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.Call switch (try globWalker.walk()) { .err => |err| { - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, .result => {}, } diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index c5ef322882..67fd3d3b8c 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -923,7 +923,7 @@ fn HandlerCallback( this.global.bunVM().waitForPromise(promise); const fail = promise.status(this.global.vm()) == .rejected; if (fail) { - _ = this.global.bunVM().unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue()); + this.global.bunVM().unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue()); } return fail; } diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 50b3bbbaf0..5e3d78dbb5 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -118,7 +118,121 @@ pub const AnyRoute = union(enum) { } } - pub fn htmlRouteFromJS(argument: JSC.JSValue, init_ctx: *ServerInitContext) ?AnyRoute { + fn bundledHTMLManifestItemFromJS(argument: JSC.JSValue, index_path: []const u8, init_ctx: *ServerInitContext) bun.JSError!?AnyRoute { + if (!argument.isObject()) return null; + + const path_string = try bun.String.fromJS(try argument.get(init_ctx.global, "path") orelse return null, init_ctx.global); + defer path_string.deref(); + var path = JSC.Node.PathOrFileDescriptor{ .path = try JSC.Node.PathLike.fromBunString(init_ctx.global, path_string, false, bun.default_allocator) }; + defer path.deinit(); + + // Construct the route by stripping paths above the root. + // + // "./index-abc.js" -> "/index-abc.js" + // "../index-abc.js" -> "/index-abc.js" + // "/index-abc.js" -> "/index-abc.js" + // "index-abc.js" -> "/index-abc.js" + // + const cwd = if (bun.StandaloneModuleGraph.isBunStandaloneFilePath(path.path.slice())) + bun.StandaloneModuleGraph.targetBasePublicPath(bun.Environment.os, "root/") + else + bun.fs.FileSystem.instance.top_level_dir; + + const abs_path = bun.fs.FileSystem.instance.abs(&[_][]const u8{path.path.slice()}); + var relative_path = bun.fs.FileSystem.instance.relative(cwd, abs_path); + + if (strings.hasPrefixComptime(relative_path, "./")) { + relative_path = relative_path[2..]; + } else if (strings.hasPrefixComptime(relative_path, "../")) { + while (strings.hasPrefixComptime(relative_path, "../")) { + relative_path = relative_path[3..]; + } + } + const is_index_route = bun.strings.eql(path.path.slice(), index_path); + var builder = std.ArrayList(u8).init(bun.default_allocator); + defer builder.deinit(); + if (!strings.hasPrefixComptime(relative_path, "/")) { + try builder.append('/'); + } + + try builder.appendSlice(relative_path); + + const fetch_headers = JSC.WebCore.FetchHeaders.createFromJS(init_ctx.global, try argument.get(init_ctx.global, "headers") orelse return null); + defer if (fetch_headers) |headers| headers.deref(); + if (init_ctx.global.hasException()) return error.JSError; + + const route = try fromOptions(init_ctx.global, fetch_headers, &path); + + if (is_index_route) { + return route; + } + + var methods = HTTP.Method.Optional{ .method = .initEmpty() }; + methods.insert(.GET); + methods.insert(.HEAD); + + try init_ctx.user_routes.append(.{ + .path = try builder.toOwnedSlice(), + .route = route, + .method = methods, + }); + return null; + } + + /// This is the JS representation of an HTMLImportManifest + /// + /// See ./src/bundler/HTMLImportManifest.zig + fn bundledHTMLManifestFromJS(argument: JSC.JSValue, init_ctx: *ServerInitContext) bun.JSError!?AnyRoute { + if (!argument.isObject()) return null; + + const index = try argument.getOptional(init_ctx.global, "index", ZigString.Slice) orelse return null; + defer index.deinit(); + + const files = try argument.getArray(init_ctx.global, "files") orelse return null; + var iter = try files.arrayIterator(init_ctx.global); + var html_route: ?AnyRoute = null; + while (try iter.next()) |file_entry| { + if (try bundledHTMLManifestItemFromJS(file_entry, index.slice(), init_ctx)) |item| { + html_route = item; + } + } + + return html_route; + } + + pub fn fromOptions(global: *JSC.JSGlobalObject, headers: ?*JSC.WebCore.FetchHeaders, path: *JSC.Node.PathOrFileDescriptor) !AnyRoute { + // The file/static route doesn't ref it. + var blob = Blob.findOrCreateFileFromPath(path, global, false); + + if (blob.needsToReadFile()) { + // Throw a more helpful error upfront if the file does not exist. + // + // In production, you do NOT want to find out that all the assets + // are 404'ing when the user goes to the route. You want to find + // that out immediately so that the health check on startup fails + // and the process exits with a non-zero status code. + if (blob.store) |store| { + if (store.getPath()) |store_path| { + switch (bun.sys.existsAtType(bun.FD.cwd(), store_path)) { + .result => |file_type| { + if (file_type == .directory) { + return global.throwInvalidArguments("Bundled file {} cannot be a directory. You may want to configure --asset-naming or `naming` when bundling.", .{bun.fmt.quote(store_path)}); + } + }, + .err => { + return global.throwInvalidArguments("Bundled file {} not found. You may want to configure --asset-naming or `naming` when bundling.", .{bun.fmt.quote(store_path)}); + }, + } + } + } + + return AnyRoute{ .file = FileRoute.initFromBlob(blob, .{ .server = null, .headers = headers }) }; + } + + return AnyRoute{ .static = StaticRoute.initFromAnyBlob(&.{ .Blob = blob }, .{ .server = null, .headers = headers }) }; + } + + pub fn htmlRouteFromJS(argument: JSC.JSValue, init_ctx: *ServerInitContext) bun.JSError!?AnyRoute { if (argument.as(HTMLBundle)) |html_bundle| { const entry = init_ctx.dedupe_html_bundle_map.getOrPut(html_bundle) catch bun.outOfMemory(); if (!entry.found_existing) { @@ -129,6 +243,10 @@ pub const AnyRoute = union(enum) { } } + if (try bundledHTMLManifestFromJS(argument, init_ctx)) |html_route| { + return html_route; + } + return null; } @@ -136,7 +254,9 @@ pub const AnyRoute = union(enum) { arena: std.heap.ArenaAllocator, dedupe_html_bundle_map: std.AutoHashMap(*HTMLBundle, bun.ptr.RefPtr(HTMLBundle.Route)), js_string_allocations: bun.bake.StringRefList, + global: *JSC.JSGlobalObject, framework_router_list: std.ArrayList(bun.bake.Framework.FileSystemRouterType), + user_routes: *std.ArrayList(ServerConfig.StaticRouteEntry), }; pub fn fromJS( @@ -145,7 +265,7 @@ pub const AnyRoute = union(enum) { argument: JSC.JSValue, init_ctx: *ServerInitContext, ) bun.JSError!?AnyRoute { - if (AnyRoute.htmlRouteFromJS(argument, init_ctx)) |html_route| { + if (try AnyRoute.htmlRouteFromJS(argument, init_ctx)) |html_route| { return html_route; } @@ -1695,7 +1815,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d else => |e| { var sys_err = bun.sys.Error.fromCode(e, .listen); sys_err.path = unix; - error_instance = sys_err.toJSC(globalThis); + error_instance = sys_err.toJS(globalThis); }, } }, @@ -1802,7 +1922,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d } } - const result: JSValue = onNodeHTTPRequestFn( + const result: JSValue = bun.jsc.fromJSHostCall(globalThis, @src(), onNodeHTTPRequestFn, .{ @intFromPtr(AnyServer.from(this).ptr.ptr()), globalThis, thisObject, @@ -1815,7 +1935,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d resp, upgrade_ctx, &node_http_response, - ); + }) catch globalThis.takeException(error.JSError); const HTTPResult = union(enum) { rejection: JSC.JSValue, @@ -2342,8 +2462,8 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d // So we first use a hash of the main field: const first_hash_segment: [8]u8 = brk: { - const buffer = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buffer); + const buffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buffer); const main = JSC.VirtualMachine.get().main; const len = @min(main.len, buffer.len); break :brk @bitCast(bun.hash(bun.strings.copyLowercase(main[0..len], buffer[0..len]))); @@ -2351,8 +2471,8 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d // And then we use a hash of their project root directory: const second_hash_segment: [8]u8 = brk: { - const buffer = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buffer); + const buffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buffer); const root = this.dev_server.?.root; const len = @min(root.len, buffer.len); break :brk @bitCast(bun.hash(bun.strings.copyLowercase(root[0..len], buffer[0..len]))); @@ -2791,7 +2911,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d } const error_code_value = JSValue.jsNumber(error_code); - const raw_packet_value = JSC.ArrayBuffer.createBuffer(this.globalThis, raw_packet); + const raw_packet_value = JSC.ArrayBuffer.createBuffer(this.globalThis, raw_packet) catch return; // TODO: properly propagate exception upwards const loop = this.globalThis.bunVM().eventLoop(); loop.enter(); defer loop.exit(); @@ -3036,13 +3156,13 @@ pub const AnyServer = struct { pub fn onRequest( this: AnyServer, req: *uws.Request, - resp: *uws.NewApp(false).Response, + resp: bun.uws.AnyResponse, ) void { return switch (this.ptr.tag()) { - Ptr.case(HTTPServer) => this.ptr.as(HTTPServer).onRequest(req, resp), - Ptr.case(HTTPSServer) => @panic("TODO: https"), - Ptr.case(DebugHTTPServer) => this.ptr.as(DebugHTTPServer).onRequest(req, resp), - Ptr.case(DebugHTTPSServer) => @panic("TODO: https"), + Ptr.case(HTTPServer) => this.ptr.as(HTTPServer).onRequest(req, resp.assertNoSSL()), + Ptr.case(HTTPSServer) => this.ptr.as(HTTPSServer).onRequest(req, resp.assertSSL()), + Ptr.case(DebugHTTPServer) => this.ptr.as(DebugHTTPServer).onRequest(req, resp.assertNoSSL()), + Ptr.case(DebugHTTPSServer) => this.ptr.as(DebugHTTPSServer).onRequest(req, resp.assertSSL()), else => bun.unreachablePanic("Invalid pointer tag", .{}), }; } diff --git a/src/bun.js/api/server/FileRoute.zig b/src/bun.js/api/server/FileRoute.zig index 2dbdca9a88..d5c1e55369 100644 --- a/src/bun.js/api/server/FileRoute.zig +++ b/src/bun.js/api/server/FileRoute.zig @@ -12,6 +12,7 @@ has_content_length_header: bool, pub const InitOptions = struct { server: ?AnyServer, status_code: u16 = 200, + headers: ?*JSC.WebCore.FetchHeaders = null, }; pub fn lastModifiedDate(this: *const FileRoute) ?u64 { @@ -34,12 +35,14 @@ pub fn lastModifiedDate(this: *const FileRoute) ?u64 { } pub fn initFromBlob(blob: Blob, opts: InitOptions) *FileRoute { - const headers = Headers.from(null, bun.default_allocator, .{ .body = &.{ .Blob = blob } }) catch bun.outOfMemory(); + const headers = Headers.from(opts.headers, bun.default_allocator, .{ .body = &.{ .Blob = blob } }) catch bun.outOfMemory(); return bun.new(FileRoute, .{ .ref_count = .init(), .server = opts.server, .blob = blob, .headers = headers, + .has_last_modified_header = headers.get("last-modified") != null, + .has_content_length_header = headers.get("content-length") != null, .status_code = opts.status_code, }); } diff --git a/src/bun.js/api/server/NodeHTTPResponse.zig b/src/bun.js/api/server/NodeHTTPResponse.zig index 50dffeb601..d19c4e7dd5 100644 --- a/src/bun.js/api/server/NodeHTTPResponse.zig +++ b/src/bun.js/api/server/NodeHTTPResponse.zig @@ -587,7 +587,7 @@ fn drainBufferedRequestBodyFromPause(this: *NodeHTTPResponse, globalObject: *JSC return null; } -pub fn doResume(this: *NodeHTTPResponse, globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { +pub fn doResume(this: *NodeHTTPResponse, globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { log("doResume", .{}); if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.ended) { return .false; @@ -744,7 +744,7 @@ fn onDataOrAborted(this: *NodeHTTPResponse, chunk: []const u8, last: bool, event } if (chunk.len > 0) { - break :brk JSC.ArrayBuffer.createBuffer(globalThis, chunk); + break :brk JSC.ArrayBuffer.createBuffer(globalThis, chunk) catch return; // TODO: properly propagate exception upwards } break :brk .js_undefined; }; diff --git a/src/bun.js/api/server/RequestContext.zig b/src/bun.js/api/server/RequestContext.zig index da894d68eb..c3506bfc5a 100644 --- a/src/bun.js/api/server/RequestContext.zig +++ b/src/bun.js/api/server/RequestContext.zig @@ -856,14 +856,14 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, file.pathlike.fd else switch (bun.sys.open(file.pathlike.path.sliceZ(&file_buf), bun.O.RDONLY | bun.O.NONBLOCK | bun.O.CLOEXEC, 0)) { .result => |_fd| _fd, - .err => |err| return this.runErrorHandler(err.withPath(file.pathlike.path.slice()).toJSC(globalThis)), + .err => |err| return this.runErrorHandler(err.withPath(file.pathlike.path.slice()).toJS(globalThis)), }; // stat only blocks if the target is a file descriptor const stat: bun.Stat = switch (bun.sys.fstat(fd)) { .result => |result| result, .err => |err| { - this.runErrorHandler(err.withPathLike(file.pathlike).toJSC(globalThis)); + this.runErrorHandler(err.withPathLike(file.pathlike).toJS(globalThis)); if (auto_close) { fd.close(); } diff --git a/src/bun.js/api/server/SSLConfig.zig b/src/bun.js/api/server/SSLConfig.zig index 5ed7eecbdf..2e40b5a348 100644 --- a/src/bun.js/api/server/SSLConfig.zig +++ b/src/bun.js/api/server/SSLConfig.zig @@ -41,7 +41,7 @@ const BlobFileContentResult = struct { const read = fs.readFileWithOptions(.{ .path = body.Blob.store.?.data.file.pathlike }, .sync, .null_terminated); switch (read) { .err => { - return global.throwValue(read.err.toJSC(global)); + return global.throwValue(read.err.toJS(global)); }, else => { const str = read.result.null_terminated; @@ -252,13 +252,13 @@ pub fn fromJS(vm: *JSC.VirtualMachine, global: *JSC.JSGlobalObject, obj: JSC.JSV if (try obj.getTruthy(global, "key")) |js_obj| { if (js_obj.jsType().isArray()) { - const count = js_obj.getLength(global); + const count = try js_obj.getLength(global); if (count > 0) { const native_array = try bun.default_allocator.alloc([*c]const u8, count); var valid_count: u32 = 0; for (0..count) |i| { - const item = js_obj.getIndex(global, @intCast(i)); + const item = try js_obj.getIndex(global, @intCast(i)); if (try JSC.Node.StringOrBuffer.fromJS(global, arena.allocator(), item)) |sb| { defer sb.deinit(); const sliced = sb.slice(); @@ -360,13 +360,13 @@ pub fn fromJS(vm: *JSC.VirtualMachine, global: *JSC.JSGlobalObject, obj: JSC.JSV if (try obj.getTruthy(global, "cert")) |js_obj| { if (js_obj.jsType().isArray()) { - const count = js_obj.getLength(global); + const count = try js_obj.getLength(global); if (count > 0) { const native_array = try bun.default_allocator.alloc([*c]const u8, count); var valid_count: u32 = 0; for (0..count) |i| { - const item = js_obj.getIndex(global, @intCast(i)); + const item = try js_obj.getIndex(global, @intCast(i)); if (try JSC.Node.StringOrBuffer.fromJS(global, arena.allocator(), item)) |sb| { defer sb.deinit(); const sliced = sb.slice(); @@ -469,13 +469,13 @@ pub fn fromJS(vm: *JSC.VirtualMachine, global: *JSC.JSGlobalObject, obj: JSC.JSV if (try obj.getTruthy(global, "ca")) |js_obj| { if (js_obj.jsType().isArray()) { - const count = js_obj.getLength(global); + const count = try js_obj.getLength(global); if (count > 0) { const native_array = try bun.default_allocator.alloc([*c]const u8, count); var valid_count: u32 = 0; for (0..count) |i| { - const item = js_obj.getIndex(global, @intCast(i)); + const item = try js_obj.getIndex(global, @intCast(i)); if (try JSC.Node.StringOrBuffer.fromJS(global, arena.allocator(), item)) |sb| { defer sb.deinit(); const sliced = sb.slice(); diff --git a/src/bun.js/api/server/ServerConfig.zig b/src/bun.js/api/server/ServerConfig.zig index 88a3effc7b..9c244a186c 100644 --- a/src/bun.js/api/server/ServerConfig.zig +++ b/src/bun.js/api/server/ServerConfig.zig @@ -495,7 +495,7 @@ pub fn fromJS( \\ "/path3/:param1/:param2": (req) => new Response("Hello") \\ } \\ - \\Learn more at https://bun.sh/docs/api/http + \\Learn more at https://bun.com/docs/api/http , .{}); }; args.had_routes_object = true; @@ -506,12 +506,15 @@ pub fn fromJS( }).init(global, static_obj); defer iter.deinit(); - var init_ctx: AnyRoute.ServerInitContext = .{ + var init_ctx_: AnyRoute.ServerInitContext = .{ .arena = .init(bun.default_allocator), .dedupe_html_bundle_map = .init(bun.default_allocator), .framework_router_list = .init(bun.default_allocator), .js_string_allocations = .empty, + .user_routes = &args.static_routes, + .global = global, }; + const init_ctx: *AnyRoute.ServerInitContext = &init_ctx_; errdefer { init_ctx.arena.deinit(); init_ctx.framework_router_list.deinit(); @@ -579,7 +582,7 @@ pub fn fromJS( }; var found = false; inline for (methods) |method| { - if (value.getOwn(global, @tagName(method))) |function| { + if (try value.getOwn(global, @tagName(method))) |function| { if (!found) { try validateRouteName(global, path); } @@ -593,7 +596,7 @@ pub fn fromJS( }, .callback = .create(function.withAsyncContextIfNeeded(global), global), }) catch bun.outOfMemory(); - } else if (try AnyRoute.fromJS(global, path, function, &init_ctx)) |html_route| { + } else if (try AnyRoute.fromJS(global, path, function, init_ctx)) |html_route| { var method_set = bun.http.Method.Set.initEmpty(); method_set.insert(method); @@ -612,7 +615,7 @@ pub fn fromJS( } } - const route = try AnyRoute.fromJS(global, path, value, &init_ctx) orelse { + const route = try AnyRoute.fromJS(global, path, value, init_ctx) orelse { return global.throwInvalidArguments( \\'routes' expects a Record Response|Promise}> \\ @@ -648,7 +651,7 @@ pub fn fromJS( \\}); \\``` \\ - \\See https://bun.sh/docs/api/http for more information. + \\See https://bun.com/docs/api/http for more information. , .{}, ); @@ -749,7 +752,7 @@ pub fn fromJS( args.address.tcp.port = @as( u16, @intCast(@min( - @max(0, port_.coerce(i32, global)), + @max(0, try port_.coerce(i32, global)), std.math.maxInt(u16), )), ); @@ -833,17 +836,17 @@ pub fn fromJS( } if (try arg.get(global, "reusePort")) |dev| { - args.reuse_port = dev.coerce(bool, global); + args.reuse_port = dev.toBoolean(); } if (global.hasException()) return error.JSError; if (try arg.get(global, "ipv6Only")) |dev| { - args.ipv6_only = dev.coerce(bool, global); + args.ipv6_only = dev.toBoolean(); } if (global.hasException()) return error.JSError; if (try arg.get(global, "inspector")) |inspector| { - args.inspector = inspector.coerce(bool, global); + args.inspector = inspector.toBoolean(); if (args.inspector and args.development == .production) { return global.throwInvalidArguments("Cannot enable inspector in production. Please set development: true in Bun.serve()", .{}); @@ -901,7 +904,7 @@ pub fn fromJS( \\ return new Response("Hello") \\ } \\ - \\Learn more at https://bun.sh/docs/api/http + \\Learn more at https://bun.com/docs/api/http , .{}); } else { if (global.hasException()) return error.JSError; @@ -911,11 +914,11 @@ pub fn fromJS( if (tls.isFalsey()) { args.ssl_config = null; } else if (tls.jsType().isArray()) { - var value_iter = tls.arrayIterator(global); + var value_iter = try tls.arrayIterator(global); if (value_iter.len == 1) { return global.throwInvalidArguments("tls option expects at least 1 tls object", .{}); } - while (value_iter.next()) |item| { + while (try value_iter.next()) |item| { var ssl_config = try SSLConfig.fromJS(vm, global, item) orelse { if (global.hasException()) { return error.JSError; diff --git a/src/bun.js/api/server/ServerWebSocket.zig b/src/bun.js/api/server/ServerWebSocket.zig index ab6bc051c6..c956735fa1 100644 --- a/src/bun.js/api/server/ServerWebSocket.zig +++ b/src/bun.js/api/server/ServerWebSocket.zig @@ -143,7 +143,7 @@ pub fn onMessage( this.getThisValue(), switch (opcode) { .text => bun.String.createUTF8ForJS(globalObject, message), - .binary => this.binaryToJS(globalObject, message), + .binary => this.binaryToJS(globalObject, message) catch .zero, // TODO: properly propagate exception upwards else => unreachable, }, }; @@ -208,7 +208,7 @@ pub fn onDrain(this: *ServerWebSocket, _: uws.AnyWebSocket) void { } } -fn binaryToJS(this: *const ServerWebSocket, globalThis: *JSC.JSGlobalObject, data: []const u8) JSC.JSValue { +fn binaryToJS(this: *const ServerWebSocket, globalThis: *JSC.JSGlobalObject, data: []const u8) bun.JSError!JSC.JSValue { return switch (this.flags.binary_type) { .Buffer => JSC.ArrayBuffer.createBuffer( globalThis, @@ -244,7 +244,7 @@ pub fn onPing(this: *ServerWebSocket, _: uws.AnyWebSocket, data: []const u8) voi _ = cb.call( globalThis, .js_undefined, - &[_]JSC.JSValue{ this.getThisValue(), this.binaryToJS(globalThis, data) }, + &[_]JSC.JSValue{ this.getThisValue(), this.binaryToJS(globalThis, data) catch .zero }, // TODO: properly propagate exception upwards ) catch |e| { const err = globalThis.takeException(e); log("onPing error", .{}); @@ -272,7 +272,7 @@ pub fn onPong(this: *ServerWebSocket, _: uws.AnyWebSocket, data: []const u8) voi _ = cb.call( globalThis, .js_undefined, - &[_]JSC.JSValue{ this.getThisValue(), this.binaryToJS(globalThis, data) }, + &[_]JSC.JSValue{ this.getThisValue(), this.binaryToJS(globalThis, data) catch .zero }, // TODO: properly propagate exception upwards ) catch |e| { const err = globalThis.takeException(e); log("onPong error", .{}); @@ -1077,7 +1077,7 @@ pub fn close( return globalThis.throwInvalidArguments("close requires a numeric code or undefined", .{}); } - break :brk args.ptr[0].coerce(i32, globalThis); + break :brk try args.ptr[0].coerce(i32, globalThis); }; var message_value: ZigString.Slice = brk: { diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index f1874d916f..616bb2bfec 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -22,11 +22,12 @@ pub const InitFromBytesOptions = struct { server: ?AnyServer, mime_type: ?*const bun.http.MimeType = null, status_code: u16 = 200, + headers: ?*JSC.WebCore.FetchHeaders = null, }; /// Ownership of `blob` is transferred to this function. pub fn initFromAnyBlob(blob: *const AnyBlob, options: InitFromBytesOptions) *StaticRoute { - var headers = Headers.from(null, bun.default_allocator, .{ .body = blob }) catch bun.outOfMemory(); + var headers = Headers.from(options.headers, bun.default_allocator, .{ .body = blob }) catch bun.outOfMemory(); if (options.mime_type) |mime_type| { if (headers.getContentType() == null) { headers.append("Content-Type", mime_type.value) catch bun.outOfMemory(); diff --git a/src/bun.js/api/server/WebSocketServerContext.zig b/src/bun.js/api/server/WebSocketServerContext.zig index bb4e9ba231..60b3dfc643 100644 --- a/src/bun.js/api/server/WebSocketServerContext.zig +++ b/src/bun.js/api/server/WebSocketServerContext.zig @@ -50,71 +50,27 @@ pub const Handler = struct { var valid = false; - if (try object.getTruthyComptime(globalObject, "message")) |message_| { - if (!message_.isCallable()) { - return globalObject.throwInvalidArguments("websocket expects a function for the message option", .{}); + inline for (.{ + .{ "error", "onError" }, + .{ "message", "onMessage" }, + .{ "open", "onOpen" }, + .{ "close", "onClose" }, + .{ "drain", "onDrain" }, + .{ "ping", "onPing" }, + .{ "pong", "onPong" }, + }, 0..) |pair, i| { + if (try object.getTruthy(globalObject, pair[0])) |value| { + if (!value.isCell() or !value.isCallable()) { + return globalObject.throwInvalidArguments("websocket expects a function for the '{s}' option", .{pair[0]}); + } + const cb = value.withAsyncContextIfNeeded(globalObject); + @field(handler, pair[1]) = cb; + cb.ensureStillAlive(); + if (i > 0) { + // anything other than "error" is considered valid. + valid = true; + } } - const message = message_.withAsyncContextIfNeeded(globalObject); - handler.onMessage = message; - message.ensureStillAlive(); - valid = true; - } - - if (try object.getTruthy(globalObject, "open")) |open_| { - if (!open_.isCallable()) { - return globalObject.throwInvalidArguments("websocket expects a function for the open option", .{}); - } - const open = open_.withAsyncContextIfNeeded(globalObject); - handler.onOpen = open; - open.ensureStillAlive(); - valid = true; - } - - if (try object.getTruthy(globalObject, "close")) |close_| { - if (!close_.isCallable()) { - return globalObject.throwInvalidArguments("websocket expects a function for the close option", .{}); - } - const close = close_.withAsyncContextIfNeeded(globalObject); - handler.onClose = close; - close.ensureStillAlive(); - valid = true; - } - - if (try object.getTruthy(globalObject, "drain")) |drain_| { - if (!drain_.isCallable()) { - return globalObject.throwInvalidArguments("websocket expects a function for the drain option", .{}); - } - const drain = drain_.withAsyncContextIfNeeded(globalObject); - handler.onDrain = drain; - drain.ensureStillAlive(); - valid = true; - } - - if (try object.getTruthy(globalObject, "onError")) |onError_| { - if (!onError_.isCallable()) { - return globalObject.throwInvalidArguments("websocket expects a function for the onError option", .{}); - } - const onError = onError_.withAsyncContextIfNeeded(globalObject); - handler.onError = onError; - onError.ensureStillAlive(); - } - - if (try object.getTruthy(globalObject, "ping")) |cb| { - if (!cb.isCallable()) { - return globalObject.throwInvalidArguments("websocket expects a function for the ping option", .{}); - } - handler.onPing = cb; - cb.ensureStillAlive(); - valid = true; - } - - if (try object.getTruthy(globalObject, "pong")) |cb| { - if (!cb.isCallable()) { - return globalObject.throwInvalidArguments("websocket expects a function for the pong option", .{}); - } - handler.onPong = cb; - cb.ensureStillAlive(); - valid = true; } if (valid) diff --git a/src/bun.js/api/sourcemap.classes.ts b/src/bun.js/api/sourcemap.classes.ts new file mode 100644 index 0000000000..9a4ebd5201 --- /dev/null +++ b/src/bun.js/api/sourcemap.classes.ts @@ -0,0 +1,32 @@ +import { define } from "../../codegen/class-definitions"; + +export default [ + define({ + name: "SourceMap", + JSType: "0b11101110", + proto: { + findOrigin: { + fn: "findOrigin", + length: 2, + }, + findEntry: { + fn: "findEntry", + length: 2, + }, + payload: { + getter: "getPayload", + cache: true, + }, + lineLengths: { + getter: "getLineLengths", + cache: true, + }, + }, + finalize: true, + construct: true, + constructNeedsThis: true, + memoryCost: true, + estimatedSize: true, + structuredClone: false, + }), +]; diff --git a/src/bun.js/bindings/AnyPromise.zig b/src/bun.js/bindings/AnyPromise.zig index 4dcce82733..c22bac69dc 100644 --- a/src/bun.js/bindings/AnyPromise.zig +++ b/src/bun.js/bindings/AnyPromise.zig @@ -76,11 +76,15 @@ pub const AnyPromise = union(enum) { args: Args, pub fn call(wrap_: *@This(), global: *JSC.JSGlobalObject) callconv(.c) JSC.JSValue { - return JSC.toJSHostValue(global, @call(.auto, Fn, wrap_.args)); + return JSC.toJSHostCall(global, @src(), Fn, wrap_.args); } }; + var scope: JSC.CatchScope = undefined; + scope.init(globalObject, @src()); + defer scope.deinit(); var ctx = Wrapper{ .args = args }; JSC__AnyPromise__wrap(globalObject, this.asValue(), &ctx, @ptrCast(&Wrapper.call)); + bun.debugAssert(!scope.hasException()); // TODO: properly propagate exception upwards } }; diff --git a/src/bun.js/bindings/AsyncContextFrame.cpp b/src/bun.js/bindings/AsyncContextFrame.cpp index 3fcca2555b..ce0b503d4d 100644 --- a/src/bun.js/bindings/AsyncContextFrame.cpp +++ b/src/bun.js/bindings/AsyncContextFrame.cpp @@ -120,4 +120,10 @@ JSValue AsyncContextFrame::profiledCall(JSGlobalObject* global, JSValue function return AsyncContextFrame::call(global, functionObject, thisValue, args, returnedException); } +JSC::JSValue AsyncContextFrame::run(JSGlobalObject* global, JSValue functionObject, JSValue thisValue, const ArgList& args) +{ + ASSERT(global->isAsyncContextTrackingEnabled()); + + ASYNCCONTEXTFRAME_CALL_IMPL(global, ProfilingReason::API, functionObject, JSC::getCallData(functionObject), thisValue, args); +} #undef ASYNCCONTEXTFRAME_CALL_IMPL diff --git a/src/bun.js/bindings/AsyncContextFrame.h b/src/bun.js/bindings/AsyncContextFrame.h index 2daa57eb0a..164550cdd0 100644 --- a/src/bun.js/bindings/AsyncContextFrame.h +++ b/src/bun.js/bindings/AsyncContextFrame.h @@ -37,6 +37,13 @@ public: mutable JSC::WriteBarrier callback; mutable JSC::WriteBarrier context; + /** + * When you have a **specific** AsyncContextFrame to run the function in, use this + * + * Usually, you do not want to use this. Usually, you want to use `call` or `profiledCall`. + */ + JSC::JSValue run(JSC::JSGlobalObject* globalObject, JSC::JSValue functionObject, JSC::JSValue thisValue, const JSC::ArgList& args); + template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { diff --git a/src/bun.js/bindings/BunObject.cpp b/src/bun.js/bindings/BunObject.cpp index 8f24d751b3..f45743b2b4 100644 --- a/src/bun.js/bindings/BunObject.cpp +++ b/src/bun.js/bindings/BunObject.cpp @@ -1,4 +1,3 @@ - #include "root.h" #include "JavaScriptCore/HeapProfiler.h" @@ -71,6 +70,7 @@ BUN_DECLARE_HOST_FUNCTION(Bun__DNSResolver__cancel); BUN_DECLARE_HOST_FUNCTION(Bun__fetch); BUN_DECLARE_HOST_FUNCTION(Bun__fetchPreconnect); BUN_DECLARE_HOST_FUNCTION(Bun__randomUUIDv7); +BUN_DECLARE_HOST_FUNCTION(Bun__randomUUIDv5); using namespace JSC; using namespace WebCore; @@ -111,15 +111,12 @@ static inline JSC::EncodedJSValue flattenArrayOfBuffersIntoArrayBufferOrUint8Arr size_t arrayLength = array->length(); const auto returnEmptyArrayBufferView = [&]() -> EncodedJSValue { if (asUint8Array) { - return JSValue::encode( - JSC::JSUint8Array::create( - lexicalGlobalObject, - lexicalGlobalObject->m_typedArrayUint8.get(lexicalGlobalObject), - 0)); + RELEASE_AND_RETURN(throwScope, JSValue::encode(JSC::JSUint8Array::create(lexicalGlobalObject, lexicalGlobalObject->m_typedArrayUint8.get(lexicalGlobalObject), 0))); } RELEASE_AND_RETURN(throwScope, JSValue::encode(JSC::JSArrayBuffer::create(vm, lexicalGlobalObject->arrayBufferStructure(), JSC::ArrayBuffer::create(static_cast(0), 1)))); }; + RETURN_IF_EXCEPTION(throwScope, {}); if (arrayLength < 1) { return returnEmptyArrayBufferView(); @@ -228,6 +225,7 @@ static inline JSC::EncodedJSValue flattenArrayOfBuffersIntoArrayBufferOrUint8Arr if (asUint8Array) { auto uint8array = JSC::JSUint8Array::create(lexicalGlobalObject, lexicalGlobalObject->m_typedArrayUint8.get(lexicalGlobalObject), WTFMove(buffer), 0, byteLength); + RETURN_IF_EXCEPTION(throwScope, {}); return JSValue::encode(uint8array); } @@ -265,7 +263,7 @@ JSC_DEFINE_HOST_FUNCTION(functionConcatTypedArrays, (JSGlobalObject * globalObje asUint8Array = arg2.toBoolean(globalObject); } - return flattenArrayOfBuffersIntoArrayBufferOrUint8Array(globalObject, arrayValue, maxLength, asUint8Array); + RELEASE_AND_RETURN(throwScope, flattenArrayOfBuffersIntoArrayBufferOrUint8Array(globalObject, arrayValue, maxLength, asUint8Array)); } JSC_DECLARE_HOST_FUNCTION(functionConcatTypedArrays); @@ -359,6 +357,7 @@ static JSValue constructBunShell(VM& vm, JSObject* bunObject) auto* bunShell = shell.getObject(); auto ShellError = bunShell->get(globalObject, JSC::Identifier::fromString(vm, "ShellError"_s)); + RETURN_IF_EXCEPTION(scope, {}); if (!ShellError.isObject()) [[unlikely]] { throwTypeError(globalObject, scope, "Internal error: BunShell.ShellError is not an object"_s); return {}; @@ -460,7 +459,7 @@ JSC_DEFINE_HOST_FUNCTION(functionBunSleep, } extern "C" JSC::EncodedJSValue Bun__escapeHTML8(JSGlobalObject* globalObject, JSC::EncodedJSValue input, const LChar* ptr, size_t length); -extern "C" JSC::EncodedJSValue Bun__escapeHTML16(JSGlobalObject* globalObject, JSC::EncodedJSValue input, const UChar* ptr, size_t length); +extern "C" JSC::EncodedJSValue Bun__escapeHTML16(JSGlobalObject* globalObject, JSC::EncodedJSValue input, const char16_t* ptr, size_t length); JSC_DEFINE_HOST_FUNCTION(functionBunEscapeHTML, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) { @@ -570,7 +569,7 @@ JSC_DEFINE_HOST_FUNCTION(functionPathToFileURL, (JSC::JSGlobalObject * lexicalGl { WTF::String pathString = pathValue.toWTFString(lexicalGlobalObject); - RETURN_IF_EXCEPTION(throwScope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(throwScope, {}); pathString = pathResolveWTFString(lexicalGlobalObject, pathString); auto fileURL = WTF::URL::fileURLWithFileSystemPath(pathString); @@ -759,6 +758,7 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj peek constructBunPeekObject DontDelete|PropertyCallback plugin constructPluginObject ReadOnly|DontDelete|PropertyCallback randomUUIDv7 Bun__randomUUIDv7 DontDelete|Function 2 + randomUUIDv5 Bun__randomUUIDv5 DontDelete|Function 3 readableStreamToArray JSBuiltin Builtin|Function 1 readableStreamToArrayBuffer JSBuiltin Builtin|Function 1 readableStreamToBytes JSBuiltin Builtin|Function 1 diff --git a/src/bun.js/bindings/BunPlugin.cpp b/src/bun.js/bindings/BunPlugin.cpp index de64d26307..03de1b1d17 100644 --- a/src/bun.js/bindings/BunPlugin.cpp +++ b/src/bun.js/bindings/BunPlugin.cpp @@ -60,12 +60,12 @@ static JSC::EncodedJSValue jsFunctionAppendOnLoadPluginBody(JSC::JSGlobalObject* auto* filterObject = callframe->uncheckedArgument(0).toObject(globalObject); RETURN_IF_EXCEPTION(scope, {}); JSC::RegExpObject* filter = nullptr; - if (JSValue filterValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "filter"_s))) { - RETURN_IF_EXCEPTION(scope, {}); + auto filterValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "filter"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (filterValue) { if (filterValue.isCell() && filterValue.asCell()->inherits()) filter = jsCast(filterValue); } - RETURN_IF_EXCEPTION(scope, {}); if (!filter) { throwException(globalObject, scope, createError(globalObject, "onLoad() expects first argument to be an object with a filter RegExp"_s)); @@ -73,7 +73,9 @@ static JSC::EncodedJSValue jsFunctionAppendOnLoadPluginBody(JSC::JSGlobalObject* } String namespaceString = String(); - if (JSValue namespaceValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "namespace"_s))) { + auto namespaceValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "namespace"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (namespaceValue) { if (namespaceValue.isString()) { namespaceString = namespaceValue.toWTFString(globalObject); RETURN_IF_EXCEPTION(scope, {}); @@ -83,7 +85,6 @@ static JSC::EncodedJSValue jsFunctionAppendOnLoadPluginBody(JSC::JSGlobalObject* } } } - RETURN_IF_EXCEPTION(scope, {}); auto func = callframe->uncheckedArgument(1); RETURN_IF_EXCEPTION(scope, {}); @@ -151,6 +152,7 @@ static EncodedJSValue jsFunctionAppendVirtualModulePluginBody(JSC::JSGlobalObjec global->requireMap()->remove(globalObject, moduleIdValue); global->esmRegistryMap()->remove(globalObject, moduleIdValue); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(callframe->thisValue()); } @@ -168,12 +170,13 @@ static JSC::EncodedJSValue jsFunctionAppendOnResolvePluginBody(JSC::JSGlobalObje auto* filterObject = callframe->uncheckedArgument(0).toObject(globalObject); RETURN_IF_EXCEPTION(scope, {}); JSC::RegExpObject* filter = nullptr; - if (JSValue filterValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "filter"_s))) { + auto filterValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "filter"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (filterValue) { RETURN_IF_EXCEPTION(scope, {}); if (filterValue.isCell() && filterValue.asCell()->inherits()) filter = jsCast(filterValue); } - RETURN_IF_EXCEPTION(scope, {}); if (!filter) { throwException(globalObject, scope, createError(globalObject, "onResolve() expects first argument to be an object with a filter RegExp"_s)); @@ -181,7 +184,9 @@ static JSC::EncodedJSValue jsFunctionAppendOnResolvePluginBody(JSC::JSGlobalObje } String namespaceString = String(); - if (JSValue namespaceValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "namespace"_s))) { + auto namespaceValue = filterObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "namespace"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (namespaceValue) { if (namespaceValue.isString()) { namespaceString = namespaceValue.toWTFString(globalObject); RETURN_IF_EXCEPTION(scope, {}); @@ -190,7 +195,6 @@ static JSC::EncodedJSValue jsFunctionAppendOnResolvePluginBody(JSC::JSGlobalObje return {}; } } - RETURN_IF_EXCEPTION(scope, {}); } @@ -286,7 +290,9 @@ static inline JSC::EncodedJSValue setupBunPlugin(JSC::JSGlobalObject* globalObje return {}; } - if (JSValue targetValue = obj->getIfPropertyExists(globalObject, Identifier::fromString(vm, "target"_s))) { + auto targetValue = obj->getIfPropertyExists(globalObject, Identifier::fromString(vm, "target"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (targetValue) { if (auto* targetJSString = targetValue.toStringOrNull(globalObject)) { String targetString = targetJSString->value(globalObject); if (!(targetString == "node"_s || targetString == "bun"_s || targetString == "browser"_s)) { @@ -294,7 +300,6 @@ static inline JSC::EncodedJSValue setupBunPlugin(JSC::JSGlobalObject* globalObje } } } - RETURN_IF_EXCEPTION(throwScope, {}); JSObject* builderObject = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 4); @@ -573,6 +578,7 @@ extern "C" JSC_DEFINE_HOST_FUNCTION(JSMock__jsModuleMock, (JSC::JSGlobalObject * JSModuleMock* mock = JSModuleMock::create(vm, globalObject->mockModule.mockModuleStructure.getInitializedOnMainThread(globalObject), callback); auto* esm = globalObject->esmRegistryMap(); + RETURN_IF_EXCEPTION(scope, {}); auto getJSValue = [&]() -> JSValue { auto scope = DECLARE_THROW_SCOPE(vm); @@ -610,8 +616,9 @@ extern "C" JSC_DEFINE_HOST_FUNCTION(JSMock__jsModuleMock, (JSC::JSGlobalObject * removeFromESM = true; JSObject* entry = entryValue ? entryValue.getObject() : nullptr; if (entry) { - if (JSValue moduleValue = entry->getIfPropertyExists(globalObject, Identifier::fromString(vm, String("module"_s)))) { - RETURN_IF_EXCEPTION(scope, {}); + auto moduleValue = entry->getIfPropertyExists(globalObject, Identifier::fromString(vm, String("module"_s))); + RETURN_IF_EXCEPTION(scope, {}); + if (moduleValue) { if (auto* mod = jsDynamicCast(moduleValue)) { JSC::JSModuleNamespaceObject* moduleNamespaceObject = mod->getModuleNamespace(globalObject); RETURN_IF_EXCEPTION(scope, {}); @@ -630,7 +637,7 @@ extern "C" JSC_DEFINE_HOST_FUNCTION(JSMock__jsModuleMock, (JSC::JSGlobalObject * // consistent with regular esm handling code auto catchScope = DECLARE_CATCH_SCOPE(vm); JSValue value = object->get(globalObject, name); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); value = jsUndefined(); } @@ -735,7 +742,7 @@ EncodedJSValue BunPlugin::OnLoad::run(JSC::JSGlobalObject* globalObject, BunStri if (!result.isObject()) { JSC::throwTypeError(globalObject, scope, "onLoad() expects an object returned"_s); - return JSValue::encode({}); + return {}; } RELEASE_AND_RETURN(scope, JSValue::encode(result)); @@ -810,7 +817,7 @@ EncodedJSValue BunPlugin::OnResolve::run(JSC::JSGlobalObject* globalObject, BunS switch (promise->status(vm)) { case JSPromise::Status::Pending: { JSC::throwTypeError(globalObject, scope, "onResolve() doesn't support pending promises yet"_s); - return JSValue::encode({}); + return {}; } case JSPromise::Status::Rejected: { promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, jsNumber(static_cast(JSC::JSPromise::Status::Fulfilled))); @@ -826,7 +833,7 @@ EncodedJSValue BunPlugin::OnResolve::run(JSC::JSGlobalObject* globalObject, BunS if (!result.isObject()) { JSC::throwTypeError(globalObject, scope, "onResolve() expects an object returned"_s); - return JSValue::encode({}); + return {}; } RELEASE_AND_RETURN(scope, JSValue::encode(result)); diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index 01e538bb30..5f9ae19505 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -186,60 +186,40 @@ static JSValue constructVersions(VM& vm, JSObject* processObject) JSC::JSObject* object = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 24); RETURN_IF_EXCEPTION(scope, {}); - object->putDirect(vm, JSC::Identifier::fromString(vm, "node"_s), - JSC::JSValue(JSC::jsOwnedString(vm, makeAtomString(ASCIILiteral::fromLiteralUnsafe(REPORTED_NODEJS_VERSION))))); - object->putDirect( - vm, JSC::Identifier::fromString(vm, "bun"_s), - JSC::JSValue(JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(Bun__version)).substring(1)))); - object->putDirect(vm, JSC::Identifier::fromString(vm, "boringssl"_s), - JSC::JSValue(JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(Bun__versions_boringssl)))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "openssl"_s), - // https://github.com/oven-sh/bun/issues/7921 - // BoringSSL is a fork of OpenSSL 1.1.0, so we can report OpenSSL 1.1.0 - JSC::JSValue(JSC::jsOwnedString(vm, String("1.1.0"_s)))); - object->putDirect(vm, JSC::Identifier::fromString(vm, "libarchive"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_libarchive))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "mimalloc"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_mimalloc))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "picohttpparser"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_picohttpparser))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "uwebsockets"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_uws))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "webkit"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(BUN_WEBKIT_VERSION))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "zig"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_zig))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "zlib"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_zlib))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "tinycc"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_tinycc))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "lolhtml"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_lolhtml))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "ares"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_c_ares))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "libdeflate"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_libdeflate))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "usockets"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_usockets))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "lshpack"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_lshpack))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "zstd"_s), - JSC::JSValue(JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_zstd))), 0); - object->putDirect(vm, JSC::Identifier::fromString(vm, "v8"_s), JSValue(JSC::jsOwnedString(vm, String("12.4.254.14-node.12"_s))), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "node"_s), JSC::jsOwnedString(vm, makeAtomString(ASCIILiteral::fromLiteralUnsafe(REPORTED_NODEJS_VERSION)))); + object->putDirect(vm, JSC::Identifier::fromString(vm, "bun"_s), JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(Bun__version)).substring(1))); + object->putDirect(vm, JSC::Identifier::fromString(vm, "boringssl"_s), JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(Bun__versions_boringssl))), 0); + // https://github.com/oven-sh/bun/issues/7921 + // BoringSSL is a fork of OpenSSL 1.1.0, so we can report OpenSSL 1.1.0 + object->putDirect(vm, JSC::Identifier::fromString(vm, "openssl"_s), JSC::jsOwnedString(vm, String("1.1.0"_s))); + object->putDirect(vm, JSC::Identifier::fromString(vm, "libarchive"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_libarchive)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "mimalloc"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_mimalloc)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "picohttpparser"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_picohttpparser)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "uwebsockets"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_uws)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "webkit"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(BUN_WEBKIT_VERSION)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "zig"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_zig)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "zlib"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_zlib)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "tinycc"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_tinycc)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "lolhtml"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_lolhtml)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "ares"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_c_ares)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "libdeflate"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_libdeflate)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "usockets"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_usockets)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "lshpack"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_lshpack)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "zstd"_s), JSC::jsOwnedString(vm, ASCIILiteral::fromLiteralUnsafe(Bun__versions_zstd)), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "v8"_s), JSValue(JSC::jsOwnedString(vm, String("13.6.233.10-node.18"_s))), 0); #if OS(WINDOWS) object->putDirect(vm, JSC::Identifier::fromString(vm, "uv"_s), JSValue(JSC::jsOwnedString(vm, String::fromLatin1(uv_version_string()))), 0); #else object->putDirect(vm, JSC::Identifier::fromString(vm, "uv"_s), JSValue(JSC::jsOwnedString(vm, String("1.48.0"_s))), 0); #endif - object->putDirect(vm, JSC::Identifier::fromString(vm, "napi"_s), JSValue(JSC::jsOwnedString(vm, String("9"_s))), 0); + object->putDirect(vm, JSC::Identifier::fromString(vm, "napi"_s), JSValue(JSC::jsOwnedString(vm, String("10"_s))), 0); object->putDirect(vm, JSC::Identifier::fromString(vm, "icu"_s), JSValue(JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(U_ICU_VERSION)))), 0); object->putDirect(vm, JSC::Identifier::fromString(vm, "unicode"_s), JSValue(JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(U_UNICODE_VERSION)))), 0); #define STRINGIFY_IMPL(x) #x #define STRINGIFY(x) STRINGIFY_IMPL(x) - object->putDirect(vm, JSC::Identifier::fromString(vm, "modules"_s), - JSC::JSValue(JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(STRINGIFY(REPORTED_NODEJS_ABI_VERSION)))))); + object->putDirect(vm, JSC::Identifier::fromString(vm, "modules"_s), JSC::jsOwnedString(vm, String(ASCIILiteral::fromLiteralUnsafe(STRINGIFY(REPORTED_NODEJS_ABI_VERSION))))); #undef STRINGIFY #undef STRINGIFY_IMPL @@ -680,7 +660,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionUmask, (JSGlobalObject * globalObject, if (value.isString()) { auto str = value.getString(globalObject); auto policy = WTF::TrailingJunkPolicy::Disallow; - auto opt = str.is8Bit() ? WTF::parseInteger(str.span8(), 8, policy) : WTF::parseInteger(str.span16(), 8, policy); + auto opt = str.is8Bit() ? WTF::parseInteger(str.span8(), 8, policy) : WTF::parseInteger(str.span16(), 8, policy); if (!opt.has_value()) return Bun::ERR::INVALID_ARG_VALUE(throwScope, globalObject, "mask"_s, value, "must be a 32-bit unsigned integer or an octal string"_s); newUmask = opt.value(); } else { @@ -1167,19 +1147,23 @@ extern "C" void Bun__promises__emitUnhandledRejectionWarning(JSC::JSGlobalObject JSValue reasonStack {}; if (Bun__promises__isErrorLike(globalObject, JSValue::decode(reason))) { reasonStack = JSValue::decode(reason).get(globalObject, vm.propertyNames->stack); - if (scope.exception()) scope.clearException(); + if (scope.exception()) [[unlikely]] + scope.clearException(); warning->putDirect(vm, vm.propertyNames->stack, reasonStack); } if (!reasonStack) { reasonStack = JSValue::decode(Bun__noSideEffectsToString(vm, globalObject, reason)); - if (scope.exception()) scope.clearException(); + if (scope.exception()) [[unlikely]] + scope.clearException(); } if (!reasonStack) reasonStack = jsUndefined(); Process::emitWarning(globalObject, reasonStack, jsString(globalObject->vm(), "UnhandledPromiseRejectionWarning"_str), jsUndefined(), jsUndefined()); - if (scope.exception()) scope.clearException(); + if (scope.exception()) [[unlikely]] + scope.clearException(); Process::emitWarningErrorInstance(globalObject, warning); - if (scope.exception()) scope.clearException(); + if (scope.exception()) [[unlikely]] + scope.clearException(); } extern "C" int Bun__handleUnhandledRejection(JSC::JSGlobalObject* lexicalGlobalObject, JSC::JSValue reason, JSC::JSValue promise) @@ -1515,6 +1499,7 @@ JSValue Process::emitWarningErrorInstance(JSC::JSGlobalObject* lexicalGlobalObje // }); auto func = JSFunction::create(vm, globalObject, 1, ""_s, jsFunction_throwValue, JSC::ImplementationVisibility::Private); process->queueNextTick(globalObject, func, errorInstance); + RETURN_IF_EXCEPTION(scope, {}); return jsUndefined(); } } @@ -1522,6 +1507,7 @@ JSValue Process::emitWarningErrorInstance(JSC::JSGlobalObject* lexicalGlobalObje // process.nextTick(doEmitWarning, warning); auto func = JSFunction::create(vm, globalObject, 1, ""_s, jsFunction_emitWarning, JSC::ImplementationVisibility::Private); process->queueNextTick(globalObject, func, errorInstance); + RETURN_IF_EXCEPTION(scope, {}); return jsUndefined(); } JSValue Process::emitWarning(JSC::JSGlobalObject* lexicalGlobalObject, JSValue warning, JSValue type, JSValue code, JSValue ctor) @@ -2062,7 +2048,6 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb JSC_DEFINE_HOST_FUNCTION(Process_functionGetReport, (JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); // TODO: node:vm return JSValue::encode(constructReportObjectComplete(vm, jsCast(globalObject), String())); } @@ -2126,8 +2111,68 @@ static JSValue constructProcessConfigObject(VM& vm, JSObject* processObject) variables->putDirect(vm, JSC::Identifier::fromString(vm, "v8_enable_i8n_support"_s), JSC::jsNumber(1), 0); variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_lto"_s), JSC::jsBoolean(false), 0); variables->putDirect(vm, JSC::Identifier::fromString(vm, "node_module_version"_s), JSC::jsNumber(REPORTED_NODEJS_ABI_VERSION), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "napi_build_version"_s), JSC::jsNumber(Napi::DEFAULT_NAPI_VERSION), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "node_builtin_shareable_builtins"_s), JSC::constructEmptyArray(globalObject, nullptr), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "node_byteorder"_s), JSC::jsString(vm, String("little"_s)), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "clang"_s), JSC::jsNumber(0), 0); + config->putDirect(vm, JSC::Identifier::fromString(vm, "target_defaults"_s), JSC::constructEmptyObject(globalObject), 0); config->putDirect(vm, JSC::Identifier::fromString(vm, "variables"_s), variables, 0); +#if OS(WINDOWS) + variables->putDirect(vm, JSC::Identifier::fromString(vm, "asan"_s), JSC::jsNumber(0), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "control_flow_guard"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "coverage"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "dcheck_always_on"_s), JSC::jsNumber(0), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "debug_nghttp2"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "debug_node"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_lto"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_pgo_generate"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_pgo_use"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "error_on_warn"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "force_dynamic_crt"_s), JSC::jsNumber(0), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "napi_build_version"_s), JSC::jsNumber(Napi::DEFAULT_NAPI_VERSION), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "nasm_version"_s), JSC::jsNumber(2), 0); +#elif OS(MACOS) + variables->putDirect(vm, JSC::Identifier::fromString(vm, "asan"_s), JSC::jsNumber(0), 0); // TODO: ASAN_ENABLED + variables->putDirect(vm, JSC::Identifier::fromString(vm, "control_flow_guard"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "coverage"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "dcheck_always_on"_s), JSC::jsNumber(0), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "debug_nghttp2"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "debug_node"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_lto"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_pgo_generate"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_pgo_use"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "error_on_warn"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "force_dynamic_crt"_s), JSC::jsNumber(0), 0); +#if CPU(ARM64) + variables->putDirect(vm, JSC::Identifier::fromString(vm, "arm_fpu"_s), JSC::jsString(vm, String("neon"_s)), 0); +#endif +#elif OS(LINUX) + variables->putDirect(vm, JSC::Identifier::fromString(vm, "asan"_s), JSC::jsNumber(0), 0); // TODO: ASAN_ENABLED + variables->putDirect(vm, JSC::Identifier::fromString(vm, "control_flow_guard"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "coverage"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "dcheck_always_on"_s), JSC::jsNumber(0), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "debug_nghttp2"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "debug_node"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_lto"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_pgo_generate"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_pgo_use"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "error_on_warn"_s), JSC::jsBoolean(false), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "force_dynamic_crt"_s), JSC::jsNumber(0), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "napi_build"_s), JSC::jsString(vm, String("0.0"_s)), 0); +#else +#error "Unsupported OS" +#endif + +#if CPU(X86_64) + variables->putDirect(vm, JSC::Identifier::fromString(vm, "host_arch"_s), JSC::jsString(vm, String("x64"_s)), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "target_arch"_s), JSC::jsString(vm, String("x64"_s)), 0); +#elif CPU(ARM64) + variables->putDirect(vm, JSC::Identifier::fromString(vm, "host_arch"_s), JSC::jsString(vm, String("arm64"_s)), 0); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "target_arch"_s), JSC::jsString(vm, String("arm64"_s)), 0); +#else +#error "Unsupported architecture" +#endif config->freeze(vm); return config; @@ -2431,6 +2476,22 @@ JSC_DEFINE_CUSTOM_SETTER(setProcessExecArgv, (JSGlobalObject * globalObject, Enc return true; } +JSC_DEFINE_CUSTOM_GETTER(processGetEval, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName)) +{ + Process* process = getProcessObject(globalObject, JSValue::decode(thisValue)); + if (!process) { + return JSValue::encode(jsUndefined()); + } + + return Bun__Process__getEval(globalObject); +} + +JSC_DEFINE_CUSTOM_SETTER(setProcessGetEval, (JSGlobalObject * globalObject, EncodedJSValue thisValue, EncodedJSValue encodedValue, PropertyName)) +{ + // dont allow setting eval from js + return true; +} + static JSValue constructBrowser(VM& vm, JSObject* processObject) { return jsBoolean(false); @@ -3165,9 +3226,9 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionOpenStdin, (JSGlobalObject * globalObje Zig::GlobalObject* global = defaultGlobalObject(globalObject); auto throwScope = DECLARE_THROW_SCOPE(vm); - if (JSValue stdinValue = global->processObject()->getIfPropertyExists(globalObject, Identifier::fromString(vm, "stdin"_s))) { - RETURN_IF_EXCEPTION(throwScope, {}); - + auto stdinValue = global->processObject()->getIfPropertyExists(globalObject, Identifier::fromString(vm, "stdin"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (stdinValue) { if (!stdinValue.isObject()) { throwTypeError(globalObject, throwScope, "stdin is not an object"_s); return {}; @@ -3175,7 +3236,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionOpenStdin, (JSGlobalObject * globalObje JSValue resumeValue = stdinValue.getObject()->getIfPropertyExists(globalObject, Identifier::fromString(vm, "resume"_s)); RETURN_IF_EXCEPTION(throwScope, {}); - if (!resumeValue.isUndefinedOrNull()) { + if (resumeValue && !resumeValue.isUndefinedOrNull()) { auto resumeFunction = jsDynamicCast(resumeValue); if (!resumeFunction) [[unlikely]] { throwTypeError(globalObject, throwScope, "stdin.resume is not a function"_s); @@ -3326,7 +3387,12 @@ void Process::queueNextTick(JSC::JSGlobalObject* globalObject, const ArgList& ar ASSERT(!args.isEmpty()); JSObject* nextTickFn = this->m_nextTickFunction.get(); - AsyncContextFrame::call(globalObject, nextTickFn, jsUndefined(), args); + auto* frame = jsDynamicCast(args.at(0)); + if (frame) { + frame->run(globalObject, jsUndefined(), nextTickFn, args); + } else { + AsyncContextFrame::call(globalObject, nextTickFn, jsUndefined(), args); + } RELEASE_AND_RETURN(scope, void()); } @@ -3355,7 +3421,7 @@ void Process::queueNextTick(JSC::JSGlobalObject* globalObject, JSValue value, JS template void Process::queueNextTick(JSC::JSGlobalObject* globalObject, JSValue func, const JSValue (&args)[NumArgs]) { - ASSERT_WITH_MESSAGE(func.isCallable(), "Must be a function for us to call"); + ASSERT_WITH_MESSAGE(func.isCallable() || func.inherits(), "Must be a function for us to call"); MarkedArgumentBuffer argsBuffer; argsBuffer.ensureCapacity(NumArgs + 1); if (!func.isEmpty()) { @@ -3461,6 +3527,9 @@ static JSValue constructFeatures(VM& vm, JSObject* processObject) object->putDirect(vm, Identifier::fromString(vm, "tls_ocsp"_s), jsBoolean(true)); object->putDirect(vm, Identifier::fromString(vm, "tls"_s), jsBoolean(true)); object->putDirect(vm, Identifier::fromString(vm, "cached_builtins"_s), jsBoolean(true)); + object->putDirect(vm, Identifier::fromString(vm, "openssl_is_boringssl"_s), jsBoolean(true)); + object->putDirect(vm, Identifier::fromString(vm, "require_module"_s), jsBoolean(true)); + object->putDirect(vm, Identifier::fromString(vm, "typescript"_s), jsString(vm, String("transform"_s))); return object; } @@ -3723,6 +3792,7 @@ extern "C" void Process__emitErrorEvent(Zig::GlobalObject* global, EncodedJSValu @begin processObjectTable _debugEnd Process_stubEmptyFunction Function 0 _debugProcess Process_stubEmptyFunction Function 0 + _eval processGetEval CustomAccessor _fatalException Process_stubEmptyFunction Function 1 _getActiveHandles Process_stubFunctionReturningArray Function 0 _getActiveRequests Process_stubFunctionReturningArray Function 0 diff --git a/src/bun.js/bindings/BunString.cpp b/src/bun.js/bindings/BunString.cpp index 4e94e68980..68cf63cbbb 100644 --- a/src/bun.js/bindings/BunString.cpp +++ b/src/bun.js/bindings/BunString.cpp @@ -106,7 +106,6 @@ extern "C" JSC::EncodedJSValue BunString__createUTF8ForJS(JSC::JSGlobalObject* g extern "C" JSC::EncodedJSValue BunString__transferToJS(BunString* bunString, JSC::JSGlobalObject* globalObject) { auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); if (bunString->tag == BunStringTag::Empty || bunString->tag == BunStringTag::Dead) [[unlikely]] { return JSValue::encode(JSC::jsEmptyString(vm)); @@ -288,7 +287,7 @@ extern "C" JSC::EncodedJSValue BunString__toJS(JSC::JSGlobalObject* globalObject extern "C" BunString BunString__fromUTF16Unitialized(size_t length) { ASSERT(length > 0); - std::span ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(length, ptr); if (!impl) [[unlikely]] { return { .tag = BunStringTag::Dead }; @@ -312,7 +311,7 @@ extern "C" BunString BunString__fromUTF8(const char* bytes, size_t length) ASSERT(length > 0); if (simdutf::validate_utf8(bytes, length)) { size_t u16Length = simdutf::utf16_length_from_utf8(bytes, length); - std::span ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(static_cast(u16Length), ptr); if (!impl) [[unlikely]] { return { .tag = BunStringTag::Dead }; @@ -361,7 +360,7 @@ extern "C" BunString BunString__fromUTF16ToLatin1(const char16_t* bytes, size_t extern "C" BunString BunString__fromUTF16(const char16_t* bytes, size_t length) { ASSERT(length > 0); - std::span ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(length, ptr); if (!impl) [[unlikely]] { return { .tag = BunStringTag::Dead }; @@ -384,7 +383,7 @@ extern "C" BunString BunString__createStaticExternal(const char* bytes, size_t l { Ref impl = isLatin1 ? WTF::ExternalStringImpl::createStatic({ reinterpret_cast(bytes), length }) : - WTF::ExternalStringImpl::createStatic({ reinterpret_cast(bytes), length }); + WTF::ExternalStringImpl::createStatic({ reinterpret_cast(bytes), length }); return { BunStringTag::WTFStringImpl, { .wtf = &impl.leakRef() } }; } @@ -393,7 +392,7 @@ extern "C" BunString BunString__createExternal(const char* bytes, size_t length, { Ref impl = isLatin1 ? WTF::ExternalStringImpl::create({ reinterpret_cast(bytes), length }, ctx, callback) : - WTF::ExternalStringImpl::create({ reinterpret_cast(bytes), length }, ctx, callback); + WTF::ExternalStringImpl::create({ reinterpret_cast(bytes), length }, ctx, callback); return { BunStringTag::WTFStringImpl, { .wtf = &impl.leakRef() } }; } @@ -719,7 +718,7 @@ extern "C" BunString BunString__createExternalGloballyAllocatedLatin1( } extern "C" BunString BunString__createExternalGloballyAllocatedUTF16( - const UChar* bytes, + const char16_t* bytes, size_t length) { ASSERT(length > 0); diff --git a/src/bun.js/bindings/CPUFeatures.cpp b/src/bun.js/bindings/CPUFeatures.cpp index 367a2134e7..aba699b6fc 100644 --- a/src/bun.js/bindings/CPUFeatures.cpp +++ b/src/bun.js/bindings/CPUFeatures.cpp @@ -125,6 +125,6 @@ extern "C" uint8_t bun_cpu_features() #elif CPU(ARM64) return aarch64_cpu_features(); #else - return 0; +#error "Unknown architecture" #endif } diff --git a/src/bun.js/bindings/CallSitePrototype.cpp b/src/bun.js/bindings/CallSitePrototype.cpp index b94b106043..35340632a0 100644 --- a/src/bun.js/bindings/CallSitePrototype.cpp +++ b/src/bun.js/bindings/CallSitePrototype.cpp @@ -55,11 +55,9 @@ ALWAYS_INLINE static CallSite* getCallSite(JSGlobalObject* globalObject, JSC::JS #define ENTER_PROTO_FUNC() \ auto& vm = JSC::getVM(globalObject); \ auto scope = DECLARE_THROW_SCOPE(vm); \ - \ CallSite* callSite = getCallSite(globalObject, callFrame->thisValue()); \ - if (!callSite) { \ - return JSC::JSValue::encode(JSC::jsUndefined()); \ - } + RETURN_IF_EXCEPTION(scope, {}); \ + (void)callSite; static const HashTableValue CallSitePrototypeTableValues[] = { @@ -251,7 +249,7 @@ JSC_DEFINE_HOST_FUNCTION(callSiteProtoFuncToString, (JSGlobalObject * globalObje ENTER_PROTO_FUNC(); WTF::StringBuilder sb; callSite->formatAsString(vm, globalObject, sb); - return JSC::JSValue::encode(JSC::JSValue(jsString(vm, sb.toString()))); + return JSC::JSValue::encode(jsString(vm, sb.toString())); } JSC_DEFINE_HOST_FUNCTION(callSiteProtoFuncToJSON, (JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) diff --git a/src/bun.js/bindings/CatchScope.zig b/src/bun.js/bindings/CatchScope.zig new file mode 100644 index 0000000000..dca41bd0d1 --- /dev/null +++ b/src/bun.js/bindings/CatchScope.zig @@ -0,0 +1,202 @@ +// TODO determine size and alignment automatically +const size = 56; +const alignment = 8; + +/// Binding for JSC::CatchScope. This should be used rarely, only at translation boundaries between +/// JSC's exception checking and Zig's. Make sure not to move it after creation. Use this if you are +/// making an external call that has no other way to indicate an exception. +/// +/// ```zig +/// // Declare a CatchScope surrounding the call that may throw an exception +/// var scope: CatchScope = undefined; +/// scope.init(global, @src()); +/// defer scope.deinit(); +/// +/// const value: i32 = external_call(vm, foo, bar, baz); +/// // Calling returnIfException() suffices to prove that we checked for an exception. +/// // This function's caller does not need to use a CatchScope or ThrowScope +/// // because it can use Zig error unions. +/// try scope.returnIfException(); +/// return value; +/// ``` +pub const CatchScope = struct { + bytes: [size]u8 align(alignment), + /// Pointer to `bytes`, set by `init()`, used to assert that the location did not change + location: if (Environment.ci_assert) *u8 else void, + + pub fn init( + self: *CatchScope, + global: *jsc.JSGlobalObject, + src: std.builtin.SourceLocation, + ) void { + CatchScope__construct( + &self.bytes, + global, + src.fn_name, + src.file, + src.line, + size, + alignment, + ); + + self.* = .{ + .bytes = self.bytes, + .location = if (Environment.ci_assert) &self.bytes[0], + }; + } + + /// Generate a useful message including where the exception was thrown. + /// Only intended to be called when there is a pending exception. + fn assertionFailure(self: *CatchScope, proof: *jsc.Exception) noreturn { + _ = proof; + bun.assert(self.location == &self.bytes[0]); + CatchScope__assertNoException(&self.bytes); + @panic("assertionFailure called without a pending exception"); + } + + pub fn hasException(self: *CatchScope) bool { + return self.exception() != null; + } + + /// Get the thrown exception if it exists (like scope.exception() in C++) + pub fn exception(self: *CatchScope) ?*jsc.Exception { + if (comptime Environment.ci_assert) bun.assert(self.location == &self.bytes[0]); + return CatchScope__pureException(&self.bytes); + } + + /// Get the thrown exception if it exists, or if an unhandled trap causes an exception to be thrown + pub fn exceptionIncludingTraps(self: *CatchScope) ?*jsc.Exception { + if (comptime Environment.ci_assert) bun.assert(self.location == &self.bytes[0]); + return CatchScope__exceptionIncludingTraps(&self.bytes); + } + + /// Intended for use with `try`. Returns if there is already a pending exception or if traps cause + /// an exception to be thrown (this is the same as how RETURN_IF_EXCEPTION behaves in C++) + pub fn returnIfException(self: *CatchScope) bun.JSError!void { + if (self.exceptionIncludingTraps() != null) return error.JSError; + } + + /// Asserts there has not been any exception thrown. + pub fn assertNoException(self: *CatchScope) void { + if (comptime Environment.ci_assert) { + if (self.exception()) |e| self.assertionFailure(e); + } + } + + /// Asserts that there is or is not an exception according to the value of `should_have_exception`. + /// Prefer over `assert(scope.hasException() == ...)` because if there is an unexpected exception, + /// this function prints a trace of where it was thrown. + pub fn assertExceptionPresenceMatches(self: *CatchScope, should_have_exception: bool) void { + if (comptime Environment.ci_assert) { + if (should_have_exception) { + bun.assertf(self.hasException(), "Expected an exception to be thrown", .{}); + } else { + self.assertNoException(); + } + } + } + + /// If no exception, returns. + /// If termination exception, returns JSExecutionTerminated (so you can `try`) + /// If non-termination exception, assertion failure. + pub fn assertNoExceptionExceptTermination(self: *CatchScope) bun.JSExecutionTerminated!void { + if (self.exception()) |e| { + if (jsc.JSValue.fromCell(e).isTerminationException()) + return error.JSExecutionTerminated + else if (comptime Environment.ci_assert) + self.assertionFailure(e); + // Unconditionally panicking here is worse for our users. + } + } + + pub fn deinit(self: *CatchScope) void { + if (comptime Environment.ci_assert) bun.assert(self.location == &self.bytes[0]); + CatchScope__destruct(&self.bytes); + self.bytes = undefined; + } +}; + +/// Limited subset of CatchScope functionality, for when you have a different way to detect +/// exceptions and you only need a CatchScope to prove that you are checking exceptions correctly. +/// Gated by `Environment.ci_assert`. +/// +/// ```zig +/// var scope: ExceptionValidationScope = undefined; +/// // these do nothing when ci_assert == false +/// scope.init(global, @src()); +/// defer scope.deinit(); +/// +/// const maybe_empty: JSValue = externalFunction(global, foo, bar, baz); +/// // does nothing when ci_assert == false +/// // with assertions on, this call serves as proof that you checked for an exception +/// scope.assertExceptionPresenceMatches(maybe_empty == .zero); +/// // you decide whether to return JSError using the return value instead of the scope +/// return if (value == .zero) error.JSError else value; +/// ``` +pub const ExceptionValidationScope = struct { + scope: if (Environment.ci_assert) CatchScope else void, + + pub fn init( + self: *ExceptionValidationScope, + global: *jsc.JSGlobalObject, + src: std.builtin.SourceLocation, + ) void { + if (Environment.ci_assert) self.scope.init(global, src); + } + + /// Asserts there has not been any exception thrown. + pub fn assertNoException(self: *ExceptionValidationScope) void { + if (Environment.ci_assert) { + self.scope.assertNoException(); + } + } + + /// Asserts that there is or is not an exception according to the value of `should_have_exception`. + /// Prefer over `assert(scope.hasException() == ...)` because if there is an unexpected exception, + /// this function prints a trace of where it was thrown. + pub fn assertExceptionPresenceMatches(self: *ExceptionValidationScope, should_have_exception: bool) void { + if (Environment.ci_assert) { + self.scope.assertExceptionPresenceMatches(should_have_exception); + } + } + + /// If no exception, returns. + /// If termination exception, returns JSExecutionTerminated (so you can `try`) + /// If non-termination exception, assertion failure. + pub fn assertNoExceptionExceptTermination(self: *ExceptionValidationScope) bun.JSExecutionTerminated!void { + if (Environment.ci_assert) { + return self.scope.assertNoExceptionExceptTermination(); + } + } + + /// Inconveniently named on purpose; this is only needed for some weird edge cases + pub fn hasExceptionOrFalseWhenAssertionsAreDisabled(self: *ExceptionValidationScope) bool { + return if (Environment.ci_assert) self.scope.hasException() else false; + } + + pub fn deinit(self: *ExceptionValidationScope) void { + if (Environment.ci_assert) self.scope.deinit(); + } +}; + +extern fn CatchScope__construct( + ptr: *align(alignment) [size]u8, + global: *jsc.JSGlobalObject, + function: [*:0]const u8, + file: [*:0]const u8, + line: c_uint, + size: usize, + alignment: usize, +) void; +/// only returns exceptions that have already been thrown. does not check traps +extern fn CatchScope__pureException(ptr: *align(alignment) [size]u8) ?*jsc.Exception; +/// returns if an exception was already thrown, or if a trap (like another thread requesting +/// termination) causes an exception to be thrown +extern fn CatchScope__exceptionIncludingTraps(ptr: *align(alignment) [size]u8) ?*jsc.Exception; +extern fn CatchScope__assertNoException(ptr: *align(alignment) [size]u8) void; +extern fn CatchScope__destruct(ptr: *align(alignment) [size]u8) void; + +const std = @import("std"); +const bun = @import("bun"); +const jsc = bun.jsc; +const Environment = bun.Environment; diff --git a/src/bun.js/bindings/CatchScopeBinding.cpp b/src/bun.js/bindings/CatchScopeBinding.cpp new file mode 100644 index 0000000000..46b9a5458d --- /dev/null +++ b/src/bun.js/bindings/CatchScopeBinding.cpp @@ -0,0 +1,59 @@ +#include + +using JSC::CatchScope; + +extern "C" void CatchScope__construct( + void* ptr, + JSC::JSGlobalObject* globalObject, + const char* function, + const char* file, + unsigned line, + size_t size, + size_t alignment) +{ + // validate that Zig is correct about what the size and alignment should be + ASSERT(size >= sizeof(CatchScope)); + ASSERT(alignment >= alignof(CatchScope)); + ASSERT((uintptr_t)ptr % alignment == 0); + +#if ENABLE(EXCEPTION_SCOPE_VERIFICATION) + new (ptr) JSC::CatchScope(JSC::getVM(globalObject), + JSC::ExceptionEventLocation { currentStackPointer(), function, file, line }); +#else + (void)function; + (void)file; + (void)line; + new (ptr) JSC::CatchScope(JSC::getVM(globalObject)); +#endif +} + +extern "C" JSC::Exception* CatchScope__pureException(void* ptr) +{ + ASSERT((uintptr_t)ptr % alignof(CatchScope) == 0); + return static_cast(ptr)->exception(); +} + +extern "C" JSC::Exception* CatchScope__exceptionIncludingTraps(void* ptr) +{ + ASSERT((uintptr_t)ptr % alignof(CatchScope) == 0); + auto* scope = static_cast(ptr); + // this is different than `return scope->exception()` because `RETURN_IF_EXCEPTION` also checks + // if there are traps that should throw an exception (like a termination request from another + // thread) + RETURN_IF_EXCEPTION(*scope, scope->exception()); + return nullptr; +} + +extern "C" void CatchScope__destruct(void* ptr) +{ + ASSERT((uintptr_t)ptr % alignof(CatchScope) == 0); + static_cast(ptr)->~CatchScope(); +} + +extern "C" void CatchScope__assertNoException(void* ptr) +{ + ASSERT((uintptr_t)ptr % alignof(CatchScope) == 0); + // this function assumes it should assert in all build modes, anything else would be confusing. + // Zig should only call CatchScope__assertNoException if it wants the assertion. + static_cast(ptr)->releaseAssertNoException(); +} diff --git a/src/bun.js/bindings/Cookie.cpp b/src/bun.js/bindings/Cookie.cpp index 46d348adfe..3859e591a7 100644 --- a/src/bun.js/bindings/Cookie.cpp +++ b/src/bun.js/bindings/Cookie.cpp @@ -85,12 +85,12 @@ ExceptionOr> Cookie::parse(StringView cookieString) return Exception { TypeError, "Invalid cookie string: no '=' found"_s }; } - String name = cookiePair.substring(0, firstEqualsPos).trim(isASCIIWhitespace).toString(); + String name = cookiePair.substring(0, firstEqualsPos).trim(isASCIIWhitespace).toString(); if (name.isEmpty()) return Exception { TypeError, "Invalid cookie string: name cannot be empty"_s }; ASSERT(isValidHTTPHeaderValue(name)); - String value = cookiePair.substring(firstEqualsPos + 1).trim(isASCIIWhitespace).toString(); + String value = cookiePair.substring(firstEqualsPos + 1).trim(isASCIIWhitespace).toString(); // Default values String domain; @@ -108,15 +108,15 @@ ExceptionOr> Cookie::parse(StringView cookieString) auto attributesString = cookieString.substring(firstSemicolonPos + 1); for (auto attribute : attributesString.split(';')) { - auto trimmedAttribute = attribute.trim(isASCIIWhitespace); + auto trimmedAttribute = attribute.trim(isASCIIWhitespace); size_t assignmentPos = trimmedAttribute.find('='); String attributeName; String attributeValue; if (assignmentPos != notFound) { - attributeName = trimmedAttribute.substring(0, assignmentPos).trim(isASCIIWhitespace).convertToASCIILowercase(); - attributeValue = trimmedAttribute.substring(assignmentPos + 1).trim(isASCIIWhitespace).toString(); + attributeName = trimmedAttribute.substring(0, assignmentPos).trim(isASCIIWhitespace).convertToASCIILowercase(); + attributeValue = trimmedAttribute.substring(assignmentPos + 1).trim(isASCIIWhitespace).toString(); } else { attributeName = trimmedAttribute.convertToASCIILowercase(); attributeValue = emptyString(); @@ -182,7 +182,7 @@ String Cookie::toString(JSC::VM& vm) const return builder.toString(); } -static inline bool isValidCharacterInCookieName(UChar c) +static inline bool isValidCharacterInCookieName(char16_t c) { return (c >= 0x21 && c <= 0x3A) || (c == 0x3C) || (c >= 0x3E && c <= 0x7E); } @@ -201,7 +201,7 @@ bool Cookie::isValidCookieName(const String& name) } return true; } -static inline bool isValidCharacterInCookiePath(UChar c) +static inline bool isValidCharacterInCookiePath(char16_t c) { return (c >= 0x20 && c <= 0x3A) || (c >= 0x3D && c <= 0x7E); } @@ -220,7 +220,7 @@ bool Cookie::isValidCookiePath(const String& path) return true; } -static inline bool isValidCharacterInCookieDomain(UChar c) +static inline bool isValidCharacterInCookieDomain(char16_t c) { return (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '.' || c == '-'; } diff --git a/src/bun.js/bindings/CookieMap.cpp b/src/bun.js/bindings/CookieMap.cpp index aabca2aa4f..00e030d9af 100644 --- a/src/bun.js/bindings/CookieMap.cpp +++ b/src/bun.js/bindings/CookieMap.cpp @@ -94,8 +94,8 @@ ExceptionOr> CookieMap::create(std::variant continue; } - auto nameView = pair.substring(0, equalsPos).trim(isASCIIWhitespace); - auto valueView = pair.substring(equalsPos + 1).trim(isASCIIWhitespace); + auto nameView = pair.substring(0, equalsPos).trim(isASCIIWhitespace); + auto valueView = pair.substring(equalsPos + 1).trim(isASCIIWhitespace); if (nameView.isEmpty()) { continue; diff --git a/src/bun.js/bindings/ErrorCode.cpp b/src/bun.js/bindings/ErrorCode.cpp index 168a98f3e1..bf8768a6af 100644 --- a/src/bun.js/bindings/ErrorCode.cpp +++ b/src/bun.js/bindings/ErrorCode.cpp @@ -189,6 +189,7 @@ static Structure* createErrorStructure(JSC::VM& vm, JSGlobalObject* globalObject JSObject* ErrorCodeCache::createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, JSValue message, JSValue options) { + auto scope = DECLARE_CATCH_SCOPE(vm); auto* cache = errorCache(globalObject); const auto& data = errors[static_cast(code)]; if (!cache->internalField(static_cast(code))) { @@ -197,7 +198,15 @@ JSObject* ErrorCodeCache::createError(VM& vm, Zig::GlobalObject* globalObject, E } auto* structure = jsCast(cache->internalField(static_cast(code)).get()); - return JSC::ErrorInstance::create(globalObject, structure, message, options, nullptr, JSC::RuntimeType::TypeNothing, data.type, true); + auto* created_error = JSC::ErrorInstance::create(globalObject, structure, message, options, nullptr, JSC::RuntimeType::TypeNothing, data.type, true); + if (auto* thrown_exception = scope.exception()) [[unlikely]] { + scope.clearException(); + // TODO investigate what can throw here and whether it will throw non-objects + // (this is better than before where we would have returned nullptr from createError if any + // exception were thrown by ErrorInstance::create) + return jsCast(thrown_exception->value()); + } + return created_error; } JSObject* createError(VM& vm, Zig::GlobalObject* globalObject, ErrorCode code, const String& message) @@ -254,7 +263,10 @@ void JSValueToStringSafe(JSC::JSGlobalObject* globalObject, WTF::StringBuilder& switch (cell->type()) { case JSC::JSType::StringType: { JSString* jsString = jsDynamicCast(cell); + auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); auto str = jsString->view(globalObject); + RETURN_IF_EXCEPTION(scope, ); if (quotesLikeInspect) { if (str->contains('\'')) { builder.append('"'); @@ -268,7 +280,7 @@ void JSValueToStringSafe(JSC::JSGlobalObject* globalObject, WTF::StringBuilder& } } } else { - const auto span = str->span(); + const auto span = str->span(); for (const auto c : span) { if (c == '"') { builder.append("\\\""_s); @@ -355,8 +367,9 @@ void determineSpecificType(JSC::VM& vm, JSC::JSGlobalObject* globalObject, WTF:: } if (value.isBigInt()) { auto str = value.toStringOrNull(globalObject); - if (!str) return void(); + RETURN_IF_EXCEPTION(scope, void()); auto view = str->view(globalObject); + RETURN_IF_EXCEPTION(scope, ); builder.append("type bigint ("_s); builder.append(view); builder.append("n)"_s); @@ -390,6 +403,7 @@ void determineSpecificType(JSC::VM& vm, JSC::JSGlobalObject* globalObject, WTF:: if (cell->isString()) { auto* jsString = jsCast(cell); auto str = jsString->view(globalObject); + RETURN_IF_EXCEPTION(scope, ); StringView view = str; @@ -415,7 +429,7 @@ void determineSpecificType(JSC::VM& vm, JSC::JSGlobalObject* globalObject, WTF:: } } } else { - const auto span = view.span(); + const auto span = view.span(); for (const auto c : span) { if (c == '"') { builder.append("\\\""_s); @@ -449,6 +463,7 @@ void determineSpecificType(JSC::VM& vm, JSC::JSGlobalObject* globalObject, WTF:: RETURN_IF_EXCEPTION(scope, void()); builder.append("an instance of "_s); auto view = str->view(globalObject); + RETURN_IF_EXCEPTION(scope, ); builder.append(view); return; } @@ -539,26 +554,31 @@ WTF::String ERR_INVALID_ARG_TYPE(JSC::ThrowScope& scope, JSC::JSGlobalObject* gl auto* str = expected_types.at(0).toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); result.append(str->view(globalObject)); + RETURN_IF_EXCEPTION(scope, {}); } else if (length == 2) { auto* str1 = expected_types.at(0).toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); result.append(str1->view(globalObject)); + RETURN_IF_EXCEPTION(scope, {}); result.append(" or "_s); auto* str2 = expected_types.at(1).toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); result.append(str2->view(globalObject)); + RETURN_IF_EXCEPTION(scope, {}); } else { for (unsigned i = 0, end = length - 1; i < end; i++) { JSValue expected_type = expected_types.at(i); auto* str = expected_type.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); result.append(str->view(globalObject)); + RETURN_IF_EXCEPTION(scope, {}); result.append(", "_s); } result.append("or "_s); auto* str = expected_types.at(length - 1).toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); result.append(str->view(globalObject)); + RETURN_IF_EXCEPTION(scope, {}); } result.append(". Received "_s); @@ -625,26 +645,29 @@ namespace ERR { EncodedJSValue INVALID_ARG_TYPE(ThrowScope& scope, JSGlobalObject* globalObject, ASCIILiteral message) { scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, message)); + scope.release(); return {}; } JSC::EncodedJSValue INVALID_ARG_TYPE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const WTF::String& arg_name, const WTF::String& expected_type, JSC::JSValue val_actual_value) { auto message = Message::ERR_INVALID_ARG_TYPE(throwScope, globalObject, arg_name, expected_type, val_actual_value); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue INVALID_ARG_TYPE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue val_arg_name, const WTF::String& expected_type, JSC::JSValue val_actual_value) { auto* jsString = val_arg_name.toString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); auto arg_name = jsString->view(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); auto message = Message::ERR_INVALID_ARG_TYPE(throwScope, globalObject, arg_name, expected_type, val_actual_value); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, message)); + throwScope.release(); return {}; } @@ -660,9 +683,10 @@ JSC::EncodedJSValue INVALID_ARG_TYPE_INSTANCE(JSC::ThrowScope& throwScope, JSC:: builder.append(expected_instance_types); builder.append(". Received "_s); determineSpecificType(vm, globalObject, builder, val_actual_value); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, builder.toString())); + throwScope.release(); return {}; } @@ -676,9 +700,10 @@ JSC::EncodedJSValue INVALID_ARG_TYPE_INSTANCE(JSC::ThrowScope& throwScope, JSC:: builder.append(expected_instance_types); builder.append(". Received "_s); determineSpecificType(vm, globalObject, builder, val_actual_value); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, builder.toString())); + throwScope.release(); return {}; } @@ -696,9 +721,10 @@ JSC::EncodedJSValue INVALID_ARG_INSTANCE(JSC::ThrowScope& throwScope, JSC::JSGlo builder.append(expected_type); builder.append(". Received "_s); determineSpecificType(vm, globalObject, builder, val_actual_value); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, builder.toString())); + throwScope.release(); return {}; } @@ -713,18 +739,19 @@ JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObjec builder.append(upper); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, actual); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue arg_name_val, double lower, double upper, JSC::JSValue actual) { auto* jsString = arg_name_val.toString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); auto arg_name = jsString->view(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); WTF::StringBuilder builder; builder.append("The value of \""_s); @@ -735,18 +762,19 @@ JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObjec builder.append(upper); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, actual); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue arg_name_val, double bound_num, Bound bound, JSC::JSValue actual) { auto* jsString = arg_name_val.toString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); auto arg_name = jsString->view(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); WTF::StringBuilder builder; builder.append("The value of \""_s); @@ -756,18 +784,19 @@ JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObjec builder.append(bound_num); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, actual); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue arg_name_val, const WTF::String& msg, JSC::JSValue actual) { auto* jsString = arg_name_val.toString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); auto arg_name = jsString->view(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); WTF::StringBuilder builder; builder.append("The value of \""_s); @@ -776,9 +805,10 @@ JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObjec builder.append(msg); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, actual); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, builder.toString())); + throwScope.release(); return {}; } @@ -791,15 +821,17 @@ JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObjec builder.append(msg); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, actual); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, ASCIILiteral message) { scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, message)); + scope.release(); return {}; } @@ -816,9 +848,10 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal builder.append(reason); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, value, true); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue INVALID_ARG_VALUE_RangeError(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral name, JSC::JSValue value, const WTF::String& reason) @@ -835,11 +868,12 @@ JSC::EncodedJSValue INVALID_ARG_VALUE_RangeError(JSC::ThrowScope& throwScope, JS builder.append(reason); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, value, true); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); auto* structure = createErrorStructure(vm, globalObject, ErrorType::RangeError, "RangeError"_s, "ERR_INVALID_ARG_VALUE"_s); auto error = JSC::ErrorInstance::create(vm, structure, builder.toString(), jsUndefined(), nullptr, JSC::RuntimeType::TypeNothing, ErrorType::RangeError, true); throwScope.throwException(globalObject, error); + throwScope.release(); return {}; } JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue name, JSC::JSValue value, const WTF::String& reason) @@ -848,15 +882,16 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal builder.append("The argument '"_s); auto& vm = JSC::getVM(globalObject); determineSpecificType(vm, globalObject, builder, name); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); builder.append("' "_s); builder.append(reason); builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, value, true); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, builder.toString())); + throwScope.release(); return {}; } @@ -866,23 +901,24 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal WTF::StringBuilder builder; builder.append("The argument '"_s); JSValueToStringSafe(globalObject, builder, name); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); builder.append("' "_s); builder.append(reason); unsigned length = oneOf->length(); for (size_t i = 0; i < length; i++) { JSValue index = oneOf->getIndex(globalObject, i); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); if (index.isString()) { JSString* str = index.toString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); builder.append('\''); builder.append(str->view(globalObject)); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); builder.append('\''); } else { JSValueToStringSafe(globalObject, builder, index); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); } if (i < length - 1) { @@ -891,9 +927,10 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal } builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, value, true); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, builder.toString())); + throwScope.release(); return {}; } @@ -922,6 +959,7 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal } throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, builder.toString())); + throwScope.release(); return {}; } @@ -948,6 +986,7 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal } throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, builder.toString())); + throwScope.release(); return {}; } @@ -967,9 +1006,10 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal builder.append(". Received "_s); JSValueToStringSafe(globalObject, builder, value, true); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, builder.toString())); + throwScope.release(); return {}; } @@ -977,18 +1017,21 @@ JSC::EncodedJSValue INVALID_URL_SCHEME(JSC::ThrowScope& throwScope, JSC::JSGloba { auto message = makeString("The URL must be of scheme "_s, expectedScheme); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_URL_SCHEME, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue INVALID_FILE_URL_HOST(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const WTF::String& platform) { auto message = makeString("File URL host must be \"localhost\" or empty on "_s, platform); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_FILE_URL_HOST, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue INVALID_FILE_URL_HOST(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const ASCIILiteral platform) { auto message = makeString("File URL host must be \"localhost\" or empty on "_s, platform); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_FILE_URL_HOST, message)); + throwScope.release(); return {}; } /// `File URL path {suffix}` @@ -996,6 +1039,7 @@ JSC::EncodedJSValue INVALID_FILE_URL_PATH(JSC::ThrowScope& throwScope, JSC::JSGl { auto message = makeString("File URL path "_s, suffix); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_FILE_URL_PATH, message)); + throwScope.release(); return {}; } @@ -1003,18 +1047,20 @@ JSC::EncodedJSValue UNKNOWN_ENCODING(JSC::ThrowScope& throwScope, JSC::JSGlobalO { auto message = makeString("Unknown encoding: "_s, encoding); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNKNOWN_ENCODING, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue UNKNOWN_ENCODING(JSC::ThrowScope& scope, JSGlobalObject* globalObject, JSValue encodingValue) { WTF::String encodingString = encodingValue.toWTFString(globalObject); - RETURN_IF_EXCEPTION(scope, {}); + RELEASE_RETURN_IF_EXCEPTION(scope, {}); WTF::StringBuilder builder; builder.append("Unknown encoding: "_s); builder.append(encodingString); scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNKNOWN_ENCODING, builder.toString())); + scope.release(); return {}; } @@ -1022,6 +1068,7 @@ JSC::EncodedJSValue INVALID_STATE(JSC::ThrowScope& throwScope, JSC::JSGlobalObje { auto message = makeString("Invalid state: "_s, statemsg); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_STATE, message)); + throwScope.release(); return {}; } @@ -1029,6 +1076,7 @@ JSC::EncodedJSValue STRING_TOO_LONG(JSC::ThrowScope& throwScope, JSC::JSGlobalOb { auto message = makeString("Cannot create a string longer than "_s, WTF::String ::MaxLength, " characters"_s); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_STRING_TOO_LONG, message)); + throwScope.release(); return {}; } @@ -1036,9 +1084,11 @@ JSC::EncodedJSValue BUFFER_OUT_OF_BOUNDS(JSC::ThrowScope& throwScope, JSC::JSGlo { if (!name.isEmpty()) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_BUFFER_OUT_OF_BOUNDS, makeString("\""_s, name, "\" is outside of buffer bounds"_s))); + throwScope.release(); return {}; } throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_BUFFER_OUT_OF_BOUNDS, "Attempt to access memory outside buffer bounds"_s)); + throwScope.release(); return {}; } @@ -1047,11 +1097,12 @@ JSC::EncodedJSValue UNKNOWN_SIGNAL(JSC::ThrowScope& throwScope, JSC::JSGlobalObj WTF::StringBuilder builder; builder.append("Unknown signal: "_s); JSValueToStringSafe(globalObject, builder, signal); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); if (triedUppercase) { builder.append(" (signals must use all capital letters)"_s); } throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNKNOWN_SIGNAL, builder.toString())); + throwScope.release(); return {}; } @@ -1060,14 +1111,15 @@ JSC::EncodedJSValue SOCKET_BAD_PORT(JSC::ThrowScope& throwScope, JSC::JSGlobalOb ASCIILiteral op = allowZero ? ">="_s : ">"_s; WTF::StringBuilder builder; JSValueToStringSafe(globalObject, builder, name); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); builder.append(" should be "_s); builder.append(op); builder.append(" 0 and < 65536. Received "_s); JSValueToStringSafe(globalObject, builder, port); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_SOCKET_BAD_PORT, builder.toString())); + throwScope.release(); return {}; } @@ -1075,21 +1127,24 @@ JSC::EncodedJSValue UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET(JSC::ThrowScope& thro { auto message = "`process.setupUncaughtExceptionCapture()` was called while a capture callback was already active"_s; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue ASSERTION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue msg) { auto msg_string = msg.toWTFString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); auto message = msg_string; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_ASSERTION, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue ASSERTION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, ASCIILiteral msg) { auto message = msg; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_ASSERTION, message)); + throwScope.release(); return {}; } @@ -1097,12 +1152,14 @@ JSC::EncodedJSValue CRYPTO_INVALID_CURVE(JSC::ThrowScope& throwScope, JSC::JSGlo { auto message = "Invalid EC curve name"_s; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_CURVE, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_KEYTYPE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral message) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_KEYTYPE, message)); + throwScope.release(); return {}; } @@ -1110,6 +1167,7 @@ JSC::EncodedJSValue CRYPTO_INVALID_KEYTYPE(JSC::ThrowScope& throwScope, JSC::JSG { auto message = "Invalid key type"_s; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_KEYTYPE, message)); + throwScope.release(); return {}; } @@ -1119,60 +1177,70 @@ JSC::EncodedJSValue CRYPTO_UNKNOWN_CIPHER(JSC::ThrowScope& throwScope, JSC::JSGl builder.append("Unknown cipher: "_s); builder.append(cipherName); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_UNKNOWN_CIPHER, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_AUTH_TAG(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const WTF::String& message) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_AUTH_TAG, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_IV(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_IV, "Invalid initialization vector"_s)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_UNSUPPORTED_OPERATION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral message) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_UNSUPPORTED_OPERATION, message)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_UNSUPPORTED_OPERATION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_UNSUPPORTED_OPERATION, "Unsupported crypto operation"_s)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_KEYLEN(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_KEYLEN, "Invalid key length"_s)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_STATE(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral message) { scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_STATE, message)); + scope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_MESSAGELEN(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_MESSAGELEN, "Invalid message length"_s)); + throwScope.release(); return {}; } JSC::EncodedJSValue MISSING_ARGS(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral message) { scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_MISSING_ARGS, message)); + scope.release(); return {}; } JSC::EncodedJSValue CRYPTO_OPERATION_FAILED(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, ASCIILiteral message) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_OPERATION_FAILED, message)); + throwScope.release(); return {}; } @@ -1180,6 +1248,7 @@ JSC::EncodedJSValue CRYPTO_INVALID_KEYPAIR(JSC::ThrowScope& throwScope, JSC::JSG { auto message = "Invalid key pair"_s; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_KEYPAIR, message)); + throwScope.release(); return {}; } @@ -1187,6 +1256,7 @@ JSC::EncodedJSValue CRYPTO_ECDH_INVALID_PUBLIC_KEY(JSC::ThrowScope& throwScope, { auto message = "Public key is not valid for specified curve"_s; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY, message)); + throwScope.release(); return {}; } @@ -1206,6 +1276,7 @@ JSC::EncodedJSValue CRYPTO_JWK_UNSUPPORTED_CURVE(JSC::ThrowScope& throwScope, JS builder.append(curve); builder.append('.'); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_JWK_UNSUPPORTED_CURVE, builder.toString())); + throwScope.release(); return {}; } @@ -1218,24 +1289,28 @@ JSC::EncodedJSValue CRYPTO_JWK_UNSUPPORTED_CURVE(JSC::ThrowScope& throwScope, JS } builder.append('.'); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_JWK_UNSUPPORTED_CURVE, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_JWK_UNSUPPORTED_KEY_TYPE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE, "Unsupported JWK Key Type."_s)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_JWK(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_JWK, "Invalid JWK data"_s)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_INVALID_JWK(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, ASCIILiteral message) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_JWK, message)); + throwScope.release(); return {}; } @@ -1243,6 +1318,7 @@ JSC::EncodedJSValue CRYPTO_SIGN_KEY_REQUIRED(JSC::ThrowScope& throwScope, JSC::J { auto message = "No key provided to sign"_s; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_SIGN_KEY_REQUIRED, message)); + throwScope.release(); return {}; } @@ -1251,11 +1327,12 @@ JSC::EncodedJSValue CRYPTO_INVALID_KEY_OBJECT_TYPE(JSC::ThrowScope& throwScope, WTF::StringBuilder builder; builder.append("Invalid key object type "_s); JSValueToStringSafe(globalObject, builder, received); - RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_RETURN_IF_EXCEPTION(throwScope, {}); builder.append(". Expected "_s); builder.append(expected); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE, builder.toString())); + throwScope.release(); return {}; } @@ -1278,6 +1355,7 @@ JSC::EncodedJSValue CRYPTO_INVALID_KEY_OBJECT_TYPE(JSC::ThrowScope& throwScope, builder.append(expected); builder.append('.'); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE, builder.toString())); + throwScope.release(); return {}; } @@ -1290,6 +1368,7 @@ JSC::EncodedJSValue CRYPTO_INCOMPATIBLE_KEY_OPTIONS(JSC::ThrowScope& throwScope, builder.append(expectedOperation); builder.append('.'); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INCOMPATIBLE_KEY_OPTIONS, builder.toString())); + throwScope.release(); return {}; } @@ -1299,6 +1378,7 @@ JSC::EncodedJSValue CRYPTO_INVALID_DIGEST(JSC::ThrowScope& throwScope, JSC::JSGl builder.append("Invalid digest: "_s); builder.append(digest); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_DIGEST, builder.toString())); + throwScope.release(); return {}; } @@ -1308,18 +1388,21 @@ JSC::EncodedJSValue CRYPTO_INVALID_DIGEST(JSC::ThrowScope& throwScope, JSC::JSGl builder.append(message); builder.append(digest); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_INVALID_DIGEST, builder.toString())); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_HASH_FINALIZED(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_HASH_FINALIZED, "Digest already called"_s)); + throwScope.release(); return {}; } JSC::EncodedJSValue CRYPTO_HASH_UPDATE_FAILED(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_HASH_UPDATE_FAILED, "Hash update failed"_s)); + throwScope.release(); return {}; } @@ -1327,6 +1410,7 @@ JSC::EncodedJSValue CRYPTO_TIMING_SAFE_EQUAL_LENGTH(JSC::ThrowScope& scope, JSC: { auto message = "Input buffers must have the same byte length"_s; scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH, message)); + scope.release(); return {}; } @@ -1334,18 +1418,21 @@ JSC::EncodedJSValue CRYPTO_UNKNOWN_DH_GROUP(JSC::ThrowScope& scope, JSGlobalObje { auto message = "Unknown DH group"_s; scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CRYPTO_UNKNOWN_DH_GROUP, message)); + scope.release(); return {}; } JSC::EncodedJSValue OSSL_EVP_INVALID_DIGEST(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject) { scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_OSSL_EVP_INVALID_DIGEST, "Invalid digest used"_s)); + scope.release(); return {}; } JSC::EncodedJSValue MISSING_PASSPHRASE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral message) { throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_MISSING_PASSPHRASE, message)); + throwScope.release(); return {}; } @@ -1353,6 +1440,7 @@ JSC::EncodedJSValue KEY_GENERATION_JOB_FAILED(JSC::ThrowScope& throwScope, JSC:: { auto message = "Key generation job failed"_s; throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_KEY_GENERATION_JOB_FAILED, message)); + throwScope.release(); return {}; } @@ -1366,6 +1454,7 @@ JSC::EncodedJSValue INCOMPATIBLE_OPTION_PAIR(JSC::ThrowScope& throwScope, JSC::J builder.append("\""_s); throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INCOMPATIBLE_OPTION_PAIR, builder.toString())); + throwScope.release(); return {}; } @@ -1375,6 +1464,7 @@ JSC::EncodedJSValue MISSING_OPTION(JSC::ThrowScope& scope, JSC::JSGlobalObject* builder.append(message); builder.append(" is required"_s); scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_MISSING_OPTION, builder.toString())); + scope.release(); return {}; } @@ -1393,12 +1483,14 @@ JSC::EncodedJSValue INVALID_MIME_SYNTAX(JSC::ThrowScope& scope, JSC::JSGlobalObj } scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_MIME_SYNTAX, builder.toString())); + scope.release(); return {}; } EncodedJSValue CLOSED_MESSAGE_PORT(ThrowScope& scope, JSGlobalObject* globalObject) { scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_CLOSED_MESSAGE_PORT, "Cannot send data on closed MessagePort"_s)); + scope.release(); return {}; } @@ -1408,12 +1500,14 @@ JSC::EncodedJSValue INVALID_THIS(JSC::ThrowScope& scope, JSC::JSGlobalObject* gl builder.append("Value of \"this\" must be of type "_s); builder.append(expectedType); scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_THIS, builder.toString())); + scope.release(); return {}; } JSC::EncodedJSValue DLOPEN_DISABLED(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, ASCIILiteral message) { scope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_DLOPEN_DISABLED, message)); + scope.release(); return {}; } @@ -1432,10 +1526,12 @@ static JSC::JSValue ERR_INVALID_ARG_TYPE(JSC::ThrowScope& scope, JSC::JSGlobalOb } const auto msg = Bun::Message::ERR_INVALID_ARG_TYPE(scope, globalObject, argName, expected_types, arg2); + RETURN_IF_EXCEPTION(scope, {}); return createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, msg); } const auto msg = Bun::Message::ERR_INVALID_ARG_TYPE(scope, globalObject, arg0, arg1, arg2); + RETURN_IF_EXCEPTION(scope, {}); return createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, msg); } @@ -1487,7 +1583,7 @@ extern "C" JSC::EncodedJSValue Bun__createErrorWithCode(JSC::JSGlobalObject* glo return JSValue::encode(createError(globalObject, code, message->toWTFString(BunString::ZeroCopy))); } -void throwBoringSSLError(JSC::VM& vm, JSC::ThrowScope& scope, JSGlobalObject* globalObject, int errorCode) +void throwBoringSSLError(JSGlobalObject* globalObject, JSC::ThrowScope& scope, int errorCode) { char buf[256] = { 0 }; ERR_error_string_n(static_cast(errorCode), buf, sizeof(buf)); @@ -1510,7 +1606,7 @@ extern "C" JSC::EncodedJSValue Bun__wrapAbortError(JSC::JSGlobalObject* lexicalG auto cause = JSC::JSValue::decode(causeParam); if (cause.isUndefined()) { - return JSC::JSValue::encode(Bun::createError(vm, globalObject, Bun::ErrorCode::ABORT_ERR, JSC::JSValue(globalObject->commonStrings().OperationWasAbortedString(globalObject)))); + return JSC::JSValue::encode(Bun::createError(vm, globalObject, Bun::ErrorCode::ABORT_ERR, globalObject->commonStrings().OperationWasAbortedString(globalObject))); } auto message = globalObject->commonStrings().OperationWasAbortedString(globalObject); @@ -1603,7 +1699,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject #if ASSERT_ENABLED if (!codeValue.isNumber()) { JSC::throwTypeError(globalObject, scope, "First argument to $ERR_ must be a number"_s); - return {}; + RELEASE_AND_RETURN(scope, {}); } #endif @@ -1612,7 +1708,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject #if ASSERT_ENABLED if (code > Bun::NODE_ERROR_COUNT - 1 || code < 0) { JSC::throwTypeError(globalObject, scope, "Invalid error code. Use $ERR_* constants"_s); - return {}; + RELEASE_AND_RETURN(scope, {}); } #endif @@ -1738,7 +1834,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject } case 2: { JSValue arg0 = callFrame->argument(1); - // ["foo", "bar", "baz"] -> 'The "foo", "bar", or "baz" argument must be specified' + // ["foo", "bar", "baz"] -> 'The "foo" or "bar" or "baz" argument must be specified' if (auto* arr = jsDynamicCast(arg0)) { ASSERT(arr->length() > 0); WTF::StringBuilder builder; @@ -1746,7 +1842,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject for (unsigned i = 0, length = arr->length(); i < length; i++) { JSValue index = arr->getIndex(globalObject, i); RETURN_IF_EXCEPTION(scope, {}); - if (i == length - 1) builder.append("or "_s); + if (i > 0) builder.append("or "_s); builder.append('"'); auto* jsString = index.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); @@ -1754,7 +1850,6 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject RETURN_IF_EXCEPTION(scope, {}); builder.append(str); builder.append('"'); - if (i != length - 1) builder.append(','); builder.append(' '); } builder.append("argument must be specified"_s); @@ -1818,11 +1913,13 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject auto str0 = arg0.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); auto view0 = str0->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); auto arg1 = callFrame->argument(2); auto str1 = arg1.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); auto view1 = str1->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); auto arg2 = callFrame->argument(3); @@ -2402,6 +2499,8 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_VM_MODULE_NOT_MODULE, "Provided module is not an instance of Module"_s)); case ErrorCode::ERR_VM_MODULE_DIFFERENT_CONTEXT: return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_VM_MODULE_DIFFERENT_CONTEXT, "Linked modules must use the same context"_s)); + case ErrorCode::ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING, "A dynamic import callback was not specified."_s)); default: { break; diff --git a/src/bun.js/bindings/ErrorCode.h b/src/bun.js/bindings/ErrorCode.h index 92f626ad23..c17dbf5d4b 100644 --- a/src/bun.js/bindings/ErrorCode.h +++ b/src/bun.js/bindings/ErrorCode.h @@ -9,6 +9,18 @@ #include "ErrorCode+List.h" #include "CryptoKeyType.h" +#define RELEASE_RETURN_IF_EXCEPTION(scope__, value__) \ + do { \ + SUPPRESS_UNCOUNTED_LOCAL JSC::VM& vm = (scope__).vm(); \ + EXCEPTION_ASSERT(!!(scope__).exception() == vm.traps().needHandling(JSC::VMTraps::NeedExceptionHandling)); \ + if (vm.traps().maybeNeedHandling()) [[unlikely]] { \ + if (vm.hasExceptionsAfterHandlingTraps()) { \ + scope__.release(); \ + return value__; \ + } \ + } \ + } while (false) + namespace Bun { class ErrorCodeCache : public JSC::JSInternalFieldObjectImpl { @@ -147,7 +159,7 @@ JSC::EncodedJSValue INVALID_FILE_URL_PATH(JSC::ThrowScope& throwScope, JSC::JSGl } -void throwBoringSSLError(JSC::VM& vm, JSC::ThrowScope& scope, JSGlobalObject* globalObject, int errorCode); +void throwBoringSSLError(JSGlobalObject* globalObject, JSC::ThrowScope& scope, int errorCode); void throwCryptoOperationFailed(JSGlobalObject* globalObject, JSC::ThrowScope& scope); } diff --git a/src/bun.js/bindings/ErrorCode.ts b/src/bun.js/bindings/ErrorCode.ts index ecd518b692..33dcec4582 100644 --- a/src/bun.js/bindings/ErrorCode.ts +++ b/src/bun.js/bindings/ErrorCode.ts @@ -296,6 +296,7 @@ const errors: ErrorCodeMapping = [ ["ERR_VM_MODULE_DIFFERENT_CONTEXT", Error], ["ERR_VM_MODULE_LINK_FAILURE", Error], ["ERR_VM_MODULE_CACHED_DATA_REJECTED", Error], + ["ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING", TypeError], ["HPE_INVALID_HEADER_TOKEN", Error], ["HPE_HEADER_OVERFLOW", Error], ]; diff --git a/src/bun.js/bindings/Errorable.zig b/src/bun.js/bindings/Errorable.zig index 2e27290c40..6bcbe25d48 100644 --- a/src/bun.js/bindings/Errorable.zig +++ b/src/bun.js/bindings/Errorable.zig @@ -1,3 +1,4 @@ +const bun = @import("bun"); const ZigErrorType = @import("ZigErrorType.zig").ZigErrorType; const ErrorCode = @import("ErrorCode.zig").ErrorCode; @@ -27,12 +28,12 @@ pub fn Errorable(comptime Type: type) type { return @This(){ .result = .{ .value = val }, .success = true }; } - pub fn err(code: anyerror, ptr: *anyopaque) @This() { + pub fn err(code: anyerror, err_value: bun.jsc.JSValue) @This() { return @This(){ .result = .{ .err = .{ .code = ErrorCode.from(code), - .ptr = ptr, + .value = err_value, }, }, .success = false, diff --git a/src/bun.js/bindings/ImportMetaObject.cpp b/src/bun.js/bindings/ImportMetaObject.cpp index 814126bd1e..b7df44de7e 100644 --- a/src/bun.js/bindings/ImportMetaObject.cpp +++ b/src/bun.js/bindings/ImportMetaObject.cpp @@ -51,6 +51,7 @@ #include "wtf/text/StringView.h" #include "isBuiltinModule.h" +#include "WebCoreJSBuiltins.h" namespace Zig { using namespace JSC; @@ -107,7 +108,9 @@ static JSC::EncodedJSValue functionRequireResolve(JSC::JSGlobalObject* globalObj // require.resolve also supports a paths array // we only support a single path if (!fromValue.isUndefinedOrNull() && fromValue.isObject()) { - if (auto pathsObject = fromValue.getObject()->getIfPropertyExists(globalObject, builtinNames(vm).pathsPublicName())) { + auto pathsObject = fromValue.getObject()->getIfPropertyExists(globalObject, builtinNames(vm).pathsPublicName()); + RETURN_IF_EXCEPTION(scope, {}); + if (pathsObject) { if (pathsObject.isCell() && pathsObject.asCell()->type() == JSC::JSType::ArrayType) { auto pathsArray = JSC::jsCast(pathsObject); if (pathsArray->length() > 0) { @@ -141,7 +144,13 @@ ImportMetaObject* ImportMetaObject::create(JSC::JSGlobalObject* globalObject, co { VM& vm = globalObject->vm(); Zig::GlobalObject* zigGlobalObject = jsCast(globalObject); - auto structure = zigGlobalObject->ImportMetaObjectStructure(); + bool isBake = url.startsWith("bake:"_s); + + // Get the appropriate structure + Structure* structure = isBake + ? zigGlobalObject->ImportMetaBakeObjectStructure() + : zigGlobalObject->ImportMetaObjectStructure(); + return create(vm, globalObject, structure, url); } @@ -213,7 +222,9 @@ extern "C" JSC::EncodedJSValue functionImportMeta__resolveSync(JSC::JSGlobalObje if (!fromValue.isUndefinedOrNull() && fromValue.isObject()) { - if (auto pathsObject = fromValue.getObject()->getIfPropertyExists(globalObject, builtinNames(vm).pathsPublicName())) { + auto pathsObject = fromValue.getObject()->getIfPropertyExists(globalObject, builtinNames(vm).pathsPublicName()); + RETURN_IF_EXCEPTION(scope, {}); + if (pathsObject) { if (pathsObject.isCell() && pathsObject.asCell()->type() == JSC::JSType::ArrayType) { auto pathsArray = JSC::jsCast(pathsObject); if (pathsArray->length() > 0) { @@ -246,6 +257,7 @@ extern "C" JSC::EncodedJSValue functionImportMeta__resolveSync(JSC::JSGlobalObje auto clientData = WebCore::clientData(vm); JSValue pathProperty = thisObject->getIfPropertyExists(globalObject, clientData->builtinNames().pathPublicName()); + RETURN_IF_EXCEPTION(scope, {}); if (pathProperty && pathProperty.isString()) from = JSC::JSValue::encode(pathProperty); @@ -354,12 +366,14 @@ extern "C" JSC::EncodedJSValue functionImportMeta__resolveSyncPrivate(JSC::JSGlo for (size_t i = 0; i < userPathListArray->length(); ++i) { JSValue path = userPathListArray->getIndex(globalObject, i); WTF::String pathStr = path.toWTFString(globalObject); - if (scope.exception()) goto cleanup; + if (scope.exception()) [[unlikely]] + goto cleanup; paths.append(Bun::toStringRef(pathStr)); } result = Bun__resolveSyncWithPaths(lexicalGlobalObject, JSC::JSValue::encode(moduleName), JSValue::encode(from), isESM, isRequireDotResolve, paths.begin(), paths.size()); - if (scope.exception()) goto cleanup; + if (scope.exception()) [[unlikely]] + goto cleanup; if (!JSC::JSValue::decode(result).isString()) { JSC::throwException(lexicalGlobalObject, scope, JSC::JSValue::decode(result)); @@ -418,7 +432,9 @@ JSC_DEFINE_HOST_FUNCTION(functionImportMeta__resolve, JSValue fromValue = callFrame->uncheckedArgument(1); if (!fromValue.isUndefinedOrNull() && fromValue.isObject()) { - if (JSValue pathsObject = fromValue.getObject()->getIfPropertyExists(globalObject, builtinNames(vm).pathsPublicName())) { + auto pathsObject = fromValue.getObject()->getIfPropertyExists(globalObject, builtinNames(vm).pathsPublicName()); + RETURN_IF_EXCEPTION(scope, {}); + if (pathsObject) { if (pathsObject.isCell() && pathsObject.asCell()->type() == JSC::JSType::ArrayType) { auto* pathsArray = JSC::jsCast(pathsObject); if (pathsArray->length() > 0) { @@ -444,6 +460,7 @@ JSC_DEFINE_HOST_FUNCTION(functionImportMeta__resolve, auto clientData = WebCore::clientData(vm); JSValue pathProperty = thisObject->getIfPropertyExists(globalObject, clientData->builtinNames().pathPublicName()); + RETURN_IF_EXCEPTION(scope, {}); if (pathProperty && pathProperty.isString()) [[likely]] { from = pathProperty; @@ -541,7 +558,8 @@ JSC_DEFINE_CUSTOM_GETTER(jsImportMetaObjectGetter_require, (JSGlobalObject * glo if (!thisObject) [[unlikely]] return JSValue::encode(jsUndefined()); - return JSValue::encode(thisObject->requireProperty.getInitializedOnMainThread(thisObject)); + auto* nullable = thisObject->requireProperty.getInitializedOnMainThread(thisObject); + return JSValue::encode(nullable ? nullable : jsUndefined()); } // https://github.com/oven-sh/bun/issues/11754#issuecomment-2452626172 @@ -581,7 +599,21 @@ static const HashTableValue ImportMetaObjectPrototypeValues[] = { { "url"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_url, 0 } }, }; -class ImportMetaObjectPrototype final : public JSC::JSNonFinalObject { +static const HashTableValue ImportMetaObjectBakePrototypeValues[] = { + { "bakeBuiltin"_s, static_cast(JSC::PropertyAttribute::Builtin | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, commonJSRequireESMCodeGenerator, 0 } }, + { "dir"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_dir, 0 } }, + { "dirname"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_dir, 0 } }, + { "env"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_env, 0 } }, + { "file"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_file, 0 } }, + { "filename"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_path, 0 } }, + { "path"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_path, 0 } }, + { "require"_s, static_cast(JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_require, jsImportMetaObjectSetter_require } }, + { "resolve"_s, static_cast(JSC::PropertyAttribute::Function | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, functionImportMeta__resolve, 0 } }, + { "resolveSync"_s, static_cast(JSC::PropertyAttribute::Function | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, functionImportMeta__resolveSync, 0 } }, + { "url"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::GetterSetterType, jsImportMetaObjectGetter_url, 0 } }, +}; + +class ImportMetaObjectPrototype : public JSC::JSNonFinalObject { public: DECLARE_INFO; using Base = JSC::JSNonFinalObject; @@ -591,10 +623,10 @@ public: return Structure::create(vm, globalObject, globalObject->objectPrototype(), TypeInfo(ObjectType, StructureFlags), info()); } - static ImportMetaObjectPrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) + static ImportMetaObjectPrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, bool isBake = false) { ImportMetaObjectPrototype* prototype = new (NotNull, JSC::allocateCell(vm)) ImportMetaObjectPrototype(vm, structure); - prototype->finishCreation(vm, globalObject); + prototype->finishCreation(vm, globalObject, isBake); return prototype; } @@ -605,14 +637,19 @@ public: return &vm.plainObjectSpace(); } - void finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject) + void finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject, bool isBake) { Base::finishCreation(vm); auto* clientData = WebCore::clientData(vm); auto& builtinNames = clientData->builtinNames(); - reifyStaticProperties(vm, ImportMetaObject::info(), ImportMetaObjectPrototypeValues, *this); + // Use the appropriate prototype values based on whether this is a bake import meta object + if (isBake) { + reifyStaticProperties(vm, ImportMetaObject::info(), ImportMetaObjectBakePrototypeValues, *this); + } else { + reifyStaticProperties(vm, ImportMetaObject::info(), ImportMetaObjectPrototypeValues, *this); + } JSC_TO_STRING_TAG_WITHOUT_TRANSITION(); auto mainGetter = JSFunction::create(vm, globalObject, importMetaObjectMainCodeGenerator(vm), globalObject); @@ -636,11 +673,12 @@ const ClassInfo ImportMetaObjectPrototype::s_info = { &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(ImportMetaObjectPrototype) }; -JSC::Structure* ImportMetaObject::createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +JSC::Structure* ImportMetaObject::createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, bool isBake) { ImportMetaObjectPrototype* prototype = ImportMetaObjectPrototype::create(vm, globalObject, - ImportMetaObjectPrototype::createStructure(vm, globalObject)); + ImportMetaObjectPrototype::createStructure(vm, globalObject), + isBake); return Structure::create(vm, globalObject, prototype, TypeInfo(ObjectType, StructureFlags), ImportMetaObject::info()); } @@ -651,6 +689,7 @@ void ImportMetaObject::finishCreation(VM& vm) ASSERT(inherits(info())); this->requireProperty.initLater([](const JSC::LazyProperty::Initializer& init) { + auto scope = DECLARE_THROW_SCOPE(init.vm); ImportMetaObject* meta = jsCast(init.owner); WTF::URL url = isAbsolutePath(meta->url) ? WTF::URL::fileURLWithFileSystemPath(meta->url) : WTF::URL(meta->url); @@ -666,8 +705,10 @@ void ImportMetaObject::finishCreation(VM& vm) path = meta->url; } - JSFunction* value = jsCast(Bun::JSCommonJSModule::createBoundRequireFunction(init.vm, meta->globalObject(), path)); - init.set(value); + auto* object = Bun::JSCommonJSModule::createBoundRequireFunction(init.vm, meta->globalObject(), path); + RETURN_IF_EXCEPTION(scope, ); + ASSERT(object); + init.set(jsCast(object)); }); this->urlProperty.initLater([](const JSC::LazyProperty::Initializer& init) { ImportMetaObject* meta = jsCast(init.owner); diff --git a/src/bun.js/bindings/ImportMetaObject.h b/src/bun.js/bindings/ImportMetaObject.h index 64bae5cc84..4ec49bcda7 100644 --- a/src/bun.js/bindings/ImportMetaObject.h +++ b/src/bun.js/bindings/ImportMetaObject.h @@ -70,7 +70,7 @@ public: [](auto& spaces, auto&& space) { spaces.m_subspaceForImportMeta = std::forward(space); }); } - static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject); + static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, bool isBake = false); static void analyzeHeap(JSCell*, JSC::HeapAnalyzer&); static JSValue getPrototype(JSObject*, JSC::JSGlobalObject* globalObject); diff --git a/src/bun.js/bindings/InspectorLifecycleAgent.cpp b/src/bun.js/bindings/InspectorLifecycleAgent.cpp index a5eadfc1f0..6dae95566d 100644 --- a/src/bun.js/bindings/InspectorLifecycleAgent.cpp +++ b/src/bun.js/bindings/InspectorLifecycleAgent.cpp @@ -146,12 +146,9 @@ Protocol::ErrorStringOr InspectorLifecycleAgent::getModuleGraph() auto* global = defaultGlobalObject(&m_globalObject); auto* esmMap = global->esmRegistryMap(); + if (!esmMap) return makeUnexpected(ErrorString("Module graph not available"_s)); auto* cjsMap = global->requireMap(); - if (!esmMap || !cjsMap) { - return makeUnexpected(ErrorString("Module graph not available"_s)); - } - Ref> esm = JSON::ArrayOf::create(); { auto iter1 = JSC::JSMapIterator::create(global, global->mapIteratorStructure(), esmMap, JSC::IterationKind::Keys); diff --git a/src/bun.js/bindings/JSArray.zig b/src/bun.js/bindings/JSArray.zig index 1f5d23c385..62662ae309 100644 --- a/src/bun.js/bindings/JSArray.zig +++ b/src/bun.js/bindings/JSArray.zig @@ -9,16 +9,16 @@ pub const JSArray = opaque { extern fn JSArray__constructArray(*JSGlobalObject, [*]const JSValue, usize) JSValue; pub fn create(global: *JSGlobalObject, items: []const JSValue) bun.JSError!JSValue { - return bun.jsc.fromJSHostValue(JSArray__constructArray(global, items.ptr, items.len)); + return bun.jsc.fromJSHostCall(global, @src(), JSArray__constructArray, .{ global, items.ptr, items.len }); } extern fn JSArray__constructEmptyArray(*JSGlobalObject, usize) JSValue; pub fn createEmpty(global: *JSGlobalObject, len: usize) bun.JSError!JSValue { - return bun.jsc.fromJSHostValue(JSArray__constructEmptyArray(global, len)); + return bun.jsc.fromJSHostCall(global, @src(), JSArray__constructEmptyArray, .{ global, len }); } - pub fn iterator(array: *JSArray, global: *JSGlobalObject) JSArrayIterator { + pub fn iterator(array: *JSArray, global: *JSGlobalObject) bun.JSError!JSArrayIterator { return JSValue.fromCell(array).arrayIterator(global); } }; diff --git a/src/bun.js/bindings/JSArrayIterator.zig b/src/bun.js/bindings/JSArrayIterator.zig index 315171fe63..6ff2beb691 100644 --- a/src/bun.js/bindings/JSArrayIterator.zig +++ b/src/bun.js/bindings/JSArrayIterator.zig @@ -10,21 +10,20 @@ pub const JSArrayIterator = struct { array: JSValue, global: *JSGlobalObject, - pub fn init(value: JSValue, global: *JSGlobalObject) JSArrayIterator { + pub fn init(value: JSValue, global: *JSGlobalObject) bun.JSError!JSArrayIterator { return .{ .array = value, .global = global, - .len = @as(u32, @truncate(value.getLength(global))), + .len = @truncate(try value.getLength(global)), }; } - // TODO: this can throw - pub fn next(this: *JSArrayIterator) ?JSValue { + pub fn next(this: *JSArrayIterator) bun.JSError!?JSValue { if (!(this.i < this.len)) { return null; } const i = this.i; this.i += 1; - return JSObject.getIndex(this.array, this.global, i); + return try JSObject.getIndex(this.array, this.global, i); } }; diff --git a/src/bun.js/bindings/JSBigInt.zig b/src/bun.js/bindings/JSBigInt.zig index 6aa0ca06fb..77d5f3a9dc 100644 --- a/src/bun.js/bindings/JSBigInt.zig +++ b/src/bun.js/bindings/JSBigInt.zig @@ -37,10 +37,6 @@ pub const JSBigInt = opaque { extern fn JSC__JSBigInt__toString(*JSBigInt, *JSGlobalObject) bun.String; pub fn toString(this: *JSBigInt, global: *JSGlobalObject) JSError!bun.String { - const str = JSC__JSBigInt__toString(this, global); - if (global.hasException()) { - return error.JSError; - } - return str; + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSBigInt__toString, .{ this, global }); } }; diff --git a/src/bun.js/bindings/JSBuffer.cpp b/src/bun.js/bindings/JSBuffer.cpp index e32794b288..eeb0500ee6 100644 --- a/src/bun.js/bindings/JSBuffer.cpp +++ b/src/bun.js/bindings/JSBuffer.cpp @@ -219,21 +219,15 @@ std::optional byteLength(JSC::JSString* str, JSC::JSGlobalObject* lexica static JSUint8Array* allocBuffer(JSC::JSGlobalObject* lexicalGlobalObject, size_t byteLength) { -#if ASSERT_ENABLED auto& vm = JSC::getVM(lexicalGlobalObject); auto throwScope = DECLARE_THROW_SCOPE(vm); -#endif auto* globalObject = defaultGlobalObject(lexicalGlobalObject); auto* subclassStructure = globalObject->JSBufferSubclassStructure(); auto* uint8Array = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, byteLength); -#if ASSERT_ENABLED - if (!uint8Array) [[unlikely]] { - // it should have thrown an exception already - ASSERT(throwScope.exception()); - } -#endif + // it should have thrown an exception already + ASSERT(!!throwScope.exception() == !uint8Array); return uint8Array; } @@ -241,19 +235,13 @@ static JSUint8Array* allocBuffer(JSC::JSGlobalObject* lexicalGlobalObject, size_ static JSUint8Array* allocBufferUnsafe(JSC::JSGlobalObject* lexicalGlobalObject, size_t byteLength) { -#if ASSERT_ENABLED auto& vm = JSC::getVM(lexicalGlobalObject); auto throwScope = DECLARE_THROW_SCOPE(vm); -#endif auto* result = createUninitializedBuffer(lexicalGlobalObject, byteLength); -#if ASSERT_ENABLED - if (!result) [[unlikely]] { - // it should have thrown an exception already - ASSERT(throwScope.exception()); - } -#endif + // it should have thrown an exception already + ASSERT(!!throwScope.exception() == !result); return result; } @@ -357,6 +345,7 @@ JSC::EncodedJSValue JSBuffer__bufferFromPointerAndLengthAndDeinit(JSC::JSGlobalO auto* globalObject = defaultGlobalObject(lexicalGlobalObject); auto* subclassStructure = globalObject->JSBufferSubclassStructure(); + auto scope = DECLARE_CATCH_SCOPE(lexicalGlobalObject->vm()); if (length > 0) [[likely]] { auto buffer = ArrayBuffer::createFromBytes({ reinterpret_cast(ptr), length }, createSharedTask([ctx, bytesDeallocator](void* p) { @@ -369,6 +358,10 @@ JSC::EncodedJSValue JSBuffer__bufferFromPointerAndLengthAndDeinit(JSC::JSGlobalO uint8Array = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, 0); } + // only JSC::JSUint8Array::create can throw and we control the ArrayBuffer passed in. + scope.assertNoException(); + ASSERT(uint8Array); + return JSC::JSValue::encode(uint8Array); } @@ -474,7 +467,6 @@ static JSC::JSUint8Array* JSBuffer__bufferFromLengthAsArray(JSC::JSGlobalObject* auto* globalObject = defaultGlobalObject(lexicalGlobalObject); auto* subclassStructure = globalObject->JSBufferSubclassStructure(); JSC::JSUint8Array* uint8Array = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, static_cast(length)); - RELEASE_AND_RETURN(throwScope, uint8Array); } @@ -566,6 +558,7 @@ JSC::EncodedJSValue constructFromEncoding(JSGlobalObject* lexicalGlobalObject, W } } } + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue decoded = JSC::JSValue::decode(result); if (!result) [[unlikely]] { @@ -606,7 +599,7 @@ static JSC::EncodedJSValue constructBufferFromStringAndEncoding(JSC::JSGlobalObj } if (str->length() == 0) - return constructBufferEmpty(lexicalGlobalObject); + RELEASE_AND_RETURN(scope, constructBufferEmpty(lexicalGlobalObject)); JSC::EncodedJSValue result = constructFromEncoding(lexicalGlobalObject, view, encoding); @@ -626,7 +619,7 @@ static JSC::EncodedJSValue jsBufferConstructorFunction_allocBody(JSC::JSGlobalOb RETURN_IF_EXCEPTION(scope, {}); if (length == 0) { - return JSValue::encode(createEmptyBuffer(lexicalGlobalObject)); + RELEASE_AND_RETURN(scope, JSValue::encode(createEmptyBuffer(lexicalGlobalObject))); } // fill argument if (callFrame->argumentCount() > 1) [[unlikely]] { @@ -764,7 +757,7 @@ static JSC::EncodedJSValue jsBufferConstructorFunction_byteLengthBody(JSC::JSGlo } if (arg0.value().isString()) [[likely]] - return jsBufferByteLengthFromStringAndEncoding(lexicalGlobalObject, asString(arg0.value()), encoding); + RELEASE_AND_RETURN(scope, jsBufferByteLengthFromStringAndEncoding(lexicalGlobalObject, asString(arg0.value()), encoding)); if (auto* arrayBufferView = jsDynamicCast(arg0.value())) { return JSValue::encode(jsNumber(arrayBufferView->byteLength())); @@ -901,11 +894,7 @@ static JSC::EncodedJSValue jsBufferConstructorFunction_concatBody(JSC::JSGlobalO // there will be some data that needs to be zeroed out // let's let the operating system do that for us allocBuffer(lexicalGlobalObject, byteLength); - - if (!outBuffer) { - ASSERT(throwScope.exception()); - return {}; - } + RETURN_IF_EXCEPTION(throwScope, {}); size_t remain = byteLength; auto* head = outBuffer->typedVector(); @@ -924,7 +913,7 @@ static JSC::EncodedJSValue jsBufferConstructorFunction_concatBody(JSC::JSGlobalO head += length; } - RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(JSC::JSValue(outBuffer))); + RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(outBuffer)); } // https://github.com/nodejs/node/blob/v22.9.0/lib/buffer.js#L337 @@ -946,7 +935,7 @@ static JSC::EncodedJSValue jsBufferConstructorFunction_copyBytesFromBody(JSC::JS auto viewLength = view->length(); if (viewLength == 0) { - return JSValue::encode(createEmptyBuffer(lexicalGlobalObject)); + RELEASE_AND_RETURN(throwScope, JSValue::encode(createEmptyBuffer(lexicalGlobalObject))); } size_t offset; @@ -956,7 +945,7 @@ static JSC::EncodedJSValue jsBufferConstructorFunction_copyBytesFromBody(JSC::JS if (!offsetValue.isUndefined()) { Bun::V::validateInteger(throwScope, lexicalGlobalObject, offsetValue, "offset"_s, jsNumber(0), jsUndefined(), &offset); RETURN_IF_EXCEPTION(throwScope, {}); - if (offset >= viewLength) return JSValue::encode(createEmptyBuffer(lexicalGlobalObject)); + if (offset >= viewLength) RELEASE_AND_RETURN(throwScope, JSValue::encode(createEmptyBuffer(lexicalGlobalObject))); } else { offset = 0; } @@ -975,13 +964,13 @@ static JSC::EncodedJSValue jsBufferConstructorFunction_copyBytesFromBody(JSC::JS auto offset_r = offset * elemSize; auto end_r = end * elemSize; auto span = view->span().subspan(offset_r, end_r - offset_r); - return JSValue::encode(createBuffer(lexicalGlobalObject, span.data(), span.size())); + RELEASE_AND_RETURN(throwScope, JSValue::encode(createBuffer(lexicalGlobalObject, span.data(), span.size()))); } auto boffset = view->byteOffset(); auto blength = view->byteLength(); auto span = view->span().subspan(boffset, blength - boffset); - return JSValue::encode(createBuffer(lexicalGlobalObject, span.data(), span.size())); + RELEASE_AND_RETURN(throwScope, JSValue::encode(createBuffer(lexicalGlobalObject, span.data(), span.size()))); } static JSC::EncodedJSValue jsBufferConstructorFunction_isEncodingBody(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame) @@ -1557,6 +1546,7 @@ static JSC::EncodedJSValue jsBufferPrototypeFunction_indexOfBody(JSC::JSGlobalOb auto& vm = JSC::getVM(lexicalGlobalObject); auto scope = DECLARE_THROW_SCOPE(vm); auto index = indexOf(lexicalGlobalObject, scope, callFrame, castedThis, false); + RETURN_IF_EXCEPTION(scope, {}); return JSC::JSValue::encode(jsNumber(index)); } @@ -1637,6 +1627,7 @@ static JSC::EncodedJSValue jsBufferPrototypeFunction_lastIndexOfBody(JSC::JSGlob auto& vm = JSC::getVM(lexicalGlobalObject); auto scope = DECLARE_THROW_SCOPE(vm); auto index = indexOf(lexicalGlobalObject, scope, callFrame, castedThis, true); + RETURN_IF_EXCEPTION(scope, {}); return JSC::JSValue::encode(jsNumber(index)); } @@ -1758,7 +1749,7 @@ JSC::EncodedJSValue jsBufferToStringFromBytes(JSGlobalObject* lexicalGlobalObjec RETURN_IF_EXCEPTION(scope, {}); if (!buffer) [[unlikely]] { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } memcpy(buffer->vector(), bytes.data(), bytes.size()); return JSC::JSValue::encode(buffer); @@ -1768,7 +1759,7 @@ JSC::EncodedJSValue jsBufferToStringFromBytes(JSGlobalObject* lexicalGlobalObjec auto str = String::tryCreateUninitialized(bytes.size(), data); if (str.isNull()) [[unlikely]] { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } memcpy(data.data(), bytes.data(), bytes.size()); @@ -1776,7 +1767,7 @@ JSC::EncodedJSValue jsBufferToStringFromBytes(JSGlobalObject* lexicalGlobalObjec } case BufferEncodingType::ucs2: case BufferEncodingType::utf16le: { - std::span data; + std::span data; size_t u16length = bytes.size() / 2; if (u16length == 0) { return JSValue::encode(jsEmptyString(vm)); @@ -1784,7 +1775,7 @@ JSC::EncodedJSValue jsBufferToStringFromBytes(JSGlobalObject* lexicalGlobalObjec auto str = String::tryCreateUninitialized(u16length, data); if (str.isNull()) [[unlikely]] { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } memcpy(reinterpret_cast(data.data()), bytes.data(), u16length * 2); return JSValue::encode(jsString(vm, WTFMove(str))); @@ -1794,7 +1785,7 @@ JSC::EncodedJSValue jsBufferToStringFromBytes(JSGlobalObject* lexicalGlobalObjec auto str = String::tryCreateUninitialized(bytes.size(), data); if (str.isNull()) [[unlikely]] { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } Bun__encoding__writeLatin1(bytes.data(), bytes.size(), data.data(), data.size(), static_cast(encoding)); return JSValue::encode(jsString(vm, WTFMove(str))); @@ -1805,12 +1796,12 @@ JSC::EncodedJSValue jsBufferToStringFromBytes(JSGlobalObject* lexicalGlobalObjec case WebCore::BufferEncodingType::base64url: case WebCore::BufferEncodingType::hex: { EncodedJSValue res = Bun__encoding__toString(bytes.data(), bytes.size(), lexicalGlobalObject, static_cast(encoding)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue stringValue = JSValue::decode(res); if (!stringValue.isString()) [[unlikely]] { scope.throwException(lexicalGlobalObject, stringValue); - return JSValue::encode({}); + return {}; } RELEASE_AND_RETURN(scope, JSValue::encode(stringValue)); @@ -2020,12 +2011,10 @@ static JSC::EncodedJSValue jsBufferPrototypeFunction_writeEncodingBody(JSC::VM& } if (offset < 0 || offset > byteLength) { - Bun::ERR::BUFFER_OUT_OF_BOUNDS(scope, lexicalGlobalObject, "offset"); - RETURN_IF_EXCEPTION(scope, {}); + return Bun::ERR::BUFFER_OUT_OF_BOUNDS(scope, lexicalGlobalObject, "offset"); } if (length < 0 || length > byteLength - offset) { - Bun::ERR::BUFFER_OUT_OF_BOUNDS(scope, lexicalGlobalObject, "length"); - RETURN_IF_EXCEPTION(scope, {}); + return Bun::ERR::BUFFER_OUT_OF_BOUNDS(scope, lexicalGlobalObject, "length"); } RELEASE_AND_RETURN(scope, writeToBuffer(lexicalGlobalObject, castedThis, str, offset, length, encoding)); @@ -2056,7 +2045,7 @@ static JSC::EncodedJSValue jsBufferPrototypeFunctionWriteWithEncoding(JSC::JSGlo return {}; } - return jsBufferPrototypeFunction_writeEncodingBody(vm, lexicalGlobalObject, castedThis, text, offsetValue, lengthValue); + RELEASE_AND_RETURN(scope, jsBufferPrototypeFunction_writeEncodingBody(vm, lexicalGlobalObject, castedThis, text, offsetValue, lengthValue)); } static JSC::EncodedJSValue jsBufferPrototypeFunction_writeBody(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame, typename IDLOperation::ClassParameter castedThis) @@ -2139,6 +2128,7 @@ extern "C" JSC::EncodedJSValue JSBuffer__fromMmap(Zig::GlobalObject* globalObjec })); auto* view = JSC::JSUint8Array::create(globalObject, structure, WTFMove(buffer), 0, length); + RETURN_IF_EXCEPTION(scope, {}); if (!view) [[unlikely]] { throwOutOfMemoryError(globalObject, scope); @@ -2459,6 +2449,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBufferPrototypeFunction_writeBigInt64LE, (JSGlobalObj if (bigint->length() > 1) [[unlikely]] return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= -(2n ** 63n) and < 2n ** 63n"_s, valueVal); auto limb = valueVal.toBigUInt64(lexicalGlobalObject); + RETURN_IF_EXCEPTION(scope, {}); if (!bigint->sign() && limb > 0x7fffffffffffffff) return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= -(2n ** 63n) and < 2n ** 63n"_s, valueVal); if (bigint->sign() && limb - 0x8000000000000000 > 0x7fffffffffffffff) return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= -(2n ** 63n) and < 2n ** 63n"_s, valueVal); int64_t value = static_cast(limb); @@ -2498,6 +2489,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBufferPrototypeFunction_writeBigInt64BE, (JSGlobalObj if (bigint->length() > 1) [[unlikely]] return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= -(2n ** 63n) and < 2n ** 63n"_s, valueVal); auto limb = valueVal.toBigUInt64(lexicalGlobalObject); + RETURN_IF_EXCEPTION(scope, {}); if (!bigint->sign() && limb > 0x7fffffffffffffff) return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= -(2n ** 63n) and < 2n ** 63n"_s, valueVal); if (bigint->sign() && limb - 0x8000000000000000 > 0x7fffffffffffffff) return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= -(2n ** 63n) and < 2n ** 63n"_s, valueVal); int64_t value = static_cast(limb); @@ -2539,6 +2531,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBufferPrototypeFunction_writeBigUInt64LE, (JSGlobalOb if (bigint->length() > 1) [[unlikely]] return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= 0n and < 2n ** 64n"_s, valueVal); uint64_t value = valueVal.toBigUInt64(lexicalGlobalObject); + RETURN_IF_EXCEPTION(scope, {}); if (offsetVal.isUndefined()) offsetVal = jsNumber(0); if (!offsetVal.isNumber()) [[unlikely]] @@ -2577,6 +2570,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBufferPrototypeFunction_writeBigUInt64BE, (JSGlobalOb if (bigint->length() > 1) [[unlikely]] return Bun::ERR::OUT_OF_RANGE(scope, lexicalGlobalObject, "value"_s, ">= 0n and < 2n ** 64n"_s, valueVal); uint64_t value = valueVal.toBigUInt64(lexicalGlobalObject); + RETURN_IF_EXCEPTION(scope, {}); if (offsetVal.isUndefined()) offsetVal = jsNumber(0); if (!offsetVal.isNumber()) [[unlikely]] @@ -2838,6 +2832,7 @@ EncodedJSValue constructBufferFromArrayBuffer(JSC::ThrowScope& throwScope, JSGlo if (isResizableOrGrowableShared) { auto* subclassStructure = globalObject->JSResizableOrGrowableSharedBufferSubclassStructure(); auto* uint8Array = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, WTFMove(buffer), offset, std::nullopt); + RETURN_IF_EXCEPTION(throwScope, {}); if (!uint8Array) [[unlikely]] { throwOutOfMemoryError(globalObject, throwScope); return {}; @@ -2846,6 +2841,7 @@ EncodedJSValue constructBufferFromArrayBuffer(JSC::ThrowScope& throwScope, JSGlo } auto* subclassStructure = globalObject->JSBufferSubclassStructure(); auto* uint8Array = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, WTFMove(buffer), offset, length); + RETURN_IF_EXCEPTION(throwScope, {}); if (!uint8Array) [[unlikely]] { throwOutOfMemoryError(globalObject, throwScope); return {}; @@ -2873,15 +2869,16 @@ static JSC::EncodedJSValue createJSBufferFromJS(JSC::JSGlobalObject* lexicalGlob } auto anyint = distinguishingArg.asAnyInt(); if (anyint < 0 or anyint > Bun::Buffer::kMaxLength) return Bun::ERR::OUT_OF_RANGE(throwScope, lexicalGlobalObject, "size"_s, 0, Bun::Buffer::kMaxLength, distinguishingArg); - return JSValue::encode(allocBuffer(lexicalGlobalObject, anyint)); + RELEASE_AND_RETURN(throwScope, JSValue::encode(allocBuffer(lexicalGlobalObject, anyint))); } else if (distinguishingArg.isNumber()) { JSValue lengthValue = distinguishingArg; Bun::V::validateNumber(throwScope, lexicalGlobalObject, lengthValue, "size"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(throwScope, {}); size_t length = lengthValue.toLength(lexicalGlobalObject); - return JSValue::encode(allocBuffer(lexicalGlobalObject, length)); + RELEASE_AND_RETURN(throwScope, JSValue::encode(allocBuffer(lexicalGlobalObject, length))); } else if (distinguishingArg.isUndefinedOrNull() || distinguishingArg.isBoolean()) { auto arg_string = distinguishingArg.toWTFString(globalObject); + RETURN_IF_EXCEPTION(throwScope, {}); auto message = makeString("The first argument must be of type string or an instance of Buffer, ArrayBuffer, Array or an Array-like object. Received "_s, arg_string); return throwVMTypeError(globalObject, throwScope, message); } else if (distinguishingArg.isCell()) { @@ -2912,10 +2909,7 @@ static JSC::EncodedJSValue createJSBufferFromJS(JSC::JSGlobalObject* lexicalGlob return {}; } auto* uint8Array = createUninitializedBuffer(lexicalGlobalObject, byteLength); - if (!uint8Array) [[unlikely]] { - ASSERT(throwScope.exception()); - return {}; - } + RETURN_IF_EXCEPTION(throwScope, {}); if (byteLength) { uint8Array->setFromTypedArray(lexicalGlobalObject, 0, view, 0, byteLength, CopyType::LeftToRight); } @@ -2961,8 +2955,6 @@ JSC_DEFINE_HOST_FUNCTION(constructJSBuffer, (JSC::JSGlobalObject * lexicalGlobal bool JSBuffer__isBuffer(JSC::JSGlobalObject* lexicalGlobalObject, JSC::EncodedJSValue value) { - auto& vm = JSC::getVM(lexicalGlobalObject); - JSC::JSValue jsValue = JSC::JSValue::decode(value); if (!jsValue || !jsValue.isCell()) return false; @@ -2971,6 +2963,6 @@ bool JSBuffer__isBuffer(JSC::JSGlobalObject* lexicalGlobalObject, JSC::EncodedJS if (!cell) return false; - JSValue prototype = cell->getPrototype(vm, lexicalGlobalObject); + JSValue prototype = cell->getPrototype(lexicalGlobalObject); return prototype.inherits(); } diff --git a/src/bun.js/bindings/JSBufferList.cpp b/src/bun.js/bindings/JSBufferList.cpp index c181f7aeb7..3ff38d965a 100644 --- a/src/bun.js/bindings/JSBufferList.cpp +++ b/src/bun.js/bindings/JSBufferList.cpp @@ -183,7 +183,9 @@ JSC::JSValue JSBufferList::_getBuffer(JSC::VM& vm, JSC::JSGlobalObject* lexicalG auto buffer = array->possiblySharedBuffer(); auto off = array->byteOffset(); JSC::JSUint8Array* retArray = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, buffer, off, n); + RETURN_IF_EXCEPTION(throwScope, {}); JSC::JSUint8Array* newArray = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, buffer, off + n, len - n); + RETURN_IF_EXCEPTION(throwScope, {}); m_deque.first().set(vm, this, newArray); RELEASE_AND_RETURN(throwScope, retArray); } @@ -210,6 +212,7 @@ JSC::JSValue JSBufferList::_getBuffer(JSC::VM& vm, JSC::JSGlobalObject* lexicalG auto buffer = array->possiblySharedBuffer(); auto off = array->byteOffset(); JSC::JSUint8Array* newArray = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, buffer, off + n, len - n); + RETURN_IF_EXCEPTION(throwScope, {}); element.set(vm, this, newArray); offset += n; break; diff --git a/src/bun.js/bindings/JSBufferList.h b/src/bun.js/bindings/JSBufferList.h index b9a0d63377..635312f270 100644 --- a/src/bun.js/bindings/JSBufferList.h +++ b/src/bun.js/bindings/JSBufferList.h @@ -92,7 +92,7 @@ public: { if (length() == 0) [[unlikely]] return JSC::jsUndefined(); - return JSC::JSValue(m_deque.first().get()); + return m_deque.first().get(); } JSC::JSValue concat(JSC::VM&, JSC::JSGlobalObject*, size_t); diff --git a/src/bun.js/bindings/JSBunRequest.cpp b/src/bun.js/bindings/JSBunRequest.cpp index 5209f1a953..d93c7480c8 100644 --- a/src/bun.js/bindings/JSBunRequest.cpp +++ b/src/bun.js/bindings/JSBunRequest.cpp @@ -229,7 +229,7 @@ JSC_DEFINE_CUSTOM_GETTER(jsJSBunRequestGetCookies, (JSC::JSGlobalObject * global RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); if (cookieMapResult.hasException()) { WebCore::propagateException(*globalObject, throwScope, cookieMapResult.releaseException()); - return JSValue::encode(jsUndefined()); + RELEASE_AND_RETURN(throwScope, {}); } auto cookieMap = cookieMapResult.releaseReturnValue(); @@ -278,7 +278,7 @@ extern "C" EncodedJSValue Bun__getParamsIfBunRequest(JSC::EncodedJSValue thisVal return JSValue::encode(params); } - return JSValue::encode({}); + return {}; } } // namespace Bun diff --git a/src/bun.js/bindings/JSBundlerPlugin.cpp b/src/bun.js/bindings/JSBundlerPlugin.cpp index 397ee5d4fd..799f731ec6 100644 --- a/src/bun.js/bindings/JSBundlerPlugin.cpp +++ b/src/bun.js/bindings/JSBundlerPlugin.cpp @@ -505,7 +505,7 @@ extern "C" void JSBundlerPlugin__matchOnLoad(Bun::JSBundlerPlugin* plugin, const call(globalObject, function, callData, plugin, arguments); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { auto exception = scope.exception(); scope.clearException(); if (!plugin->plugin.tombstoned) { diff --git a/src/bun.js/bindings/JSCommonJSExtensions.cpp b/src/bun.js/bindings/JSCommonJSExtensions.cpp index 34b0056ac4..4999c8bb8a 100644 --- a/src/bun.js/bindings/JSCommonJSExtensions.cpp +++ b/src/bun.js/bindings/JSCommonJSExtensions.cpp @@ -241,12 +241,12 @@ JSC::EncodedJSValue builtinLoader(JSC::JSGlobalObject* globalObject, JSC::CallFr JSC::JSObject* modValue = callFrame->argument(0).getObject(); if (!modValue) { throwTypeError(globalObject, scope, "Module._extensions['.js'] must be called with a CommonJS module object"_s); - return JSC::JSValue::encode({}); + return {}; } Bun::JSCommonJSModule* mod = jsDynamicCast(modValue); if (!mod) { throwTypeError(globalObject, scope, "Module._extensions['.js'] must be called with a CommonJS module object"_s); - return JSC::JSValue::encode({}); + return {}; } JSC::JSValue specifier = callFrame->argument(1); WTF::String specifierWtfString = specifier.toWTFString(globalObject); @@ -283,7 +283,7 @@ JSC::EncodedJSValue builtinLoader(JSC::JSGlobalObject* globalObject, JSC::CallFr JSC::profiledCall(global, JSC::ProfilingReason::API, requireESM, callData, mod, args, returnedException); if (returnedException) [[unlikely]] { throwException(globalObject, scope, returnedException->value()); - return JSC::JSValue::encode({}); + return {}; } } diff --git a/src/bun.js/bindings/JSCommonJSModule.cpp b/src/bun.js/bindings/JSCommonJSModule.cpp index 4739efe53c..36d0e03cb6 100644 --- a/src/bun.js/bindings/JSCommonJSModule.cpp +++ b/src/bun.js/bindings/JSCommonJSModule.cpp @@ -148,7 +148,9 @@ static bool evaluateCommonJSModuleOnce(JSC::VM& vm, Zig::GlobalObject* globalObj // Using same approach as node, `arguments` in the entry point isn't defined // https://github.com/nodejs/node/blob/592c6907bfe1922f36240e9df076be1864c3d1bd/lib/internal/process/execution.js#L92 - globalObject->putDirect(vm, builtinNames(vm).exportsPublicName(), moduleObject->exportsObject(), 0); + auto exports = moduleObject->exportsObject(); + RETURN_IF_EXCEPTION(scope, {}); + globalObject->putDirect(vm, builtinNames(vm).exportsPublicName(), exports, 0); globalObject->putDirect(vm, builtinNames(vm).requirePublicName(), requireFunction, 0); globalObject->putDirect(vm, Identifier::fromString(vm, "module"_s), moduleObject, 0); globalObject->putDirect(vm, Identifier::fromString(vm, "__filename"_s), filename, 0); @@ -183,7 +185,9 @@ static bool evaluateCommonJSModuleOnce(JSC::VM& vm, Zig::GlobalObject* globalObj RETURN_IF_EXCEPTION(scope, false); MarkedArgumentBuffer args; - args.append(moduleObject->exportsObject()); // exports + auto exports = moduleObject->exportsObject(); + RETURN_IF_EXCEPTION(scope, false); + args.append(exports); // exports args.append(requireFunction); // require args.append(moduleObject); // module args.append(filename); // filename @@ -446,6 +450,9 @@ JSC_DEFINE_CUSTOM_GETTER(getterPath, (JSC::JSGlobalObject * globalObject, JSC::E JSC_DEFINE_CUSTOM_GETTER(getterParent, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::PropertyName)) { + auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); + JSCommonJSModule* thisObject = jsDynamicCast(JSValue::decode(thisValue)); if (!thisObject) [[unlikely]] { return JSValue::encode(jsUndefined()); @@ -465,6 +472,7 @@ JSC_DEFINE_CUSTOM_GETTER(getterParent, (JSC::JSGlobalObject * globalObject, JSC: auto idValue = thisObject->m_id.get(); if (idValue) { auto id = idValue->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); if (id == "."_s) { thisObject->m_overriddenParent.set(globalObject->vm(), thisObject, jsNull()); return JSValue::encode(jsNull()); @@ -1126,7 +1134,9 @@ void JSCommonJSModule::toSyntheticSource(JSC::JSGlobalObject* globalObject, Vector& exportNames, JSC::MarkedArgumentBuffer& exportValues) { + auto scope = DECLARE_THROW_SCOPE(JSC::getVM(globalObject)); auto result = this->exportsObject(); + RETURN_IF_EXCEPTION(scope, ); populateESMExports(globalObject, result, exportNames, exportValues, this->ignoreESModuleAnnotation); } @@ -1262,14 +1272,15 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionRequireCommonJS, (JSGlobalObject * lexicalGlo if (options.isObject()) { JSObject* obj = options.getObject(); // This getter is expensive and rare. - if (auto typeValue = obj->getIfPropertyExists(globalObject, vm.propertyNames->type)) { + auto typeValue = obj->getIfPropertyExists(globalObject, vm.propertyNames->type); + REQUIRE_CJS_RETURN_IF_EXCEPTION; + if (typeValue) { if (typeValue.isString()) { typeAttribute = typeValue.toWTFString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); + REQUIRE_CJS_RETURN_IF_EXCEPTION; typeAttributeStr = Bun::toString(typeAttribute); } } - REQUIRE_CJS_RETURN_IF_EXCEPTION; } } @@ -1305,7 +1316,9 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionRequireNativeModule, (JSGlobalObject * lexica res.success = false; memset(&res.result, 0, sizeof res.result); BunString specifierStr = Bun::toString(specifier); - if (auto result = fetchBuiltinModuleWithoutResolution(globalObject, &specifierStr, &res)) { + auto result = fetchBuiltinModuleWithoutResolution(globalObject, &specifierStr, &res); + RETURN_IF_EXCEPTION(throwScope, {}); + if (result) { if (res.success) return JSC::JSValue::encode(result); } @@ -1411,10 +1424,13 @@ std::optional createCommonJSModule( ResolvedSource& source, bool isBuiltIn) { + auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); JSCommonJSModule* moduleObject = nullptr; WTF::String sourceURL = source.source_url.toWTFString(); JSValue entry = globalObject->requireMap()->get(globalObject, requireMapKey); + RETURN_IF_EXCEPTION(scope, {}); bool ignoreESModuleAnnotation = source.tag == ResolvedSourceTagPackageJSONTypeModule; SourceOrigin sourceOrigin; @@ -1423,12 +1439,12 @@ std::optional createCommonJSModule( } if (!moduleObject) { - VM& vm = JSC::getVM(globalObject); size_t index = sourceURL.reverseFind(PLATFORM_SEP, sourceURL.length()); JSString* dirname; JSString* filename = requireMapKey; if (index != WTF::notFound) { dirname = JSC::jsSubstring(globalObject, requireMapKey, 0, index); + RETURN_IF_EXCEPTION(scope, {}); } else { dirname = jsEmptyString(vm); } @@ -1458,6 +1474,7 @@ std::optional createCommonJSModule( JSC::constructEmptyObject(globalObject, globalObject->objectPrototype()), 0); requireMap->set(globalObject, filename, moduleObject); + RETURN_IF_EXCEPTION(scope, {}); } else { sourceOrigin = Zig::toSourceOrigin(sourceURL, isBuiltIn); } @@ -1472,14 +1489,15 @@ std::optional createCommonJSModule( JSC::MarkedArgumentBuffer& exportValues) -> void { auto* globalObject = jsCast(lexicalGlobalObject); auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); JSValue keyValue = identifierToJSValue(vm, moduleKey); JSValue entry = globalObject->requireMap()->get(globalObject, keyValue); + RETURN_IF_EXCEPTION(scope, {}); if (entry) { if (auto* moduleObject = jsDynamicCast(entry)) { if (!moduleObject->hasEvaluated) { - auto scope = DECLARE_THROW_SCOPE(vm); evaluateCommonJSModuleOnce( vm, globalObject, @@ -1492,6 +1510,7 @@ std::optional createCommonJSModule( // On error, remove the module from the require map // so that it can be re-evaluated on the next require. globalObject->requireMap()->remove(globalObject, moduleObject->filename()); + RETURN_IF_EXCEPTION(scope, {}); scope.throwException(globalObject, exception); return; @@ -1499,6 +1518,7 @@ std::optional createCommonJSModule( } moduleObject->toSyntheticSource(globalObject, moduleKey, exportNames, exportValues); + RETURN_IF_EXCEPTION(scope, {}); } } else { // require map was cleared of the entry @@ -1513,12 +1533,14 @@ JSObject* JSCommonJSModule::createBoundRequireFunction(VM& vm, JSGlobalObject* l ASSERT(!pathString.startsWith("file://"_s)); auto* globalObject = jsCast(lexicalGlobalObject); + auto scope = DECLARE_THROW_SCOPE(vm); JSString* filename = JSC::jsStringWithCache(vm, pathString); auto index = pathString.reverseFind(PLATFORM_SEP, pathString.length()); JSString* dirname; if (index != WTF::notFound) { dirname = JSC::jsSubstring(globalObject, filename, 0, index); + RETURN_IF_EXCEPTION(scope, nullptr); } else { dirname = jsEmptyString(vm); } @@ -1533,12 +1555,14 @@ JSObject* JSCommonJSModule::createBoundRequireFunction(VM& vm, JSGlobalObject* l globalObject->requireFunctionUnbound(), moduleObject, ArgList(), 1, globalObject->commonStrings().requireString(globalObject)); + RETURN_IF_EXCEPTION(scope, nullptr); JSFunction* resolveFunction = JSC::JSBoundFunction::create(vm, globalObject, globalObject->requireResolveFunctionUnbound(), moduleObject->filename(), ArgList(), 1, globalObject->commonStrings().resolveString(globalObject)); + RETURN_IF_EXCEPTION(scope, nullptr); requireFunction->putDirect(vm, vm.propertyNames->resolve, resolveFunction, 0); diff --git a/src/bun.js/bindings/JSDOMExceptionHandling.cpp b/src/bun.js/bindings/JSDOMExceptionHandling.cpp index f2c83e14e7..de78fc5d29 100644 --- a/src/bun.js/bindings/JSDOMExceptionHandling.cpp +++ b/src/bun.js/bindings/JSDOMExceptionHandling.cpp @@ -212,7 +212,9 @@ JSValue createDOMException(JSGlobalObject& lexicalGlobalObject, Exception&& exce void propagateExceptionSlowPath(JSC::JSGlobalObject& lexicalGlobalObject, JSC::ThrowScope& throwScope, Exception&& exception) { throwScope.assertNoExceptionExceptTermination(); - throwException(&lexicalGlobalObject, throwScope, createDOMException(lexicalGlobalObject, WTFMove(exception))); + auto jsException = createDOMException(lexicalGlobalObject, WTFMove(exception)); + RETURN_IF_EXCEPTION(throwScope, ); + throwException(&lexicalGlobalObject, throwScope, jsException); } static EncodedJSValue throwTypeError(JSC::JSGlobalObject& lexicalGlobalObject, JSC::ThrowScope& scope, const String& errorMessage) diff --git a/src/bun.js/bindings/JSEnvironmentVariableMap.cpp b/src/bun.js/bindings/JSEnvironmentVariableMap.cpp index 6f08fa2a2a..7c1781298f 100644 --- a/src/bun.js/bindings/JSEnvironmentVariableMap.cpp +++ b/src/bun.js/bindings/JSEnvironmentVariableMap.cpp @@ -78,7 +78,9 @@ JSC_DEFINE_CUSTOM_GETTER(jsTimeZoneEnvironmentVariableGetter, (JSGlobalObject * ZigString name = toZigString(propertyName.publicName()); ZigString value = { nullptr, 0 }; - if (auto hasExistingValue = thisObject->getIfPropertyExists(globalObject, clientData->builtinNames().dataPrivateName())) { + auto hasExistingValue = thisObject->getIfPropertyExists(globalObject, clientData->builtinNames().dataPrivateName()); + RETURN_IF_EXCEPTION(scope, {}); + if (hasExistingValue) { return JSValue::encode(hasExistingValue); } @@ -303,10 +305,13 @@ JSValue createEnvironmentVariablesMap(Zig::GlobalObject* globalObject) bool hasNodeTLSRejectUnauthorized = false; bool hasBunConfigVerboseFetch = false; + auto* cached_getter_setter = JSC::CustomGetterSetter::create(vm, jsGetterEnvironmentVariable, nullptr); + for (size_t i = 0; i < count; i++) { unsigned char* chars; size_t len = Bun__getEnvKey(list, i, &chars); - auto name = String::fromUTF8(std::span { chars, len }); + // We can't really trust that the OS gives us valid UTF-8 + auto name = String::fromUTF8ReplacingInvalidSequences(std::span { chars, len }); #if OS(WINDOWS) keyArray->putByIndexInline(globalObject, (unsigned)i, jsString(vm, name), false); #endif @@ -345,7 +350,11 @@ JSValue createEnvironmentVariablesMap(Zig::GlobalObject* globalObject) } } - object->putDirectCustomAccessor(vm, identifier, JSC::CustomGetterSetter::create(vm, jsGetterEnvironmentVariable, jsSetterEnvironmentVariable), JSC::PropertyAttribute::CustomAccessor | 0); + // JSC::PropertyAttribute::CustomValue calls the getter ONCE (the first + // time) and then sets it onto the object, subsequent calls to the + // getter will not go through the getter and instead will just do the + // property lookup. + object->putDirectCustomAccessor(vm, identifier, cached_getter_setter, JSC::PropertyAttribute::CustomValue | 0); } unsigned int TZAttrs = JSC::PropertyAttribute::CustomAccessor | 0; diff --git a/src/bun.js/bindings/JSGlobalObject.zig b/src/bun.js/bindings/JSGlobalObject.zig index 723b633af8..1380682452 100644 --- a/src/bun.js/bindings/JSGlobalObject.zig +++ b/src/bun.js/bindings/JSGlobalObject.zig @@ -490,6 +490,7 @@ pub const JSGlobalObject = opaque { return JSC__JSGlobalObject__generateHeapSnapshot(this); } + // DEPRECATED - use CatchScope to check for exceptions and signal exceptions by returning JSError pub fn hasException(this: *JSGlobalObject) bool { return JSGlobalObject__hasException(this); } @@ -552,7 +553,7 @@ pub const JSGlobalObject = opaque { /// pub fn reportActiveExceptionAsUnhandled(this: *JSGlobalObject, err: bun.JSError) void { const exception = this.takeException(err); - if (!exception.isTerminationException(this.vm())) { + if (!exception.isTerminationException()) { _ = this.bunVM().uncaughtException(this, exception, false); } } diff --git a/src/bun.js/bindings/JSMockFunction.cpp b/src/bun.js/bindings/JSMockFunction.cpp index d10befc1ba..08ef2a604b 100644 --- a/src/bun.js/bindings/JSMockFunction.cpp +++ b/src/bun.js/bindings/JSMockFunction.cpp @@ -1016,7 +1016,7 @@ extern "C" JSC::EncodedJSValue JSMockFunction__getCalls(EncodedJSValue encodedVa return JSValue::encode(mock->getCalls()); } - return JSValue::encode({}); + return {}; } extern "C" JSC::EncodedJSValue JSMockFunction__getReturns(EncodedJSValue encodedValue) { @@ -1025,7 +1025,7 @@ extern "C" JSC::EncodedJSValue JSMockFunction__getReturns(EncodedJSValue encoded return JSValue::encode(mock->getReturnValues()); } - return JSValue::encode({}); + return {}; } JSC_DEFINE_HOST_FUNCTION(jsMockFunctionGetMockName, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callframe)) diff --git a/src/bun.js/bindings/JSModuleLoader.zig b/src/bun.js/bindings/JSModuleLoader.zig index bebeaacd9a..8692a6b449 100644 --- a/src/bun.js/bindings/JSModuleLoader.zig +++ b/src/bun.js/bindings/JSModuleLoader.zig @@ -46,8 +46,8 @@ pub const JSModuleLoader = opaque { return JSC__JSModuleLoader__loadAndEvaluateModule(globalObject, module_name); } - extern fn JSModuleLoader__import(*JSGlobalObject, *const bun.String) *JSInternalPromise; - pub fn import(globalObject: *JSGlobalObject, module_name: *const bun.String) *JSInternalPromise { - return JSModuleLoader__import(globalObject, module_name); + extern fn JSModuleLoader__import(*JSGlobalObject, *const bun.String) ?*JSInternalPromise; + pub fn import(globalObject: *JSGlobalObject, module_name: *const bun.String) bun.JSError!*JSInternalPromise { + return JSModuleLoader__import(globalObject, module_name) orelse error.JSError; } }; diff --git a/src/bun.js/bindings/JSObject.zig b/src/bun.js/bindings/JSObject.zig index 748b5322e4..7046faed46 100644 --- a/src/bun.js/bindings/JSObject.zig +++ b/src/bun.js/bindings/JSObject.zig @@ -144,8 +144,18 @@ pub const JSObject = opaque { return JSC__JSObject__create(global, length, creator, Type.call); } - pub fn getIndex(this: JSValue, globalThis: *JSGlobalObject, i: u32) JSValue { - return JSC__JSObject__getIndex(this, globalThis, i); + pub fn getIndex(this: JSValue, globalThis: *JSGlobalObject, i: u32) JSError!JSValue { + // we don't use fromJSHostCall, because it will assert that if there is an exception + // then the JSValue is zero. the function this ends up calling can return undefined + // with an exception: + // https://github.com/oven-sh/WebKit/blob/397dafc9721b8f8046f9448abb6dbc14efe096d3/Source/JavaScriptCore/runtime/JSObjectInlines.h#L112 + var scope: JSC.CatchScope = undefined; + scope.init(globalThis, @src()); + defer scope.deinit(); + const value = JSC__JSObject__getIndex(this, globalThis, i); + try scope.returnIfException(); + bun.assert(value != .zero); + return value; } pub fn putRecord(this: *JSObject, global: *JSGlobalObject, key: *ZigString, values: []ZigString) void { diff --git a/src/bun.js/bindings/JSPromise.zig b/src/bun.js/bindings/JSPromise.zig index 5cb0765641..3affea0d9c 100644 --- a/src/bun.js/bindings/JSPromise.zig +++ b/src/bun.js/bindings/JSPromise.zig @@ -189,12 +189,17 @@ pub const JSPromise = opaque { args: Args, pub fn call(this: *@This(), g: *JSC.JSGlobalObject) callconv(.c) JSC.JSValue { - return JSC.toJSHostValue(g, @call(.auto, Fn, this.args)); + return JSC.toJSHostCall(g, @src(), Fn, this.args); } }; + var scope: JSC.CatchScope = undefined; + scope.init(globalObject, @src()); + defer scope.deinit(); var ctx = Wrapper{ .args = args }; - return JSC__JSPromise__wrap(globalObject, &ctx, @ptrCast(&Wrapper.call)); + const promise = JSC__JSPromise__wrap(globalObject, &ctx, @ptrCast(&Wrapper.call)); + bun.debugAssert(!scope.hasException()); // TODO: properly propagate exception upwards + return promise; } pub fn wrapValue(globalObject: *JSGlobalObject, value: JSValue) JSValue { @@ -265,7 +270,7 @@ pub const JSPromise = opaque { } } - JSC__JSPromise__resolve(this, globalThis, value); + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSPromise__resolve, .{ this, globalThis, value }) catch return bun.debugAssert(false); // TODO: properly propagate exception upwards } pub fn reject(this: *JSPromise, globalThis: *JSGlobalObject, value: JSError!JSValue) void { @@ -279,11 +284,11 @@ pub const JSPromise = opaque { const err = value catch |err| globalThis.takeException(err); - JSC__JSPromise__reject(this, globalThis, err); + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSPromise__reject, .{ this, globalThis, err }) catch return bun.debugAssert(false); // TODO: properly propagate exception upwards } pub fn rejectAsHandled(this: *JSPromise, globalThis: *JSGlobalObject, value: JSValue) void { - JSC__JSPromise__rejectAsHandled(this, globalThis, value); + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSPromise__rejectAsHandled, .{ this, globalThis, value }) catch return bun.debugAssert(false); // TODO: properly propagate exception upwards } pub fn create(globalThis: *JSGlobalObject) *JSPromise { diff --git a/src/bun.js/bindings/JSPropertyIterator.cpp b/src/bun.js/bindings/JSPropertyIterator.cpp index b7891e8383..d9bd3ed52f 100644 --- a/src/bun.js/bindings/JSPropertyIterator.cpp +++ b/src/bun.js/bindings/JSPropertyIterator.cpp @@ -162,11 +162,11 @@ extern "C" EncodedJSValue Bun__JSPropertyIterator__getNameAndValueNonObservable( auto scope = DECLARE_THROW_SCOPE(vm); PropertySlot slot(object, PropertySlot::InternalMethodType::VMInquiry, vm.ptr()); - if (!object->getNonIndexPropertySlot(globalObject, prop, slot)) { + auto has = object->getNonIndexPropertySlot(globalObject, prop, slot); + RETURN_IF_EXCEPTION(scope, {}); + if (!has) { return {}; } - RETURN_IF_EXCEPTION(scope, {}); - if (slot.isAccessor() || slot.isCustom()) { return {}; } diff --git a/src/bun.js/bindings/JSPropertyIterator.zig b/src/bun.js/bindings/JSPropertyIterator.zig index 226585e337..1453029180 100644 --- a/src/bun.js/bindings/JSPropertyIterator.zig +++ b/src/bun.js/bindings/JSPropertyIterator.zig @@ -81,13 +81,8 @@ pub fn JSPropertyIterator(comptime options: JSPropertyIteratorOptions) type { var name = bun.String.dead; if (comptime options.include_value) { const FnToUse = if (options.observable) JSPropertyIteratorImpl.getNameAndValue else JSPropertyIteratorImpl.getNameAndValueNonObservable; - const current = FnToUse(this.impl.?, this.globalObject, this.object, &name, i); - if (current == .zero) { - if (this.globalObject.hasException()) { - return error.JSError; - } - continue; - } + const current: JSC.JSValue = try FnToUse(this.impl.?, this.globalObject, this.object, &name, i); + if (current == .zero) continue; current.ensureStillAlive(); this.value = current; } else { @@ -121,19 +116,34 @@ const JSPropertyIteratorImpl = opaque { own_properties_only: bool, only_non_index_properties: bool, ) bun.JSError!?*JSPropertyIteratorImpl { - const iter = Bun__JSPropertyIterator__create(globalObject, object.toJS(), count, own_properties_only, only_non_index_properties); - if (globalObject.hasException()) { - return error.JSError; - } - return iter; + return bun.jsc.fromJSHostCallGeneric(globalObject, @src(), Bun__JSPropertyIterator__create, .{ globalObject, object.toJS(), count, own_properties_only, only_non_index_properties }); } pub const deinit = Bun__JSPropertyIterator__deinit; - pub const getNameAndValue = Bun__JSPropertyIterator__getNameAndValue; - pub const getNameAndValueNonObservable = Bun__JSPropertyIterator__getNameAndValueNonObservable; + + pub fn getNameAndValue(iter: *JSPropertyIteratorImpl, globalObject: *JSC.JSGlobalObject, object: *JSC.JSObject, propertyName: *bun.String, i: usize) bun.JSError!JSC.JSValue { + var scope: bun.jsc.CatchScope = undefined; + scope.init(globalObject, @src()); + defer scope.deinit(); + const value = Bun__JSPropertyIterator__getNameAndValue(iter, globalObject, object, propertyName, i); + try scope.returnIfException(); + return value; + } + + pub fn getNameAndValueNonObservable(iter: *JSPropertyIteratorImpl, globalObject: *JSC.JSGlobalObject, object: *JSC.JSObject, propertyName: *bun.String, i: usize) bun.JSError!JSC.JSValue { + var scope: bun.jsc.CatchScope = undefined; + scope.init(globalObject, @src()); + defer scope.deinit(); + const value = Bun__JSPropertyIterator__getNameAndValueNonObservable(iter, globalObject, object, propertyName, i); + try scope.returnIfException(); + return value; + } + pub const getName = Bun__JSPropertyIterator__getName; + pub const getLongestPropertyName = Bun__JSPropertyIterator__getLongestPropertyName; + /// may return null without an exception extern "c" fn Bun__JSPropertyIterator__create(globalObject: *JSC.JSGlobalObject, encodedValue: JSC.JSValue, count: *usize, own_properties_only: bool, only_non_index_properties: bool) ?*JSPropertyIteratorImpl; extern "c" fn Bun__JSPropertyIterator__getNameAndValue(iter: *JSPropertyIteratorImpl, globalObject: *JSC.JSGlobalObject, object: *JSC.JSObject, propertyName: *bun.String, i: usize) JSC.JSValue; extern "c" fn Bun__JSPropertyIterator__getNameAndValueNonObservable(iter: *JSPropertyIteratorImpl, globalObject: *JSC.JSGlobalObject, object: *JSC.JSObject, propertyName: *bun.String, i: usize) JSC.JSValue; diff --git a/src/bun.js/bindings/JSStringDecoder.cpp b/src/bun.js/bindings/JSStringDecoder.cpp index 7ae9c16959..4795880fdf 100644 --- a/src/bun.js/bindings/JSStringDecoder.cpp +++ b/src/bun.js/bindings/JSStringDecoder.cpp @@ -81,6 +81,7 @@ static inline JSStringDecoder* jsStringDecoderCast(JSGlobalObject* globalObject, if (JSC::JSObject* thisObject = stringDecoderValue.getObject()) { auto clientData = WebCore::clientData(vm); JSValue existingDecoderValue = thisObject->getIfPropertyExists(globalObject, clientData->builtinNames().decodePrivateName()); + RETURN_IF_EXCEPTION(throwScope, {}); if (existingDecoderValue) [[likely]] { if (auto cast = jsDynamicCast(existingDecoderValue); cast) [[likely]] { return cast; @@ -199,7 +200,7 @@ JSC::JSValue JSStringDecoder::text(JSC::VM& vm, JSC::JSGlobalObject* globalObjec if (length == offset) RELEASE_AND_RETURN(throwScope, JSC::jsEmptyString(vm)); if ((length - offset) % 2 == 0) { - UChar c = (static_cast(bufPtr[length - 1]) << 8) + static_cast(bufPtr[length - 2]); + char16_t c = (static_cast(bufPtr[length - 1]) << 8) + static_cast(bufPtr[length - 2]); if (c >= 0xD800 && c <= 0xDBFF) { m_lastNeed = 2; m_lastTotal = 4; @@ -457,24 +458,24 @@ JSC_DEFINE_HOST_FUNCTION(jsStringDecoderPrototypeFunction_write, { auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); JSStringDecoder* castedThis = jsStringDecoderCast(globalObject, callFrame->thisValue(), "write"_s); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); - return jsStringDecoderPrototypeFunction_writeBody(globalObject, callFrame, castedThis); + RETURN_IF_EXCEPTION(scope, {}); + RELEASE_AND_RETURN(scope, jsStringDecoderPrototypeFunction_writeBody(globalObject, callFrame, castedThis)); } JSC_DEFINE_HOST_FUNCTION(jsStringDecoderPrototypeFunction_end, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); JSStringDecoder* castedThis = jsStringDecoderCast(globalObject, callFrame->thisValue(), "end"_s); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); - return jsStringDecoderPrototypeFunction_endBody(globalObject, callFrame, castedThis); + RETURN_IF_EXCEPTION(scope, {}); + RELEASE_AND_RETURN(scope, jsStringDecoderPrototypeFunction_endBody(globalObject, callFrame, castedThis)); } JSC_DEFINE_HOST_FUNCTION(jsStringDecoderPrototypeFunction_text, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); JSStringDecoder* castedThis = jsStringDecoderCast(globalObject, callFrame->thisValue(), "text"_s); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); - return jsStringDecoderPrototypeFunction_textBody(globalObject, callFrame, castedThis); + RETURN_IF_EXCEPTION(scope, {}); + RELEASE_AND_RETURN(scope, jsStringDecoderPrototypeFunction_textBody(globalObject, callFrame, castedThis)); } static JSC_DEFINE_CUSTOM_GETTER(jsStringDecoder_lastChar, (JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, PropertyName attributeName)) @@ -482,7 +483,7 @@ static JSC_DEFINE_CUSTOM_GETTER(jsStringDecoder_lastChar, (JSGlobalObject * lexi auto& vm = JSC::getVM(lexicalGlobalObject); auto scope = DECLARE_THROW_SCOPE(vm); JSStringDecoder* castedThis = jsStringDecoderCast(lexicalGlobalObject, JSC::JSValue::decode(thisValue), "lastChar"_s); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); auto buffer = ArrayBuffer::create({ castedThis->m_lastChar, 4 }); auto* globalObject = reinterpret_cast(lexicalGlobalObject); JSC::JSUint8Array* uint8Array = JSC::JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(buffer), 0, 4); @@ -493,7 +494,7 @@ static JSC_DEFINE_CUSTOM_GETTER(jsStringDecoder_lastNeed, (JSGlobalObject * lexi auto& vm = JSC::getVM(lexicalGlobalObject); auto scope = DECLARE_THROW_SCOPE(vm); JSStringDecoder* castedThis = jsStringDecoderCast(lexicalGlobalObject, JSC::JSValue::decode(thisValue), "lastNeed"_s); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); RELEASE_AND_RETURN(scope, JSC::JSValue::encode(JSC::jsNumber(castedThis->m_lastNeed))); } static JSC_DEFINE_CUSTOM_GETTER(jsStringDecoder_lastTotal, (JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, PropertyName attributeName)) @@ -501,7 +502,7 @@ static JSC_DEFINE_CUSTOM_GETTER(jsStringDecoder_lastTotal, (JSGlobalObject * lex auto& vm = JSC::getVM(lexicalGlobalObject); auto scope = DECLARE_THROW_SCOPE(vm); JSStringDecoder* castedThis = jsStringDecoderCast(lexicalGlobalObject, JSC::JSValue::decode(thisValue), "lastTotal"_s); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); RELEASE_AND_RETURN(scope, JSC::JSValue::encode(JSC::jsNumber(castedThis->m_lastTotal))); } @@ -510,7 +511,7 @@ static JSC_DEFINE_CUSTOM_GETTER(jsStringDecoder_encoding, (JSGlobalObject * lexi auto& vm = JSC::getVM(lexicalGlobalObject); auto scope = DECLARE_THROW_SCOPE(vm); JSStringDecoder* castedThis = jsStringDecoderCast(lexicalGlobalObject, JSC::JSValue::decode(thisValue), "encoding"_s); - RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); return JSC::JSValue::encode(WebCore::convertEnumerationToJS(*lexicalGlobalObject, castedThis->m_encoding)); } diff --git a/src/bun.js/bindings/JSValue.zig b/src/bun.js/bindings/JSValue.zig index 218f07800c..8c4fddb8a2 100644 --- a/src/bun.js/bindings/JSValue.zig +++ b/src/bun.js/bindings/JSValue.zig @@ -46,19 +46,17 @@ pub const JSValue = enum(i64) { return @as(JSValue, @enumFromInt(@as(i64, @bitCast(@intFromPtr(ptr))))); } - // TODO: use JSError! `toInt32` can throw extern fn JSC__JSValue__coerceToInt32(this: JSValue, globalThis: *JSC.JSGlobalObject) i32; - pub fn coerceToInt32(this: JSValue, globalThis: *JSC.JSGlobalObject) i32 { - return JSC__JSValue__coerceToInt32(this, globalThis); + pub fn coerceToInt32(this: JSValue, globalThis: *JSC.JSGlobalObject) bun.JSError!i32 { + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSValue__coerceToInt32, .{ this, globalThis }); } - // TODO: use JSError! `toInt32` can throw extern fn JSC__JSValue__coerceToInt64(this: JSValue, globalThis: *JSC.JSGlobalObject) i64; - pub fn coerceToInt64(this: JSValue, globalThis: *JSC.JSGlobalObject) i64 { - return JSC__JSValue__coerceToInt64(this, globalThis); + pub fn coerceToInt64(this: JSValue, globalThis: *JSC.JSGlobalObject) bun.JSError!i64 { + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSValue__coerceToInt64, .{ this, globalThis }); } - pub fn getIndex(this: JSValue, globalThis: *JSGlobalObject, i: u32) JSValue { + pub fn getIndex(this: JSValue, globalThis: *JSGlobalObject, i: u32) JSError!JSValue { return JSC.JSObject.getIndex(this, globalThis, i); } @@ -91,8 +89,8 @@ pub const JSValue = enum(i64) { globalThis: *JSC.JSGlobalObject, ctx: ?*anyopaque, callback: PropertyIteratorFn, - ) void { - JSC__JSValue__forEachPropertyNonIndexed(this, globalThis, ctx, callback); + ) JSError!void { + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSValue__forEachPropertyNonIndexed, .{ this, globalThis, ctx, callback }); } pub fn forEachProperty( @@ -100,40 +98,26 @@ pub const JSValue = enum(i64) { globalThis: *JSC.JSGlobalObject, ctx: ?*anyopaque, callback: PropertyIteratorFn, - ) void { - JSC__JSValue__forEachProperty(this, globalThis, ctx, callback); + ) JSError!void { + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSValue__forEachProperty, .{ this, globalThis, ctx, callback }); } pub fn forEachPropertyOrdered( this: JSValue, - globalObject: *JSC.JSGlobalObject, + globalThis: *JSC.JSGlobalObject, ctx: ?*anyopaque, callback: PropertyIteratorFn, - ) void { - JSC__JSValue__forEachPropertyOrdered(this, globalObject, ctx, callback); + ) JSError!void { + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSValue__forEachPropertyOrdered, .{ this, globalThis, ctx, callback }); } - extern fn JSC__JSValue__coerceToDouble(this: JSValue, globalObject: *JSC.JSGlobalObject) f64; - /// Prefer toNumber over this function to - /// - Match the underlying JSC api name - /// - Match the underlying specification - /// - Catch exceptions - pub fn coerceToDouble(this: JSValue, globalObject: *JSC.JSGlobalObject) f64 { - return JSC__JSValue__coerceToDouble(this, globalObject); - } - - extern fn Bun__JSValue__toNumber(value: JSValue, global: *JSGlobalObject, had_error: *bool) f64; + extern fn Bun__JSValue__toNumber(value: JSValue, global: *JSGlobalObject) f64; /// Perform the ToNumber abstract operation, coercing a value to a number. /// Equivalent to `+value` /// https://tc39.es/ecma262/#sec-tonumber pub fn toNumber(this: JSValue, global: *JSGlobalObject) bun.JSError!f64 { - var had_error: bool = false; - const result = Bun__JSValue__toNumber(this, global, &had_error); - if (had_error) { - return error.JSError; - } - return result; + return bun.jsc.fromJSHostCallGeneric(global, @src(), Bun__JSValue__toNumber, .{ this, global }); } // ECMA-262 20.1.2.3 Number.isInteger @@ -164,14 +148,13 @@ pub const JSValue = enum(i64) { return @trunc(d) == d and @abs(d) <= JSC.MAX_SAFE_INTEGER; } - pub fn coerce(this: JSValue, comptime T: type, globalThis: *JSC.JSGlobalObject) T { + pub fn coerce(this: JSValue, comptime T: type, globalThis: *JSC.JSGlobalObject) bun.JSError!T { return switch (T) { - bool => this.toBoolean(), f64 => { if (this.isDouble()) { return this.asDouble(); } - return this.coerceToDouble(globalThis); + return this.toNumber(globalThis); }, i64 => { return this.coerceToInt64(globalThis); @@ -193,7 +176,7 @@ pub const JSValue = enum(i64) { if (this.getNumber()) |num| { return @bitCast(coerceJSValueDoubleTruncatingT(i32, num)); } - return @bitCast(this.coerceToInt32(globalThis)); + return @bitCast(try this.coerceToInt32(globalThis)); }, else => @compileError("Unsupported coercion type"), }; @@ -255,13 +238,13 @@ pub const JSValue = enum(i64) { return this.call(globalThis, globalThis.toJSValue(), args); } - pub extern "c" fn Bun__JSValue__call( + extern "c" fn Bun__JSValue__call( ctx: *JSGlobalObject, object: JSValue, thisObject: JSValue, argumentCount: usize, arguments: [*]const JSValue, - ) JSValue.MaybeException; + ) JSValue; pub fn call(function: JSValue, global: *JSGlobalObject, thisValue: JSC.JSValue, args: []const JSC.JSValue) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); @@ -277,22 +260,19 @@ pub const JSValue = enum(i64) { // this can be an async context so it's fine if it's not callable. } - return Bun__JSValue__call( + return fromJSHostCall(global, @src(), Bun__JSValue__call, .{ global, function, thisValue, args.len, args.ptr, - ).unwrap(); + }); } extern fn Bun__Process__queueNextTick1(*JSGlobalObject, func: JSValue, JSValue) void; extern fn Bun__Process__queueNextTick2(*JSGlobalObject, func: JSValue, JSValue, JSValue) void; pub inline fn callNextTick(function: JSValue, global: *JSGlobalObject, args: anytype) void { - if (Environment.isDebug) { - bun.assert(function.isCallable()); - } switch (comptime bun.len(@as(@TypeOf(args), undefined))) { 1 => Bun__Process__queueNextTick1(@ptrCast(global), function, args[0]), 2 => Bun__Process__queueNextTick2(@ptrCast(global), function, args[0], args[1]), @@ -337,7 +317,7 @@ pub const JSValue = enum(i64) { extern fn JSC__JSValue__createEmptyArray(global: *JSGlobalObject, len: usize) JSValue; pub fn createEmptyArray(global: *JSGlobalObject, len: usize) bun.JSError!JSValue { - return bun.jsc.fromJSHostValue(JSC__JSValue__createEmptyArray(global, len)); + return fromJSHostCall(global, @src(), JSC__JSValue__createEmptyArray, .{ global, len }); } extern fn JSC__JSValue__putRecord(value: JSValue, global: *JSGlobalObject, key: *ZigString, values_array: [*]ZigString, values_len: usize) void; @@ -385,14 +365,19 @@ pub const JSValue = enum(i64) { JSC__JSValue__putMayBeIndex(this, globalObject, key, value); } + extern fn JSC__JSValue__putToPropertyKey(target: JSValue, globalObject: *JSGlobalObject, key: JSC.JSValue, value: JSC.JSValue) void; + pub fn putToPropertyKey(target: JSValue, globalObject: *JSGlobalObject, key: JSC.JSValue, value: JSC.JSValue) bun.JSError!void { + return bun.jsc.host_fn.fromJSHostCallGeneric(globalObject, @src(), JSC__JSValue__putToPropertyKey, .{ target, globalObject, key, value }); + } + extern fn JSC__JSValue__putIndex(value: JSValue, globalObject: *JSGlobalObject, i: u32, out: JSValue) void; - pub fn putIndex(value: JSValue, globalObject: *JSGlobalObject, i: u32, out: JSValue) void { - JSC__JSValue__putIndex(value, globalObject, i, out); + pub fn putIndex(value: JSValue, globalObject: *JSGlobalObject, i: u32, out: JSValue) bun.JSError!void { + return bun.jsc.fromJSHostCallGeneric(globalObject, @src(), JSC__JSValue__putIndex, .{ value, globalObject, i, out }); } extern fn JSC__JSValue__push(value: JSValue, globalObject: *JSGlobalObject, out: JSValue) void; - pub fn push(value: JSValue, globalObject: *JSGlobalObject, out: JSValue) void { - JSC__JSValue__push(value, globalObject, out); + pub fn push(value: JSValue, globalObject: *JSGlobalObject, out: JSValue) bun.JSError!void { + return bun.jsc.fromJSHostCallGeneric(globalObject, @src(), JSC__JSValue__push, .{ value, globalObject, out }); } extern fn JSC__JSValue__toISOString(*JSC.JSGlobalObject, JSC.JSValue, *[28]u8) c_int; @@ -523,24 +508,10 @@ pub const JSValue = enum(i64) { Bun__JSValue__unprotect(this); } - extern fn JSC__JSValue__JSONValueFromString( - global: *JSGlobalObject, - str: [*]const u8, - len: usize, - ascii: bool, - ) JSValue; - pub fn JSONValueFromString( - global: *JSGlobalObject, - str: [*]const u8, - len: usize, - ascii: bool, - ) JSValue { - return JSC__JSValue__JSONValueFromString(global, str, len, ascii); - } extern fn JSC__JSValue__createObject2(global: *JSGlobalObject, key1: *const ZigString, key2: *const ZigString, value1: JSValue, value2: JSValue) JSValue; /// Create an object with exactly two properties - pub fn createObject2(global: *JSGlobalObject, key1: *const ZigString, key2: *const ZigString, value1: JSValue, value2: JSValue) JSValue { - return JSC__JSValue__createObject2(global, key1, key2, value1, value2); + pub fn createObject2(global: *JSGlobalObject, key1: *const ZigString, key2: *const ZigString, value1: JSValue, value2: JSValue) bun.JSError!JSValue { + return bun.jsc.fromJSHostCall(global, @src(), JSC__JSValue__createObject2, .{ global, key1, key2, value1, value2 }); } /// this must have been created by fromPtrAddress() @@ -603,9 +574,9 @@ pub const JSValue = enum(i64) { } extern fn JSC__JSValue__createUninitializedUint8Array(globalObject: *JSGlobalObject, len: usize) JSValue; - pub fn createUninitializedUint8Array(globalObject: *JSGlobalObject, len: usize) JSValue { + pub fn createUninitializedUint8Array(globalObject: *JSGlobalObject, len: usize) bun.JSError!JSValue { JSC.markBinding(@src()); - return JSC__JSValue__createUninitializedUint8Array(globalObject, len); + return bun.jsc.fromJSHostCall(globalObject, @src(), JSC__JSValue__createUninitializedUint8Array, .{ globalObject, len }); } pub fn createBufferWithCtx(globalObject: *JSGlobalObject, slice: []u8, ptr: ?*anyopaque, func: JSC.C.JSTypedArrayBytesDeallocator) JSValue { @@ -758,24 +729,24 @@ pub const JSValue = enum(i64) { } extern fn JSC__JSValue__keys(globalThis: *JSGlobalObject, value: JSValue) JSValue; - pub fn keys(value: JSValue, globalThis: *JSGlobalObject) JSValue { - return JSC__JSValue__keys( + pub fn keys(value: JSValue, globalThis: *JSGlobalObject) JSError!JSValue { + return fromJSHostCall(globalThis, @src(), JSC__JSValue__keys, .{ globalThis, value, - ); + }); } extern fn JSC__JSValue__values(globalThis: *JSGlobalObject, value: JSValue) JSValue; /// This is `Object.values`. /// `value` is assumed to be not empty, undefined, or null. - pub fn values(value: JSValue, globalThis: *JSGlobalObject) JSValue { + pub fn values(value: JSValue, globalThis: *JSGlobalObject) JSError!JSValue { if (comptime bun.Environment.allow_assert) { bun.assert(!value.isEmptyOrUndefinedOrNull()); } - return JSC__JSValue__values( + return fromJSHostCall(globalThis, @src(), JSC__JSValue__values, .{ globalThis, value, - ); + }); } extern "c" fn JSC__JSValue__hasOwnPropertyValue(JSValue, *JSGlobalObject, JSValue) bool; @@ -783,9 +754,11 @@ pub const JSValue = enum(i64) { /// Returns true if the object has the property, false otherwise /// /// If the object is not an object, it will crash. **You must check if the object is an object before calling this function.** - pub const hasOwnPropertyValue = JSC__JSValue__hasOwnPropertyValue; + pub fn hasOwnPropertyValue(this: JSValue, global: *JSGlobalObject, key: JSValue) JSError!bool { + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__hasOwnPropertyValue, .{ this, global, key }); + } - pub inline fn arrayIterator(this: JSValue, global: *JSGlobalObject) JSArrayIterator { + pub inline fn arrayIterator(this: JSValue, global: *JSGlobalObject) JSError!JSArrayIterator { return JSArrayIterator.init(this, global); } @@ -1044,7 +1017,7 @@ pub const JSValue = enum(i64) { pub inline fn isFunction(this: JSValue) bool { return this.isCell() and this.jsType().isFunction(); } - pub fn isObjectEmpty(this: JSValue, globalObject: *JSGlobalObject) bool { + pub fn isObjectEmpty(this: JSValue, globalObject: *JSGlobalObject) JSError!bool { const type_of_value = this.jsType(); // https://github.com/jestjs/jest/blob/main/packages/jest-get-type/src/index.ts#L26 // Map and Set are not considered as object in jest-extended @@ -1052,7 +1025,7 @@ pub const JSValue = enum(i64) { return false; } - return this.jsType().isObject() and keys(this, globalObject).getLength(globalObject) == 0; + return this.jsType().isObject() and try (try this.keys(globalObject)).getLength(globalObject) == 0; } extern fn JSC__JSValue__isClass(this: JSValue, global: *JSGlobalObject) bool; @@ -1118,20 +1091,27 @@ pub const JSValue = enum(i64) { return JSC__JSValue__isException(this, vm); } - extern fn JSC__JSValue__isTerminationException(this: JSValue, vm: *VM) bool; - pub fn isTerminationException(this: JSValue, vm: *VM) bool { - return JSC__JSValue__isTerminationException(this, vm); + /// Cast to an Exception pointer, or null if not an Exception + pub fn asException(this: JSValue, vm: *VM) ?*JSC.Exception { + return if (this.isException(vm)) + this.uncheckedPtrCast(JSC.Exception) + else + null; + } + + extern fn JSC__JSValue__isTerminationException(this: JSValue) bool; + pub fn isTerminationException(this: JSValue) bool { + return JSC__JSValue__isTerminationException(this); } extern fn JSC__JSValue__toZigException(this: JSValue, global: *JSGlobalObject, exception: *ZigException) void; pub fn toZigException(this: JSValue, global: *JSGlobalObject, exception: *ZigException) void { - return JSC__JSValue__toZigException(this, global, exception); + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__toZigException, .{ this, global, exception }) catch return; // TODO: properly propagate termination } extern fn JSC__JSValue__toZigString(this: JSValue, out: *ZigString, global: *JSGlobalObject) void; - pub fn toZigString(this: JSValue, out: *ZigString, global: *JSGlobalObject) error{JSError}!void { - JSC__JSValue__toZigString(this, out, global); - if (global.hasException()) return error.JSError; + pub fn toZigString(this: JSValue, out: *ZigString, global: *JSGlobalObject) JSError!void { + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__toZigString, .{ this, out, global }); } /// Increments the reference count, you must call `.deref()` or it will leak memory. @@ -1225,14 +1205,19 @@ pub const JSValue = enum(i64) { } extern fn JSC__JSValue__jsonStringify(this: JSValue, globalThis: *JSGlobalObject, indent: u32, out: *bun.String) void; - pub fn jsonStringify(this: JSValue, globalThis: *JSGlobalObject, indent: u32, out: *bun.String) void { - return JSC__JSValue__jsonStringify(this, globalThis, indent, out); + pub fn jsonStringify(this: JSValue, globalThis: *JSGlobalObject, indent: u32, out: *bun.String) bun.JSError!void { + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSValue__jsonStringify, .{ this, globalThis, indent, out }); } extern fn JSC__JSValue__toStringOrNull(this: JSValue, globalThis: *JSGlobalObject) ?*JSString; // Calls JSValue::toStringOrNull. Returns error on exception. pub fn toJSString(this: JSValue, globalThis: *JSGlobalObject) bun.JSError!*JSString { - return JSC__JSValue__toStringOrNull(this, globalThis) orelse return error.JSError; + var scope: ExceptionValidationScope = undefined; + scope.init(globalThis, @src()); + defer scope.deinit(); + const maybe_string = JSC__JSValue__toStringOrNull(this, globalThis); + scope.assertExceptionPresenceMatches(maybe_string == null); + return maybe_string orelse error.JSError; } /// Call `toString()` on the JSValue and clone the result. @@ -1363,8 +1348,13 @@ pub const JSValue = enum(i64) { if (bun.Environment.isDebug) bun.assert(this.isObject()); - return switch (JSC__JSValue__fastGet(this, global, @intFromEnum(builtin_name))) { - .zero => error.JSError, + return switch (try fromJSHostCall( + global, + @src(), + JSC__JSValue__fastGet, + .{ this, global, @intFromEnum(builtin_name) }, + )) { + .zero => unreachable, // handled by fromJSHostCall .js_undefined, .property_does_not_exist_on_object => null, else => |val| val, }; @@ -1398,8 +1388,13 @@ pub const JSValue = enum(i64) { extern fn JSC__JSValue__getIfPropertyExistsImpl(target: JSValue, global: *JSGlobalObject, ptr: [*]const u8, len: u32) JSValue; extern fn JSC__JSValue__getPropertyValue(target: JSValue, global: *JSGlobalObject, ptr: [*]const u8, len: u32) JSValue; extern fn JSC__JSValue__getIfPropertyExistsFromPath(this: JSValue, global: *JSGlobalObject, path: JSValue) JSValue; - pub fn getIfPropertyExistsFromPath(this: JSValue, global: *JSGlobalObject, path: JSValue) JSValue { - return JSC__JSValue__getIfPropertyExistsFromPath(this, global, path); + pub fn getIfPropertyExistsFromPath(this: JSValue, global: *JSGlobalObject, path: JSValue) JSError!JSValue { + var scope: CatchScope = undefined; + scope.init(global, @src()); + defer scope.deinit(); + const result = JSC__JSValue__getIfPropertyExistsFromPath(this, global, path); + try scope.returnIfException(); + return result; } extern fn JSC__JSValue__getSymbolDescription(this: JSValue, global: *JSGlobalObject, str: *ZigString) void; @@ -1418,16 +1413,24 @@ pub const JSValue = enum(i64) { } extern fn JSC__JSValue___then(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: *const JSC.JSHostFn, reject: *const JSC.JSHostFn) void; - pub fn _then(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: JSC.JSHostFnZig, reject: JSC.JSHostFnZig) void { + fn _then(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: JSC.JSHostFnZig, reject: JSC.JSHostFnZig) void { return JSC__JSValue___then(this, global, ctx, toJSHostFunction(resolve), toJSHostFunction(reject)); } pub fn _then2(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: *const JSC.JSHostFn, reject: *const JSC.JSHostFn) void { - return JSC__JSValue___then(this, global, ctx, resolve, reject); + var scope: CatchScope = undefined; + scope.init(global, @src()); + defer scope.deinit(); + JSC__JSValue___then(this, global, ctx, resolve, reject); + bun.debugAssert(!scope.hasException()); // TODO: properly propagate exception upwards } pub fn then(this: JSValue, global: *JSGlobalObject, ctx: ?*anyopaque, resolve: JSC.JSHostFnZig, reject: JSC.JSHostFnZig) void { - return this._then(global, JSValue.fromPtrAddress(@intFromPtr(ctx)), resolve, reject); + var scope: CatchScope = undefined; + scope.init(global, @src()); + defer scope.deinit(); + this._then(global, JSValue.fromPtrAddress(@intFromPtr(ctx)), resolve, reject); + bun.debugAssert(!scope.hasException()); // TODO: properly propagate exception upwards } pub fn getDescription(this: JSValue, global: *JSGlobalObject) ZigString { @@ -1436,23 +1439,6 @@ pub const JSValue = enum(i64) { return zig_str; } - /// Equivalent to `obj.property` in JavaScript. - /// Reminder: `undefined` is a value! - /// - /// Prefer `get` in new code, as this function is incapable of returning an exception - pub fn get_unsafe(this: JSValue, global: *JSGlobalObject, property: []const u8) ?JSValue { - if (comptime bun.Environment.isDebug) { - if (BuiltinName.has(property)) { - Output.debugWarn("get(\"{s}\") called. Please use fastGet(.{s}) instead!", .{ property, property }); - } - } - - return switch (JSC__JSValue__getIfPropertyExistsImpl(this, global, property.ptr, @intCast(property.len))) { - .js_undefined, .zero, .property_does_not_exist_on_object => null, - else => |val| val, - }; - } - /// Equivalent to `target[property]`. Calls userland getters/proxies. Can /// throw. Null indicates the property does not exist. JavaScript undefined /// and JavaScript null can exist as a property and is different than zig @@ -1466,7 +1452,7 @@ pub const JSValue = enum(i64) { /// Cannot handle property names that are numeric indexes. (For this use `getPropertyValue` instead.) /// pub inline fn get(target: JSValue, global: *JSGlobalObject, property: anytype) JSError!?JSValue { - if (bun.Environment.isDebug) bun.assert(target.isObject()); + bun.debugAssert(target.isObject()); const property_slice: []const u8 = property; // must be a slice! // This call requires `get` to be `inline` @@ -1476,8 +1462,13 @@ pub const JSValue = enum(i64) { } } - return switch (JSC__JSValue__getIfPropertyExistsImpl(target, global, property_slice.ptr, @intCast(property_slice.len))) { - .zero => error.JSError, + return switch (try fromJSHostCall(global, @src(), JSC__JSValue__getIfPropertyExistsImpl, .{ + target, + global, + property_slice.ptr, + @intCast(property_slice.len), + })) { + .zero => unreachable, // handled by fromJSHostCall .property_does_not_exist_on_object => null, // TODO: see bug described in ObjectBindings.cpp @@ -1512,10 +1503,17 @@ pub const JSValue = enum(i64) { extern fn JSC__JSValue__getOwn(value: JSValue, globalObject: *JSGlobalObject, propertyName: *const bun.String) JSValue; /// Get *own* property value (i.e. does not resolve property in the prototype chain) - pub fn getOwn(this: JSValue, global: *JSGlobalObject, property_name: anytype) ?JSValue { + pub fn getOwn(this: JSValue, global: *JSGlobalObject, property_name: anytype) bun.JSError!?JSValue { var property_name_str = bun.String.init(property_name); + var scope: CatchScope = undefined; + scope.init(global, @src()); + defer scope.deinit(); const value = JSC__JSValue__getOwn(this, global, &property_name_str); - return if (@intFromEnum(value) != 0) value else return null; + try scope.returnIfException(); + return if (value == .zero) + null + else + value; } extern fn JSC__JSValue__getOwnByValue(value: JSValue, globalObject: *JSGlobalObject, propertyValue: JSValue) JSValue; @@ -1525,8 +1523,8 @@ pub const JSValue = enum(i64) { return if (@intFromEnum(value) != 0) value else return null; } - pub fn getOwnTruthy(this: JSValue, global: *JSGlobalObject, property_name: anytype) ?JSValue { - if (getOwn(this, global, property_name)) |prop| { + pub fn getOwnTruthy(this: JSValue, global: *JSGlobalObject, property_name: anytype) bun.JSError!?JSValue { + if (try getOwn(this, global, property_name)) |prop| { if (prop.isUndefined()) return null; return prop; } @@ -1610,6 +1608,9 @@ pub const JSValue = enum(i64) { /// - .js_undefined /// - an empty string pub fn getStringish(this: JSValue, global: *JSGlobalObject, property: []const u8) bun.JSError!?bun.String { + var scope: CatchScope = undefined; + scope.init(global, @src()); + defer scope.deinit(); const prop = try get(this, global, property) orelse return null; if (prop.isNull() or prop == .false) { return null; @@ -1619,14 +1620,12 @@ pub const JSValue = enum(i64) { } const str = try prop.toBunString(global); - if (global.hasException()) { - str.deref(); - return error.JSError; - } - if (str.isEmpty()) { - return null; - } - return str; + errdefer str.deref(); + try scope.returnIfException(); + return if (str.isEmpty()) + null + else + str; } pub fn toEnumFromMap( @@ -1715,7 +1714,7 @@ pub const JSValue = enum(i64) { return globalThis.throwInvalidArguments(property_name ++ " must be an array", .{}); } - if (prop.getLength(globalThis) == 0) { + if (try prop.getLength(globalThis) == 0) { return null; } @@ -1731,7 +1730,7 @@ pub const JSValue = enum(i64) { } pub fn getOwnArray(this: JSValue, globalThis: *JSGlobalObject, comptime property_name: []const u8) JSError!?JSValue { - if (getOwnTruthy(this, globalThis, property_name)) |prop| { + if (try getOwnTruthy(this, globalThis, property_name)) |prop| { return coerceToArray(prop, globalThis, property_name); } @@ -1739,7 +1738,7 @@ pub const JSValue = enum(i64) { } pub fn getOwnObject(this: JSValue, globalThis: *JSGlobalObject, comptime property_name: []const u8) JSError!?*JSC.JSObject { - if (getOwnTruthy(this, globalThis, property_name)) |prop| { + if (try getOwnTruthy(this, globalThis, property_name)) |prop| { const obj = prop.getObject() orelse { return globalThis.throwInvalidArguments(property_name ++ " must be an object", .{}); }; @@ -1867,46 +1866,36 @@ pub const JSValue = enum(i64) { /// /// This algorithm differs from the IsStrictlyEqual Algorithm by treating all NaN values as equivalent and by differentiating +0𝔽 from -0𝔽. /// https://tc39.es/ecma262/#sec-samevalue - pub fn isSameValue(this: JSValue, other: JSValue, global: *JSGlobalObject) bool { - return @intFromEnum(this) == @intFromEnum(other) or JSC__JSValue__isSameValue(this, other, global); + /// + /// This can throw because it resolves rope strings + pub fn isSameValue(this: JSValue, other: JSValue, global: *JSGlobalObject) JSError!bool { + if (@intFromEnum(this) == @intFromEnum(other)) return true; + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__isSameValue, .{ this, other, global }); } extern fn JSC__JSValue__deepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) bool; pub fn deepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) JSError!bool { - const result = JSC__JSValue__deepEquals(this, other, global); - if (global.hasException()) return error.JSError; - return result; + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__deepEquals, .{ this, other, global }); } extern fn JSC__JSValue__jestDeepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) bool; /// same as `JSValue.deepEquals`, but with jest asymmetric matchers enabled pub fn jestDeepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) JSError!bool { - const result = JSC__JSValue__jestDeepEquals(this, other, global); - if (global.hasException()) return error.JSError; - return result; + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__jestDeepEquals, .{ this, other, global }); } extern fn JSC__JSValue__strictDeepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) bool; pub fn strictDeepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) JSError!bool { - const result = JSC__JSValue__strictDeepEquals(this, other, global); - if (global.hasException()) return error.JSError; - return result; + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__strictDeepEquals, .{ this, other, global }); } extern fn JSC__JSValue__jestStrictDeepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) bool; /// same as `JSValue.strictDeepEquals`, but with jest asymmetric matchers enabled pub fn jestStrictDeepEquals(this: JSValue, other: JSValue, global: *JSGlobalObject) JSError!bool { - const result = JSC__JSValue__jestStrictDeepEquals(this, other, global); - if (global.hasException()) return error.JSError; - return result; - } - extern fn JSC__JSValue__deepMatch(this: JSValue, subset: JSValue, global: *JSGlobalObject, replace_props_with_asymmetric_matchers: bool) bool; - /// NOTE: can throw. Check for exceptions. - pub fn deepMatch(this: JSValue, subset: JSValue, global: *JSGlobalObject, replace_props_with_asymmetric_matchers: bool) bool { - return JSC__JSValue__deepMatch(this, subset, global, replace_props_with_asymmetric_matchers); + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__jestStrictDeepEquals, .{ this, other, global }); } extern fn JSC__JSValue__jestDeepMatch(this: JSValue, subset: JSValue, global: *JSGlobalObject, replace_props_with_asymmetric_matchers: bool) bool; /// same as `JSValue.deepMatch`, but with jest asymmetric matchers enabled - pub fn jestDeepMatch(this: JSValue, subset: JSValue, global: *JSGlobalObject, replace_props_with_asymmetric_matchers: bool) bool { - return JSC__JSValue__jestDeepMatch(this, subset, global, replace_props_with_asymmetric_matchers); + pub fn jestDeepMatch(this: JSValue, subset: JSValue, global: *JSGlobalObject, replace_props_with_asymmetric_matchers: bool) JSError!bool { + return bun.jsc.fromJSHostCallGeneric(global, @src(), JSC__JSValue__jestDeepMatch, .{ this, subset, global, replace_props_with_asymmetric_matchers }); } pub const DiffMethod = enum(u8) { @@ -2107,42 +2096,22 @@ pub const JSValue = enum(i64) { /// - anything with a .length property returning a number /// /// If the "length" property does not exist, this function will return 0. - pub fn getLength(this: JSValue, globalThis: *JSGlobalObject) u64 { - const len = this.getLengthIfPropertyExistsInternal(globalThis); + pub fn getLength(this: JSValue, globalThis: *JSGlobalObject) JSError!u64 { + const len = try this.getLengthIfPropertyExistsInternal(globalThis); if (len == std.math.floatMax(f64)) { return 0; } - return @as(u64, @intFromFloat(@max(@min(len, std.math.maxInt(i52)), 0))); - } - - /// This function supports: - /// - Array, DerivedArray & friends - /// - String, DerivedString & friends - /// - TypedArray - /// - Map (size) - /// - WeakMap (size) - /// - Set (size) - /// - WeakSet (size) - /// - ArrayBuffer (byteLength) - /// - anything with a .length property returning a number - /// - /// If the "length" property does not exist, this function will return null. - pub fn tryGetLength(this: JSValue, globalThis: *JSGlobalObject) ?f64 { - const len = this.getLengthIfPropertyExistsInternal(globalThis); - if (len == std.math.floatMax(f64)) { - return null; - } - - return @as(u64, @intFromFloat(@max(@min(len, std.math.maxInt(i52)), 0))); + return @intFromFloat(std.math.clamp(len, 0, std.math.maxInt(i52))); } extern fn JSC__JSValue__getLengthIfPropertyExistsInternal(this: JSValue, globalThis: *JSGlobalObject) f64; /// Do not use this directly! /// /// If the property does not exist, this function will return max(f64) instead of 0. - pub fn getLengthIfPropertyExistsInternal(this: JSValue, globalThis: *JSGlobalObject) f64 { - return JSC__JSValue__getLengthIfPropertyExistsInternal(this, globalThis); + /// TODO this should probably just return an optional + pub fn getLengthIfPropertyExistsInternal(this: JSValue, globalThis: *JSGlobalObject) JSError!f64 { + return bun.jsc.fromJSHostCallGeneric(globalThis, @src(), JSC__JSValue__getLengthIfPropertyExistsInternal, .{ this, globalThis }); } extern fn JSC__JSValue__isAggregateError(this: JSValue, globalObject: *JSGlobalObject) bool; @@ -2172,8 +2141,8 @@ pub const JSValue = enum(i64) { } extern fn JSC__JSValue__isIterable(this: JSValue, globalObject: *JSGlobalObject) bool; - pub fn isIterable(this: JSValue, globalObject: *JSGlobalObject) bool { - return JSC__JSValue__isIterable(this, globalObject); + pub fn isIterable(this: JSValue, globalObject: *JSGlobalObject) JSError!bool { + return bun.jsc.fromJSHostCallGeneric(globalObject, @src(), JSC__JSValue__isIterable, .{ this, globalObject }); } extern fn JSC__JSValue__stringIncludes(this: JSValue, globalObject: *JSGlobalObject, other: JSValue) bool; @@ -2198,7 +2167,7 @@ pub const JSValue = enum(i64) { // TODO: remove this (no replacement) pub inline fn asObjectRef(this: JSValue) C_API.JSObjectRef { - return @as(C_API.JSObjectRef, @ptrCast(this.asVoid())); + return @ptrFromInt(@as(usize, @bitCast(@intFromEnum(this)))); } /// When the GC sees a JSValue referenced in the stack, it knows not to free it @@ -2208,19 +2177,6 @@ pub const JSValue = enum(i64) { std.mem.doNotOptimizeAway(this.asEncoded().asPtr); } - pub inline fn asNullableVoid(this: JSValue) ?*anyopaque { - return @as(?*anyopaque, @ptrFromInt(@as(usize, @bitCast(@intFromEnum(this))))); - } - - pub inline fn asVoid(this: JSValue) *anyopaque { - if (comptime bun.Environment.allow_assert) { - if (@intFromEnum(this) == 0) { - @panic("JSValue is null"); - } - } - return this.asNullableVoid().?; - } - pub fn uncheckedPtrCast(value: JSValue, comptime T: type) *T { return @alignCast(@ptrCast(value.asEncoded().asPtr)); } @@ -2244,8 +2200,8 @@ pub const JSValue = enum(i64) { extern "c" fn Bun__JSValue__deserialize(global: *JSGlobalObject, data: [*]const u8, len: usize) JSValue; /// Deserializes a JSValue from a serialized buffer. Zig version of `import('bun:jsc').deserialize` - pub inline fn deserialize(bytes: []const u8, global: *JSGlobalObject) JSValue { - return Bun__JSValue__deserialize(global, bytes.ptr, bytes.len); + pub inline fn deserialize(bytes: []const u8, global: *JSGlobalObject) bun.JSError!JSValue { + return bun.jsc.fromJSHostCall(global, @src(), Bun__JSValue__deserialize, .{ global, bytes.ptr, bytes.len }); } extern fn Bun__serializeJSValue(global: *JSC.JSGlobalObject, value: JSValue, forTransfer: bool) SerializedScriptValue.External; @@ -2268,12 +2224,9 @@ pub const JSValue = enum(i64) { /// Throws a JS exception and returns null if the serialization fails, otherwise returns a SerializedScriptValue. /// Must be freed when you are done with the bytes. - pub inline fn serialize(this: JSValue, global: *JSGlobalObject, forTransfer: bool) ?SerializedScriptValue { - const value = Bun__serializeJSValue(global, this, forTransfer); - return if (value.bytes) |bytes| - .{ .data = bytes[0..value.size], .handle = value.handle.? } - else - null; + pub inline fn serialize(this: JSValue, global: *JSGlobalObject, forTransfer: bool) bun.JSError!SerializedScriptValue { + const value = try bun.jsc.fromJSHostCallGeneric(global, @src(), Bun__serializeJSValue, .{ global, this, forTransfer }); + return .{ .data = value.bytes.?[0..value.size], .handle = value.handle.? }; } extern fn Bun__ProxyObject__getInternalField(this: JSValue, field: ProxyInternalField) JSValue; @@ -2350,7 +2303,7 @@ pub const JSValue = enum(i64) { inline []const u16, []const u32, []const i16, []const i8, []const i32, []const f32 => { var array = try JSC.JSValue.createEmptyArray(globalObject, value.len); for (value, 0..) |item, i| { - array.putIndex( + try array.putIndex( globalObject, @truncate(i), JSC.jsNumber(item), @@ -2369,7 +2322,7 @@ pub const JSValue = enum(i64) { for (value, 0..) |*item, i| { const res = try fromAny(globalObject, *Child, item); if (res == .zero) return .zero; - array.putIndex( + try array.putIndex( globalObject, @truncate(i), res, @@ -2450,6 +2403,9 @@ const JSString = JSC.JSString; const JSObject = JSC.JSObject; const JSArrayIterator = JSC.JSArrayIterator; const JSCell = JSC.JSCell; +const fromJSHostCall = JSC.fromJSHostCall; +const CatchScope = JSC.CatchScope; +const ExceptionValidationScope = JSC.ExceptionValidationScope; const AnyPromise = JSC.AnyPromise; const DOMURL = JSC.DOMURL; diff --git a/src/bun.js/bindings/JSWrappingFunction.cpp b/src/bun.js/bindings/JSWrappingFunction.cpp index 9e33f11c35..d86804128f 100644 --- a/src/bun.js/bindings/JSWrappingFunction.cpp +++ b/src/bun.js/bindings/JSWrappingFunction.cpp @@ -82,7 +82,7 @@ extern "C" JSC::EncodedJSValue Bun__JSWrappingFunction__getWrappedFunction( JSC::JSFunction* wrappedFn = thisObject->m_wrappedFn.get(); return JSC::JSValue::encode(wrappedFn); } - return JSC::JSValue::encode({}); + return {}; } } diff --git a/src/bun.js/bindings/JSX509Certificate.cpp b/src/bun.js/bindings/JSX509Certificate.cpp index 86afb980f0..0dc8938161 100644 --- a/src/bun.js/bindings/JSX509Certificate.cpp +++ b/src/bun.js/bindings/JSX509Certificate.cpp @@ -252,7 +252,7 @@ JSX509Certificate* JSX509Certificate::create(JSC::VM& vm, JSC::Structure* struct // Initialize the X509 certificate from the provided data auto result = ncrypto::X509Pointer::Parse(ncrypto::Buffer { reinterpret_cast(der.data()), der.size() }); if (!result) { - Bun::throwBoringSSLError(vm, scope, globalObject, result.error.value_or(0)); + Bun::throwBoringSSLError(globalObject, scope, result.error.value_or(0)); return nullptr; } @@ -391,7 +391,6 @@ static JSObject* GetX509NameObject(JSGlobalObject* globalObject, const X509* cer // Check if this key already exists JSValue existing = result->getIfPropertyExists(globalObject, Identifier::fromString(vm, key)); - RETURN_IF_EXCEPTION(scope, nullptr); if (existing) { JSArray* array = jsDynamicCast(existing); @@ -430,7 +429,7 @@ JSValue JSX509Certificate::computeSubject(ncrypto::X509View view, JSGlobalObject auto bio = view.getSubject(); if (!bio) { throwCryptoOperationFailed(globalObject, scope); - return jsUndefined(); + return {}; } return jsString(vm, toWTFString(bio)); } @@ -441,6 +440,7 @@ JSValue JSX509Certificate::computeSubject(ncrypto::X509View view, JSGlobalObject return jsUndefined(); JSObject* obj = GetX509NameObject(globalObject, cert); + RETURN_IF_EXCEPTION(scope, {}); if (!obj) return jsUndefined(); @@ -455,14 +455,14 @@ JSValue JSX509Certificate::computeIssuer(ncrypto::X509View view, JSGlobalObject* auto bio = view.getIssuer(); if (!bio) { throwCryptoOperationFailed(globalObject, scope); - return jsEmptyString(vm); + return {}; } if (!legacy) { return jsString(vm, toWTFString(bio)); } - return GetX509NameObject(globalObject, view.get()); + RELEASE_AND_RETURN(scope, GetX509NameObject(globalObject, view.get())); } JSString* JSX509Certificate::computeValidFrom(ncrypto::X509View view, JSGlobalObject* globalObject) @@ -551,7 +551,7 @@ JSString* JSX509Certificate::computeFingerprint512(ncrypto::X509View view, JSGlo JSUint8Array* JSX509Certificate::computeRaw(ncrypto::X509View view, JSGlobalObject* globalObject) { - VM& vm = globalObject->vm(); + auto& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); auto bio = view.toDER(); @@ -559,6 +559,7 @@ JSUint8Array* JSX509Certificate::computeRaw(ncrypto::X509View view, JSGlobalObje throwCryptoOperationFailed(globalObject, scope); return nullptr; } + auto bio_ptr = bio.release(); BUF_MEM* bptr = nullptr; BIO_get_mem_ptr(bio_ptr, &bptr); @@ -566,7 +567,7 @@ JSUint8Array* JSX509Certificate::computeRaw(ncrypto::X509View view, JSGlobalObje Ref buffer = JSC::ArrayBuffer::createFromBytes(std::span(reinterpret_cast(bptr->data), bptr->length), createSharedTask([](void* data) { ncrypto::BIOPointer free_me(static_cast(data)); })); - return Bun::createBuffer(globalObject, WTFMove(buffer)); + RELEASE_AND_RETURN(scope, Bun::createBuffer(globalObject, WTFMove(buffer))); } bool JSX509Certificate::computeIsCA(ncrypto::X509View view, JSGlobalObject* globalObject) diff --git a/src/bun.js/bindings/JSX509CertificatePrototype.cpp b/src/bun.js/bindings/JSX509CertificatePrototype.cpp index 198960046e..2883aabe9e 100644 --- a/src/bun.js/bindings/JSX509CertificatePrototype.cpp +++ b/src/bun.js/bindings/JSX509CertificatePrototype.cpp @@ -264,6 +264,7 @@ JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail, (JSGlobalObject * auto emailString = arg0.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); auto view = emailString->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); uint32_t flags = getFlags(vm, globalObject, scope, callFrame->argument(1)); RETURN_IF_EXCEPTION(scope, {}); @@ -300,6 +301,7 @@ JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost, (JSGlobalObject * auto hostString = arg0.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); auto view = hostString->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); Bun::UTF8View hostView(view); @@ -330,6 +332,7 @@ JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP, (JSGlobalObject * gl auto ipString = arg0.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); auto view = ipString->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); WTF::CString ip = view->utf8(); // ignore flags diff --git a/src/bun.js/bindings/ModuleLoader.cpp b/src/bun.js/bindings/ModuleLoader.cpp index adb8f33bb2..03284445d4 100644 --- a/src/bun.js/bindings/ModuleLoader.cpp +++ b/src/bun.js/bindings/ModuleLoader.cpp @@ -134,20 +134,25 @@ static OnLoadResult handleOnLoadObjectResult(Zig::GlobalObject* globalObject, JS OnLoadResult result {}; result.type = OnLoadResultTypeObject; auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); auto& builtinNames = WebCore::builtinNames(vm); - if (JSC::JSValue exportsValue = object->getIfPropertyExists(globalObject, builtinNames.exportsPublicName())) { + auto exportsValue = object->getIfPropertyExists(globalObject, builtinNames.exportsPublicName()); + if (scope.exception()) [[unlikely]] { + result.value.error = scope.exception(); + scope.clearException(); + return result; + } + if (exportsValue) { if (exportsValue.isObject()) { result.value.object = exportsValue; return result; } } - auto scope = DECLARE_THROW_SCOPE(vm); scope.throwException(globalObject, createTypeError(globalObject, "\"object\" loader must return an \"exports\" object"_s)); result.type = OnLoadResultTypeError; result.value.error = scope.exception(); scope.clearException(); - scope.release(); return result; } @@ -225,11 +230,18 @@ OnLoadResult handleOnLoadResultNotPromise(Zig::GlobalObject* globalObject, JSC:: scope.throwException(globalObject, JSC::createError(globalObject, "Expected module mock to return an object"_s)); result.value.error = scope.exception(); + scope.clearException(); result.type = OnLoadResultTypeError; return result; } - if (JSC::JSValue loaderValue = object->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "loader"_s))) { + auto loaderValue = object->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "loader"_s)); + if (scope.exception()) [[unlikely]] { + result.value.error = scope.exception(); + scope.clearException(); + return result; + } + if (loaderValue) { if (!loaderValue.isUndefinedOrNull()) { // If a loader is passed, we must validate it loader = BunLoaderTypeNone; @@ -258,6 +270,7 @@ OnLoadResult handleOnLoadResultNotPromise(Zig::GlobalObject* globalObject, JSC:: if (loader == BunLoaderTypeNone) [[unlikely]] { throwException(globalObject, scope, createError(globalObject, "Expected loader to be one of \"js\", \"jsx\", \"object\", \"ts\", \"tsx\", \"toml\", or \"json\""_s)); result.value.error = scope.exception(); + scope.clearException(); return result; } @@ -265,7 +278,13 @@ OnLoadResult handleOnLoadResultNotPromise(Zig::GlobalObject* globalObject, JSC:: result.value.sourceText.value = JSValue {}; result.value.sourceText.string = {}; - if (JSC::JSValue contentsValue = object->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "contents"_s))) { + auto contentsValue = object->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "contents"_s)); + if (scope.exception()) [[unlikely]] { + result.value.error = scope.exception(); + scope.clearException(); + return result; + } + if (contentsValue) { if (contentsValue.isString()) { if (JSC::JSString* contentsJSString = contentsValue.toStringOrNull(globalObject)) { result.value.sourceText.string = Zig::toZigString(contentsJSString, globalObject); @@ -280,6 +299,7 @@ OnLoadResult handleOnLoadResultNotPromise(Zig::GlobalObject* globalObject, JSC:: if (result.value.sourceText.value.isEmpty()) [[unlikely]] { throwException(globalObject, scope, createError(globalObject, "Expected \"contents\" to be a string or an ArrayBufferView"_s)); result.value.error = scope.exception(); + scope.clearException(); return result; } @@ -358,7 +378,7 @@ static JSValue handleVirtualModuleResult( case OnLoadResultTypeCode: { Bun__transpileVirtualModule(globalObject, specifier, referrer, &onLoadResult.value.sourceText.string, onLoadResult.value.sourceText.loader, res); if (!res->success) { - return reject(JSValue::decode(reinterpret_cast(res->result.err.ptr))); + return reject(JSValue::decode(res->result.err.value)); } auto provider = Zig::SourceProvider::create(globalObject, res->result.value); @@ -372,11 +392,15 @@ static JSValue handleVirtualModuleResult( JSC::JSObject* object = onLoadResult.value.object.getObject(); if (commonJSModule) { const auto& __esModuleIdentifier = vm.propertyNames->__esModule; - JSValue esModuleValue = object->getIfPropertyExists(globalObject, __esModuleIdentifier); - RETURN_IF_EXCEPTION(scope, {}); + auto esModuleValue = object->getIfPropertyExists(globalObject, __esModuleIdentifier); + if (scope.exception()) [[unlikely]] { + return reject(scope.exception()); + } if (esModuleValue && esModuleValue.toBoolean(globalObject)) { - JSValue defaultValue = object->getIfPropertyExists(globalObject, vm.propertyNames->defaultKeyword); - RETURN_IF_EXCEPTION(scope, {}); + auto defaultValue = object->getIfPropertyExists(globalObject, vm.propertyNames->defaultKeyword); + if (scope.exception()) [[unlikely]] { + return reject(scope.exception()); + } if (defaultValue && !defaultValue.isUndefined()) { commonJSModule->setExportsObject(defaultValue); commonJSModule->hasEvaluated = true; @@ -434,20 +458,23 @@ extern "C" void Bun__onFulfillAsyncModule( JSC::JSInternalPromise* promise = jsCast(JSC::JSValue::decode(encodedPromiseValue)); if (!res->success) { - throwException(scope, res->result.err, globalObject); - auto* exception = scope.exception(); - scope.clearException(); - return promise->reject(globalObject, exception); + return promise->reject(globalObject, JSValue::decode(res->result.err.value)); } auto specifierValue = Bun::toJS(globalObject, *specifier); - if (auto entry = globalObject->esmRegistryMap()->get(globalObject, specifierValue)) { + auto* map = globalObject->esmRegistryMap(); + RETURN_IF_EXCEPTION(scope, ); + auto entry = map->get(globalObject, specifierValue); + RETURN_IF_EXCEPTION(scope, ); + if (entry) { if (entry.isObject()) { auto* object = entry.getObject(); - if (auto state = object->getIfPropertyExists(globalObject, Bun::builtinNames(vm).statePublicName())) { - if (state.toInt32(globalObject) > JSC::JSModuleLoader::Status::Fetch) { + auto state = object->getIfPropertyExists(globalObject, Bun::builtinNames(vm).statePublicName()); + RETURN_IF_EXCEPTION(scope, ); + if (state && state.isInt32()) { + if (state.asInt32() > JSC::JSModuleLoader::Status::Fetch) { // it's a race! we lost. // https://github.com/oven-sh/bun/issues/6946 // https://github.com/oven-sh/bun/issues/12910 @@ -463,12 +490,15 @@ extern "C" void Bun__onFulfillAsyncModule( promise->resolve(globalObject, code); } else { auto* exception = scope.exception(); - scope.clearException(); - promise->reject(globalObject, exception); + if (!vm.isTerminationException(exception)) { + scope.clearException(); + promise->reject(globalObject, exception); + } } } else { auto&& provider = Zig::SourceProvider::create(jsDynamicCast(globalObject), res->result.value); promise->resolve(globalObject, JSC::JSSourceCode::create(vm, JSC::SourceCode(provider))); + scope.assertNoExceptionExceptTermination(); } } else { // the module has since been deleted from the registry. @@ -659,7 +689,9 @@ JSValue fetchCommonJSModule( } } - if (auto builtin = fetchBuiltinModuleWithoutResolution(globalObject, &specifier, res)) { + auto builtin = fetchBuiltinModuleWithoutResolution(globalObject, &specifier, res); + RETURN_IF_EXCEPTION(scope, {}); + if (builtin) { if (!res->success) { RELEASE_AND_RETURN(scope, builtin); } @@ -707,19 +739,24 @@ JSValue fetchCommonJSModule( } JSMap* registry = globalObject->esmRegistryMap(); + RETURN_IF_EXCEPTION(scope, {}); - const auto hasAlreadyLoadedESMVersionSoWeShouldntTranspileItTwice = [&]() -> bool { + bool hasAlreadyLoadedESMVersionSoWeShouldntTranspileItTwice = [&]() -> bool { JSValue entry = registry->get(globalObject, specifierValue); if (!entry || !entry.isObject()) { return false; } + // return value doesn't matter since we check for exceptions after calling this lambda and + // before checking the returned bool + RETURN_IF_EXCEPTION(scope, false); int status = entry.getObject()->getDirect(vm, WebCore::clientData(vm)->builtinNames().statePublicName()).asInt32(); return status > JSModuleLoader::Status::Fetch; - }; + }(); + RETURN_IF_EXCEPTION(scope, {}); - if (hasAlreadyLoadedESMVersionSoWeShouldntTranspileItTwice()) { + if (hasAlreadyLoadedESMVersionSoWeShouldntTranspileItTwice) { RELEASE_AND_RETURN(scope, jsNumber(-1)); } return fetchCommonJSModuleNonBuiltin(bunVM, vm, globalObject, &specifier, specifierValue, referrer, typeAttribute, res, target, specifierWtfString, BunLoaderTypeNone, scope); @@ -909,6 +946,7 @@ static JSValue fetchESMSourceCode( scope.clearException(); return rejectedInternalPromise(globalObject, exception); } else { + scope.release(); return {}; } } @@ -972,6 +1010,7 @@ static JSValue fetchESMSourceCode( scope.clearException(); return rejectedInternalPromise(globalObject, exception); } else { + scope.release(); return {}; } } @@ -997,7 +1036,7 @@ static JSValue fetchESMSourceCode( if (res->result.value.tag == SyntheticModuleType::JSONForObjectLoader) { WTF::String jsonSource = res->result.value.source_code.toWTFString(BunString::NonNull); JSC::JSValue value = JSC::JSONParseWithException(globalObject, jsonSource); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { auto* exception = scope.exception(); scope.clearException(); return reject(exception); @@ -1017,7 +1056,7 @@ static JSValue fetchESMSourceCode( else if (res->result.value.tag == SyntheticModuleType::ExportsObject) { JSC::JSValue value = JSC::JSValue::decode(res->result.value.jsvalue_for_export); if (!value) { - return reject(JSC::JSValue(JSC::createSyntaxError(globalObject, "Failed to parse Object"_s))); + return reject(JSC::createSyntaxError(globalObject, "Failed to parse Object"_s)); } // JSON can become strings, null, numbers, booleans so we must handle "export default 123" @@ -1032,7 +1071,7 @@ static JSValue fetchESMSourceCode( } else if (res->result.value.tag == SyntheticModuleType::ExportDefaultObject) { JSC::JSValue value = JSC::JSValue::decode(res->result.value.jsvalue_for_export); if (!value) { - return reject(JSC::JSValue(JSC::createSyntaxError(globalObject, "Failed to parse Object"_s))); + return reject(JSC::createSyntaxError(globalObject, "Failed to parse Object"_s)); } // JSON can become strings, null, numbers, booleans so we must handle "export default 123" @@ -1097,7 +1136,7 @@ BUN_DEFINE_HOST_FUNCTION(jsFunctionOnLoadObjectResultResolve, (JSC::JSGlobalObje JSC::JSValue result = handleVirtualModuleResult(reinterpret_cast(globalObject), objectResult, &res, &specifier, &referrer, wasModuleMock); if (res.success) { - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { auto retValue = JSValue::encode(promise->rejectWithCaughtException(globalObject, scope)); pendingModule->internalField(2).set(vm, pendingModule, JSC::jsUndefined()); return retValue; diff --git a/src/bun.js/bindings/NapiClass.cpp b/src/bun.js/bindings/NapiClass.cpp new file mode 100644 index 0000000000..eacf7fb6db --- /dev/null +++ b/src/bun.js/bindings/NapiClass.cpp @@ -0,0 +1,135 @@ +#include "root.h" +#include "napi.h" +#include + +namespace Zig { + +template +void NapiClass::visitChildrenImpl(JSCell* cell, Visitor& visitor) +{ + NapiClass* thisObject = jsCast(cell); + ASSERT_GC_OBJECT_INHERITS(thisObject, info()); + Base::visitChildren(thisObject, visitor); +} + +DEFINE_VISIT_CHILDREN(NapiClass); + +template +JSC_HOST_CALL_ATTRIBUTES JSC::EncodedJSValue NapiClass_ConstructorFunction(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame) +{ + JSC::VM& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); + JSObject* constructorTarget = asObject(callFrame->jsCallee()); + NapiClass* napi = jsDynamicCast(constructorTarget); + while (!napi && constructorTarget) { + constructorTarget = constructorTarget->getPrototypeDirect().getObject(); + napi = jsDynamicCast(constructorTarget); + } + + if (!napi) [[unlikely]] { + JSC::throwVMError(globalObject, scope, JSC::createTypeError(globalObject, "NapiClass constructor called on an object that is not a NapiClass"_s)); + return JSValue::encode(JSC::jsUndefined()); + } + + JSValue newTarget; + + if constexpr (ConstructCall) { + // Use ::get instead of ::getIfPropertyExists here so that DontEnum is ignored. + auto prototypeValue = napi->get(globalObject, vm.propertyNames->prototype); + RETURN_IF_EXCEPTION(scope, {}); + NapiPrototype* prototype = JSC::jsDynamicCast(prototypeValue); + + if (!prototype) { + JSC::throwVMError(globalObject, scope, JSC::createTypeError(globalObject, "NapiClass constructor is missing the prototype"_s)); + return JSValue::encode(JSC::jsUndefined()); + } + + newTarget = callFrame->newTarget(); + JSObject* thisValue; + // Match the behavior from + // https://github.com/oven-sh/WebKit/blob/397dafc9721b8f8046f9448abb6dbc14efe096d3/Source/JavaScriptCore/runtime/ObjectConstructor.cpp#L118-L145 + if (newTarget && newTarget != napi) { + JSGlobalObject* functionGlobalObject = getFunctionRealm(globalObject, asObject(newTarget)); + RETURN_IF_EXCEPTION(scope, {}); + Structure* baseStructure = functionGlobalObject->objectStructureForObjectConstructor(); + Structure* objectStructure = InternalFunction::createSubclassStructure(globalObject, asObject(newTarget), baseStructure); + RETURN_IF_EXCEPTION(scope, {}); + thisValue = constructEmptyObject(vm, objectStructure); + } else { + thisValue = prototype->subclass(globalObject, asObject(newTarget)); + } + RETURN_IF_EXCEPTION(scope, {}); + callFrame->setThisValue(thisValue); + } + + NAPICallFrame frame(globalObject, callFrame, napi->dataPtr(), newTarget); + Bun::NapiHandleScope handleScope(jsCast(globalObject)); + + JSValue ret = toJS(napi->constructor()(napi->env(), frame.toNapi())); + napi_set_last_error(napi->env(), napi_ok); + RETURN_IF_EXCEPTION(scope, {}); + if (ret.isEmpty()) { + ret = jsUndefined(); + } + if constexpr (ConstructCall) { + RELEASE_AND_RETURN(scope, JSValue::encode(frame.thisValue())); + } else { + RELEASE_AND_RETURN(scope, JSValue::encode(ret)); + } +} + +NapiClass* NapiClass::create(VM& vm, napi_env env, WTF::String name, + napi_callback constructor, + void* data, + size_t property_count, + const napi_property_descriptor* properties) +{ + NativeExecutable* executable = vm.getHostFunction( + // for normal call + NapiClass_ConstructorFunction, + ImplementationVisibility::Public, + // for constructor call + NapiClass_ConstructorFunction, name); + Structure* structure = env->globalObject()->NapiClassStructure(); + NapiClass* napiClass = new (NotNull, allocateCell(vm)) NapiClass(vm, executable, env, structure, data); + napiClass->finishCreation(vm, executable, name, constructor, data, property_count, properties); + return napiClass; +} + +void NapiClass::finishCreation(VM& vm, NativeExecutable* executable, const String& name, napi_callback constructor, + void* data, + size_t property_count, + const napi_property_descriptor* properties) +{ + Base::finishCreation(vm, executable, 0, name); + ASSERT(inherits(info())); + this->m_constructor = constructor; + auto globalObject = reinterpret_cast(this->globalObject()); + + this->putDirect(vm, vm.propertyNames->name, jsString(vm, name), JSC::PropertyAttribute::DontEnum | 0); + + NapiPrototype* prototype = NapiPrototype::create(vm, globalObject->NapiPrototypeStructure()); + + auto throwScope = DECLARE_THROW_SCOPE(vm); + auto env = m_env; + + for (size_t i = 0; i < property_count; i++) { + const napi_property_descriptor& property = properties[i]; + + if (property.attributes & napi_static) { + Napi::defineProperty(env, this, property, true, throwScope); + } else { + Napi::defineProperty(env, prototype, property, false, throwScope); + } + + if (throwScope.exception()) + break; + } + + this->putDirect(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | 0); + prototype->putDirect(vm, vm.propertyNames->constructor, this, JSC::PropertyAttribute::DontEnum | 0); +} + +const ClassInfo NapiClass::s_info = { "Function"_s, &NapiClass::Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NapiClass) }; +const ClassInfo NapiPrototype::s_info = { "Object"_s, &NapiPrototype::Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NapiPrototype) }; +} diff --git a/src/bun.js/bindings/NapiRef.cpp b/src/bun.js/bindings/NapiRef.cpp new file mode 100644 index 0000000000..d33ac46cef --- /dev/null +++ b/src/bun.js/bindings/NapiRef.cpp @@ -0,0 +1,46 @@ +#include "root.h" +#include "napi.h" +#include + +namespace Zig { + +WTF_MAKE_TZONE_ALLOCATED_IMPL(NapiRef); + +void NapiRef::ref() +{ + NAPI_LOG("ref %p %u -> %u", this, refCount, refCount + 1); + ++refCount; + if (refCount == 1 && !weakValueRef.isClear()) { + auto& vm = globalObject.get()->vm(); + strongRef.set(vm, weakValueRef.get()); + + // isSet() will return always true after being set once + // We cannot rely on isSet() to check if the value is set we need to use isClear() + // .setString/.setObject/.setPrimitive will assert fail if called more than once (even after clear()) + // We should not clear the weakValueRef here because we need to keep it if we call NapiRef::unref() + // so we can call the finalizer + } +} + +void NapiRef::unref() +{ + NAPI_LOG("unref %p %u -> %u", this, refCount, refCount - 1); + bool clear = refCount == 1; + refCount = refCount > 0 ? refCount - 1 : 0; + if (clear && !m_isEternal) { + // we still dont clean weakValueRef so we can ref it again using NapiRef::ref() if the GC didn't collect it + // and use it to call the finalizer when GC'd + strongRef.clear(); + } +} + +void NapiRef::clear() +{ + NAPI_LOG("ref clear %p", this); + finalizer.call(env, nativeObject); + globalObject.clear(); + weakValueRef.clear(); + strongRef.clear(); +} + +} diff --git a/src/bun.js/bindings/NapiWeakValue.cpp b/src/bun.js/bindings/NapiWeakValue.cpp new file mode 100644 index 0000000000..b934250789 --- /dev/null +++ b/src/bun.js/bindings/NapiWeakValue.cpp @@ -0,0 +1,103 @@ +#include "napi.h" + +namespace Zig { + +NapiWeakValue::~NapiWeakValue() +{ + clear(); +} + +void NapiWeakValue::clear() +{ + switch (m_tag) { + case WeakTypeTag::Cell: { + m_value.cell.clear(); + break; + } + case WeakTypeTag::String: { + m_value.string.clear(); + break; + } + default: { + break; + } + } + + m_tag = WeakTypeTag::NotSet; +} + +bool NapiWeakValue::isClear() const +{ + return m_tag == WeakTypeTag::NotSet; +} + +void NapiWeakValue::setPrimitive(JSValue value) +{ + switch (m_tag) { + case WeakTypeTag::Cell: { + m_value.cell.clear(); + break; + } + case WeakTypeTag::String: { + m_value.string.clear(); + break; + } + default: { + break; + } + } + m_tag = WeakTypeTag::Primitive; + m_value.primitive = value; +} + +void NapiWeakValue::set(JSValue value, WeakHandleOwner& owner, void* context) +{ + if (value.isCell()) { + auto* cell = value.asCell(); + if (cell->isString()) { + setString(jsCast(cell), owner, context); + } else { + setCell(cell, owner, context); + } + } else { + setPrimitive(value); + } +} + +void NapiWeakValue::setCell(JSCell* cell, WeakHandleOwner& owner, void* context) +{ + switch (m_tag) { + case WeakTypeTag::Cell: { + m_value.cell.clear(); + break; + } + case WeakTypeTag::String: { + m_value.string.clear(); + break; + } + default: { + break; + } + } + + m_value.cell = JSC::Weak(cell, &owner, context); + m_tag = WeakTypeTag::Cell; +} + +void NapiWeakValue::setString(JSString* string, WeakHandleOwner& owner, void* context) +{ + switch (m_tag) { + case WeakTypeTag::Cell: { + m_value.cell.clear(); + break; + } + default: { + break; + } + } + + m_value.string = JSC::Weak(string, &owner, context); + m_tag = WeakTypeTag::String; +} + +} diff --git a/src/bun.js/bindings/NodeDirent.cpp b/src/bun.js/bindings/NodeDirent.cpp index de4385bac7..04a339cc14 100644 --- a/src/bun.js/bindings/NodeDirent.cpp +++ b/src/bun.js/bindings/NodeDirent.cpp @@ -339,6 +339,7 @@ extern "C" JSC::EncodedJSValue Bun__Dirent__toJS(Zig::GlobalObject* globalObject JSString* pathValue = nullptr; if (path && path->tag == BunStringTag::WTFStringImpl && previousPath && *previousPath && (*previousPath)->length() == path->impl.wtf->length()) { auto view = (*previousPath)->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); if (view == path->impl.wtf) { pathValue = *previousPath; diff --git a/src/bun.js/bindings/NodeFSStatBinding.cpp b/src/bun.js/bindings/NodeFSStatBinding.cpp index d818b610e6..382bdefc42 100644 --- a/src/bun.js/bindings/NodeFSStatBinding.cpp +++ b/src/bun.js/bindings/NodeFSStatBinding.cpp @@ -143,6 +143,7 @@ static JSValue modeStatFunction(JSC::JSGlobalObject* globalObject, CallFrame* ca if constexpr (isBigInt) { int64_t mode_value = modeValue.toBigInt64(globalObject); + RETURN_IF_EXCEPTION(scope, {}); return jsBoolean(isModeFn(statFunction, mode_value)); } @@ -602,7 +603,6 @@ extern "C" JSC::EncodedJSValue Bun__createJSStatsObject(Zig::GlobalObject* globa int64_t uid, int64_t gid, int64_t rdev, int64_t size, int64_t blksize, int64_t blocks, double atimeMs, double mtimeMs, double ctimeMs, double birthtimeMs) { auto& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); JSC::JSValue js_dev = JSC::jsDoubleNumber(dev); JSC::JSValue js_ino = JSC::jsDoubleNumber(ino); @@ -637,7 +637,7 @@ extern "C" JSC::EncodedJSValue Bun__createJSStatsObject(Zig::GlobalObject* globa object->putDirectOffset(vm, 12, js_ctimeMs); object->putDirectOffset(vm, 13, js_birthtimeMs); - RELEASE_AND_RETURN(scope, JSC::JSValue::encode(object)); + return JSC::JSValue::encode(object); } extern "C" JSC::EncodedJSValue Bun__createJSBigIntStatsObject(Zig::GlobalObject* globalObject, @@ -665,23 +665,42 @@ extern "C" JSC::EncodedJSValue Bun__createJSBigIntStatsObject(Zig::GlobalObject* auto* structure = getStructure(globalObject); JSC::JSValue js_dev = JSC::JSBigInt::createFrom(globalObject, dev); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_ino = JSC::JSBigInt::createFrom(globalObject, ino); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_mode = JSC::JSBigInt::createFrom(globalObject, mode); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_nlink = JSC::JSBigInt::createFrom(globalObject, nlink); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_uid = JSC::JSBigInt::createFrom(globalObject, uid); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_gid = JSC::JSBigInt::createFrom(globalObject, gid); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_rdev = JSC::JSBigInt::createFrom(globalObject, rdev); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_size = JSC::JSBigInt::createFrom(globalObject, size); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_blksize = JSC::JSBigInt::createFrom(globalObject, blksize); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_blocks = JSC::JSBigInt::createFrom(globalObject, blocks); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_atimeMs = JSC::JSBigInt::createFrom(globalObject, atimeMs); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_mtimeMs = JSC::JSBigInt::createFrom(globalObject, mtimeMs); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_ctimeMs = JSC::JSBigInt::createFrom(globalObject, ctimeMs); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_birthtimeMs = JSC::JSBigInt::createFrom(globalObject, birthtimeMs); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_atimeNs = JSC::JSBigInt::createFrom(globalObject, atimeNs); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_mtimeNs = JSC::JSBigInt::createFrom(globalObject, mtimeNs); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_ctimeNs = JSC::JSBigInt::createFrom(globalObject, ctimeNs); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_birthtimeNs = JSC::JSBigInt::createFrom(globalObject, birthtimeNs); + RETURN_IF_EXCEPTION(scope, {}); + auto* object = JSC::JSFinalObject::create(vm, structure); object->putDirectOffset(vm, 0, js_dev); diff --git a/src/bun.js/bindings/NodeFSStatFSBinding.cpp b/src/bun.js/bindings/NodeFSStatFSBinding.cpp index e4256e613d..d768ed9596 100644 --- a/src/bun.js/bindings/NodeFSStatFSBinding.cpp +++ b/src/bun.js/bindings/NodeFSStatFSBinding.cpp @@ -259,7 +259,6 @@ extern "C" JSC::EncodedJSValue Bun__createJSStatFSObject(Zig::GlobalObject* glob int64_t ffree) { auto& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); JSC::JSValue js_fstype = JSC::jsNumber(fstype); JSC::JSValue js_bsize = JSC::jsNumber(bsize); @@ -280,7 +279,7 @@ extern "C" JSC::EncodedJSValue Bun__createJSStatFSObject(Zig::GlobalObject* glob object->putDirectOffset(vm, 5, js_files); object->putDirectOffset(vm, 6, js_ffree); - RELEASE_AND_RETURN(scope, JSC::JSValue::encode(object)); + return JSC::JSValue::encode(object); } extern "C" JSC::EncodedJSValue Bun__createJSBigIntStatFSObject(Zig::GlobalObject* globalObject, @@ -297,12 +296,19 @@ extern "C" JSC::EncodedJSValue Bun__createJSBigIntStatFSObject(Zig::GlobalObject auto* structure = getStatFSStructure(globalObject); JSC::JSValue js_fstype = JSC::JSBigInt::createFrom(globalObject, fstype); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_bsize = JSC::JSBigInt::createFrom(globalObject, bsize); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_blocks = JSC::JSBigInt::createFrom(globalObject, blocks); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_bfree = JSC::JSBigInt::createFrom(globalObject, bfree); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_bavail = JSC::JSBigInt::createFrom(globalObject, bavail); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_files = JSC::JSBigInt::createFrom(globalObject, files); + RETURN_IF_EXCEPTION(scope, {}); JSC::JSValue js_ffree = JSC::JSBigInt::createFrom(globalObject, ffree); + RETURN_IF_EXCEPTION(scope, {}); auto* object = JSC::JSFinalObject::create(vm, structure); diff --git a/src/bun.js/bindings/NodeHTTP.cpp b/src/bun.js/bindings/NodeHTTP.cpp index 39b4bad591..b0ce4653ee 100644 --- a/src/bun.js/bindings/NodeHTTP.cpp +++ b/src/bun.js/bindings/NodeHTTP.cpp @@ -677,6 +677,7 @@ static void assignHeadersFromUWebSocketsForCall(uWS::HttpRequest* request, JSVal } else { headersObject->putDirectMayBeIndex(globalObject, nameIdentifier, jsValue); + RETURN_IF_EXCEPTION(scope, void()); arrayValues.append(nameString); arrayValues.append(jsValue); RETURN_IF_EXCEPTION(scope, void()); @@ -825,7 +826,7 @@ static EncodedJSValue assignHeadersFromUWebSockets(uWS::HttpRequest* request, JS auto value = String::tryCreateUninitialized(pair.second.length(), data); if (value.isNull()) [[unlikely]] { throwOutOfMemoryError(globalObject, scope); - return JSValue::encode({}); + return {}; } if (pair.second.length() > 0) memcpy(data.data(), pair.second.data(), pair.second.length()); @@ -921,12 +922,7 @@ static EncodedJSValue NodeHTTPServer__onRequest( args.append(thisValue); assignHeadersFromUWebSocketsForCall(request, methodString, args, globalObject, vm); - if (scope.exception()) { - auto* exception = scope.exception(); - response->endWithoutBody(); - scope.clearException(); - return JSValue::encode(exception); - } + RETURN_IF_EXCEPTION(scope, {}); bool hasBody = false; WebCore::JSNodeHTTPResponse* nodeHTTPResponseObject = jsCast(JSValue::decode(NodeHTTPResponse__createForJS(any_server, globalObject, &hasBody, request, isSSL, response, upgrade_ctx, nodeHttpResponsePtr))); @@ -947,11 +943,7 @@ static EncodedJSValue NodeHTTPServer__onRequest( args.append(jsUndefined()); } } else { - JSNodeHTTPServerSocket* socket = JSNodeHTTPServerSocket::create( - vm, - globalObject->m_JSNodeHTTPServerSocketStructure.getInitializedOnMainThread(globalObject), - (us_socket_t*)response, - isSSL, nodeHTTPResponseObject); + JSNodeHTTPServerSocket* socket = JSNodeHTTPServerSocket::create(vm, globalObject->m_JSNodeHTTPServerSocketStructure.getInitializedOnMainThread(globalObject), (us_socket_t*)response, isSSL, nodeHTTPResponseObject); socket->strongThis.set(vm, socket); @@ -963,13 +955,8 @@ static EncodedJSValue NodeHTTPServer__onRequest( } args.append(jsBoolean(request->isAncient())); - WTF::NakedPtr exception; - JSValue returnValue = AsyncContextFrame::call(globalObject, callbackObject, jsUndefined(), args, exception); - if (exception) { - auto* ptr = exception.get(); - exception.clear(); - return JSValue::encode(ptr); - } + JSValue returnValue = AsyncContextFrame::profiledCall(globalObject, callbackObject, jsUndefined(), args); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(returnValue); } @@ -1101,7 +1088,7 @@ static void NodeHTTPServer__writeHead( String key = entry.key(); String value = headerValue.toWTFString(globalObject); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { return false; } @@ -1116,6 +1103,7 @@ static void NodeHTTPServer__writeHead( for (unsigned i = 0; i < propertyNames.size(); ++i) { JSValue headerValue = headersObject->getIfPropertyExists(globalObject, propertyNames[i]); + RETURN_IF_EXCEPTION(scope, ); if (!headerValue.isString()) { continue; } @@ -1256,7 +1244,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHTTPAssignHeaders, (JSGlobalObject * globalObject, Ca RETURN_IF_EXCEPTION(scope, {}); } - return assignHeadersFromFetchHeaders(impl, globalObject->objectPrototype(), objectValue, tuple, globalObject, vm); + RELEASE_AND_RETURN(scope, assignHeadersFromFetchHeaders(impl, globalObject->objectPrototype(), objectValue, tuple, globalObject, vm)); } } } @@ -1368,7 +1356,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHTTPGetHeader, (JSGlobalObject * globalObject, CallFr WebCore::ExceptionOr res = impl->get(name); if (res.hasException()) { WebCore::propagateException(globalObject, scope, res.releaseException()); - return JSValue::encode(jsUndefined()); + RELEASE_AND_RETURN(scope, {}); } String value = res.returnValue(); diff --git a/src/bun.js/bindings/NodeModuleModule.zig b/src/bun.js/bindings/NodeModuleModule.zig index 2b903d806d..133cfd643f 100644 --- a/src/bun.js/bindings/NodeModuleModule.zig +++ b/src/bun.js/bindings/NodeModuleModule.zig @@ -5,12 +5,14 @@ const JSGlobalObject = JSC.JSGlobalObject; const JSValue = JSC.JSValue; const ErrorableString = JSC.ErrorableString; +export const NodeModuleModule__findPath = JSC.host_fn.wrap3(findPath); + // https://github.com/nodejs/node/blob/40ef9d541ed79470977f90eb445c291b95ab75a0/lib/internal/modules/cjs/loader.js#L666 -pub export fn NodeModuleModule__findPath( +fn findPath( global: *JSGlobalObject, request_bun_str: bun.String, paths_maybe: ?*JSC.JSArray, -) JSValue { +) bun.JSError!JSValue { var stack_buf = std.heap.stackFallback(8192, bun.default_allocator); const alloc = stack_buf.get(); @@ -25,12 +27,9 @@ pub export fn NodeModuleModule__findPath( // for each path const found = if (paths_maybe) |paths| found: { - var iter = paths.iterator(global); - while (iter.next()) |path| { - const cur_path = bun.String.fromJS(path, global) catch |err| switch (err) { - error.JSError => return .zero, - error.OutOfMemory => return global.throwOutOfMemoryValue(), - }; + var iter = try paths.iterator(global); + while (try iter.next()) |path| { + const cur_path = try bun.String.fromJS(path, global); defer cur_path.deref(); if (findPathInner(request_bun_str, cur_path, global)) |found| { @@ -65,7 +64,7 @@ fn findPathInner( true, ) catch |err| switch (err) { error.JSError => { - global.clearException(); + global.clearException(); // TODO sus return null; }, else => return null, diff --git a/src/bun.js/bindings/NodeTLS.cpp b/src/bun.js/bindings/NodeTLS.cpp index 36ef165e22..a846703aef 100644 --- a/src/bun.js/bindings/NodeTLS.cpp +++ b/src/bun.js/bindings/NodeTLS.cpp @@ -69,7 +69,7 @@ JSC_DEFINE_HOST_FUNCTION(getExtraCACertificates, (JSC::JSGlobalObject * globalOb BIO_free(bio); } - return JSValue::encode(JSC::objectConstructorFreeze(globalObject, rootCertificates)); + RELEASE_AND_RETURN(scope, JSValue::encode(JSC::objectConstructorFreeze(globalObject, rootCertificates))); } } // namespace Bun diff --git a/src/bun.js/bindings/NodeTimerObject.cpp b/src/bun.js/bindings/NodeTimerObject.cpp index d3523c983d..2936bddeb4 100644 --- a/src/bun.js/bindings/NodeTimerObject.cpp +++ b/src/bun.js/bindings/NodeTimerObject.cpp @@ -19,7 +19,7 @@ using namespace JSC; static bool call(JSGlobalObject* globalObject, JSValue timerObject, JSValue callbackValue, JSValue argumentsValue) { auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); + auto scope = DECLARE_CATCH_SCOPE(vm); JSValue restoreAsyncContext {}; JSC::InternalFieldTuple* asyncContextData = nullptr; diff --git a/src/bun.js/bindings/NodeURL.cpp b/src/bun.js/bindings/NodeURL.cpp index 86b1e70bea..6b7612d37f 100644 --- a/src/bun.js/bindings/NodeURL.cpp +++ b/src/bun.js/bindings/NodeURL.cpp @@ -59,7 +59,7 @@ JSC_DEFINE_HOST_FUNCTION(jsDomainToASCII, (JSC::JSGlobalObject * globalObject, J constexpr static size_t hostnameBufferLength = 2048; auto encoder = &WTF::URLParser::internationalDomainNameTranscoder(); - UChar hostnameBuffer[hostnameBufferLength]; + char16_t hostnameBuffer[hostnameBufferLength]; UErrorCode error = U_ZERO_ERROR; UIDNAInfo processingDetails = UIDNA_INFO_INITIALIZER; const auto span = domain.span16(); @@ -128,7 +128,7 @@ JSC_DEFINE_HOST_FUNCTION(jsDomainToUnicode, (JSC::JSGlobalObject * globalObject, constexpr static int hostnameBufferLength = 2048; auto encoder = &WTF::URLParser::internationalDomainNameTranscoder(); - UChar hostnameBuffer[hostnameBufferLength]; + char16_t hostnameBuffer[hostnameBufferLength]; UErrorCode error = U_ZERO_ERROR; UIDNAInfo processingDetails = UIDNA_INFO_INITIALIZER; diff --git a/src/bun.js/bindings/NodeVM.cpp b/src/bun.js/bindings/NodeVM.cpp index 94b8b0b253..8d91cc4a4b 100644 --- a/src/bun.js/bindings/NodeVM.cpp +++ b/src/bun.js/bindings/NodeVM.cpp @@ -55,16 +55,23 @@ #include "JavaScriptCore/FunctionCodeBlock.h" #include "JavaScriptCore/JIT.h" #include "JavaScriptCore/ProgramCodeBlock.h" +#include "JavaScriptCore/GlobalObjectMethodTable.h" #include "NodeVMScriptFetcher.h" #include "wtf/FileHandle.h" #include "../vm/SigintWatcher.h" +#include "JavaScriptCore/GetterSetter.h" + namespace Bun { using namespace WebCore; +static JSInternalPromise* moduleLoaderImportModuleInner(NodeVMGlobalObject* globalObject, JSC::JSModuleLoader* moduleLoader, JSC::JSString* moduleName, JSC::JSValue parameters, const JSC::SourceOrigin& sourceOrigin); + namespace NodeVM { +static JSInternalPromise* importModuleInner(JSGlobalObject* globalObject, JSString* moduleName, JSValue parameters, const SourceOrigin& sourceOrigin, JSValue dynamicImportCallback, JSValue owner); + bool extractCachedData(JSValue cachedDataValue, WTF::Vector& outCachedData) { if (!cachedDataValue.isCell()) { @@ -126,6 +133,8 @@ JSC::JSFunction* constructAnonymousFunction(JSC::JSGlobalObject* globalObject, c if (actuallyValid) { auto exception = error.toErrorObject(globalObject, sourceCode, -1); + RETURN_IF_EXCEPTION(throwScope, nullptr); + throwException(globalObject, throwScope, exception); return nullptr; } @@ -174,6 +183,7 @@ JSC::JSFunction* constructAnonymousFunction(JSC::JSGlobalObject* globalObject, c { DeferGC deferGC(vm); programCodeBlock = ProgramCodeBlock::create(vm, programExecutable, unlinkedProgramCodeBlock, scope); + RETURN_IF_EXCEPTION(throwScope, nullptr); } if (!programCodeBlock || programCodeBlock->numberOfFunctionExprs() == 0) { @@ -193,6 +203,7 @@ JSC::JSFunction* constructAnonymousFunction(JSC::JSGlobalObject* globalObject, c RefPtr producedBytecode = getBytecode(globalObject, programExecutable, sourceCode); if (producedBytecode) { JSC::JSUint8Array* buffer = WebCore::createBuffer(globalObject, producedBytecode->span()); + RETURN_IF_EXCEPTION(throwScope, nullptr); function->putDirect(vm, JSC::Identifier::fromString(vm, "cachedData"_s), buffer); function->putDirect(vm, JSC::Identifier::fromString(vm, "cachedDataProduced"_s), jsBoolean(true)); } else { @@ -201,39 +212,84 @@ JSC::JSFunction* constructAnonymousFunction(JSC::JSGlobalObject* globalObject, c } } else { function->putDirect(vm, JSC::Identifier::fromString(vm, "cachedDataRejected"_s), jsBoolean(bytecodeAccepted == TriState::False)); + RETURN_IF_EXCEPTION(throwScope, nullptr); } return function; } -JSInternalPromise* importModule(JSGlobalObject* globalObject, JSString* moduleNameValue, JSValue parameters, const SourceOrigin& sourceOrigin) +JSInternalPromise* importModule(JSGlobalObject* globalObject, JSString* moduleName, JSValue parameters, const SourceOrigin& sourceOrigin) { - if (auto* fetcher = sourceOrigin.fetcher(); !fetcher || fetcher->fetcherType() != ScriptFetcher::Type::NodeVM) { - return nullptr; - } - VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); - auto* fetcher = static_cast(sourceOrigin.fetcher()); - - JSValue dynamicImportCallback = fetcher->dynamicImportCallback(); - - if (!dynamicImportCallback || !dynamicImportCallback.isCallable()) { + if (auto* fetcher = sourceOrigin.fetcher(); !fetcher || fetcher->fetcherType() != ScriptFetcher::Type::NodeVM) { + if (!sourceOrigin.url().isEmpty()) { + if (auto* nodeVmGlobalObject = jsDynamicCast(globalObject)) { + if (nodeVmGlobalObject->dynamicImportCallback()) { + RELEASE_AND_RETURN(scope, moduleLoaderImportModuleInner(nodeVmGlobalObject, globalObject->moduleLoader(), moduleName, parameters, sourceOrigin)); + } + } + } return nullptr; } - JSFunction* owner = fetcher->owner(); + auto* fetcher = static_cast(sourceOrigin.fetcher()); + + if (fetcher->isUsingDefaultLoader()) { + return nullptr; + } + + JSValue dynamicImportCallback = fetcher->dynamicImportCallback(); + + if (isUseMainContextDefaultLoaderConstant(globalObject, dynamicImportCallback)) { + auto defer = fetcher->temporarilyUseDefaultLoader(); + Zig::GlobalObject* zigGlobalObject = defaultGlobalObject(globalObject); + RELEASE_AND_RETURN(scope, zigGlobalObject->moduleLoaderImportModule(zigGlobalObject, zigGlobalObject->moduleLoader(), moduleName, parameters, sourceOrigin)); + } else if (!dynamicImportCallback || !dynamicImportCallback.isCallable()) { + throwException(globalObject, scope, createError(globalObject, ErrorCode::ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING, "A dynamic import callback was not specified."_s)); + return nullptr; + } + + RELEASE_AND_RETURN(scope, importModuleInner(globalObject, moduleName, parameters, sourceOrigin, dynamicImportCallback, fetcher->owner())); +} + +static JSInternalPromise* importModuleInner(JSGlobalObject* globalObject, JSString* moduleName, JSValue parameters, const SourceOrigin& sourceOrigin, JSValue dynamicImportCallback, JSValue owner) +{ + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + if (parameters.isObject()) { + if (JSValue with = asObject(parameters)->getIfPropertyExists(globalObject, vm.propertyNames->with)) { + parameters = with; + } + RETURN_IF_EXCEPTION(scope, nullptr); + } MarkedArgumentBuffer args; - args.append(moduleNameValue); - args.append(owner ? owner : jsUndefined()); + args.append(moduleName); + if (owner) { + args.append(owner); + } else if (auto* nodeVmGlobalObject = jsDynamicCast(globalObject)) { + if (nodeVmGlobalObject->isNotContextified()) { + args.append(nodeVmGlobalObject->specialSandbox()); + } else { + args.append(nodeVmGlobalObject->contextifiedObject()); + } + } else { + args.append(jsUndefined()); + } args.append(parameters); JSValue result = AsyncContextFrame::call(globalObject, dynamicImportCallback, jsUndefined(), args); RETURN_IF_EXCEPTION(scope, nullptr); + if (result.isUndefinedOrNull()) { + throwException(globalObject, scope, createError(globalObject, ErrorCode::ERR_VM_MODULE_NOT_MODULE, "Provided module is not an instance of Module"_s)); + return nullptr; + } + if (auto* promise = jsDynamicCast(result)) { return promise; } @@ -267,7 +323,7 @@ JSInternalPromise* importModule(JSGlobalObject* globalObject, JSString* moduleNa promise = promise->then(globalObject, transformer, nullptr); RETURN_IF_EXCEPTION(scope, nullptr); - return promise; + RELEASE_AND_RETURN(scope, promise); } // Helper function to create an anonymous function expression with parameters @@ -368,9 +424,11 @@ JSC::EncodedJSValue createCachedData(JSGlobalObject* globalObject, const JSC::So std::span bytes = bytecode->span(); JSC::JSUint8Array* buffer = WebCore::createBuffer(globalObject, bytes); - RETURN_IF_EXCEPTION(scope, {}); - ASSERT(buffer); + + if (!buffer) { + return throwVMError(globalObject, scope, "Failed to create buffer"_s); + } return JSValue::encode(buffer); } @@ -386,6 +444,7 @@ bool handleException(JSGlobalObject* globalObject, VM& vm, NakedPtrstack(); size_t stack_size = e_stack.size(); @@ -411,8 +470,12 @@ bool handleException(JSGlobalObject* globalObject, VM& vm, NakedPtr getNodeVMContextOptions(JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSValue optionsArg, NodeVMContextOptions& outOptions, ASCIILiteral codeGenerationKey) +std::optional getNodeVMContextOptions(JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSValue optionsArg, NodeVMContextOptions& outOptions, ASCIILiteral codeGenerationKey, JSValue* importer) { + if (importer) { + *importer = jsUndefined(); + } + outOptions = {}; // If options is provided, validate name and origin properties @@ -423,24 +486,36 @@ std::optional getNodeVMContextOptions(JSGlobalObject* globa JSObject* options = asObject(optionsArg); // Check name property - if (JSValue nameValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "name"_s))) { - RETURN_IF_EXCEPTION(scope, {}); + auto nameValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "name"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (nameValue) { if (!nameValue.isUndefined() && !nameValue.isString()) { return ERR::INVALID_ARG_TYPE(scope, globalObject, "options.name"_s, "string"_s, nameValue); } } // Check origin property - if (JSValue originValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "origin"_s))) { - RETURN_IF_EXCEPTION(scope, {}); + auto originValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "origin"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (originValue) { if (!originValue.isUndefined() && !originValue.isString()) { return ERR::INVALID_ARG_TYPE(scope, globalObject, "options.origin"_s, "string"_s, originValue); } } - if (JSValue codeGenerationValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, codeGenerationKey))) { - RETURN_IF_EXCEPTION(scope, {}); + JSValue importModuleDynamicallyValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "importModuleDynamically"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (importModuleDynamicallyValue) { + if (importer && importModuleDynamicallyValue && (importModuleDynamicallyValue.isCallable() || isUseMainContextDefaultLoaderConstant(globalObject, importModuleDynamicallyValue))) { + *importer = importModuleDynamicallyValue; + } + } + + JSValue codeGenerationValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, codeGenerationKey)); + RETURN_IF_EXCEPTION(scope, {}); + + if (codeGenerationValue) { if (codeGenerationValue.isUndefined()) { return std::nullopt; } @@ -451,22 +526,26 @@ std::optional getNodeVMContextOptions(JSGlobalObject* globa JSObject* codeGenerationObject = asObject(codeGenerationValue); - if (JSValue allowStringsValue = codeGenerationObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "strings"_s))) { - RETURN_IF_EXCEPTION(scope, {}); + auto allowStringsValue = codeGenerationObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "strings"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (allowStringsValue) { if (!allowStringsValue.isBoolean()) { return ERR::INVALID_ARG_TYPE(scope, globalObject, WTF::makeString("options."_s, codeGenerationKey, ".strings"_s), "boolean"_s, allowStringsValue); } outOptions.allowStrings = allowStringsValue.toBoolean(globalObject); + RETURN_IF_EXCEPTION(scope, {}); } - if (JSValue allowWasmValue = codeGenerationObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "wasm"_s))) { - RETURN_IF_EXCEPTION(scope, {}); + auto allowWasmValue = codeGenerationObject->getIfPropertyExists(globalObject, Identifier::fromString(vm, "wasm"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (allowWasmValue) { if (!allowWasmValue.isBoolean()) { return ERR::INVALID_ARG_TYPE(scope, globalObject, WTF::makeString("options."_s, codeGenerationKey, ".wasm"_s), "boolean"_s, allowWasmValue); } outOptions.allowWasm = allowWasmValue.toBoolean(globalObject); + RETURN_IF_EXCEPTION(scope, {}); } } @@ -495,13 +574,28 @@ NodeVMGlobalObject* getGlobalObjectFromContext(JSGlobalObject* globalObject, JSV auto* zigGlobalObject = defaultGlobalObject(globalObject); JSValue scopeValue = zigGlobalObject->vmModuleContextMap()->get(context); if (scopeValue.isUndefined()) { + if (auto* specialSandbox = jsDynamicCast(context)) { + return specialSandbox->parentGlobal(); + } + + if (auto* proxy = jsDynamicCast(context)) { + if (auto* nodeVmGlobalObject = jsDynamicCast(proxy->target())) { + return nodeVmGlobalObject; + } + } + if (canThrow) { INVALID_ARG_VALUE_VM_VARIATION(scope, globalObject, "contextifiedObject"_s, context); } return nullptr; } - NodeVMGlobalObject* nodeVmGlobalObject = jsDynamicCast(scopeValue); + auto* nodeVmGlobalObject = jsDynamicCast(scopeValue); + + if (!nodeVmGlobalObject) { + nodeVmGlobalObject = jsDynamicCast(context); + } + if (!nodeVmGlobalObject) { if (canThrow) { INVALID_ARG_VALUE_VM_VARIATION(scope, globalObject, "contextifiedObject"_s, context); @@ -520,12 +614,98 @@ JSC::EncodedJSValue INVALID_ARG_VALUE_VM_VARIATION(JSC::ThrowScope& throwScope, return {}; } +bool isContext(JSGlobalObject* globalObject, JSValue value) +{ + auto* zigGlobalObject = defaultGlobalObject(globalObject); + + if (zigGlobalObject->vmModuleContextMap()->has(asObject(value))) { + return true; + } + + if (value.inherits(NodeVMSpecialSandbox::info())) { + return true; + } + + if (auto* proxy = jsDynamicCast(value); proxy && proxy->target()) { + return proxy->target()->inherits(NodeVMGlobalObject::info()); + } + + return false; +} + +bool getContextArg(JSGlobalObject* globalObject, JSValue& contextArg) +{ + if (contextArg.isUndefined()) { + contextArg = JSC::constructEmptyObject(globalObject); + } else if (contextArg.isSymbol()) { + Zig::GlobalObject* zigGlobalObject = defaultGlobalObject(globalObject); + if (contextArg == zigGlobalObject->m_nodeVMDontContextify.get(zigGlobalObject)) { + contextArg = JSC::constructEmptyObject(globalObject); + return true; + } + } + + return false; +} + +bool isUseMainContextDefaultLoaderConstant(JSGlobalObject* globalObject, JSValue value) +{ + if (value.isSymbol()) { + Zig::GlobalObject* zigGlobalObject = defaultGlobalObject(globalObject); + if (value == zigGlobalObject->m_nodeVMUseMainContextDefaultLoader.get(zigGlobalObject)) { + return true; + } + } + + return false; +} + } // namespace NodeVM using namespace NodeVM; -NodeVMGlobalObject::NodeVMGlobalObject(JSC::VM& vm, JSC::Structure* structure) +template JSC::GCClient::IsoSubspace* NodeVMSpecialSandbox::subspaceFor(JSC::VM& vm) +{ + if constexpr (mode == JSC::SubspaceAccess::Concurrently) + return nullptr; + return WebCore::subspaceForImpl( + vm, + [](auto& spaces) { return spaces.m_clientSubspaceForNodeVMSpecialSandbox.get(); }, + [](auto& spaces, auto&& space) { spaces.m_clientSubspaceForNodeVMSpecialSandbox = std::forward(space); }, + [](auto& spaces) { return spaces.m_subspaceForNodeVMSpecialSandbox.get(); }, + [](auto& spaces, auto&& space) { spaces.m_subspaceForNodeVMSpecialSandbox = std::forward(space); }); +} + +NodeVMSpecialSandbox* NodeVMSpecialSandbox::create(VM& vm, Structure* structure, NodeVMGlobalObject* globalObject) +{ + NodeVMSpecialSandbox* ptr = new (NotNull, allocateCell(vm)) NodeVMSpecialSandbox(vm, structure, globalObject); + ptr->finishCreation(vm); + return ptr; +} + +JSC::Structure* NodeVMSpecialSandbox::createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) +{ + return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info()); +} + +NodeVMSpecialSandbox::NodeVMSpecialSandbox(VM& vm, Structure* structure, NodeVMGlobalObject* globalObject) : Base(vm, structure) +{ + m_parentGlobal.set(vm, this, globalObject); +} + +void NodeVMSpecialSandbox::finishCreation(VM& vm) +{ + Base::finishCreation(vm); + ASSERT(inherits(info())); +} + +const JSC::ClassInfo NodeVMSpecialSandbox::s_info = { "NodeVMSpecialSandbox"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NodeVMSpecialSandbox) }; + +NodeVMGlobalObject::NodeVMGlobalObject(JSC::VM& vm, JSC::Structure* structure, NodeVMContextOptions contextOptions, JSValue importer) + : Base(vm, structure, &globalObjectMethodTable()) + , m_dynamicImportCallback(vm, this, importer) + , m_contextOptions(contextOptions) { } @@ -542,10 +722,10 @@ template JSC::GCClient::IsoSubspace* NodeVMG [](auto& server) -> JSC::HeapCellType& { return server.m_heapCellTypeForNodeVMGlobalObject; }); } -NodeVMGlobalObject* NodeVMGlobalObject::create(JSC::VM& vm, JSC::Structure* structure, NodeVMContextOptions options) +NodeVMGlobalObject* NodeVMGlobalObject::create(JSC::VM& vm, JSC::Structure* structure, NodeVMContextOptions options, JSValue importer) { - auto* cell = new (NotNull, JSC::allocateCell(vm)) NodeVMGlobalObject(vm, structure); - cell->finishCreation(vm, options); + auto* cell = new (NotNull, JSC::allocateCell(vm)) NodeVMGlobalObject(vm, structure, options, importer); + cell->finishCreation(vm); return cell; } @@ -555,11 +735,40 @@ Structure* NodeVMGlobalObject::createStructure(JSC::VM& vm, JSC::JSValue prototy return JSC::Structure::create(vm, nullptr, prototype, JSC::TypeInfo(JSC::GlobalObjectType, StructureFlags & ~IsImmutablePrototypeExoticObject), info()); } -void NodeVMGlobalObject::finishCreation(JSC::VM& vm, NodeVMContextOptions options) +const JSC::GlobalObjectMethodTable& NodeVMGlobalObject::globalObjectMethodTable() +{ + static const JSC::GlobalObjectMethodTable table { + &supportsRichSourceInfo, + &shouldInterruptScript, + &javaScriptRuntimeFlags, + nullptr, // queueTaskToEventLoop + nullptr, // shouldInterruptScriptBeforeTimeout, + &moduleLoaderImportModule, + nullptr, // moduleLoaderResolve + nullptr, // moduleLoaderFetch + nullptr, // moduleLoaderCreateImportMetaProperties + nullptr, // moduleLoaderEvaluate + nullptr, // promiseRejectionTracker + &reportUncaughtExceptionAtEventLoop, + ¤tScriptExecutionOwner, + &scriptExecutionStatus, + nullptr, // reportViolationForUnsafeEval + nullptr, // defaultLanguage + nullptr, // compileStreaming + nullptr, // instantiateStreaming + nullptr, + &codeForEval, + &canCompileStrings, + &trustedScriptStructure, + }; + return table; +} + +void NodeVMGlobalObject::finishCreation(JSC::VM& vm) { Base::finishCreation(vm); - setEvalEnabled(options.allowStrings, "Code generation from strings disallowed for this context"_s); - setWebAssemblyEnabled(options.allowWasm, "Wasm code generation disallowed by embedder"_s); + setEvalEnabled(m_contextOptions.allowStrings, "Code generation from strings disallowed for this context"_s); + setWebAssemblyEnabled(m_contextOptions.allowWasm, "Wasm code generation disallowed by embedder"_s); vm.ensureTerminationException(); } @@ -614,18 +823,27 @@ bool NodeVMGlobalObject::put(JSCell* cell, JSGlobalObject* globalObject, Propert bool isFunction = value.isCallable(); if (slot.isStrictMode() && !isDeclared && isContextualStore && !isFunction) { - return Base::put(cell, globalObject, propertyName, value, slot); + RELEASE_AND_RETURN(scope, Base::put(cell, globalObject, propertyName, value, slot)); } if (!isDeclared && value.isSymbol()) { - return Base::put(cell, globalObject, propertyName, value, slot); + RELEASE_AND_RETURN(scope, Base::put(cell, globalObject, propertyName, value, slot)); + } + + if (thisObject->m_contextOptions.notContextified) { + JSObject* specialSandbox = thisObject->specialSandbox(); + slot.setThisValue(specialSandbox); + RELEASE_AND_RETURN(scope, specialSandbox->putInline(globalObject, propertyName, value, slot)); } slot.setThisValue(sandbox); + bool result = sandbox->methodTable()->put(sandbox, globalObject, propertyName, value, slot); + RETURN_IF_EXCEPTION(scope, false); - if (!sandbox->methodTable()->put(sandbox, globalObject, propertyName, value, slot)) { + if (!result) { return false; } + RETURN_IF_EXCEPTION(scope, false); if (isDeclaredOnSandbox && getter.isAccessor() and (getter.attributes() & PropertyAttribute::DontEnum) == 0) { @@ -633,21 +851,54 @@ bool NodeVMGlobalObject::put(JSCell* cell, JSGlobalObject* globalObject, Propert } slot.setThisValue(thisValue); - - return Base::put(cell, globalObject, propertyName, value, slot); + RELEASE_AND_RETURN(scope, Base::put(cell, globalObject, propertyName, value, slot)); } // This is copy-pasted from JSC's ProxyObject.cpp static const ASCIILiteral s_proxyAlreadyRevokedErrorMessage { "Proxy has already been revoked. No more operations are allowed to be performed on it"_s }; +bool NodeVMSpecialSandbox::getOwnPropertySlot(JSObject* cell, JSGlobalObject* globalObject, PropertyName propertyName, PropertySlot& slot) +{ + VM& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); + + auto* thisObject = jsCast(cell); + NodeVMGlobalObject* parentGlobal = thisObject->parentGlobal(); + + if (propertyName.uid()->utf8() == "globalThis") [[unlikely]] { + slot.disableCaching(); + slot.setThisValue(thisObject); + slot.setValue(thisObject, slot.attributes(), thisObject); + return true; + } + + bool result = parentGlobal->getOwnPropertySlot(parentGlobal, globalObject, propertyName, slot); + RETURN_IF_EXCEPTION(scope, false); + + if (result) { + return true; + } + + RELEASE_AND_RETURN(scope, Base::getOwnPropertySlot(cell, globalObject, propertyName, slot)); +} + bool NodeVMGlobalObject::getOwnPropertySlot(JSObject* cell, JSGlobalObject* globalObject, PropertyName propertyName, PropertySlot& slot) { VM& vm = JSC::getVM(globalObject); auto scope = DECLARE_THROW_SCOPE(vm); auto* thisObject = jsCast(cell); - if (thisObject->m_sandbox) { - auto* contextifiedObject = thisObject->m_sandbox.get(); + + bool notContextified = thisObject->isNotContextified(); + + if (notContextified && propertyName.uid()->utf8() == "globalThis") [[unlikely]] { + slot.disableCaching(); + slot.setThisValue(thisObject); + slot.setValue(thisObject, slot.attributes(), thisObject->specialSandbox()); + return true; + } + + if (JSObject* contextifiedObject = thisObject->contextifiedObject()) { slot.setThisValue(contextifiedObject); // Unfortunately we must special case ProxyObjects. Why? // @@ -714,8 +965,12 @@ bool NodeVMGlobalObject::getOwnPropertySlot(JSObject* cell, JSGlobalObject* glob goto try_from_global; } - if (contextifiedObject->getPropertySlot(globalObject, propertyName, slot)) { - return true; + if (!notContextified) { + bool result = contextifiedObject->getPropertySlot(globalObject, propertyName, slot); + RETURN_IF_EXCEPTION(scope, false); + if (result) { + return true; + } } try_from_global: @@ -724,47 +979,61 @@ bool NodeVMGlobalObject::getOwnPropertySlot(JSObject* cell, JSGlobalObject* glob RETURN_IF_EXCEPTION(scope, false); } - return Base::getOwnPropertySlot(cell, globalObject, propertyName, slot); + bool result = Base::getOwnPropertySlot(cell, globalObject, propertyName, slot); + RETURN_IF_EXCEPTION(scope, false); + + if (result) { + return true; + } + + if (thisObject->m_contextOptions.notContextified) { + JSObject* specialSandbox = thisObject->specialSandbox(); + RELEASE_AND_RETURN(scope, JSObject::getOwnPropertySlot(specialSandbox, globalObject, propertyName, slot)); + } + + return false; } bool NodeVMGlobalObject::defineOwnProperty(JSObject* cell, JSGlobalObject* globalObject, PropertyName propertyName, const PropertyDescriptor& descriptor, bool shouldThrow) { - // if (!propertyName.isSymbol()) - // printf("defineOwnProperty called for %s\n", propertyName.publicName()->utf8().data()); - auto* thisObject = jsCast(cell); - if (!thisObject->m_sandbox) { - return Base::defineOwnProperty(cell, globalObject, propertyName, descriptor, shouldThrow); - } - - auto* contextifiedObject = thisObject->m_sandbox.get(); VM& vm = JSC::getVM(globalObject); auto scope = DECLARE_THROW_SCOPE(vm); + auto* thisObject = jsCast(cell); + if (!thisObject->m_sandbox) { + RELEASE_AND_RETURN(scope, Base::defineOwnProperty(cell, globalObject, propertyName, descriptor, shouldThrow)); + } + + auto* contextifiedObject = thisObject->isNotContextified() ? thisObject->specialSandbox() : thisObject->m_sandbox.get(); + PropertySlot slot(globalObject, PropertySlot::InternalMethodType::GetOwnProperty, nullptr); bool isDeclaredOnGlobalProxy = globalObject->JSC::JSGlobalObject::getOwnPropertySlot(globalObject, globalObject, propertyName, slot); // If the property is set on the global as neither writable nor // configurable, don't change it on the global or sandbox. if (isDeclaredOnGlobalProxy && (slot.attributes() & PropertyAttribute::ReadOnly) != 0 && (slot.attributes() & PropertyAttribute::DontDelete) != 0) { - return Base::defineOwnProperty(cell, globalObject, propertyName, descriptor, shouldThrow); + RELEASE_AND_RETURN(scope, Base::defineOwnProperty(cell, globalObject, propertyName, descriptor, shouldThrow)); } if (descriptor.isAccessorDescriptor()) { - return contextifiedObject->defineOwnProperty(contextifiedObject, contextifiedObject->globalObject(), propertyName, descriptor, shouldThrow); + RELEASE_AND_RETURN(scope, JSObject::defineOwnProperty(contextifiedObject, contextifiedObject->globalObject(), propertyName, descriptor, shouldThrow)); } bool isDeclaredOnSandbox = contextifiedObject->getPropertySlot(globalObject, propertyName, slot); RETURN_IF_EXCEPTION(scope, false); if (isDeclaredOnSandbox && !isDeclaredOnGlobalProxy) { - return contextifiedObject->defineOwnProperty(contextifiedObject, contextifiedObject->globalObject(), propertyName, descriptor, shouldThrow); + RELEASE_AND_RETURN(scope, JSObject::defineOwnProperty(contextifiedObject, contextifiedObject->globalObject(), propertyName, descriptor, shouldThrow)); } - if (!contextifiedObject->defineOwnProperty(contextifiedObject, contextifiedObject->globalObject(), propertyName, descriptor, shouldThrow)) { + bool result = JSObject::defineOwnProperty(contextifiedObject, contextifiedObject->globalObject(), propertyName, descriptor, shouldThrow); + RETURN_IF_EXCEPTION(scope, false); + + if (!result) { return false; } - return Base::defineOwnProperty(cell, globalObject, propertyName, descriptor, shouldThrow); + RELEASE_AND_RETURN(scope, Base::defineOwnProperty(cell, globalObject, propertyName, descriptor, shouldThrow)); } DEFINE_VISIT_CHILDREN(NodeVMGlobalObject); @@ -775,6 +1044,8 @@ void NodeVMGlobalObject::visitChildrenImpl(JSCell* cell, Visitor& visitor) Base::visitChildren(cell, visitor); auto* thisObject = jsCast(cell); visitor.append(thisObject->m_sandbox); + visitor.append(thisObject->m_specialSandbox); + visitor.append(thisObject->m_dynamicImportCallback); } JSC_DEFINE_HOST_FUNCTION(vmModuleRunInNewContext, (JSGlobalObject * globalObject, CallFrame* callFrame)) @@ -787,41 +1058,44 @@ JSC_DEFINE_HOST_FUNCTION(vmModuleRunInNewContext, (JSGlobalObject * globalObject return ERR::INVALID_ARG_TYPE(scope, globalObject, "code"_s, "string"_s, code); JSValue contextArg = callFrame->argument(1); - if (contextArg.isUndefined()) { - contextArg = JSC::constructEmptyObject(globalObject); - } + bool notContextified = getContextArg(globalObject, contextArg); - if (!contextArg.isObject()) + if (!contextArg.isObject()) { return ERR::INVALID_ARG_TYPE(scope, globalObject, "context"_s, "object"_s, contextArg); + } JSObject* sandbox = asObject(contextArg); JSValue contextOptionsArg = callFrame->argument(2); - NodeVMContextOptions contextOptions {}; - if (auto encodedException = getNodeVMContextOptions(globalObject, vm, scope, contextOptionsArg, contextOptions, "contextCodeGeneration")) { + JSValue globalObjectDynamicImportCallback; + + if (auto encodedException = getNodeVMContextOptions(globalObject, vm, scope, contextOptionsArg, contextOptions, "contextCodeGeneration", &globalObjectDynamicImportCallback)) { return *encodedException; } + contextOptions.notContextified = notContextified; + // Create context and run code auto* context = NodeVMGlobalObject::create(vm, defaultGlobalObject(globalObject)->NodeVMGlobalObjectStructure(), - contextOptions); + contextOptions, globalObjectDynamicImportCallback); context->setContextifiedObject(sandbox); JSValue optionsArg = callFrame->argument(2); + JSValue scriptDynamicImportCallback; ScriptOptions options(optionsArg.toWTFString(globalObject), OrdinalNumber::fromZeroBasedInt(0), OrdinalNumber::fromZeroBasedInt(0)); if (optionsArg.isString()) { options.filename = optionsArg.toWTFString(globalObject); RETURN_IF_EXCEPTION(scope, {}); - } else if (!options.fromJS(globalObject, vm, scope, optionsArg)) { + } else if (!options.fromJS(globalObject, vm, scope, optionsArg, &scriptDynamicImportCallback)) { RETURN_IF_EXCEPTION(scope, {}); } - RefPtr fetcher(NodeVMScriptFetcher::create(vm, options.importer)); + RefPtr fetcher(NodeVMScriptFetcher::create(vm, scriptDynamicImportCallback, jsUndefined())); SourceCode sourceCode( JSC::StringSourceProvider::create( @@ -857,19 +1131,21 @@ JSC_DEFINE_HOST_FUNCTION(vmModuleRunInThisContext, (JSGlobalObject * globalObjec return ERR::INVALID_ARG_TYPE(throwScope, globalObject, "code"_s, "string"_s, sourceStringValue); } - auto sourceString = sourceStringValue.toWTFString(globalObject); + String sourceString = sourceStringValue.toWTFString(globalObject); RETURN_IF_EXCEPTION(throwScope, encodedJSUndefined()); + JSValue importer; + JSValue optionsArg = callFrame->argument(1); ScriptOptions options(optionsArg.toWTFString(globalObject), OrdinalNumber::fromZeroBasedInt(0), OrdinalNumber::fromZeroBasedInt(0)); if (optionsArg.isString()) { options.filename = optionsArg.toWTFString(globalObject); RETURN_IF_EXCEPTION(throwScope, {}); - } else if (!options.fromJS(globalObject, vm, throwScope, optionsArg)) { + } else if (!options.fromJS(globalObject, vm, throwScope, optionsArg, &importer)) { RETURN_IF_EXCEPTION(throwScope, encodedJSUndefined()); } - RefPtr fetcher(NodeVMScriptFetcher::create(vm, options.importer)); + RefPtr fetcher(NodeVMScriptFetcher::create(vm, importer, jsUndefined())); SourceCode source( JSC::StringSourceProvider::create(sourceString, JSC::SourceOrigin(WTF::URL::fileURLWithFileSystemPath(options.filename), *fetcher), options.filename, JSC::SourceTaintedOrigin::Untainted, TextPosition(options.lineOffset, options.columnOffset)), @@ -922,7 +1198,9 @@ JSC_DEFINE_HOST_FUNCTION(vmModuleCompileFunction, (JSGlobalObject * globalObject // Get options argument JSValue optionsArg = callFrame->argument(2); CompileFunctionOptions options; - if (!options.fromJS(globalObject, vm, scope, optionsArg)) { + JSValue importer; + + if (!options.fromJS(globalObject, vm, scope, optionsArg, &importer)) { RETURN_IF_EXCEPTION(scope, {}); options = {}; options.parsingContext = globalObject; @@ -944,7 +1222,7 @@ JSC_DEFINE_HOST_FUNCTION(vmModuleCompileFunction, (JSGlobalObject * globalObject // Add the function body constructFunctionArgs.append(jsString(vm, sourceString)); - RefPtr fetcher(NodeVMScriptFetcher::create(vm, options.importer)); + RefPtr fetcher(NodeVMScriptFetcher::create(vm, importer, jsUndefined())); // Create the source origin SourceOrigin sourceOrigin { WTF::URL::fileURLWithFileSystemPath(options.filename), *fetcher }; @@ -978,14 +1256,18 @@ JSC_DEFINE_HOST_FUNCTION(vmModuleCompileFunction, (JSGlobalObject * globalObject // Create the function using constructAnonymousFunction with the appropriate scope chain JSFunction* function = constructAnonymousFunction(globalObject, ArgList(constructFunctionArgs), sourceOrigin, WTFMove(options), JSC::SourceTaintedOrigin::Untainted, functionScope); - fetcher->owner(vm, function); - RETURN_IF_EXCEPTION(scope, {}); if (!function) { return throwVMError(globalObject, scope, "Failed to compile function"_s); } + fetcher->owner(vm, function); + + if (!function) { + return throwVMError(globalObject, scope, "Failed to compile function"_s); + } + return JSValue::encode(function); } @@ -994,31 +1276,16 @@ Structure* createNodeVMGlobalObjectStructure(JSC::VM& vm) return NodeVMGlobalObject::createStructure(vm, jsNull()); } -NodeVMGlobalObject* createContextImpl(JSC::VM& vm, JSGlobalObject* globalObject, JSObject* sandbox) -{ - auto* targetContext = NodeVMGlobalObject::create(vm, - defaultGlobalObject(globalObject)->NodeVMGlobalObjectStructure(), - NodeVMContextOptions {}); - - // Set sandbox as contextified object - targetContext->setContextifiedObject(sandbox); - - // Store context in WeakMap for isContext checks - auto* zigGlobalObject = defaultGlobalObject(globalObject); - zigGlobalObject->vmModuleContextMap()->set(vm, sandbox, targetContext); - - return targetContext; -} - JSC_DEFINE_HOST_FUNCTION(vmModule_createContext, (JSGlobalObject * globalObject, CallFrame* callFrame)) { VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); + NodeVMContextOptions contextOptions {}; + JSValue contextArg = callFrame->argument(0); - if (contextArg.isUndefined()) { - contextArg = JSC::constructEmptyObject(globalObject); - } + bool notContextified = getContextArg(globalObject, contextArg); + RETURN_IF_EXCEPTION(scope, {}); if (!contextArg.isObject()) { return ERR::INVALID_ARG_TYPE(scope, globalObject, "context"_s, "object"_s, contextArg); @@ -1031,25 +1298,48 @@ JSC_DEFINE_HOST_FUNCTION(vmModule_createContext, (JSGlobalObject * globalObject, return ERR::INVALID_ARG_TYPE(scope, globalObject, "options"_s, "object"_s, optionsArg); } - NodeVMContextOptions contextOptions {}; + JSValue importer; - if (auto encodedException = getNodeVMContextOptions(globalObject, vm, scope, optionsArg, contextOptions, "codeGeneration")) { + if (auto encodedException = getNodeVMContextOptions(globalObject, vm, scope, optionsArg, contextOptions, "codeGeneration", &importer)) { return *encodedException; } + contextOptions.notContextified = notContextified; + JSObject* sandbox = asObject(contextArg); + if (isContext(globalObject, sandbox)) { + if (auto* proxy = jsDynamicCast(sandbox)) { + if (auto* targetContext = jsDynamicCast(proxy->target())) { + if (targetContext->isNotContextified()) { + return JSValue::encode(targetContext->specialSandbox()); + } + } + } + return JSValue::encode(sandbox); + } + + auto* zigGlobalObject = defaultGlobalObject(globalObject); + auto* targetContext = NodeVMGlobalObject::create(vm, - defaultGlobalObject(globalObject)->NodeVMGlobalObjectStructure(), - contextOptions); + zigGlobalObject->NodeVMGlobalObjectStructure(), + contextOptions, importer); + + RETURN_IF_EXCEPTION(scope, {}); // Set sandbox as contextified object targetContext->setContextifiedObject(sandbox); // Store context in WeakMap for isContext checks - auto* zigGlobalObject = defaultGlobalObject(globalObject); zigGlobalObject->vmModuleContextMap()->set(vm, sandbox, targetContext); + if (notContextified) { + auto* specialSandbox = NodeVMSpecialSandbox::create(vm, zigGlobalObject->NodeVMSpecialSandboxStructure(), targetContext); + RETURN_IF_EXCEPTION(scope, {}); + targetContext->setSpecialSandbox(specialSandbox); + return JSValue::encode(targetContext->specialSandbox()); + } + return JSValue::encode(sandbox); } @@ -1059,39 +1349,12 @@ JSC_DEFINE_HOST_FUNCTION(vmModule_isContext, (JSGlobalObject * globalObject, Cal JSValue contextArg = callFrame->argument(0); VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); - bool isContext; if (!contextArg || !contextArg.isObject()) { - isContext = false; return ERR::INVALID_ARG_TYPE(scope, globalObject, "object"_s, "object"_s, contextArg); - } else { - auto* zigGlobalObject = defaultGlobalObject(globalObject); - isContext = zigGlobalObject->vmModuleContextMap()->has(asObject(contextArg)); } - return JSValue::encode(jsBoolean(isContext)); + return JSValue::encode(jsBoolean(isContext(globalObject, contextArg))); } -// NodeVMGlobalObject* NodeVMGlobalObject::create(JSC::VM& vm, JSC::Structure* structure) -// { -// auto* obj = new (NotNull, allocateCell(vm)) NodeVMGlobalObject(vm, structure); -// obj->finishCreation(vm); -// return obj; -// } - -// void NodeVMGlobalObject::finishCreation(VM& vm, JSObject* context) -// { -// Base::finishCreation(vm); -// // We don't need to store the context anymore since we use proxies -// } - -// DEFINE_VISIT_CHILDREN(NodeVMGlobalObject); - -// template -// void NodeVMGlobalObject::visitChildrenImpl(JSCell* cell, Visitor& visitor) -// { -// Base::visitChildren(cell, visitor); -// // auto* thisObject = jsCast(cell); -// // visitor.append(thisObject->m_proxyTarget); -// } const ClassInfo NodeVMGlobalObject::s_info = { "NodeVMGlobalObject"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NodeVMGlobalObject) }; bool NodeVMGlobalObject::deleteProperty(JSCell* cell, JSGlobalObject* globalObject, PropertyName propertyName, JSC::DeletePropertySlot& slot) @@ -1113,19 +1376,55 @@ bool NodeVMGlobalObject::deleteProperty(JSCell* cell, JSGlobalObject* globalObje return Base::deleteProperty(cell, globalObject, propertyName, slot); } +static JSInternalPromise* moduleLoaderImportModuleInner(NodeVMGlobalObject* globalObject, JSC::JSModuleLoader* moduleLoader, JSC::JSString* moduleName, JSC::JSValue parameters, const JSC::SourceOrigin& sourceOrigin) +{ + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + auto* promise = JSInternalPromise::create(vm, globalObject->internalPromiseStructure()); + + if (sourceOrigin.fetcher() == nullptr && sourceOrigin.url().isEmpty()) { + if (globalObject->dynamicImportCallback().isCallable()) { + return NodeVM::importModuleInner(globalObject, moduleName, parameters, sourceOrigin, globalObject->dynamicImportCallback(), JSValue {}); + } + + promise->reject(globalObject, createError(globalObject, ErrorCode::ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING, "A dynamic import callback was not specified."_s)); + return promise; + } + + // Default behavior copied from JSModuleLoader::importModule + auto moduleNameString = moduleName->value(globalObject); + RETURN_IF_EXCEPTION(scope, promise->rejectWithCaughtException(globalObject, scope)); + + scope.release(); + promise->reject(globalObject, createError(globalObject, makeString("Could not import the module '"_s, moduleNameString.data, "'."_s))); + return promise; +} + +JSInternalPromise* NodeVMGlobalObject::moduleLoaderImportModule(JSGlobalObject* globalObject, JSC::JSModuleLoader* moduleLoader, JSC::JSString* moduleName, JSC::JSValue parameters, const JSC::SourceOrigin& sourceOrigin) +{ + auto* nodeVmGlobalObject = static_cast(globalObject); + + if (JSInternalPromise* result = NodeVM::importModule(nodeVmGlobalObject, moduleName, parameters, sourceOrigin)) { + return result; + } + + return moduleLoaderImportModuleInner(nodeVmGlobalObject, moduleLoader, moduleName, parameters, sourceOrigin); +} + void NodeVMGlobalObject::getOwnPropertyNames(JSObject* cell, JSGlobalObject* globalObject, JSC::PropertyNameArray& propertyNames, JSC::DontEnumPropertiesMode mode) { auto* thisObject = jsCast(cell); - if (thisObject->m_sandbox) { - thisObject->m_sandbox->getOwnPropertyNames( - thisObject->m_sandbox.get(), - globalObject, - propertyNames, - mode); + VM& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); + + if (thisObject->m_sandbox) [[likely]] { + thisObject->m_sandbox->getOwnPropertyNames(thisObject->m_sandbox.get(), globalObject, propertyNames, mode); + RETURN_IF_EXCEPTION(scope, ); } - Base::getOwnPropertyNames(cell, globalObject, propertyNames, mode); + RELEASE_AND_RETURN(scope, Base::getOwnPropertyNames(cell, globalObject, propertyNames, mode)); } JSC_DEFINE_HOST_FUNCTION(vmIsModuleNamespaceObject, (JSGlobalObject * globalObject, CallFrame* callFrame)) @@ -1145,7 +1444,7 @@ JSC::JSValue createNodeVMBinding(Zig::GlobalObject* globalObject) defaultGlobalObject(globalObject)->NodeVMSourceTextModule(), 0); obj->putDirect( vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "createContext"_s)), - JSC::JSFunction::create(vm, globalObject, 0, "createContext"_s, vmModule_createContext, ImplementationVisibility::Public), 0); + JSC::JSFunction::create(vm, globalObject, 0, "createContext"_s, vmModule_createContext, ImplementationVisibility::Public, Intrinsic::NoIntrinsic, vmModule_createContext), 0); obj->putDirect( vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "isContext"_s)), JSC::JSFunction::create(vm, globalObject, 0, "isContext"_s, vmModule_isContext, ImplementationVisibility::Public), 0); @@ -1185,11 +1484,24 @@ JSC::JSValue createNodeVMBinding(Zig::GlobalObject* globalObject) obj->putDirect( vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "kSynthetic"_s)), JSC::jsNumber(static_cast(NodeVMModule::Type::Synthetic)), 0); + obj->putDirect( + vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "DONT_CONTEXTIFY"_s)), + globalObject->m_nodeVMDontContextify.get(globalObject), 0); + obj->putDirect( + vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "USE_MAIN_CONTEXT_DEFAULT_LOADER"_s)), + globalObject->m_nodeVMUseMainContextDefaultLoader.get(globalObject), 0); return obj; } void configureNodeVM(JSC::VM& vm, Zig::GlobalObject* globalObject) { + globalObject->m_nodeVMDontContextify.initLater([](const LazyProperty::Initializer& init) { + init.set(JSC::Symbol::createWithDescription(init.vm, "vm_dont_contextify"_s)); + }); + globalObject->m_nodeVMUseMainContextDefaultLoader.initLater([](const LazyProperty::Initializer& init) { + init.set(JSC::Symbol::createWithDescription(init.vm, "vm_use_main_context_default_loader"_s)); + }); + globalObject->m_NodeVMScriptClassStructure.initLater( [](LazyClassStructure::Initializer& init) { auto prototype = NodeVMScript::createPrototype(init.vm, init.global); @@ -1233,6 +1545,11 @@ void configureNodeVM(JSC::VM& vm, Zig::GlobalObject* globalObject) [](const JSC::LazyProperty::Initializer& init) { init.set(createNodeVMGlobalObjectStructure(init.vm)); }); + + globalObject->m_cachedNodeVMSpecialSandboxStructure.initLater( + [](const JSC::LazyProperty::Initializer& init) { + init.set(NodeVMSpecialSandbox::createStructure(init.vm, init.owner, init.owner->objectPrototype())); // TODO(@heimskr): or maybe jsNull() for the prototype? + }); } BaseVMOptions::BaseVMOptions(String filename) @@ -1260,7 +1577,9 @@ bool BaseVMOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC:: return false; } - if (JSValue filenameOpt = options->getIfPropertyExists(globalObject, builtinNames(vm).filenamePublicName())) { + auto filenameOpt = options->getIfPropertyExists(globalObject, builtinNames(vm).filenamePublicName()); + RETURN_IF_EXCEPTION(scope, false); + if (filenameOpt) { if (filenameOpt.isString()) { this->filename = filenameOpt.toWTFString(globalObject); RETURN_IF_EXCEPTION(scope, false); @@ -1273,7 +1592,9 @@ bool BaseVMOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC:: this->filename = "evalmachine."_s; } - if (JSValue lineOffsetOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "lineOffset"_s))) { + auto lineOffsetOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "lineOffset"_s)); + RETURN_IF_EXCEPTION(scope, false); + if (lineOffsetOpt) { if (lineOffsetOpt.isAnyInt()) { if (!lineOffsetOpt.isInt32()) { ERR::OUT_OF_RANGE(scope, globalObject, "options.lineOffset"_s, std::numeric_limits().min(), std::numeric_limits().max(), lineOffsetOpt); @@ -1290,7 +1611,9 @@ bool BaseVMOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC:: } } - if (JSValue columnOffsetOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "columnOffset"_s))) { + auto columnOffsetOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "columnOffset"_s)); + RETURN_IF_EXCEPTION(scope, false); + if (columnOffsetOpt) { if (columnOffsetOpt.isAnyInt()) { if (!columnOffsetOpt.isInt32()) { ERR::OUT_OF_RANGE(scope, globalObject, "options.columnOffset"_s, std::numeric_limits().min(), std::numeric_limits().max(), columnOffsetOpt); @@ -1316,8 +1639,8 @@ bool BaseVMOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC:: bool BaseVMOptions::validateProduceCachedData(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSObject* options, bool& outProduceCachedData) { JSValue produceCachedDataOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "produceCachedData"_s)); + RETURN_IF_EXCEPTION(scope, false); if (produceCachedDataOpt && !produceCachedDataOpt.isUndefined()) { - RETURN_IF_EXCEPTION(scope, {}); if (!produceCachedDataOpt.isBoolean()) { ERR::INVALID_ARG_TYPE(scope, globalObject, "options.produceCachedData"_s, "boolean"_s, produceCachedDataOpt); return false; @@ -1348,6 +1671,7 @@ bool BaseVMOptions::validateCachedData(JSC::JSGlobalObject* globalObject, JSC::V bool BaseVMOptions::validateTimeout(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSObject* options, std::optional& outTimeout) { JSValue timeoutOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "timeout"_s)); + RETURN_IF_EXCEPTION(scope, false); if (timeoutOpt && !timeoutOpt.isUndefined()) { if (!timeoutOpt.isNumber()) { ERR::INVALID_ARG_TYPE(scope, globalObject, "options.timeout"_s, "number"_s, timeoutOpt); @@ -1364,8 +1688,12 @@ bool BaseVMOptions::validateTimeout(JSC::JSGlobalObject* globalObject, JSC::VM& return false; } -bool CompileFunctionOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg) +bool CompileFunctionOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg, JSValue* importer) { + if (importer) { + *importer = jsUndefined(); + } + this->parsingContext = globalObject; bool any = BaseVMOptions::fromJS(globalObject, vm, scope, optionsArg); RETURN_IF_EXCEPTION(scope, false); @@ -1436,8 +1764,10 @@ bool CompileFunctionOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& JSValue importModuleDynamicallyValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "importModuleDynamically"_s)); RETURN_IF_EXCEPTION(scope, {}); - if (importModuleDynamicallyValue && importModuleDynamicallyValue.isCallable()) { - this->importer = importModuleDynamicallyValue; + if (importModuleDynamicallyValue && (importModuleDynamicallyValue.isCallable() || isUseMainContextDefaultLoaderConstant(globalObject, importModuleDynamicallyValue))) { + if (importer) { + *importer = importModuleDynamicallyValue; + } any = true; } } diff --git a/src/bun.js/bindings/NodeVM.h b/src/bun.js/bindings/NodeVM.h index 43e85b7981..797af6aa4f 100644 --- a/src/bun.js/bindings/NodeVM.h +++ b/src/bun.js/bindings/NodeVM.h @@ -25,65 +25,19 @@ RefPtr getBytecode(JSGlobalObject* globalObject, JSC::Modul bool extractCachedData(JSValue cachedDataValue, WTF::Vector& outCachedData); String stringifyAnonymousFunction(JSGlobalObject* globalObject, const ArgList& args, ThrowScope& scope, int* outOffset); JSC::EncodedJSValue createCachedData(JSGlobalObject* globalObject, const JSC::SourceCode& source); -NodeVMGlobalObject* createContextImpl(JSC::VM& vm, JSGlobalObject* globalObject, JSObject* sandbox); bool handleException(JSGlobalObject* globalObject, VM& vm, NakedPtr exception, ThrowScope& throwScope); -std::optional getNodeVMContextOptions(JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSValue optionsArg, NodeVMContextOptions& outOptions, ASCIILiteral codeGenerationKey); +std::optional getNodeVMContextOptions(JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSValue optionsArg, NodeVMContextOptions& outOptions, ASCIILiteral codeGenerationKey, JSValue* importer); NodeVMGlobalObject* getGlobalObjectFromContext(JSGlobalObject* globalObject, JSValue contextValue, bool canThrow); JSC::EncodedJSValue INVALID_ARG_VALUE_VM_VARIATION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral name, JSC::JSValue value); // For vm.compileFunction we need to return an anonymous function expression. This code is adapted from/inspired by JSC::constructFunction, which is used for function declarations. JSC::JSFunction* constructAnonymousFunction(JSC::JSGlobalObject* globalObject, const ArgList& args, const SourceOrigin& sourceOrigin, CompileFunctionOptions&& options, JSC::SourceTaintedOrigin sourceTaintOrigin, JSC::JSScope* scope); JSInternalPromise* importModule(JSGlobalObject* globalObject, JSString* moduleNameValue, JSValue parameters, const SourceOrigin& sourceOrigin); +bool isContext(JSC::JSGlobalObject* globalObject, JSValue); +bool getContextArg(JSC::JSGlobalObject* globalObject, JSValue& contextArg); +bool isUseMainContextDefaultLoaderConstant(JSC::JSGlobalObject* globalObject, JSValue value); } // namespace NodeVM -// This class represents a sandboxed global object for vm contexts -class NodeVMGlobalObject final : public Bun::GlobalScope { - using Base = Bun::GlobalScope; - -public: - static constexpr unsigned StructureFlags = Base::StructureFlags | JSC::OverridesGetOwnPropertySlot | JSC::OverridesPut | JSC::OverridesGetOwnPropertyNames | JSC::GetOwnPropertySlotMayBeWrongAboutDontEnum | JSC::ProhibitsPropertyCaching; - static constexpr JSC::DestructionMode needsDestruction = NeedsDestruction; - - template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm); - static NodeVMGlobalObject* create(JSC::VM& vm, JSC::Structure* structure, NodeVMContextOptions options); - static Structure* createStructure(JSC::VM& vm, JSC::JSValue prototype); - - DECLARE_INFO; - DECLARE_VISIT_CHILDREN; - - void finishCreation(JSC::VM&, NodeVMContextOptions options); - static void destroy(JSCell* cell); - void setContextifiedObject(JSC::JSObject* contextifiedObject); - JSC::JSObject* contextifiedObject() const { return m_sandbox.get(); } - void clearContextifiedObject(); - void sigintReceived(); - - // Override property access to delegate to contextified object - static bool getOwnPropertySlot(JSObject*, JSGlobalObject*, JSC::PropertyName, JSC::PropertySlot&); - static bool put(JSCell*, JSGlobalObject*, JSC::PropertyName, JSC::JSValue, JSC::PutPropertySlot&); - static void getOwnPropertyNames(JSObject*, JSGlobalObject*, JSC::PropertyNameArray&, JSC::DontEnumPropertiesMode); - static bool defineOwnProperty(JSObject* object, JSGlobalObject* globalObject, PropertyName propertyName, const PropertyDescriptor& descriptor, bool shouldThrow); - static bool deleteProperty(JSCell* cell, JSGlobalObject* globalObject, PropertyName propertyName, JSC::DeletePropertySlot& slot); - -private: - NodeVMGlobalObject(JSC::VM& vm, JSC::Structure* structure); - ~NodeVMGlobalObject(); - - // The contextified object that acts as the global proxy - mutable JSC::WriteBarrier m_sandbox; -}; - -// Helper functions to create vm contexts and run code -JSC::JSValue createNodeVMBinding(Zig::GlobalObject*); -Structure* createNodeVMGlobalObjectStructure(JSC::VM&); -void configureNodeVM(JSC::VM&, Zig::GlobalObject*); - -// VM module functions -JSC_DECLARE_HOST_FUNCTION(vmModule_createContext); -JSC_DECLARE_HOST_FUNCTION(vmModule_isContext); -JSC_DECLARE_HOST_FUNCTION(vmModuleRunInNewContext); -JSC_DECLARE_HOST_FUNCTION(vmModuleRunInThisContext); - class BaseVMOptions { public: String filename; @@ -106,18 +60,103 @@ public: WTF::Vector cachedData; JSGlobalObject* parsingContext = nullptr; JSValue contextExtensions {}; - JSValue importer {}; bool produceCachedData = false; using BaseVMOptions::BaseVMOptions; - bool fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg); + bool fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg, JSValue* importer); }; class NodeVMContextOptions final { public: bool allowStrings = true; bool allowWasm = true; + bool notContextified = false; }; +class NodeVMGlobalObject; + +class NodeVMSpecialSandbox final : public JSC::JSNonFinalObject { +public: + using Base = JSC::JSNonFinalObject; + + static constexpr unsigned StructureFlags = Base::StructureFlags | JSC::OverridesGetOwnPropertySlot; + + static NodeVMSpecialSandbox* create(VM& vm, Structure* structure, NodeVMGlobalObject* globalObject); + + DECLARE_INFO; + template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm); + static Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype); + + static bool getOwnPropertySlot(JSObject*, JSGlobalObject*, JSC::PropertyName, JSC::PropertySlot&); + + NodeVMGlobalObject* parentGlobal() const { return m_parentGlobal.get(); } + +private: + WriteBarrier m_parentGlobal; + + NodeVMSpecialSandbox(VM& vm, Structure* structure, NodeVMGlobalObject* globalObject); + + void finishCreation(VM&); +}; + +// This class represents a sandboxed global object for vm contexts +class NodeVMGlobalObject final : public Bun::GlobalScope { +public: + using Base = Bun::GlobalScope; + + static constexpr unsigned StructureFlags = Base::StructureFlags | JSC::OverridesGetOwnPropertySlot | JSC::OverridesPut | JSC::OverridesGetOwnPropertyNames | JSC::GetOwnPropertySlotMayBeWrongAboutDontEnum | JSC::ProhibitsPropertyCaching; + static constexpr JSC::DestructionMode needsDestruction = NeedsDestruction; + + template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm); + static NodeVMGlobalObject* create(JSC::VM& vm, JSC::Structure* structure, NodeVMContextOptions options, JSValue importer); + static Structure* createStructure(JSC::VM& vm, JSC::JSValue prototype); + static const JSC::GlobalObjectMethodTable& globalObjectMethodTable(); + + DECLARE_INFO; + DECLARE_VISIT_CHILDREN; + + ~NodeVMGlobalObject(); + + void finishCreation(JSC::VM&); + static void destroy(JSCell* cell); + void setContextifiedObject(JSC::JSObject* contextifiedObject); + JSObject* contextifiedObject() const { return m_sandbox.get(); } + void clearContextifiedObject(); + void sigintReceived(); + bool isNotContextified() const { return m_contextOptions.notContextified; } + NodeVMSpecialSandbox* specialSandbox() const { return m_specialSandbox.get(); } + void setSpecialSandbox(NodeVMSpecialSandbox* sandbox) { m_specialSandbox.set(vm(), this, sandbox); } + JSValue dynamicImportCallback() const { return m_dynamicImportCallback.get(); } + + // Override property access to delegate to contextified object + static bool getOwnPropertySlot(JSObject*, JSGlobalObject*, JSC::PropertyName, JSC::PropertySlot&); + static bool put(JSCell*, JSGlobalObject*, JSC::PropertyName, JSC::JSValue, JSC::PutPropertySlot&); + static void getOwnPropertyNames(JSObject*, JSGlobalObject*, JSC::PropertyNameArray&, JSC::DontEnumPropertiesMode); + static bool defineOwnProperty(JSObject* object, JSGlobalObject* globalObject, PropertyName propertyName, const PropertyDescriptor& descriptor, bool shouldThrow); + static bool deleteProperty(JSCell* cell, JSGlobalObject* globalObject, PropertyName propertyName, JSC::DeletePropertySlot& slot); + static JSC::JSInternalPromise* moduleLoaderImportModule(JSGlobalObject*, JSC::JSModuleLoader*, JSC::JSString* moduleNameValue, JSC::JSValue parameters, const JSC::SourceOrigin&); + +private: + // The contextified object that acts as the global proxy + WriteBarrier m_sandbox; + // A special object used when the context is not contextified. + WriteBarrier m_specialSandbox; + WriteBarrier m_dynamicImportCallback; + NodeVMContextOptions m_contextOptions {}; + + NodeVMGlobalObject(VM& vm, Structure* structure, NodeVMContextOptions contextOptions, JSValue importer); +}; + +// Helper functions to create vm contexts and run code +JSC::JSValue createNodeVMBinding(Zig::GlobalObject*); +Structure* createNodeVMGlobalObjectStructure(JSC::VM&); +void configureNodeVM(JSC::VM&, Zig::GlobalObject*); + +// VM module functions +JSC_DECLARE_HOST_FUNCTION(vmModule_createContext); +JSC_DECLARE_HOST_FUNCTION(vmModule_isContext); +JSC_DECLARE_HOST_FUNCTION(vmModuleRunInNewContext); +JSC_DECLARE_HOST_FUNCTION(vmModuleRunInThisContext); + } // namespace Bun diff --git a/src/bun.js/bindings/NodeVMModule.cpp b/src/bun.js/bindings/NodeVMModule.cpp index 6cfc93a5a1..14097cac21 100644 --- a/src/bun.js/bindings/NodeVMModule.cpp +++ b/src/bun.js/bindings/NodeVMModule.cpp @@ -29,15 +29,20 @@ JSArray* NodeVMModuleRequest::toJS(JSGlobalObject* globalObject) const JSArray* array = JSC::constructEmptyArray(globalObject, nullptr, 2); RETURN_IF_EXCEPTION(scope, {}); + array->putDirectIndex(globalObject, 0, JSC::jsString(globalObject->vm(), m_specifier)); + RETURN_IF_EXCEPTION(scope, {}); JSObject* attributes = JSC::constructEmptyObject(globalObject); RETURN_IF_EXCEPTION(scope, {}); + for (const auto& [key, value] : m_importAttributes) { attributes->putDirect(globalObject->vm(), JSC::Identifier::fromString(globalObject->vm(), key), JSC::jsString(globalObject->vm(), value), PropertyAttribute::ReadOnly | PropertyAttribute::DontDelete); + RETURN_IF_EXCEPTION(scope, {}); } array->putDirectIndex(globalObject, 1, attributes); + RETURN_IF_EXCEPTION(scope, {}); return array; } @@ -73,6 +78,7 @@ JSValue NodeVMModule::evaluate(JSGlobalObject* globalObject, uint32_t timeout, b JSValue result {}; NodeVMGlobalObject* nodeVmGlobalObject = NodeVM::getGlobalObjectFromContext(globalObject, m_context.get(), false); + RETURN_IF_EXCEPTION(scope, {}); if (nodeVmGlobalObject) { globalObject = nodeVmGlobalObject; @@ -82,13 +88,12 @@ JSValue NodeVMModule::evaluate(JSGlobalObject* globalObject, uint32_t timeout, b if (sourceTextThis) { status(Status::Evaluating); evaluateDependencies(globalObject, record, timeout, breakOnSigint); + RETURN_IF_EXCEPTION(scope, ); sourceTextThis->initializeImportMeta(globalObject); } else if (syntheticThis) { syntheticThis->evaluate(globalObject); } - if (scope.exception()) { - return; - } + RETURN_IF_EXCEPTION(scope, ); result = record->evaluate(globalObject, jsUndefined(), jsNumber(static_cast(JSGenerator::ResumeMode::NormalMode))); }; @@ -206,11 +211,11 @@ NodeVMModule* NodeVMModule::create(JSC::VM& vm, JSC::JSGlobalObject* globalObjec JSValue disambiguator = args.at(2); if (disambiguator.isString()) { - return NodeVMSourceTextModule::create(vm, globalObject, args); + RELEASE_AND_RETURN(scope, NodeVMSourceTextModule::create(vm, globalObject, args)); } if (disambiguator.inherits(JSArray::info())) { - return NodeVMSyntheticModule::create(vm, globalObject, args); + RELEASE_AND_RETURN(scope, NodeVMSyntheticModule::create(vm, globalObject, args)); } throwArgumentTypeError(*globalObject, scope, 2, "sourceText or syntheticExportNames"_s, "Module"_s, "Module"_s, "string or array"_s); @@ -227,11 +232,14 @@ JSModuleNamespaceObject* NodeVMModule::namespaceObject(JSC::JSGlobalObject* glob if (auto* thisObject = jsDynamicCast(this)) { VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); - object = thisObject->moduleRecord(globalObject)->getModuleNamespace(globalObject); + AbstractModuleRecord* record = thisObject->moduleRecord(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + object = record->getModuleNamespace(globalObject); RETURN_IF_EXCEPTION(scope, {}); if (object) { namespaceObject(vm, object); } + RETURN_IF_EXCEPTION(scope, {}); } else { RELEASE_ASSERT_NOT_REACHED_WITH_MESSAGE("NodeVMModule::namespaceObject called on an unsupported module type (%s)", classInfo()->className.characters()); } @@ -333,7 +341,7 @@ JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleGetNamespace, (JSC::JSGlobalObject * glob auto scope = DECLARE_THROW_SCOPE(vm); if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { - return JSValue::encode(thisObject->namespaceObject(globalObject)); + RELEASE_AND_RETURN(scope, JSValue::encode(thisObject->namespaceObject(globalObject))); } throwTypeError(globalObject, scope, "This function must be called on a SourceTextModule or SyntheticModule"_s); @@ -366,6 +374,7 @@ JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleGetModuleRequests, (JSC::JSGlobalObject * if (auto* sourceTextModule = jsDynamicCast(callFrame->thisValue())) { sourceTextModule->ensureModuleRecord(globalObject); + RETURN_IF_EXCEPTION(scope, {}); } const WTF::Vector& requests = thisObject->moduleRequests(); @@ -399,11 +408,11 @@ JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleEvaluate, (JSC::JSGlobalObject * globalOb } if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { - return JSValue::encode(thisObject->evaluate(globalObject, timeout, breakOnSigint)); - } else { - throwTypeError(globalObject, scope, "This function must be called on a SourceTextModule or SyntheticModule"_s); - return {}; + RELEASE_AND_RETURN(scope, JSValue::encode(thisObject->evaluate(globalObject, timeout, breakOnSigint))); } + + throwTypeError(globalObject, scope, "This function must be called on a SourceTextModule or SyntheticModule"_s); + return {}; } JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleLink, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) @@ -423,14 +432,11 @@ JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleLink, (JSC::JSGlobalObject * globalObject } if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { - return JSValue::encode(thisObject->link(globalObject, specifiers, moduleNatives, callFrame->argument(2))); - // return thisObject->link(globalObject, linker); - // } else if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { - // return thisObject->link(globalObject, specifiers, moduleNatives); - } else { - throwTypeError(globalObject, scope, "This function must be called on a SourceTextModule or SyntheticModule"_s); - return {}; + RELEASE_AND_RETURN(scope, JSValue::encode(thisObject->link(globalObject, specifiers, moduleNatives, callFrame->argument(2)))); } + + throwTypeError(globalObject, scope, "This function must be called on a SourceTextModule"_s); + return {}; } JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleInstantiate, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) @@ -439,11 +445,11 @@ JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleInstantiate, (JSC::JSGlobalObject * globa auto scope = DECLARE_THROW_SCOPE(vm); if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { - return JSValue::encode(thisObject->instantiate(globalObject)); + RELEASE_AND_RETURN(scope, JSValue::encode(thisObject->instantiate(globalObject))); } if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { - return JSValue::encode(thisObject->instantiate(globalObject)); + RELEASE_AND_RETURN(scope, JSValue::encode(thisObject->instantiate(globalObject))); } throwTypeError(globalObject, scope, "This function must be called on a SourceTextModule or SyntheticModule"_s); @@ -455,7 +461,7 @@ JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleSetExport, (JSC::JSGlobalObject * globalO VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); - if (auto* thisObject = jsCast(callFrame->thisValue())) { + if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { JSValue nameValue = callFrame->argument(0); if (!nameValue.isString()) { Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, "name"_str, "string"_s, nameValue); @@ -478,7 +484,7 @@ JSC_DEFINE_HOST_FUNCTION(jsNodeVmModuleCreateCachedData, (JSC::JSGlobalObject * auto scope = DECLARE_THROW_SCOPE(vm); if (auto* thisObject = jsDynamicCast(callFrame->thisValue())) { - return JSValue::encode(thisObject->cachedData(globalObject)); + RELEASE_AND_RETURN(scope, JSValue::encode(thisObject->cachedData(globalObject))); } throwTypeError(globalObject, scope, "This function must be called on a SourceTextModule"_s); @@ -517,6 +523,7 @@ constructModule(JSGlobalObject* globalObject, CallFrame* callFrame, JSValue newT ArgList args(callFrame); NodeVMModule* module = NodeVMModule::create(vm, globalObject, args); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(module); } diff --git a/src/bun.js/bindings/NodeVMScript.cpp b/src/bun.js/bindings/NodeVMScript.cpp index 4404c7141c..077629b910 100644 --- a/src/bun.js/bindings/NodeVMScript.cpp +++ b/src/bun.js/bindings/NodeVMScript.cpp @@ -9,6 +9,7 @@ #include "JavaScriptCore/ProgramCodeBlock.h" #include "JavaScriptCore/SourceCodeKey.h" +#include "NodeVMScriptFetcher.h" #include "../vm/SigintWatcher.h" #include @@ -16,8 +17,12 @@ namespace Bun { using namespace NodeVM; -bool ScriptOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg) +bool ScriptOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg, JSValue* importer) { + if (importer) { + *importer = jsUndefined(); + } + bool any = BaseVMOptions::fromJS(globalObject, vm, scope, optionsArg); RETURN_IF_EXCEPTION(scope, false); @@ -25,23 +30,25 @@ bool ScriptOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC:: JSObject* options = asObject(optionsArg); // Validate contextName and contextOrigin are strings - if (JSValue contextNameOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "contextName"_s))) { + auto contextNameOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "contextName"_s)); + RETURN_IF_EXCEPTION(scope, false); + if (contextNameOpt) { if (!contextNameOpt.isUndefined() && !contextNameOpt.isString()) { ERR::INVALID_ARG_TYPE(scope, globalObject, "options.contextName"_s, "string"_s, contextNameOpt); return false; } any = true; } - RETURN_IF_EXCEPTION(scope, false); - if (JSValue contextOriginOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "contextOrigin"_s))) { + auto contextOriginOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "contextOrigin"_s)); + RETURN_IF_EXCEPTION(scope, false); + if (contextOriginOpt) { if (!contextOriginOpt.isUndefined() && !contextOriginOpt.isString()) { ERR::INVALID_ARG_TYPE(scope, globalObject, "options.contextOrigin"_s, "string"_s, contextOriginOpt); return false; } any = true; } - RETURN_IF_EXCEPTION(scope, false); if (validateTimeout(globalObject, vm, scope, options, this->timeout)) { RETURN_IF_EXCEPTION(scope, false); @@ -62,9 +69,16 @@ bool ScriptOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC:: JSValue importModuleDynamicallyValue = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "importModuleDynamically"_s)); RETURN_IF_EXCEPTION(scope, {}); - if (importModuleDynamicallyValue && importModuleDynamicallyValue.isCallable()) { - this->importer = importModuleDynamicallyValue; - any = true; + if (importModuleDynamicallyValue) { + if ((importModuleDynamicallyValue.isCallable() || isUseMainContextDefaultLoaderConstant(globalObject, importModuleDynamicallyValue))) { + if (importer) { + *importer = importModuleDynamicallyValue; + } + any = true; + } else if (!importModuleDynamicallyValue.isUndefined()) { + ERR::INVALID_ARG_TYPE(scope, globalObject, "options.importModuleDynamically"_s, "function"_s, importModuleDynamicallyValue); + return false; + } } } @@ -78,15 +92,22 @@ constructScript(JSGlobalObject* globalObject, CallFrame* callFrame, JSValue newT auto scope = DECLARE_THROW_SCOPE(vm); ArgList args(callFrame); JSValue sourceArg = args.at(0); - String sourceString = sourceArg.isUndefined() ? emptyString() : sourceArg.toWTFString(globalObject); - RETURN_IF_EXCEPTION(scope, encodedJSUndefined()); + String sourceString; + if (sourceArg.isUndefined()) { + sourceString = emptyString(); + } else { + sourceString = sourceArg.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, encodedJSUndefined()); + } JSValue optionsArg = args.at(1); ScriptOptions options(""_s); + JSValue importer; + if (optionsArg.isString()) { options.filename = optionsArg.toWTFString(globalObject); RETURN_IF_EXCEPTION(scope, {}); - } else if (!options.fromJS(globalObject, vm, scope, optionsArg)) { + } else if (!options.fromJS(globalObject, vm, scope, optionsArg, &importer)) { RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined())); } @@ -106,13 +127,18 @@ constructScript(JSGlobalObject* globalObject, CallFrame* callFrame, JSValue newT scope.release(); } - SourceCode source = makeSource(sourceString, JSC::SourceOrigin(WTF::URL::fileURLWithFileSystemPath(options.filename)), JSC::SourceTaintedOrigin::Untainted, options.filename, TextPosition(options.lineOffset, options.columnOffset)); + RefPtr fetcher(NodeVMScriptFetcher::create(vm, importer, jsUndefined())); + + SourceCode source = makeSource(sourceString, JSC::SourceOrigin(WTF::URL::fileURLWithFileSystemPath(options.filename), *fetcher), JSC::SourceTaintedOrigin::Untainted, options.filename, TextPosition(options.lineOffset, options.columnOffset)); RETURN_IF_EXCEPTION(scope, {}); const bool produceCachedData = options.produceCachedData; auto filename = options.filename; NodeVMScript* script = NodeVMScript::create(vm, globalObject, structure, WTFMove(source), WTFMove(options)); + RETURN_IF_EXCEPTION(scope, {}); + + fetcher->owner(vm, script); WTF::Vector& cachedData = script->cachedData(); @@ -137,6 +163,7 @@ constructScript(JSGlobalObject* globalObject, CallFrame* callFrame, JSValue newT // JSC::ProgramCodeBlock::create() requires GC to be deferred. DeferGC deferGC(vm); codeBlock = JSC::ProgramCodeBlock::create(vm, executable, unlinkedBlock, jsScope); + RETURN_IF_EXCEPTION(scope, {}); } JSC::CompilationResult compilationResult = JIT::compileSync(vm, codeBlock, JITCompilationEffort::JITCompilationCanFail); if (compilationResult != JSC::CompilationResult::CompilationFailed) { @@ -197,6 +224,9 @@ JSC::JSUint8Array* NodeVMScript::getBytecodeBuffer() std::span bytes = m_cachedBytecode->span(); m_cachedBytecodeBuffer.set(vm(), this, WebCore::createBuffer(globalObject(), bytes)); + if (!m_cachedBytecodeBuffer) { + return nullptr; + } } ASSERT(m_cachedBytecodeBuffer); @@ -252,10 +282,8 @@ void NodeVMScript::destroy(JSCell* cell) static_cast(cell)->NodeVMScript::~NodeVMScript(); } -static bool checkForTermination(JSGlobalObject* globalObject, ThrowScope& scope, NodeVMScript* script, std::optional timeout) +static bool checkForTermination(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::ThrowScope& scope, NodeVMScript* script, std::optional timeout) { - VM& vm = JSC::getVM(globalObject); - if (vm.hasTerminationRequest()) { vm.clearHasTerminationRequest(); if (script->getSigintReceived()) { @@ -333,11 +361,13 @@ static JSC::EncodedJSValue runInContext(NodeVMGlobalObject* globalObject, NodeVM run(); } + RETURN_IF_EXCEPTION(scope, {}); + if (options.timeout) { vm.watchdog()->setTimeLimit(WTF::Seconds::fromMilliseconds(*oldLimit)); } - if (checkForTermination(globalObject, scope, script, newLimit)) { + if (checkForTermination(vm, globalObject, scope, script, newLimit)) { return {}; } @@ -351,7 +381,8 @@ static JSC::EncodedJSValue runInContext(NodeVMGlobalObject* globalObject, NodeVM return {}; } - return JSValue::encode(result); + RETURN_IF_EXCEPTION(scope, {}); + RELEASE_AND_RETURN(scope, JSValue::encode(result)); } JSC_DEFINE_HOST_FUNCTION(scriptRunInThisContext, (JSGlobalObject * globalObject, CallFrame* callFrame)) @@ -399,7 +430,7 @@ JSC_DEFINE_HOST_FUNCTION(scriptRunInThisContext, (JSGlobalObject * globalObject, vm.watchdog()->setTimeLimit(WTF::Seconds::fromMilliseconds(*oldLimit)); } - if (checkForTermination(globalObject, scope, script, newLimit)) { + if (checkForTermination(vm, globalObject, scope, script, newLimit)) { return {}; } @@ -414,7 +445,7 @@ JSC_DEFINE_HOST_FUNCTION(scriptRunInThisContext, (JSGlobalObject * globalObject, } RETURN_IF_EXCEPTION(scope, {}); - return JSValue::encode(result); + RELEASE_AND_RETURN(scope, JSValue::encode(result)); } JSC_DEFINE_CUSTOM_GETTER(scriptGetSourceMapURL, (JSGlobalObject * globalObject, JSC::EncodedJSValue thisValueEncoded, PropertyName)) @@ -433,7 +464,7 @@ JSC_DEFINE_CUSTOM_GETTER(scriptGetSourceMapURL, (JSGlobalObject * globalObject, return encodedJSUndefined(); } - return JSValue::encode(jsString(vm, url)); + RELEASE_AND_RETURN(scope, JSValue::encode(jsString(vm, url))); } JSC_DEFINE_CUSTOM_GETTER(scriptGetCachedData, (JSGlobalObject * globalObject, JSC::EncodedJSValue thisValueEncoded, PropertyName)) @@ -447,10 +478,10 @@ JSC_DEFINE_CUSTOM_GETTER(scriptGetCachedData, (JSGlobalObject * globalObject, JS } if (auto* buffer = script->getBytecodeBuffer()) { - return JSValue::encode(buffer); + RELEASE_AND_RETURN(scope, JSValue::encode(buffer)); } - return JSValue::encode(jsUndefined()); + RELEASE_AND_RETURN(scope, JSValue::encode(jsUndefined())); } JSC_DEFINE_CUSTOM_GETTER(scriptGetCachedDataProduced, (JSGlobalObject * globalObject, JSC::EncodedJSValue thisValueEncoded, PropertyName)) @@ -463,7 +494,7 @@ JSC_DEFINE_CUSTOM_GETTER(scriptGetCachedDataProduced, (JSGlobalObject * globalOb return ERR::INVALID_ARG_VALUE(scope, globalObject, "this"_s, thisValue, "must be a Script"_s); } - return JSValue::encode(jsBoolean(script->cachedDataProduced())); + RELEASE_AND_RETURN(scope, JSValue::encode(jsBoolean(script->cachedDataProduced()))); } JSC_DEFINE_CUSTOM_GETTER(scriptGetCachedDataRejected, (JSGlobalObject * globalObject, JSC::EncodedJSValue thisValueEncoded, PropertyName)) @@ -478,11 +509,11 @@ JSC_DEFINE_CUSTOM_GETTER(scriptGetCachedDataRejected, (JSGlobalObject * globalOb switch (script->cachedDataRejected()) { case TriState::True: - return JSValue::encode(jsBoolean(true)); + RELEASE_AND_RETURN(scope, JSValue::encode(jsBoolean(true))); case TriState::False: - return JSValue::encode(jsBoolean(false)); + RELEASE_AND_RETURN(scope, JSValue::encode(jsBoolean(false))); default: - return JSValue::encode(jsUndefined()); + RELEASE_AND_RETURN(scope, encodedJSUndefined()); } } @@ -498,7 +529,7 @@ JSC_DEFINE_HOST_FUNCTION(scriptCreateCachedData, (JSGlobalObject * globalObject, } const JSC::SourceCode& source = script->source(); - return createCachedData(globalObject, source); + RELEASE_AND_RETURN(scope, createCachedData(globalObject, source)); } JSC_DEFINE_HOST_FUNCTION(scriptRunInContext, (JSGlobalObject * globalObject, CallFrame* callFrame)) @@ -519,7 +550,7 @@ JSC_DEFINE_HOST_FUNCTION(scriptRunInContext, (JSGlobalObject * globalObject, Cal JSObject* context = asObject(contextArg); ASSERT(nodeVmGlobalObject != nullptr); - return runInContext(nodeVmGlobalObject, script, context, args.at(1)); + RELEASE_AND_RETURN(scope, runInContext(nodeVmGlobalObject, script, context, args.at(1))); } JSC_DEFINE_HOST_FUNCTION(scriptRunInNewContext, (JSGlobalObject * globalObject, CallFrame* callFrame)) @@ -527,8 +558,6 @@ JSC_DEFINE_HOST_FUNCTION(scriptRunInNewContext, (JSGlobalObject * globalObject, VM& vm = JSC::getVM(globalObject); NodeVMScript* script = jsDynamicCast(callFrame->thisValue()); JSValue contextObjectValue = callFrame->argument(0); - // TODO: options - // JSValue optionsObjectValue = callFrame->argument(1); auto scope = DECLARE_THROW_SCOPE(vm); if (!script) { @@ -536,26 +565,38 @@ JSC_DEFINE_HOST_FUNCTION(scriptRunInNewContext, (JSGlobalObject * globalObject, return {}; } - if (contextObjectValue.isUndefined()) { - contextObjectValue = JSC::constructEmptyObject(globalObject); - } + bool notContextified = NodeVM::getContextArg(globalObject, contextObjectValue); if (!contextObjectValue || !contextObjectValue.isObject()) [[unlikely]] { throwTypeError(globalObject, scope, "Context must be an object"_s); return {}; } - // we don't care about options for now - // TODO: options - // bool didThrow = false; + JSValue contextOptionsArg = callFrame->argument(1); + NodeVMContextOptions contextOptions {}; + JSValue importer; - auto* zigGlobal = defaultGlobalObject(globalObject); + if (auto encodedException = getNodeVMContextOptions(globalObject, vm, scope, contextOptionsArg, contextOptions, "contextCodeGeneration", &importer)) { + return *encodedException; + } + + contextOptions.notContextified = notContextified; + + auto* zigGlobalObject = defaultGlobalObject(globalObject); JSObject* context = asObject(contextObjectValue); auto* targetContext = NodeVMGlobalObject::create(vm, - zigGlobal->NodeVMGlobalObjectStructure(), - {}); + zigGlobalObject->NodeVMGlobalObjectStructure(), + contextOptions, importer); + RETURN_IF_EXCEPTION(scope, {}); - return runInContext(targetContext, script, context, callFrame->argument(1)); + if (notContextified) { + auto* specialSandbox = NodeVMSpecialSandbox::create(vm, zigGlobalObject->NodeVMSpecialSandboxStructure(), targetContext); + RETURN_IF_EXCEPTION(scope, {}); + targetContext->setSpecialSandbox(specialSandbox); + RELEASE_AND_RETURN(scope, runInContext(targetContext, script, targetContext->specialSandbox(), callFrame->argument(1))); + } + + RELEASE_AND_RETURN(scope, runInContext(targetContext, script, context, callFrame->argument(1))); } class NodeVMScriptPrototype final : public JSC::JSNonFinalObject { @@ -626,8 +667,9 @@ bool RunningScriptOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm if (!optionsArg.isUndefined() && !optionsArg.isString()) { JSObject* options = asObject(optionsArg); - if (JSValue displayErrorsOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "displayErrors"_s))) { - RETURN_IF_EXCEPTION(scope, false); + auto displayErrorsOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "displayErrors"_s)); + RETURN_IF_EXCEPTION(scope, false); + if (displayErrorsOpt) { if (!displayErrorsOpt.isUndefined()) { if (!displayErrorsOpt.isBoolean()) { ERR::INVALID_ARG_TYPE(scope, globalObject, "options.displayErrors"_s, "boolean"_s, displayErrorsOpt); @@ -639,12 +681,13 @@ bool RunningScriptOptions::fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm } if (validateTimeout(globalObject, vm, scope, options, this->timeout)) { - RETURN_IF_EXCEPTION(scope, false); any = true; } + RETURN_IF_EXCEPTION(scope, {}); - if (JSValue breakOnSigintOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "breakOnSigint"_s))) { - RETURN_IF_EXCEPTION(scope, false); + auto breakOnSigintOpt = options->getIfPropertyExists(globalObject, Identifier::fromString(vm, "breakOnSigint"_s)); + RETURN_IF_EXCEPTION(scope, false); + if (breakOnSigintOpt) { if (!breakOnSigintOpt.isUndefined()) { if (!breakOnSigintOpt.isBoolean()) { ERR::INVALID_ARG_TYPE(scope, globalObject, "options.breakOnSigint"_s, "boolean"_s, breakOnSigintOpt); diff --git a/src/bun.js/bindings/NodeVMScript.h b/src/bun.js/bindings/NodeVMScript.h index b7e7b2dec4..5637ae939a 100644 --- a/src/bun.js/bindings/NodeVMScript.h +++ b/src/bun.js/bindings/NodeVMScript.h @@ -10,12 +10,11 @@ class ScriptOptions : public BaseVMOptions { public: WTF::Vector cachedData; std::optional timeout = std::nullopt; - JSValue importer {}; bool produceCachedData = false; using BaseVMOptions::BaseVMOptions; - bool fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg); + bool fromJS(JSC::JSGlobalObject* globalObject, JSC::VM& vm, JSC::ThrowScope& scope, JSC::JSValue optionsArg, JSValue* importer); }; class NodeVMScriptConstructor final : public JSC::InternalFunction { diff --git a/src/bun.js/bindings/NodeVMScriptFetcher.h b/src/bun.js/bindings/NodeVMScriptFetcher.h index 0ec7416e06..b8676b2445 100644 --- a/src/bun.js/bindings/NodeVMScriptFetcher.h +++ b/src/bun.js/bindings/NodeVMScriptFetcher.h @@ -3,27 +3,39 @@ #include "root.h" #include +#include namespace Bun { // The presence of this class in a JSFunction's sourceOrigin indicates that the function was compiled by Bun's node:vm implementation. class NodeVMScriptFetcher : public JSC::ScriptFetcher { public: - static Ref create(JSC::VM& vm, JSC::JSValue dynamicImportCallback) { return adoptRef(*new NodeVMScriptFetcher(vm, dynamicImportCallback)); } + static Ref create(JSC::VM& vm, JSC::JSValue dynamicImportCallback, JSC::JSValue owner) { return adoptRef(*new NodeVMScriptFetcher(vm, dynamicImportCallback, owner)); } Type fetcherType() const final { return Type::NodeVM; } JSC::JSValue dynamicImportCallback() const { return m_dynamicImportCallback.get(); } - JSC::JSFunction* owner() const { return m_owner.get(); } - void owner(JSC::VM& vm, JSC::JSFunction* value) { m_owner.set(vm, value); } + JSC::JSValue owner() const { return m_owner.get(); } + void owner(JSC::VM& vm, JSC::JSValue value) { m_owner.set(vm, value); } + + bool isUsingDefaultLoader() const { return m_isUsingDefaultLoader; } + auto temporarilyUseDefaultLoader() + { + m_isUsingDefaultLoader = true; + return makeScopeExit([this] { + m_isUsingDefaultLoader = false; + }); + } private: JSC::Strong m_dynamicImportCallback; - JSC::Strong m_owner; + JSC::Strong m_owner; + bool m_isUsingDefaultLoader = false; - NodeVMScriptFetcher(JSC::VM& vm, JSC::JSValue dynamicImportCallback) + NodeVMScriptFetcher(JSC::VM& vm, JSC::JSValue dynamicImportCallback, JSC::JSValue owner) : m_dynamicImportCallback(vm, dynamicImportCallback) + , m_owner(vm, owner) { } }; diff --git a/src/bun.js/bindings/NodeVMSourceTextModule.cpp b/src/bun.js/bindings/NodeVMSourceTextModule.cpp index fb72e3bb84..8af22d59a5 100644 --- a/src/bun.js/bindings/NodeVMSourceTextModule.cpp +++ b/src/bun.js/bindings/NodeVMSourceTextModule.cpp @@ -1,3 +1,4 @@ +#include "NodeVMScriptFetcher.h" #include "NodeVMSourceTextModule.h" #include "NodeVMSyntheticModule.h" @@ -77,16 +78,35 @@ NodeVMSourceTextModule* NodeVMSourceTextModule::create(VM& vm, JSGlobalObject* g return nullptr; } - uint32_t lineOffset = lineOffsetValue.toUInt32(globalObject); - uint32_t columnOffset = columnOffsetValue.toUInt32(globalObject); + JSValue dynamicImportCallback = args.at(8); + if (!dynamicImportCallback.isUndefined() && !dynamicImportCallback.isCallable()) { + throwArgumentTypeError(*globalObject, scope, 8, "dynamicImportCallback"_s, "Module"_s, "Module"_s, "function"_s); + return nullptr; + } - Ref sourceProvider = StringSourceProvider::create(sourceTextValue.toWTFString(globalObject), SourceOrigin {}, String {}, SourceTaintedOrigin::Untainted, + uint32_t lineOffset = lineOffsetValue.toUInt32(globalObject); + RETURN_IF_EXCEPTION(scope, nullptr); + uint32_t columnOffset = columnOffsetValue.toUInt32(globalObject); + RETURN_IF_EXCEPTION(scope, nullptr); + + RefPtr fetcher(NodeVMScriptFetcher::create(vm, dynamicImportCallback, moduleWrapper)); + RETURN_IF_EXCEPTION(scope, nullptr); + + SourceOrigin sourceOrigin { {}, *fetcher }; + + WTF::String sourceText = sourceTextValue.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, nullptr); + + Ref sourceProvider = StringSourceProvider::create(WTFMove(sourceText), sourceOrigin, String {}, SourceTaintedOrigin::Untainted, TextPosition { OrdinalNumber::fromZeroBasedInt(lineOffset), OrdinalNumber::fromZeroBasedInt(columnOffset) }, SourceProviderSourceType::Module); SourceCode sourceCode(WTFMove(sourceProvider), lineOffset, columnOffset); auto* zigGlobalObject = defaultGlobalObject(globalObject); - NodeVMSourceTextModule* ptr = new (NotNull, allocateCell(vm)) NodeVMSourceTextModule(vm, zigGlobalObject->NodeVMSourceTextModuleStructure(), identifierValue.toWTFString(globalObject), contextValue, WTFMove(sourceCode), moduleWrapper); + WTF::String identifier = identifierValue.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, nullptr); + NodeVMSourceTextModule* ptr = new (NotNull, allocateCell(vm)) NodeVMSourceTextModule(vm, zigGlobalObject->NodeVMSourceTextModuleStructure(), WTFMove(identifier), contextValue, WTFMove(sourceCode), moduleWrapper); + RETURN_IF_EXCEPTION(scope, nullptr); ptr->finishCreation(vm); if (!initializeImportMeta.isUndefined()) { @@ -111,7 +131,9 @@ NodeVMSourceTextModule* NodeVMSourceTextModule::create(VM& vm, JSGlobalObject* g LexicallyScopedFeatures lexicallyScopedFeatures = globalObject->globalScopeExtension() ? TaintedByWithScopeLexicallyScopedFeature : NoLexicallyScopedFeatures; SourceCodeKey key(ptr->sourceCode(), {}, SourceCodeType::ProgramType, lexicallyScopedFeatures, JSParserScriptMode::Classic, DerivedContextType::None, EvalContextType::None, false, {}, std::nullopt); Ref cachedBytecode = CachedBytecode::create(std::span(cachedData), nullptr, {}); + RETURN_IF_EXCEPTION(scope, nullptr); UnlinkedModuleProgramCodeBlock* unlinkedBlock = decodeCodeBlock(vm, key, WTFMove(cachedBytecode)); + RETURN_IF_EXCEPTION(scope, nullptr); if (unlinkedBlock) { JSScope* jsScope = globalObject->globalScope(); @@ -120,9 +142,11 @@ NodeVMSourceTextModule* NodeVMSourceTextModule::create(VM& vm, JSGlobalObject* g // JSC::ProgramCodeBlock::create() requires GC to be deferred. DeferGC deferGC(vm); codeBlock = ModuleProgramCodeBlock::create(vm, executable, unlinkedBlock, jsScope); + RETURN_IF_EXCEPTION(scope, nullptr); } if (codeBlock) { CompilationResult compilationResult = JIT::compileSync(vm, codeBlock, JITCompilationEffort::JITCompilationCanFail); + RETURN_IF_EXCEPTION(scope, nullptr); if (compilationResult != CompilationResult::CompilationFailed) { executable->installCode(codeBlock); return ptr; @@ -184,7 +208,9 @@ JSValue NodeVMSourceTextModule::createModuleRecord(JSGlobalObject* globalObject) const auto& requests = moduleRecord->requestedModules(); if (requests.isEmpty()) { - return constructEmptyArray(globalObject, nullptr, 0); + JSArray* requestsArray = constructEmptyArray(globalObject, nullptr, 0); + RETURN_IF_EXCEPTION(scope, {}); + return requestsArray; } JSArray* requestsArray = constructEmptyArray(globalObject, nullptr, requests.size()); @@ -312,26 +338,35 @@ JSValue NodeVMSourceTextModule::link(JSGlobalObject* globalObject, JSArray* spec if (length != 0) { for (unsigned i = 0; i < length; i++) { JSValue specifierValue = specifiers->getDirectIndex(globalObject, i); + RETURN_IF_EXCEPTION(scope, {}); JSValue moduleNativeValue = moduleNatives->getDirectIndex(globalObject, i); + RETURN_IF_EXCEPTION(scope, {}); ASSERT(specifierValue.isString()); ASSERT(moduleNativeValue.isObject()); WTF::String specifier = specifierValue.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); JSObject* moduleNative = moduleNativeValue.getObject(); + RETURN_IF_EXCEPTION(scope, {}); AbstractModuleRecord* resolvedRecord = jsCast(moduleNative)->moduleRecord(globalObject); + RETURN_IF_EXCEPTION(scope, {}); record->setImportedModule(globalObject, Identifier::fromString(vm, specifier), resolvedRecord); + RETURN_IF_EXCEPTION(scope, {}); m_resolveCache.set(WTFMove(specifier), WriteBarrier { vm, this, moduleNative }); + RETURN_IF_EXCEPTION(scope, {}); } } - if (NodeVMGlobalObject* nodeVmGlobalObject = getGlobalObjectFromContext(globalObject, m_context.get(), false)) { + NodeVMGlobalObject* nodeVmGlobalObject = getGlobalObjectFromContext(globalObject, m_context.get(), false); + RETURN_IF_EXCEPTION(scope, {}); + + if (nodeVmGlobalObject) { globalObject = nodeVmGlobalObject; } Synchronousness sync = record->link(globalObject, scriptFetcher); - RETURN_IF_EXCEPTION(scope, {}); if (sync == Synchronousness::Async) { @@ -355,6 +390,7 @@ RefPtr NodeVMSourceTextModule::bytecode(JSGlobalObject* globalOb if (!m_bytecode) { if (!m_cachedExecutable) { ModuleProgramExecutable* executable = ModuleProgramExecutable::tryCreate(globalObject, m_sourceCode); + RETURN_IF_EXCEPTION(scope, nullptr); if (!executable) { if (!scope.exception()) { throwSyntaxError(globalObject, scope, "Failed to create cached executable"_s); @@ -364,6 +400,7 @@ RefPtr NodeVMSourceTextModule::bytecode(JSGlobalObject* globalOb m_cachedExecutable.set(vm, this, executable); } m_bytecode = getBytecode(globalObject, m_cachedExecutable.get(), m_sourceCode); + RETURN_IF_EXCEPTION(scope, nullptr); } return m_bytecode; @@ -371,10 +408,16 @@ RefPtr NodeVMSourceTextModule::bytecode(JSGlobalObject* globalOb JSUint8Array* NodeVMSourceTextModule::cachedData(JSGlobalObject* globalObject) { + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!m_cachedBytecodeBuffer) { RefPtr cachedBytecode = bytecode(globalObject); + RETURN_IF_EXCEPTION(scope, nullptr); std::span bytes = cachedBytecode->span(); - m_cachedBytecodeBuffer.set(vm(), this, WebCore::createBuffer(globalObject, bytes)); + JSUint8Array* buffer = WebCore::createBuffer(globalObject, bytes); + RETURN_IF_EXCEPTION(scope, nullptr); + m_cachedBytecodeBuffer.set(vm, this, buffer); } return m_cachedBytecodeBuffer.get(); @@ -389,7 +432,11 @@ void NodeVMSourceTextModule::initializeImportMeta(JSGlobalObject* globalObject) JSModuleEnvironment* moduleEnvironment = m_moduleRecord->moduleEnvironmentMayBeNull(); ASSERT(moduleEnvironment != nullptr); + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + JSValue metaValue = moduleEnvironment->get(globalObject, globalObject->vm().propertyNames->builtinNames().metaPrivateName()); + RETURN_IF_EXCEPTION(scope, ); ASSERT(metaValue); ASSERT(metaValue.isObject()); @@ -400,6 +447,7 @@ void NodeVMSourceTextModule::initializeImportMeta(JSGlobalObject* globalObject) args.append(m_moduleWrapper.get()); JSC::call(globalObject, m_initializeImportMeta.get(), callData, jsUndefined(), args); + RETURN_IF_EXCEPTION(scope, ); } JSObject* NodeVMSourceTextModule::createPrototype(VM& vm, JSGlobalObject* globalObject) diff --git a/src/bun.js/bindings/NodeVMSyntheticModule.cpp b/src/bun.js/bindings/NodeVMSyntheticModule.cpp index 5a178a18cf..3cd49018a1 100644 --- a/src/bun.js/bindings/NodeVMSyntheticModule.cpp +++ b/src/bun.js/bindings/NodeVMSyntheticModule.cpp @@ -67,15 +67,20 @@ NodeVMSyntheticModule* NodeVMSyntheticModule::create(VM& vm, JSGlobalObject* glo WTF::HashSet exportNames; for (unsigned i = 0; i < exportNamesArray->getArrayLength(); i++) { JSValue exportNameValue = exportNamesArray->getIndex(globalObject, i); + RETURN_IF_EXCEPTION(scope, nullptr); if (!exportNameValue.isString()) { throwArgumentTypeError(*globalObject, scope, 2, "exportNames"_s, "Module"_s, "Module"_s, "string[]"_s); + return nullptr; } exportNames.addVoid(exportNameValue.toWTFString(globalObject)); + RETURN_IF_EXCEPTION(scope, nullptr); } auto* zigGlobalObject = defaultGlobalObject(globalObject); auto* structure = zigGlobalObject->NodeVMSyntheticModuleStructure(); - auto* ptr = new (NotNull, allocateCell(vm)) NodeVMSyntheticModule(vm, structure, identifierValue.toWTFString(globalObject), contextValue, moduleWrapperValue, WTFMove(exportNames), syntheticEvaluationStepsValue); + WTF::String identifier = identifierValue.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, nullptr); + auto* ptr = new (NotNull, allocateCell(vm)) NodeVMSyntheticModule(vm, structure, WTFMove(identifier), contextValue, moduleWrapperValue, WTFMove(exportNames), syntheticEvaluationStepsValue); ptr->finishCreation(vm); return ptr; } @@ -204,8 +209,11 @@ void NodeVMSyntheticModule::setExport(JSGlobalObject* globalObject, WTF::String } ensureModuleRecord(globalObject); + RETURN_IF_EXCEPTION(scope, ); JSModuleNamespaceObject* namespaceObject = m_moduleRecord->getModuleNamespace(globalObject, false); + RETURN_IF_EXCEPTION(scope, ); namespaceObject->overrideExportValue(globalObject, Identifier::fromString(vm, exportName), value); + RETURN_IF_EXCEPTION(scope, ); } JSObject* NodeVMSyntheticModule::createPrototype(VM& vm, JSGlobalObject* globalObject) diff --git a/src/bun.js/bindings/NodeValidator.cpp b/src/bun.js/bindings/NodeValidator.cpp index 701d647d30..18e84e503e 100644 --- a/src/bun.js/bindings/NodeValidator.cpp +++ b/src/bun.js/bindings/NodeValidator.cpp @@ -567,7 +567,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateEncoding, (JSC::JSGlobalObject * glo length = impl->byteLength(); } } else if (auto* object = data.getObject()) { - JSValue lengthValue = object->getIfPropertyExists(globalObject, vm.propertyNames->length); + JSValue lengthValue = object->get(globalObject, vm.propertyNames->length); RETURN_IF_EXCEPTION(scope, {}); length = lengthValue.toLength(globalObject); RETURN_IF_EXCEPTION(scope, {}); @@ -640,7 +640,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateOneOf, (JSC::JSGlobalObject * global unsigned length = array->length(); for (size_t i = 0; i < length; i++) { JSValue element = array->getIndex(globalObject, i); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (JSC::sameValue(globalObject, value, element)) { return JSValue::encode(jsUndefined()); } @@ -661,8 +661,9 @@ JSC::EncodedJSValue V::validateOneOf(JSC::ThrowScope& scope, JSC::JSGlobalObject } JSC::JSString* valueStr = value.toString(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); auto valueView = valueStr->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); for (ASCIILiteral oneOfStr : oneOf) { diff --git a/src/bun.js/bindings/ObjectBindings.cpp b/src/bun.js/bindings/ObjectBindings.cpp index 7ccd219cf3..aca4c7a5fd 100644 --- a/src/bun.js/bindings/ObjectBindings.cpp +++ b/src/bun.js/bindings/ObjectBindings.cpp @@ -41,7 +41,7 @@ static bool getNonIndexPropertySlotPrototypePollutionMitigation(JSC::VM& vm, JSO if (!structure->typeInfo().overridesGetPrototype() || slot.internalMethodType() == PropertySlot::InternalMethodType::VMInquiry) [[likely]] prototype = object->getPrototypeDirect(); else { - prototype = object->getPrototype(vm, globalObject); + prototype = object->getPrototype(globalObject); RETURN_IF_EXCEPTION(scope, false); } if (!prototype.isObject()) @@ -68,10 +68,12 @@ JSC::JSValue getIfPropertyExistsPrototypePollutionMitigationUnsafe(JSC::VM& vm, auto isDefined = getNonIndexPropertySlotPrototypePollutionMitigation(vm, object, globalObject, name, propertySlot); if (!isDefined) { + RETURN_IF_EXCEPTION(scope, {}); return JSValue::decode(JSC::JSValue::ValueDeleted); } - scope.assertNoException(); + scope.assertNoExceptionExceptTermination(); + RETURN_IF_EXCEPTION(scope, {}); JSValue value = propertySlot.getValue(globalObject, name); RETURN_IF_EXCEPTION(scope, {}); return value; diff --git a/src/bun.js/bindings/ProcessBindingTTYWrap.cpp b/src/bun.js/bindings/ProcessBindingTTYWrap.cpp index 9e5866e78b..2b13975d21 100644 --- a/src/bun.js/bindings/ProcessBindingTTYWrap.cpp +++ b/src/bun.js/bindings/ProcessBindingTTYWrap.cpp @@ -323,7 +323,9 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionInternalGetWindowSize, } array->putDirectIndex(globalObject, 0, jsNumber(width)); + RETURN_IF_EXCEPTION(throwScope, {}); array->putDirectIndex(globalObject, 1, jsNumber(height)); + RETURN_IF_EXCEPTION(throwScope, {}); return JSC::JSValue::encode(jsBoolean(true)); } diff --git a/src/bun.js/bindings/S3Error.cpp b/src/bun.js/bindings/S3Error.cpp index 0ce4d9d093..8f90ab38c3 100644 --- a/src/bun.js/bindings/S3Error.cpp +++ b/src/bun.js/bindings/S3Error.cpp @@ -38,10 +38,8 @@ SYSV_ABI JSC::EncodedJSValue S3Error__toErrorInstance(const S3Error* arg0, JSC::JSValue options = JSC::jsUndefined(); auto prototype = defaultGlobalObject(globalObject)->m_S3ErrorStructure.getInitializedOnMainThread(globalObject); JSC::JSObject* result = JSC::ErrorInstance::create(globalObject, prototype, message, options); - result->putDirect( - vm, vm.propertyNames->name, - JSC::JSValue(defaultGlobalObject(globalObject)->commonStrings().s3ErrorString(globalObject)), - JSC::PropertyAttribute::DontEnum | 0); + RETURN_IF_EXCEPTION(scope, {}); + result->putDirect(vm, vm.propertyNames->name, defaultGlobalObject(globalObject)->commonStrings().s3ErrorString(globalObject), JSC::PropertyAttribute::DontEnum | 0); if (err.code.tag != BunStringTag::Empty) { JSC::JSValue code = Bun::toJS(globalObject, err.code); result->putDirect(vm, names.codePublicName(), code, @@ -57,7 +55,7 @@ SYSV_ABI JSC::EncodedJSValue S3Error__toErrorInstance(const S3Error* arg0, RETURN_IF_EXCEPTION(scope, {}); scope.release(); - return JSC::JSValue::encode(JSC::JSValue(result)); + return JSC::JSValue::encode(result); } } } diff --git a/src/bun.js/bindings/SQLClient.cpp b/src/bun.js/bindings/SQLClient.cpp index 8b76c2c81b..af1eab7776 100644 --- a/src/bun.js/bindings/SQLClient.cpp +++ b/src/bun.js/bindings/SQLClient.cpp @@ -126,6 +126,8 @@ public: static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCell& cell) { + auto scope = DECLARE_THROW_SCOPE(vm); + switch (cell.tag) { case DataCellTag::Null: return jsNull(); @@ -134,9 +136,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel Zig::GlobalObject* zigGlobal = jsCast(globalObject); auto* subclassStructure = zigGlobal->JSBufferSubclassStructure(); auto* uint8Array = JSC::JSUint8Array::createUninitialized(globalObject, subclassStructure, cell.value.raw.length); - if (uint8Array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (cell.value.raw.length > 0) { memcpy(uint8Array->vector(), reinterpret_cast(cell.value.raw.ptr), cell.value.raw.length); @@ -173,9 +173,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel Zig::GlobalObject* zigGlobal = jsCast(globalObject); auto* subclassStructure = zigGlobal->JSBufferSubclassStructure(); auto* uint8Array = JSC::JSUint8Array::createUninitialized(globalObject, subclassStructure, cell.value.bytea[1]); - if (uint8Array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (cell.value.bytea[1] > 0) { memcpy(uint8Array->vector(), reinterpret_cast(cell.value.bytea[0]), cell.value.bytea[1]); @@ -210,9 +208,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel switch (type) { case JSC::JSType::Int32ArrayType: { JSC::JSInt32Array* array = JSC::JSInt32Array::createUninitialized(globalObject, globalObject->typedArrayStructureWithTypedArrayType(), length); - if (array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (length > 0) { memcpy(array->vector(), reinterpret_cast(cell.value.typed_array.data), length * sizeof(int32_t)); @@ -222,9 +218,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel } case JSC::JSType::Uint32ArrayType: { JSC::JSUint32Array* array = JSC::JSUint32Array::createUninitialized(globalObject, globalObject->typedArrayStructureWithTypedArrayType(), length); - if (array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (length > 0) { memcpy(array->vector(), reinterpret_cast(cell.value.typed_array.data), length * sizeof(uint32_t)); @@ -233,9 +227,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel } case JSC::JSType::Int16ArrayType: { JSC::JSInt16Array* array = JSC::JSInt16Array::createUninitialized(globalObject, globalObject->typedArrayStructureWithTypedArrayType(), length); - if (array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (length > 0) { memcpy(array->vector(), reinterpret_cast(cell.value.typed_array.data), length * sizeof(int16_t)); @@ -245,9 +237,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel } case JSC::JSType::Uint16ArrayType: { JSC::JSUint16Array* array = JSC::JSUint16Array::createUninitialized(globalObject, globalObject->typedArrayStructureWithTypedArrayType(), length); - if (array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (length > 0) { memcpy(array->vector(), reinterpret_cast(cell.value.typed_array.data), length * sizeof(uint16_t)); @@ -256,9 +246,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel } case JSC::JSType::Float16ArrayType: { JSC::JSFloat16Array* array = JSC::JSFloat16Array::createUninitialized(globalObject, globalObject->typedArrayStructureWithTypedArrayType(), length); - if (array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (length > 0) { memcpy(array->vector(), reinterpret_cast(cell.value.typed_array.data), length * 2); // sizeof(float16_t) @@ -267,9 +255,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel } case JSC::JSType::Float32ArrayType: { JSC::JSFloat32Array* array = JSC::JSFloat32Array::createUninitialized(globalObject, globalObject->typedArrayStructureWithTypedArrayType(), length); - if (array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (length > 0) { memcpy(array->vector(), reinterpret_cast(cell.value.typed_array.data), length * sizeof(float)); @@ -278,9 +264,7 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel } case JSC::JSType::Float64ArrayType: { JSC::JSFloat64Array* array = JSC::JSFloat64Array::createUninitialized(globalObject, globalObject->typedArrayStructureWithTypedArrayType(), length); - if (array == nullptr) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (length > 0) { memcpy(array->vector(), reinterpret_cast(cell.value.typed_array.data), length * sizeof(double)); diff --git a/src/bun.js/bindings/Serialization.cpp b/src/bun.js/bindings/Serialization.cpp index 83c986f6c4..6d51c87ab9 100644 --- a/src/bun.js/bindings/Serialization.cpp +++ b/src/bun.js/bindings/Serialization.cpp @@ -18,6 +18,8 @@ struct SerializedValueSlice { /// Returns a "slice" that also contains a pointer to the SerializedScriptValue. Must be freed by the caller extern "C" SerializedValueSlice Bun__serializeJSValue(JSGlobalObject* globalObject, EncodedJSValue encodedValue, bool forTransferBool) { + auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); JSValue value = JSValue::decode(encodedValue); Vector> transferList; @@ -27,12 +29,9 @@ extern "C" SerializedValueSlice Bun__serializeJSValue(JSGlobalObject* globalObje auto forTransferEnum = forTransferBool ? SerializationForTransfer::Yes : SerializationForTransfer::No; ExceptionOr> serialized = SerializedScriptValue::create(*globalObject, value, WTFMove(transferList), dummyPorts, forStorage, context, forTransferEnum); - auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); - + EXCEPTION_ASSERT(!!scope.exception() == serialized.hasException()); if (serialized.hasException()) { - WebCore::propagateException(*globalObject, scope, - serialized.releaseException()); + WebCore::propagateException(*globalObject, scope, serialized.releaseException()); RELEASE_AND_RETURN(scope, { 0 }); } diff --git a/src/bun.js/bindings/Strong.cpp b/src/bun.js/bindings/Strong.cpp index d60f52eb7c..d4ce228f4a 100644 --- a/src/bun.js/bindings/Strong.cpp +++ b/src/bun.js/bindings/Strong.cpp @@ -48,6 +48,6 @@ extern "C" void Bun__StrongRef__clear(JSC::JSValue* _Nonnull handleSlot) { // The write barrier must be called *before* the value is cleared // to correctly remove the handle from the strong list if it held a cell. - JSC::HandleSet::heapFor(handleSlot)->writeBarrier(handleSlot, JSC::JSValue()); - *handleSlot = JSC::JSValue(); + JSC::HandleSet::heapFor(handleSlot)->writeBarrier(handleSlot, {}); + *handleSlot = {}; } diff --git a/src/bun.js/bindings/SystemError.zig b/src/bun.js/bindings/SystemError.zig index 484ed54489..b9fa36042b 100644 --- a/src/bun.js/bindings/SystemError.zig +++ b/src/bun.js/bindings/SystemError.zig @@ -9,7 +9,7 @@ pub const SystemError = extern struct { errno: c_int = 0, /// label for errno code: String = .empty, - message: String = .empty, + message: String, // it is illegal to have an empty message path: String = .empty, syscall: String = .empty, hostname: String = .empty, @@ -52,7 +52,6 @@ pub const SystemError = extern struct { pub fn toErrorInstance(this: *const SystemError, global: *JSGlobalObject) JSValue { defer this.deref(); - return SystemError__toErrorInstance(this, global); } @@ -77,7 +76,6 @@ pub const SystemError = extern struct { /// to match the error code that `node:os` throws. pub fn toErrorInstanceWithInfoObject(this: *const SystemError, global: *JSGlobalObject) JSValue { defer this.deref(); - return SystemError__toErrorInstanceWithInfoObject(this, global); } diff --git a/src/bun.js/bindings/UtilInspect.cpp b/src/bun.js/bindings/UtilInspect.cpp index cdb3c8ed0e..ae09d5b11c 100644 --- a/src/bun.js/bindings/UtilInspect.cpp +++ b/src/bun.js/bindings/UtilInspect.cpp @@ -28,6 +28,7 @@ Structure* createUtilInspectOptionsStructure(VM& vm, JSC::JSGlobalObject* global JSObject* createInspectOptionsObject(VM& vm, Zig::GlobalObject* globalObject, unsigned max_depth, bool colors) { JSFunction* stylizeFn = colors ? globalObject->utilInspectStylizeColorFunction() : globalObject->utilInspectStylizeNoColorFunction(); + if (!stylizeFn) return nullptr; JSObject* options = JSC::constructEmptyObject(vm, globalObject->utilInspectOptionsStructure()); options->putDirectOffset(vm, 0, stylizeFn); options->putDirectOffset(vm, 1, jsNumber(max_depth)); @@ -41,8 +42,7 @@ extern "C" JSC::EncodedJSValue JSC__JSValue__callCustomInspectFunction( JSC::EncodedJSValue encodedThisValue, unsigned depth, unsigned max_depth, - bool colors, - bool* is_exception) + bool colors) { JSValue functionToCall = JSValue::decode(encodedFunctionValue); JSValue thisValue = JSValue::decode(encodedThisValue); @@ -50,8 +50,10 @@ extern "C" JSC::EncodedJSValue JSC__JSValue__callCustomInspectFunction( auto scope = DECLARE_THROW_SCOPE(vm); JSObject* options = Bun::createInspectOptionsObject(vm, globalObject, max_depth, colors); + RETURN_IF_EXCEPTION(scope, {}); JSFunction* inspectFn = globalObject->utilInspectFunction(); + RETURN_IF_EXCEPTION(scope, {}); auto callData = JSC::getCallData(functionToCall); MarkedArgumentBuffer arguments; arguments.append(jsNumber(depth)); @@ -59,10 +61,7 @@ extern "C" JSC::EncodedJSValue JSC__JSValue__callCustomInspectFunction( arguments.append(inspectFn); auto inspectRet = JSC::profiledCall(globalObject, ProfilingReason::API, functionToCall, callData, thisValue, arguments); - if (scope.exception()) { - *is_exception = true; - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); RELEASE_AND_RETURN(scope, JSValue::encode(inspectRet)); } diff --git a/src/bun.js/bindings/ZigErrorType.zig b/src/bun.js/bindings/ZigErrorType.zig index 8559dda282..ce38dbda88 100644 --- a/src/bun.js/bindings/ZigErrorType.zig +++ b/src/bun.js/bindings/ZigErrorType.zig @@ -1,6 +1,7 @@ pub const ZigErrorType = extern struct { code: ErrorCode, - ptr: ?*anyopaque, + value: bun.jsc.JSValue, }; +const bun = @import("bun"); const ErrorCode = @import("ErrorCode.zig").ErrorCode; diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index 6b3615dc3c..e518145214 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -295,6 +295,7 @@ extern "C" void JSCInitialize(const char* envp[], size_t envc, void (*onCrash)(c JSC::Options::useJITCage() = false; JSC::Options::useShadowRealm() = true; JSC::Options::useV8DateParser() = true; + JSC::Options::useMathSumPreciseMethod() = true; JSC::Options::evalMode() = evalMode; JSC::Options::heapGrowthSteepnessFactor() = 1.0; JSC::Options::heapGrowthMaxIncrease() = 2.0; @@ -337,8 +338,9 @@ static JSValue formatStackTraceToJSValue(JSC::VM& vm, Zig::GlobalObject* globalO WTF::StringBuilder sb; - if (JSC::JSValue errorMessage = errorObject->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->message)) { - RETURN_IF_EXCEPTION(scope, {}); + auto errorMessage = errorObject->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->message); + RETURN_IF_EXCEPTION(scope, {}); + if (errorMessage) { auto* str = errorMessage.toString(lexicalGlobalObject); RETURN_IF_EXCEPTION(scope, {}); if (str->length() > 0) { @@ -427,7 +429,7 @@ static JSValue formatStackTraceToJSValueWithoutPrepareStackTrace(JSC::VM& vm, Zi auto* errorConstructor = lexicalGlobalObject->m_errorStructure.constructor(globalObject); prepareStackTrace = errorConstructor->getIfPropertyExists(lexicalGlobalObject, JSC::Identifier::fromString(vm, "prepareStackTrace"_s)); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); } } @@ -662,7 +664,7 @@ static String computeErrorInfoWithoutPrepareStackTrace( String& sourceURL, JSObject* errorInstance) { - + auto scope = DECLARE_THROW_SCOPE(vm); WTF::String name = "Error"_s; WTF::String message; @@ -673,7 +675,9 @@ static String computeErrorInfoWithoutPrepareStackTrace( lexicalGlobalObject = errorInstance->globalObject(); } name = instance->sanitizedNameString(lexicalGlobalObject); + RETURN_IF_EXCEPTION(scope, {}); message = instance->sanitizedMessageString(lexicalGlobalObject); + RETURN_IF_EXCEPTION(scope, {}); } } @@ -752,7 +756,7 @@ static JSValue computeErrorInfoWithPrepareStackTrace(JSC::VM& vm, Zig::GlobalObj JSArray* callSitesArray = JSC::constructArray(globalObject, globalObject->arrayStructureForIndexingTypeDuringAllocation(JSC::ArrayWithContiguous), callSites); RETURN_IF_EXCEPTION(scope, {}); - return formatStackTraceToJSValue(vm, globalObject, lexicalGlobalObject, errorObject, callSitesArray, prepareStackTrace); + RELEASE_AND_RETURN(scope, formatStackTraceToJSValue(vm, globalObject, lexicalGlobalObject, errorObject, callSitesArray, prepareStackTrace)); } static String computeErrorInfoToString(JSC::VM& vm, Vector& stackTrace, OrdinalNumber& line, OrdinalNumber& column, String& sourceURL) @@ -768,9 +772,9 @@ static JSValue computeErrorInfoToJSValueWithoutSkipping(JSC::VM& vm, VectorglobalObject(); globalObject = jsDynamicCast(lexicalGlobalObject); + auto scope = DECLARE_THROW_SCOPE(vm); // Error.prepareStackTrace - https://v8.dev/docs/stack-trace-api#customizing-stack-traces if (!globalObject) { @@ -778,12 +782,14 @@ static JSValue computeErrorInfoToJSValueWithoutSkipping(JSC::VM& vm, VectorisInsideErrorPrepareStackTraceCallback) { auto* errorConstructor = lexicalGlobalObject->m_errorStructure.constructor(lexicalGlobalObject); - if (JSValue prepareStackTrace = errorConstructor->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "prepareStackTrace"_s))) { + auto prepareStackTrace = errorConstructor->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "prepareStackTrace"_s)); + RETURN_IF_EXCEPTION(scope, {}); + if (prepareStackTrace) { if (prepareStackTrace.isCell() && prepareStackTrace.isObject() && prepareStackTrace.isCallable()) { globalObject->isInsideErrorPrepareStackTraceCallback = true; auto result = computeErrorInfoWithPrepareStackTrace(vm, globalObject, lexicalGlobalObject, stackTrace, line, column, sourceURL, errorInstance, prepareStackTrace.getObject()); globalObject->isInsideErrorPrepareStackTraceCallback = false; - return result; + RELEASE_AND_RETURN(scope, result); } } } @@ -794,13 +800,14 @@ static JSValue computeErrorInfoToJSValueWithoutSkipping(JSC::VM& vm, VectorisInsideErrorPrepareStackTraceCallback = true; auto result = computeErrorInfoWithPrepareStackTrace(vm, globalObject, lexicalGlobalObject, stackTrace, line, column, sourceURL, errorInstance, prepareStackTrace.getObject()); globalObject->isInsideErrorPrepareStackTraceCallback = false; - return result; + RELEASE_AND_RETURN(scope, result); } } } } String result = computeErrorInfoWithoutPrepareStackTrace(vm, globalObject, lexicalGlobalObject, stackTrace, line, column, sourceURL, errorInstance); + RETURN_IF_EXCEPTION(scope, {}); return jsString(vm, result); } @@ -814,7 +821,14 @@ static String computeErrorInfoWrapperToString(JSC::VM& vm, Vector& s OrdinalNumber line = OrdinalNumber::fromOneBasedInt(line_in); OrdinalNumber column = OrdinalNumber::fromOneBasedInt(column_in); + auto scope = DECLARE_CATCH_SCOPE(vm); WTF::String result = computeErrorInfoToString(vm, stackTrace, line, column, sourceURL); + if (scope.exception()) { + // TODO: is this correct? vm.setOnComputeErrorInfo doesnt appear to properly handle a function that can throw + // test/js/node/test/parallel/test-stream-writable-write-writev-finish.js is the one that trips the exception checker + scope.clearException(); + result = WTF::emptyString(); + } line_in = line.oneBasedInt(); column_in = column.oneBasedInt(); @@ -1562,11 +1576,13 @@ JSC_DEFINE_HOST_FUNCTION(functionStructuredClone, (JSC::JSGlobalObject * globalO if (options.isObject()) { JSC::JSObject* optionsObject = options.getObject(); JSC::JSValue transferListValue = optionsObject->get(globalObject, vm.propertyNames->transfer); + RETURN_IF_EXCEPTION(throwScope, {}); if (transferListValue.isObject()) { JSC::JSObject* transferListObject = transferListValue.getObject(); if (auto* transferListArray = jsDynamicCast(transferListObject)) { for (unsigned i = 0; i < transferListArray->length(); i++) { JSC::JSValue transferListValue = transferListArray->get(globalObject, i); + RETURN_IF_EXCEPTION(throwScope, {}); if (transferListValue.isObject()) { JSC::JSObject* transferListObject = transferListValue.getObject(); transferList.append(JSC::Strong(vm, transferListObject)); @@ -1580,10 +1596,12 @@ JSC_DEFINE_HOST_FUNCTION(functionStructuredClone, (JSC::JSGlobalObject * globalO ExceptionOr> serialized = SerializedScriptValue::create(*globalObject, value, WTFMove(transferList), ports); if (serialized.hasException()) { WebCore::propagateException(*globalObject, throwScope, serialized.releaseException()); - return JSValue::encode(jsUndefined()); + RELEASE_AND_RETURN(throwScope, {}); } + throwScope.assertNoException(); JSValue deserialized = serialized.releaseReturnValue()->deserialize(*globalObject, globalObject, ports); + RETURN_IF_EXCEPTION(throwScope, {}); return JSValue::encode(deserialized); } @@ -1735,11 +1753,7 @@ extern "C" JSC::EncodedJSValue Bun__allocUint8ArrayForCopy(JSC::JSGlobalObject* auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); JSC::JSUint8Array* array = JSC::JSUint8Array::createUninitialized(globalObject, globalObject->m_typedArrayUint8.get(globalObject), len); - - if (!array) [[unlikely]] { - JSC::throwOutOfMemoryError(globalObject, scope); - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); *ptr = array->vector(); @@ -1754,10 +1768,7 @@ extern "C" JSC::EncodedJSValue Bun__allocArrayBufferForCopy(JSC::JSGlobalObject* auto* subclassStructure = globalObject->JSBufferSubclassStructure(); auto buf = JSC::JSUint8Array::createUninitialized(lexicalGlobalObject, subclassStructure, len); - - if (!buf) [[unlikely]] { - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); *ptr = buf->vector(); @@ -1771,11 +1782,7 @@ extern "C" JSC::EncodedJSValue Bun__createUint8ArrayForCopy(JSC::JSGlobalObject* auto* subclassStructure = isBuffer ? static_cast(globalObject)->JSBufferSubclassStructure() : globalObject->typedArrayStructureWithTypedArrayType(); JSC::JSUint8Array* array = JSC::JSUint8Array::createUninitialized(globalObject, subclassStructure, len); - - if (!array) [[unlikely]] { - JSC::throwOutOfMemoryError(globalObject, scope); - return {}; - } + RETURN_IF_EXCEPTION(scope, {}); if (len > 0 && ptr != nullptr) memcpy(array->vector(), ptr, len); @@ -2011,12 +2018,12 @@ static inline std::optional invokeReadableStreamFunction(JSC::JSGl auto callData = JSC::getCallData(function); auto result = call(&lexicalGlobalObject, function, callData, thisValue, arguments); #if ASSERT_ENABLED - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { Bun__reportError(&lexicalGlobalObject, JSValue::encode(scope.exception())); } #endif EXCEPTION_ASSERT(!scope.exception() || vm.hasPendingTerminationException()); - if (scope.exception()) + if (scope.exception()) [[unlikely]] return {}; return result; } @@ -2104,11 +2111,13 @@ extern "C" int32_t ReadableStreamTag__tagged(Zig::GlobalObject* globalObject, JS JSValue target = object; JSValue fn = JSValue(); auto* function = jsDynamicCast(object); - if (function && function->jsExecutable() && function->jsExecutable()->isAsyncGenerator()) { + if (function && !function->isHostFunction() && function->jsExecutable() && function->jsExecutable()->isAsyncGenerator()) { fn = object; target = jsUndefined(); - } else if (auto iterable = object->getIfPropertyExists(globalObject, vm.propertyNames->asyncIteratorSymbol)) { - if (iterable.isCallable()) { + } else { + auto iterable = object->getIfPropertyExists(globalObject, vm.propertyNames->asyncIteratorSymbol); + RETURN_IF_EXCEPTION(throwScope, {}); + if (iterable && iterable.isCallable()) { fn = iterable; } } @@ -2189,7 +2198,9 @@ extern "C" JSC::EncodedJSValue ZigGlobalObject__createNativeReadableStream(Zig:: arguments.append(JSValue::decode(nativePtr)); auto callData = JSC::getCallData(function); - return JSC::JSValue::encode(call(globalObject, function, callData, JSC::jsUndefined(), arguments)); + auto result = call(globalObject, function, callData, JSC::jsUndefined(), arguments); + EXCEPTION_ASSERT(!!scope.exception() == !result); + return JSValue::encode(result); } extern "C" JSC::EncodedJSValue Bun__Jest__createTestModuleObject(JSC::JSGlobalObject*); @@ -2242,13 +2253,11 @@ static inline JSC::EncodedJSValue ZigGlobalObject__readableStreamToArrayBufferBo RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(promise)); } -extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToArrayBuffer(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToArrayBuffer(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue) { return ZigGlobalObject__readableStreamToArrayBufferBody(static_cast(globalObject), readableStreamValue); } -extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToBytes(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToBytes(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue) { auto& vm = JSC::getVM(globalObject); @@ -2288,7 +2297,6 @@ extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToBytes(Zig::Globa RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(promise)); } -extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToText(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToText(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue) { auto& vm = JSC::getVM(globalObject); @@ -2330,7 +2338,6 @@ extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToFormData(Zig::Gl return JSC::JSValue::encode(call(globalObject, function, callData, JSC::jsUndefined(), arguments)); } -extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToJSON(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToJSON(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue) { auto& vm = JSC::getVM(globalObject); @@ -2663,6 +2670,7 @@ JSC_DEFINE_HOST_FUNCTION(errorConstructorFuncCaptureStackTrace, (JSC::JSGlobalOb OrdinalNumber column; String sourceURL; JSValue result = computeErrorInfoToJSValue(vm, stackTrace, line, column, sourceURL, errorObject); + RETURN_IF_EXCEPTION(scope, {}); errorObject->putDirect(vm, vm.propertyNames->stack, result, 0); } @@ -2931,16 +2939,20 @@ void GlobalObject::finishCreation(VM& vm) m_JSBufferSubclassStructure.initLater( [](const Initializer& init) { + auto scope = DECLARE_CATCH_SCOPE(init.vm); auto* globalObject = static_cast(init.owner); auto* baseStructure = globalObject->typedArrayStructureWithTypedArrayType(); JSC::Structure* subclassStructure = JSC::InternalFunction::createSubclassStructure(globalObject, globalObject->JSBufferConstructor(), baseStructure); + scope.assertNoExceptionExceptTermination(); init.set(subclassStructure); }); m_JSResizableOrGrowableSharedBufferSubclassStructure.initLater( [](const Initializer& init) { + auto scope = DECLARE_CATCH_SCOPE(init.vm); auto* globalObject = static_cast(init.owner); auto* baseStructure = globalObject->resizableOrGrowableSharedTypedArrayStructureWithTypedArrayType(); JSC::Structure* subclassStructure = JSC::InternalFunction::createSubclassStructure(globalObject, globalObject->JSBufferConstructor(), baseStructure); + scope.assertNoExceptionExceptTermination(); init.set(subclassStructure); }); m_performMicrotaskFunction.initLater( @@ -2955,9 +2967,14 @@ void GlobalObject::finishCreation(VM& vm) m_utilInspectFunction.initLater( [](const Initializer& init) { + auto scope = DECLARE_THROW_SCOPE(init.vm); JSValue nodeUtilValue = jsCast(init.owner)->internalModuleRegistry()->requireId(init.owner, init.vm, Bun::InternalModuleRegistry::Field::NodeUtil); + RETURN_IF_EXCEPTION(scope, ); RELEASE_ASSERT(nodeUtilValue.isObject()); - init.set(jsCast(nodeUtilValue.getObject()->getIfPropertyExists(init.owner, Identifier::fromString(init.vm, "inspect"_s)))); + auto prop = nodeUtilValue.getObject()->getIfPropertyExists(init.owner, Identifier::fromString(init.vm, "inspect"_s)); + RETURN_IF_EXCEPTION(scope, ); + ASSERT(prop); + init.set(jsCast(prop)); }); m_utilInspectOptionsStructure.initLater( @@ -2976,23 +2993,23 @@ void GlobalObject::finishCreation(VM& vm) m_utilInspectStylizeColorFunction.initLater( [](const Initializer& init) { + auto scope = DECLARE_THROW_SCOPE(init.vm); JSC::MarkedArgumentBuffer args; args.append(jsCast(init.owner)->utilInspectFunction()); + RETURN_IF_EXCEPTION(scope, ); - auto scope = DECLARE_THROW_SCOPE(init.vm); JSC::JSFunction* getStylize = JSC::JSFunction::create(init.vm, init.owner, utilInspectGetStylizeWithColorCodeGenerator(init.vm), init.owner); - // RETURN_IF_EXCEPTION(scope, {}); + RETURN_IF_EXCEPTION(scope, ); JSC::CallData callData = JSC::getCallData(getStylize); - NakedPtr returnedException = nullptr; auto result = JSC::profiledCall(init.owner, ProfilingReason::API, getStylize, callData, jsNull(), args, returnedException); - // RETURN_IF_EXCEPTION(scope, {}); + RETURN_IF_EXCEPTION(scope, ); if (returnedException) { throwException(init.owner, scope, returnedException.get()); } - // RETURN_IF_EXCEPTION(scope, {}); + RETURN_IF_EXCEPTION(scope, ); init.set(jsCast(result)); }); @@ -3184,9 +3201,24 @@ void GlobalObject::finishCreation(VM& vm) [](const JSC::LazyProperty::Initializer& init) { auto* global = init.owner; auto& vm = init.vm; + auto scope = DECLARE_THROW_SCOPE(vm); + + // if we get the termination exception, we'd still like to set a non-null Map so that + // we don't segfault + auto setEmpty = [&]() { + ASSERT(scope.exception()); + init.set(JSC::JSMap::create(init.vm, init.owner->mapStructure())); + }; + JSMap* registry = nullptr; - if (auto loaderValue = global->getIfPropertyExists(global, JSC::Identifier::fromString(vm, "Loader"_s))) { - if (auto registryValue = loaderValue.getObject()->getIfPropertyExists(global, JSC::Identifier::fromString(vm, "registry"_s))) { + auto loaderValue = global->getIfPropertyExists(global, JSC::Identifier::fromString(vm, "Loader"_s)); + scope.assertNoExceptionExceptTermination(); + RETURN_IF_EXCEPTION(scope, setEmpty()); + if (loaderValue) { + auto registryValue = loaderValue.getObject()->getIfPropertyExists(global, JSC::Identifier::fromString(vm, "registry"_s)); + scope.assertNoExceptionExceptTermination(); + RETURN_IF_EXCEPTION(scope, setEmpty()); + if (registryValue) { registry = jsCast(registryValue); } } @@ -3265,6 +3297,11 @@ void GlobalObject::finishCreation(VM& vm) init.set(Zig::ImportMetaObject::createStructure(init.vm, init.owner)); }); + m_importMetaBakeObjectStructure.initLater( + [](const JSC::LazyProperty::Initializer& init) { + init.set(Zig::ImportMetaObject::createStructure(init.vm, init.owner, true)); + }); + m_asyncBoundFunctionStructure.initLater( [](const JSC::LazyProperty::Initializer& init) { init.set(AsyncContextFrame::createStructure(init.vm, init.owner)); @@ -3457,8 +3494,7 @@ JSC_DEFINE_CUSTOM_GETTER(getConsoleStdout, (JSGlobalObject * globalObject, Encod // instead of calling the constructor builtin, go through the process.stdout getter to ensure it's only created once. auto stdoutValue = global->processObject()->get(globalObject, Identifier::fromString(vm, "stdout"_s)); - if (!stdoutValue) - return JSValue::encode({}); + if (!stdoutValue) return {}; console->putDirect(vm, property, stdoutValue, PropertyAttribute::DontEnum | 0); return JSValue::encode(stdoutValue); @@ -3473,8 +3509,7 @@ JSC_DEFINE_CUSTOM_GETTER(getConsoleStderr, (JSGlobalObject * globalObject, Encod // instead of calling the constructor builtin, go through the process.stdout getter to ensure it's only created once. auto stderrValue = global->processObject()->get(globalObject, Identifier::fromString(vm, "stderr"_s)); - if (!stderrValue) - return JSValue::encode({}); + if (!stderrValue) return {}; console->putDirect(vm, property, stderrValue, PropertyAttribute::DontEnum | 0); return JSValue::encode(stderrValue); @@ -3493,14 +3528,17 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionToClass, (JSC::JSGlobalObject * globalObject, if (!base) { base = globalObject->functionPrototype(); - } else if (auto proto = base->getIfPropertyExists(globalObject, vm.propertyNames->prototype)) { - if (auto protoObject = proto.getObject()) { - prototypeBase = protoObject; - } } else { + auto proto = base->getIfPropertyExists(globalObject, vm.propertyNames->prototype); RETURN_IF_EXCEPTION(scope, encodedJSValue()); - JSC::throwTypeError(globalObject, scope, "Base class must have a prototype property"_s); - return encodedJSValue(); + if (proto) { + if (auto protoObject = proto.getObject()) { + prototypeBase = protoObject; + } + } else { + JSC::throwTypeError(globalObject, scope, "Base class must have a prototype property"_s); + return encodedJSValue(); + } } JSObject* prototype = prototypeBase ? JSC::constructEmptyObject(globalObject, prototypeBase) : JSC::constructEmptyObject(globalObject); @@ -3546,6 +3584,34 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionCheckBufferRead, (JSC::JSGlobalObject * globa } return JSValue::encode(jsUndefined()); } +extern "C" EncodedJSValue Bun__assignStreamIntoResumableSink(JSC::JSGlobalObject* globalObject, JSC::EncodedJSValue stream, JSC::EncodedJSValue sink) +{ + Zig::GlobalObject* globalThis = reinterpret_cast(globalObject); + return globalThis->assignStreamToResumableSink(JSValue::decode(stream), JSValue::decode(sink)); +} +EncodedJSValue GlobalObject::assignStreamToResumableSink(JSValue stream, JSValue sink) +{ + auto& vm = this->vm(); + JSC::JSFunction* function = this->m_assignStreamToResumableSink.get(); + if (!function) { + function = JSFunction::create(vm, this, static_cast(readableStreamInternalsAssignStreamIntoResumableSinkCodeGenerator(vm)), this); + this->m_assignStreamToResumableSink.set(vm, this, function); + } + + auto callData = JSC::getCallData(function); + JSC::MarkedArgumentBuffer arguments; + arguments.append(stream); + arguments.append(sink); + + WTF::NakedPtr returnedException = nullptr; + + auto result = JSC::profiledCall(this, ProfilingReason::API, function, callData, JSC::jsUndefined(), arguments, returnedException); + if (auto* exception = returnedException.get()) { + return JSC::JSValue::encode(exception); + } + + return JSC::JSValue::encode(result); +} EncodedJSValue GlobalObject::assignToStream(JSValue stream, JSValue controller) { @@ -3613,6 +3679,7 @@ JSValue GlobalObject_getGlobalThis(VM& vm, JSObject* globalObject) void GlobalObject::addBuiltinGlobals(JSC::VM& vm) { + auto scope = DECLARE_CATCH_SCOPE(vm); m_builtinInternalFunctions.initialize(*this); auto clientData = WebCore::clientData(vm); @@ -3726,6 +3793,8 @@ void GlobalObject::addBuiltinGlobals(JSC::VM& vm) errorConstructor->putDirectCustomAccessor(vm, JSC::Identifier::fromString(vm, "prepareStackTrace"_s), JSC::CustomGetterSetter::create(vm, errorConstructorPrepareStackTraceGetter, errorConstructorPrepareStackTraceSetter), PropertyAttribute::DontEnum | PropertyAttribute::CustomValue); JSC::JSObject* consoleObject = this->get(this, JSC::Identifier::fromString(vm, "console"_s)).getObject(); + scope.assertNoExceptionExceptTermination(); + RETURN_IF_EXCEPTION(scope, ); consoleObject->putDirectBuiltinFunction(vm, this, vm.propertyNames->asyncIteratorSymbol, consoleObjectAsyncIteratorCodeGenerator(vm), PropertyAttribute::Builtin | 0); consoleObject->putDirectBuiltinFunction(vm, this, clientData->builtinNames().writePublicName(), consoleObjectWriteCodeGenerator(vm), PropertyAttribute::Builtin | 0); consoleObject->putDirectCustomAccessor(vm, Identifier::fromString(vm, "Console"_s), CustomGetterSetter::create(vm, getConsoleConstructor, nullptr), PropertyAttribute::CustomValue | 0); @@ -4067,23 +4136,27 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderImportModule(JSGlobalObject* j { auto* globalObject = static_cast(jsGlobalObject); - if (JSC::JSInternalPromise* result = NodeVM::importModule(globalObject, moduleNameValue, parameters, sourceOrigin)) { - return result; + VM& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); + + { + JSC::JSInternalPromise* result = NodeVM::importModule(globalObject, moduleNameValue, parameters, sourceOrigin); + RETURN_IF_EXCEPTION(scope, nullptr); + if (result) { + return result; + } } - auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); JSC::Identifier resolvedIdentifier; auto moduleName = moduleNameValue->value(globalObject); - RETURN_IF_EXCEPTION(scope, {}); + RETURN_IF_EXCEPTION(scope, nullptr); if (globalObject->onLoadPlugins.hasVirtualModules()) { if (auto resolution = globalObject->onLoadPlugins.resolveVirtualModule(moduleName, sourceOrigin.url().protocolIsFile() ? sourceOrigin.url().fileSystemPath() : String())) { resolvedIdentifier = JSC::Identifier::fromString(vm, resolution.value()); - auto result = JSC::importModule(globalObject, resolvedIdentifier, - JSC::jsUndefined(), parameters, JSC::jsUndefined()); - if (scope.exception()) { + auto result = JSC::importModule(globalObject, resolvedIdentifier, JSC::jsUndefined(), parameters, JSC::jsUndefined()); + if (scope.exception()) [[unlikely]] { auto* promise = JSC::JSInternalPromise::create(vm, globalObject->internalPromiseStructure()); return promise->rejectWithCaughtException(globalObject, scope); } @@ -4156,10 +4229,14 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderImportModule(JSGlobalObject* j // Therefore, we modify it in place. if (parameters && parameters.isObject()) { auto* object = parameters.toObject(globalObject); - if (auto withObject = object->getIfPropertyExists(globalObject, vm.propertyNames->withKeyword)) { + auto withObject = object->getIfPropertyExists(globalObject, vm.propertyNames->withKeyword); + RETURN_IF_EXCEPTION(scope, {}); + if (withObject) { if (withObject.isObject()) { auto* with = jsCast(withObject); - if (auto type = with->getIfPropertyExists(globalObject, vm.propertyNames->type)) { + auto type = with->getIfPropertyExists(globalObject, vm.propertyNames->type); + RETURN_IF_EXCEPTION(scope, {}); + if (type) { if (type.isString()) { const auto typeString = type.toWTFString(globalObject); parameters = JSC::JSScriptFetchParameters::create(vm, ScriptFetchParameters::create(typeString)); @@ -4171,7 +4248,7 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderImportModule(JSGlobalObject* j auto result = JSC::importModule(globalObject, resolvedIdentifier, JSC::jsUndefined(), parameters, jsUndefined()); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { return JSC::JSInternalPromise::rejectedPromiseWithCaughtException(globalObject, scope); } @@ -4242,6 +4319,8 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderFetch(JSGlobalObject* globalOb &source, typeAttributeString.isEmpty() ? nullptr : &typeAttribute); + RETURN_IF_EXCEPTION(scope, rejectedInternalPromise(globalObject, scope.exception()->value())); + ASSERT(result); if (auto* internalPromise = JSC::jsDynamicCast(result)) { return internalPromise; } else if (auto* promise = JSC::jsDynamicCast(result)) { @@ -4365,18 +4444,14 @@ GlobalObject::PromiseFunctions GlobalObject::promiseHandlerID(Zig::FFIFunction h return GlobalObject::PromiseFunctions::Bun__NodeHTTPRequest__onResolve; } else if (handler == Bun__NodeHTTPRequest__onReject) { return GlobalObject::PromiseFunctions::Bun__NodeHTTPRequest__onReject; - } else if (handler == Bun__FetchTasklet__onResolveRequestStream) { - return GlobalObject::PromiseFunctions::Bun__FetchTasklet__onResolveRequestStream; - } else if (handler == Bun__FetchTasklet__onRejectRequestStream) { - return GlobalObject::PromiseFunctions::Bun__FetchTasklet__onRejectRequestStream; - } else if (handler == Bun__S3UploadStream__onResolveRequestStream) { - return GlobalObject::PromiseFunctions::Bun__S3UploadStream__onResolveRequestStream; - } else if (handler == Bun__S3UploadStream__onRejectRequestStream) { - return GlobalObject::PromiseFunctions::Bun__S3UploadStream__onRejectRequestStream; } else if (handler == Bun__FileStreamWrapper__onResolveRequestStream) { return GlobalObject::PromiseFunctions::Bun__FileStreamWrapper__onResolveRequestStream; } else if (handler == Bun__FileStreamWrapper__onRejectRequestStream) { return GlobalObject::PromiseFunctions::Bun__FileStreamWrapper__onRejectRequestStream; + } else if (handler == Bun__FileSink__onResolveStream) { + return GlobalObject::PromiseFunctions::Bun__FileSink__onResolveStream; + } else if (handler == Bun__FileSink__onRejectStream) { + return GlobalObject::PromiseFunctions::Bun__FileSink__onRejectStream; } else { RELEASE_ASSERT_NOT_REACHED(); } diff --git a/src/bun.js/bindings/ZigGlobalObject.h b/src/bun.js/bindings/ZigGlobalObject.h index f086237c67..b80fefee49 100644 --- a/src/bun.js/bindings/ZigGlobalObject.h +++ b/src/bun.js/bindings/ZigGlobalObject.h @@ -289,12 +289,14 @@ public: JSC::JSFunction* requireESMFromHijackedExtension() const { return m_commonJSRequireESMFromHijackedExtensionFunction.getInitializedOnMainThread(this); } Structure* NodeVMGlobalObjectStructure() const { return m_cachedNodeVMGlobalObjectStructure.getInitializedOnMainThread(this); } + Structure* NodeVMSpecialSandboxStructure() const { return m_cachedNodeVMSpecialSandboxStructure.getInitializedOnMainThread(this); } Structure* globalProxyStructure() const { return m_cachedGlobalProxyStructure.getInitializedOnMainThread(this); } JSObject* lazyTestModuleObject() const { return m_lazyTestModuleObject.getInitializedOnMainThread(this); } JSObject* lazyPreloadTestModuleObject() const { return m_lazyPreloadTestModuleObject.getInitializedOnMainThread(this); } Structure* CommonJSModuleObjectStructure() const { return m_commonJSModuleObjectStructure.getInitializedOnMainThread(this); } Structure* JSSocketAddressDTOStructure() const { return m_JSSocketAddressDTOStructure.getInitializedOnMainThread(this); } Structure* ImportMetaObjectStructure() const { return m_importMetaObjectStructure.getInitializedOnMainThread(this); } + Structure* ImportMetaBakeObjectStructure() const { return m_importMetaBakeObjectStructure.getInitializedOnMainThread(this); } Structure* AsyncContextFrameStructure() const { return m_asyncBoundFunctionStructure.getInitializedOnMainThread(this); } JSWeakMap* vmModuleContextMap() const { return m_vmModuleContextMap.getInitializedOnMainThread(this); } @@ -336,7 +338,7 @@ public: JSObject* subtleCrypto() { return m_subtleCryptoObject.getInitializedOnMainThread(this); } JSC::EncodedJSValue assignToStream(JSValue stream, JSValue controller); - + JSC::EncodedJSValue assignStreamToResumableSink(JSValue stream, JSValue sink); WebCore::EventTarget& eventTarget(); WebCore::ScriptExecutionContext* m_scriptExecutionContext; @@ -373,14 +375,12 @@ public: Bun__onRejectEntryPointResult, Bun__NodeHTTPRequest__onResolve, Bun__NodeHTTPRequest__onReject, - Bun__FetchTasklet__onRejectRequestStream, - Bun__FetchTasklet__onResolveRequestStream, - Bun__S3UploadStream__onRejectRequestStream, - Bun__S3UploadStream__onResolveRequestStream, Bun__FileStreamWrapper__onRejectRequestStream, Bun__FileStreamWrapper__onResolveRequestStream, + Bun__FileSink__onResolveStream, + Bun__FileSink__onRejectStream, }; - static constexpr size_t promiseFunctionsSize = 34; + static constexpr size_t promiseFunctionsSize = 36; static PromiseFunctions promiseHandlerID(SYSV_ABI EncodedJSValue (*handler)(JSC::JSGlobalObject* arg0, JSC::CallFrame* arg1)); @@ -449,6 +449,7 @@ public: #define FOR_EACH_GLOBALOBJECT_GC_MEMBER(V) \ /* TODO: these should use LazyProperty */ \ V(private, WriteBarrier, m_assignToStream) \ + V(private, WriteBarrier, m_assignStreamToResumableSink) \ V(public, WriteBarrier, m_readableStreamToArrayBuffer) \ V(public, WriteBarrier, m_readableStreamToBytes) \ V(public, WriteBarrier, m_readableStreamToBlob) \ @@ -461,6 +462,8 @@ public: V(public, LazyPropertyOfGlobalObject, m_modulePrototypeUnderscoreCompileFunction) \ V(public, LazyPropertyOfGlobalObject, m_commonJSRequireESMFromHijackedExtensionFunction) \ V(public, LazyPropertyOfGlobalObject, m_nodeModuleConstructor) \ + V(public, LazyPropertyOfGlobalObject, m_nodeModuleSourceMapEntryStructure) \ + V(public, LazyPropertyOfGlobalObject, m_nodeModuleSourceMapOriginStructure) \ \ V(public, WriteBarrier, m_nextTickQueue) \ \ @@ -576,6 +579,7 @@ public: V(private, LazyPropertyOfGlobalObject, m_lazyPreloadTestModuleObject) \ V(public, LazyPropertyOfGlobalObject, m_testMatcherUtilsObject) \ V(public, LazyPropertyOfGlobalObject, m_cachedNodeVMGlobalObjectStructure) \ + V(public, LazyPropertyOfGlobalObject, m_cachedNodeVMSpecialSandboxStructure) \ V(private, LazyPropertyOfGlobalObject, m_cachedGlobalProxyStructure) \ V(private, LazyPropertyOfGlobalObject, m_commonJSModuleObjectStructure) \ V(private, LazyPropertyOfGlobalObject, m_JSSocketAddressDTOStructure) \ @@ -588,6 +592,7 @@ public: V(private, LazyPropertyOfGlobalObject, m_processBindingFs) \ V(private, LazyPropertyOfGlobalObject, m_processBindingHTTPParser) \ V(private, LazyPropertyOfGlobalObject, m_importMetaObjectStructure) \ + V(private, LazyPropertyOfGlobalObject, m_importMetaBakeObjectStructure) \ V(private, LazyPropertyOfGlobalObject, m_asyncBoundFunctionStructure) \ V(public, LazyPropertyOfGlobalObject, m_JSDOMFileConstructor) \ V(public, LazyPropertyOfGlobalObject, m_JSMIMEParamsConstructor) \ @@ -616,7 +621,9 @@ public: V(public, LazyPropertyOfGlobalObject, m_statValues) \ V(public, LazyPropertyOfGlobalObject, m_bigintStatValues) \ V(public, LazyPropertyOfGlobalObject, m_statFsValues) \ - V(public, LazyPropertyOfGlobalObject, m_bigintStatFsValues) + V(public, LazyPropertyOfGlobalObject, m_bigintStatFsValues) \ + V(public, LazyPropertyOfGlobalObject, m_nodeVMDontContextify) \ + V(public, LazyPropertyOfGlobalObject, m_nodeVMUseMainContextDefaultLoader) #define DECLARE_GLOBALOBJECT_GC_MEMBER(visibility, T, name) \ visibility: \ @@ -795,4 +802,10 @@ inline void* bunVM(Zig::GlobalObject* globalObject) JSC_DECLARE_HOST_FUNCTION(jsFunctionNotImplemented); JSC_DECLARE_HOST_FUNCTION(jsFunctionCreateFunctionThatMasqueradesAsUndefined); +extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToText(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); +extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToArrayBuffer(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); +extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToBytes(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); +extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToJSON(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); +extern "C" JSC::EncodedJSValue ZigGlobalObject__readableStreamToBlob(Zig::GlobalObject* globalObject, JSC::EncodedJSValue readableStreamValue); + #endif diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index a0a6b00a93..a731fd7857 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -191,11 +191,34 @@ enum class AsymmetricMatcherConstructorType : int8_t { }; #if ASSERT_ENABLED -#define ASSERT_NO_PENDING_EXCEPTION(globalObject) DECLARE_THROW_SCOPE(globalObject->vm()).assertNoExceptionExceptTermination() +#define ASSERT_NO_PENDING_EXCEPTION(globalObject) DECLARE_CATCH_SCOPE(globalObject->vm()).assertNoExceptionExceptTermination() #else #define ASSERT_NO_PENDING_EXCEPTION(globalObject) void() #endif +// Ensure we instantiate the true and false variants of this function +template bool Bun__deepMatch( + JSValue objValue, + std::set* seenObjProperties, + JSValue subsetValue, + std::set* seenSubsetProperties, + JSGlobalObject* globalObject, + ThrowScope* throwScope, + MarkedArgumentBuffer* gcBuffer, + bool replacePropsWithAsymmetricMatchers, + bool isMatchingObjectContaining); + +template bool Bun__deepMatch( + JSValue objValue, + std::set* seenObjProperties, + JSValue subsetValue, + std::set* seenSubsetProperties, + JSGlobalObject* globalObject, + ThrowScope* throwScope, + MarkedArgumentBuffer* gcBuffer, + bool replacePropsWithAsymmetricMatchers, + bool isMatchingObjectContaining); + extern "C" bool Expect_readFlagsAndProcessPromise(JSC::EncodedJSValue instanceValue, JSC::JSGlobalObject* globalObject, ExpectFlags* flags, JSC::EncodedJSValue* value, AsymmetricMatcherConstructorType* constructorType); extern "C" int8_t AsymmetricMatcherConstructorType__fromJS(JSC::JSGlobalObject* globalObject, JSC::EncodedJSValue encodedValue) @@ -215,21 +238,18 @@ extern "C" int8_t AsymmetricMatcherConstructorType__fromJS(JSC::JSGlobalObject* auto stringConstructorValue = globalObject->stringPrototype()->getIfPropertyExists(globalObject, vm.propertyNames->constructor); RETURN_IF_EXCEPTION(scope, -1); - if (stringConstructorValue == object) { return static_cast(AsymmetricMatcherConstructorType::String); } auto symbolConstructorValue = globalObject->symbolPrototype()->getIfPropertyExists(globalObject, vm.propertyNames->constructor); RETURN_IF_EXCEPTION(scope, -1); - if (symbolConstructorValue == object) { return static_cast(AsymmetricMatcherConstructorType::Symbol); } auto bigIntConstructorValue = globalObject->bigIntPrototype()->getIfPropertyExists(globalObject, vm.propertyNames->constructor); RETURN_IF_EXCEPTION(scope, -1); - if (bigIntConstructorValue == object) { return static_cast(AsymmetricMatcherConstructorType::BigInt); } @@ -375,7 +395,9 @@ AsymmetricMatcherResult matchAsymmetricMatcherAndGetFlags(JSGlobalObject* global } } - if (constructorObject->hasInstance(globalObject, otherProp)) { + bool hasInstance = constructorObject->hasInstance(globalObject, otherProp); + RETURN_IF_EXCEPTION(*throwScope, AsymmetricMatcherResult::FAIL); + if (hasInstance) { return AsymmetricMatcherResult::PASS; } @@ -453,10 +475,11 @@ AsymmetricMatcherResult matchAsymmetricMatcherAndGetFlags(JSGlobalObject* global for (unsigned n = 0; n < otherLength; n++) { JSValue otherValue = otherArray->getIndex(globalObject, n); - ThrowScope scope = DECLARE_THROW_SCOPE(globalObject->vm()); Vector, 16> stack; MarkedArgumentBuffer gcBuffer; - if (Bun__deepEquals(globalObject, expectedValue, otherValue, gcBuffer, stack, &scope, true)) { + bool foundNow = Bun__deepEquals(globalObject, expectedValue, otherValue, gcBuffer, stack, throwScope, true); + RETURN_IF_EXCEPTION(*throwScope, AsymmetricMatcherResult::FAIL); + if (foundNow) { found = true; break; } @@ -479,11 +502,12 @@ AsymmetricMatcherResult matchAsymmetricMatcherAndGetFlags(JSGlobalObject* global JSValue patternObject = expectObjectContaining->m_objectValue.get(); if (patternObject.isObject()) { if (otherProp.isObject()) { - ThrowScope scope = DECLARE_THROW_SCOPE(globalObject->vm()); // SAFETY: visited property sets are not required when // `enableAsymmetricMatchers` and `isMatchingObjectContaining` // are both true - if (Bun__deepMatch(otherProp, nullptr, patternObject, nullptr, globalObject, &scope, nullptr, false, true)) { + bool match = Bun__deepMatch(otherProp, nullptr, patternObject, nullptr, globalObject, throwScope, nullptr, false, true); + RETURN_IF_EXCEPTION(*throwScope, AsymmetricMatcherResult::FAIL); + if (match) { return AsymmetricMatcherResult::PASS; } } @@ -686,6 +710,7 @@ bool Bun__deepEquals(JSC::JSGlobalObject* globalObject, JSValue v1, JSValue v2, ASSERT(c1); ASSERT(c2); std::optional isSpecialEqual = specialObjectsDequal(globalObject, gcBuffer, stack, scope, c1, c2); + RETURN_IF_EXCEPTION(*scope, false); if (isSpecialEqual.has_value()) return std::move(*isSpecialEqual); isSpecialEqual = specialObjectsDequal(globalObject, gcBuffer, stack, scope, c2, c1); if (isSpecialEqual.has_value()) return std::move(*isSpecialEqual); @@ -755,7 +780,9 @@ bool Bun__deepEquals(JSC::JSGlobalObject* globalObject, JSValue v1, JSValue v2, JSC::PropertyNameArray a1(vm, PropertyNameMode::Symbols, PrivateSymbolMode::Exclude); JSC::PropertyNameArray a2(vm, PropertyNameMode::Symbols, PrivateSymbolMode::Exclude); JSObject::getOwnPropertyNames(o1, globalObject, a1, DontEnumPropertiesMode::Exclude); + RETURN_IF_EXCEPTION(*scope, false); JSObject::getOwnPropertyNames(o2, globalObject, a2, DontEnumPropertiesMode::Exclude); + RETURN_IF_EXCEPTION(*scope, false); size_t propertyLength = a1.size(); if constexpr (isStrict) { @@ -1000,23 +1027,28 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark } auto iter1 = JSSetIterator::create(globalObject, globalObject->setIteratorStructure(), set1, IterationKind::Keys); + RETURN_IF_EXCEPTION(*scope, {}); JSValue key1; while (iter1->next(globalObject, key1)) { - if (set2->has(globalObject, key1)) { + bool has = set2->has(globalObject, key1); + RETURN_IF_EXCEPTION(*scope, {}); + if (has) { continue; } // We couldn't find the key in the second set. This may be a false positive due to how // JSValues are represented in JSC, so we need to fall back to a linear search to be sure. auto iter2 = JSSetIterator::create(globalObject, globalObject->setIteratorStructure(), set2, IterationKind::Keys); + RETURN_IF_EXCEPTION(*scope, {}); JSValue key2; bool foundMatchingKey = false; while (iter2->next(globalObject, key2)) { - if (Bun__deepEquals(globalObject, key1, key2, gcBuffer, stack, scope, false)) { + bool equal = Bun__deepEquals(globalObject, key1, key2, gcBuffer, stack, scope, false); + RETURN_IF_EXCEPTION(*scope, {}); + if (equal) { foundMatchingKey = true; break; } - RETURN_IF_EXCEPTION(*scope, false); } if (!foundMatchingKey) { @@ -1040,22 +1072,26 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark } auto iter1 = JSMapIterator::create(globalObject, globalObject->mapIteratorStructure(), map1, IterationKind::Entries); + RETURN_IF_EXCEPTION(*scope, {}); JSValue key1, value1; while (iter1->nextKeyValue(globalObject, key1, value1)) { JSValue value2 = map2->get(globalObject, key1); + RETURN_IF_EXCEPTION(*scope, {}); if (value2.isUndefined()) { // We couldn't find the key in the second map. This may be a false positive due to // how JSValues are represented in JSC, so we need to fall back to a linear search // to be sure. auto iter2 = JSMapIterator::create(globalObject, globalObject->mapIteratorStructure(), map2, IterationKind::Entries); + RETURN_IF_EXCEPTION(*scope, {}); JSValue key2; bool foundMatchingKey = false; while (iter2->nextKeyValue(globalObject, key2, value2)) { - if (Bun__deepEquals(globalObject, key1, key2, gcBuffer, stack, scope, false)) { + bool keysEqual = Bun__deepEquals(globalObject, key1, key2, gcBuffer, stack, scope, false); + RETURN_IF_EXCEPTION(*scope, {}); + if (keysEqual) { foundMatchingKey = true; break; } - RETURN_IF_EXCEPTION(*scope, false); } if (!foundMatchingKey) { @@ -1065,7 +1101,9 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark // Compare both values below. } - if (!Bun__deepEquals(globalObject, value1, value2, gcBuffer, stack, scope, false)) { + bool valuesEqual = Bun__deepEquals(globalObject, value1, value2, gcBuffer, stack, scope, false); + RETURN_IF_EXCEPTION(*scope, {}); + if (!valuesEqual) { return false; } } @@ -1147,11 +1185,26 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark return false; } - if ( - left->errorType() != right->errorType() || // quick check on ctors (does not handle subclasses) - left->sanitizedNameString(globalObject) != right->sanitizedNameString(globalObject) || // manual `.name` changes (usually in subclasses) - left->sanitizedMessageString(globalObject) != right->sanitizedMessageString(globalObject) // `.message` - ) { + if (left->errorType() != right->errorType()) { + // quick check on ctors (does not handle subclasses) + return false; + } + + auto leftName = left->sanitizedNameString(globalObject); + RETURN_IF_EXCEPTION(*scope, {}); + auto rightName = right->sanitizedNameString(globalObject); + RETURN_IF_EXCEPTION(*scope, {}); + if (leftName != rightName) { + // manual `.name` changes (usually in subclasses) + return false; + } + + auto leftMessage = left->sanitizedMessageString(globalObject); + RETURN_IF_EXCEPTION(*scope, {}); + auto rightMessage = right->sanitizedMessageString(globalObject); + RETURN_IF_EXCEPTION(*scope, {}); + if (leftMessage != rightMessage) { + // `.message` return false; } @@ -1161,25 +1214,30 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark } } - VM& vm = globalObject->vm(); + VM& vm = JSC::getVM(globalObject); // `.cause` is non-enumerable, so it must be checked explicitly. // note that an undefined cause is different than a missing cause in // strict mode. const PropertyName cause(vm.propertyNames->cause); if constexpr (isStrict) { - if (left->hasProperty(globalObject, cause) != right->hasProperty(globalObject, cause)) { + bool leftHasCause = left->hasProperty(globalObject, cause); + RETURN_IF_EXCEPTION(*scope, {}); + bool rightHasCause = right->hasProperty(globalObject, cause); + RETURN_IF_EXCEPTION(*scope, {}); + if (leftHasCause != rightHasCause) { return false; } } auto leftCause = left->get(globalObject, cause); - RETURN_IF_EXCEPTION(*scope, false); + RETURN_IF_EXCEPTION(*scope, {}); auto rightCause = right->get(globalObject, cause); - RETURN_IF_EXCEPTION(*scope, false); - if (!Bun__deepEquals(globalObject, leftCause, rightCause, gcBuffer, stack, scope, true)) { + RETURN_IF_EXCEPTION(*scope, {}); + bool causesEqual = Bun__deepEquals(globalObject, leftCause, rightCause, gcBuffer, stack, scope, true); + RETURN_IF_EXCEPTION(*scope, {}); + if (!causesEqual) { return false; } - RETURN_IF_EXCEPTION(*scope, false); // check arbitrary enumerable properties. `.stack` is not checked. left->materializeErrorInfoIfNeeded(vm); @@ -1187,9 +1245,9 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark JSC::PropertyNameArray a1(vm, PropertyNameMode::StringsAndSymbols, PrivateSymbolMode::Exclude); JSC::PropertyNameArray a2(vm, PropertyNameMode::StringsAndSymbols, PrivateSymbolMode::Exclude); left->getPropertyNames(globalObject, a1, DontEnumPropertiesMode::Exclude); - RETURN_IF_EXCEPTION(*scope, false); + RETURN_IF_EXCEPTION(*scope, {}); right->getPropertyNames(globalObject, a2, DontEnumPropertiesMode::Exclude); - RETURN_IF_EXCEPTION(*scope, false); + RETURN_IF_EXCEPTION(*scope, {}); const size_t propertyArrayLength1 = a1.size(); const size_t propertyArrayLength2 = a2.size(); @@ -1207,14 +1265,11 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark PropertyName propertyName1 = PropertyName(i1); JSValue prop1 = left->get(globalObject, propertyName1); - RETURN_IF_EXCEPTION(*scope, false); - - if (!prop1) [[unlikely]] { - return false; - } + RETURN_IF_EXCEPTION(*scope, {}); + ASSERT(prop1); JSValue prop2 = right->getIfPropertyExists(globalObject, propertyName1); - RETURN_IF_EXCEPTION(*scope, false); + RETURN_IF_EXCEPTION(*scope, {}); if constexpr (!isStrict) { if (prop1.isUndefined() && prop2.isEmpty()) { @@ -1226,11 +1281,11 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark return false; } - if (!Bun__deepEquals(globalObject, prop1, prop2, gcBuffer, stack, scope, true)) { + bool propertiesEqual = Bun__deepEquals(globalObject, prop1, prop2, gcBuffer, stack, scope, true); + RETURN_IF_EXCEPTION(*scope, {}); + if (!propertiesEqual) { return false; } - - RETURN_IF_EXCEPTION(*scope, false); } // for the remaining properties in the other object, make sure they are undefined @@ -1240,7 +1295,7 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark PropertyName propertyName2 = PropertyName(i2); JSValue prop2 = right->getIfPropertyExists(globalObject, propertyName2); - RETURN_IF_EXCEPTION(*scope, false); + RETURN_IF_EXCEPTION(*scope, {}); if (!prop2.isUndefined()) { return false; @@ -1347,9 +1402,13 @@ std::optional specialObjectsDequal(JSC::JSGlobalObject* globalObject, Mark } JSString* s1 = c1->toStringInline(globalObject); + RETURN_IF_EXCEPTION(*scope, {}); JSString* s2 = c2->toStringInline(globalObject); + RETURN_IF_EXCEPTION(*scope, {}); - return s1->equal(globalObject, s2); + bool stringsEqual = s1->equal(globalObject, s2); + RETURN_IF_EXCEPTION(*scope, {}); + return stringsEqual; } case JSFunctionType: { return false; @@ -1542,6 +1601,7 @@ bool Bun__deepMatch( PropertyNameArray subsetProps(vm, PropertyNameMode::StringsAndSymbols, PrivateSymbolMode::Include); subsetObj->getPropertyNames(globalObject, subsetProps, DontEnumPropertiesMode::Exclude); + RETURN_IF_EXCEPTION(*throwScope, false); // TODO: add fast paths for: // - two "simple" objects (using ->forEachProperty in both) @@ -1555,6 +1615,7 @@ bool Bun__deepMatch( } PropertyNameArray objProps(vm, PropertyNameMode::StringsAndSymbols, PrivateSymbolMode::Include); obj->getPropertyNames(globalObject, objProps, DontEnumPropertiesMode::Exclude); + RETURN_IF_EXCEPTION(*throwScope, false); if (objProps.size() != subsetProps.size()) { return false; } @@ -1563,7 +1624,6 @@ bool Bun__deepMatch( for (const auto& property : subsetProps) { JSValue prop = obj->getIfPropertyExists(globalObject, property); RETURN_IF_EXCEPTION(*throwScope, false); - if (prop.isEmpty()) { return false; } @@ -1582,6 +1642,7 @@ bool Bun__deepMatch( case AsymmetricMatcherResult::PASS: if (replacePropsWithAsymmetricMatchers) { obj->putDirectMayBeIndex(globalObject, property, subsetProp); + RETURN_IF_EXCEPTION(*throwScope, false); } // continue to next subset prop continue; @@ -1595,6 +1656,7 @@ bool Bun__deepMatch( case AsymmetricMatcherResult::PASS: if (replacePropsWithAsymmetricMatchers) { subsetObj->putDirectMayBeIndex(globalObject, property, prop); + RETURN_IF_EXCEPTION(*throwScope, false); } // continue to next subset prop continue; @@ -1647,7 +1709,8 @@ inline bool deepEqualsWrapperImpl(JSC::EncodedJSValue a, JSC::EncodedJSValue b, auto scope = DECLARE_THROW_SCOPE(vm); Vector, 16> stack; MarkedArgumentBuffer args; - return Bun__deepEquals(global, JSC::JSValue::decode(a), JSC::JSValue::decode(b), args, stack, &scope, true); + bool result = Bun__deepEquals(global, JSC::JSValue::decode(a), JSC::JSValue::decode(b), args, stack, &scope, true); + RELEASE_AND_RETURN(scope, result); } } @@ -1668,8 +1731,8 @@ void WebCore__FetchHeaders__append(WebCore::FetchHeaders* headers, const ZigStri JSC::JSGlobalObject* lexicalGlobalObject) { auto throwScope = DECLARE_THROW_SCOPE(lexicalGlobalObject->vm()); - WebCore::propagateException(*lexicalGlobalObject, throwScope, - headers->append(Zig::toString(*arg1), Zig::toString(*arg2))); + WebCore::propagateException(*lexicalGlobalObject, throwScope, headers->append(Zig::toString(*arg1), Zig::toString(*arg2))); + RELEASE_AND_RETURN(throwScope, ); } WebCore::FetchHeaders* WebCore__FetchHeaders__cast_(JSC::EncodedJSValue JSValue0, JSC::VM* vm) { @@ -1718,8 +1781,7 @@ WebCore::FetchHeaders* WebCore__FetchHeaders__createFromJS(JSC::JSGlobalObject* // `fill` doesn't set an exception on the VM if it fails, it returns an // ExceptionOr. So we need to check for the exception and, if set, // translate it to JSValue and throw it. - WebCore::propagateException(*lexicalGlobalObject, throwScope, - headers->fill(WTFMove(init.value()))); + WebCore::propagateException(*lexicalGlobalObject, throwScope, headers->fill(WTFMove(init.value()))); // If there's an exception, it will be thrown by the above call to fill(). // in that case, let's also free the headers to make memory leaks harder. @@ -1753,8 +1815,7 @@ JSC::EncodedJSValue WebCore__FetchHeaders__clone(WebCore::FetchHeaders* headers, auto throwScope = DECLARE_THROW_SCOPE(arg1->vm()); Zig::GlobalObject* globalObject = reinterpret_cast(arg1); auto* clone = new WebCore::FetchHeaders({ WebCore::FetchHeaders::Guard::None, {} }); - WebCore::propagateException(*arg1, throwScope, - clone->fill(*headers)); + WebCore::propagateException(*arg1, throwScope, clone->fill(*headers)); return JSC::JSValue::encode(WebCore::toJSNewlyCreated(arg1, globalObject, WTFMove(clone))); } @@ -1763,8 +1824,7 @@ WebCore::FetchHeaders* WebCore__FetchHeaders__cloneThis(WebCore::FetchHeaders* h auto throwScope = DECLARE_THROW_SCOPE(lexicalGlobalObject->vm()); auto* clone = new WebCore::FetchHeaders({ WebCore::FetchHeaders::Guard::None, {} }); clone->relaxAdoptionRequirement(); - WebCore::propagateException(*lexicalGlobalObject, throwScope, - clone->fill(*headers)); + WebCore::propagateException(*lexicalGlobalObject, throwScope, clone->fill(*headers)); return clone; } @@ -1936,8 +1996,7 @@ JSC::EncodedJSValue WebCore__FetchHeaders__createValue(JSC::JSGlobalObject* arg0 } Ref headers = WebCore::FetchHeaders::create(); - WebCore::propagateException(*arg0, throwScope, - headers->fill(WebCore::FetchHeaders::Init(WTFMove(pairs)))); + WebCore::propagateException(*arg0, throwScope, headers->fill(WebCore::FetchHeaders::Init(WTFMove(pairs)))); JSValue value = WebCore::toJSNewlyCreated(arg0, reinterpret_cast(arg0), WTFMove(headers)); @@ -2090,17 +2149,14 @@ JSC::EncodedJSValue SystemError__toErrorInstance(const SystemError* arg0, JSC::J auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); - JSC::JSValue message = JSC::jsUndefined(); + WTF::String message = WTF::emptyString(); if (err.message.tag != BunStringTag::Empty) { - message = Bun::toJS(globalObject, err.message); + message = err.message.toWTFString(); } auto& names = WebCore::builtinNames(vm); - JSC::JSValue options = JSC::jsUndefined(); - - JSC::JSObject* result = JSC::ErrorInstance::create(globalObject, globalObject->errorStructureWithErrorType(), message, options); + JSC::JSObject* result = createError(globalObject, ErrorType::Error, message); auto clientData = WebCore::clientData(vm); @@ -2120,7 +2176,7 @@ JSC::EncodedJSValue SystemError__toErrorInstance(const SystemError* arg0, JSC::J } if (err.fd >= 0) { - JSC::JSValue fd = JSC::JSValue(jsNumber(err.fd)); + JSC::JSValue fd = jsNumber(err.fd); result->putDirect(vm, names.fdPublicName(), fd, JSC::PropertyAttribute::DontDelete | 0); } @@ -2134,12 +2190,10 @@ JSC::EncodedJSValue SystemError__toErrorInstance(const SystemError* arg0, JSC::J result->putDirect(vm, names.hostnamePublicName(), hostname, JSC::PropertyAttribute::DontDelete | 0); } - result->putDirect(vm, names.errnoPublicName(), JSC::JSValue(err.errno_), JSC::PropertyAttribute::DontDelete | 0); + result->putDirect(vm, names.errnoPublicName(), jsNumber(err.errno_), JSC::PropertyAttribute::DontDelete | 0); - RETURN_IF_EXCEPTION(scope, {}); - scope.release(); - - return JSC::JSValue::encode(JSC::JSValue(result)); + ASSERT_NO_PENDING_EXCEPTION(globalObject); + return JSC::JSValue::encode(result); } JSC::EncodedJSValue SystemError__toErrorInstanceWithInfoObject(const SystemError* arg0, JSC::JSGlobalObject* globalObject) @@ -2155,16 +2209,15 @@ JSC::EncodedJSValue SystemError__toErrorInstanceWithInfoObject(const SystemError auto syscallString = err.syscall.toWTFString(); auto messageString = err.message.toWTFString(); - JSC::JSValue message = JSC::jsString(vm, makeString("A system error occurred: "_s, syscallString, " returned "_s, codeString, " ("_s, messageString, ")"_s)); + auto message = makeString("A system error occurred: "_s, syscallString, " returned "_s, codeString, " ("_s, messageString, ")"_s); - JSC::JSValue options = JSC::jsUndefined(); - JSC::JSObject* result = JSC::ErrorInstance::create(globalObject, JSC::ErrorInstance::createStructure(vm, globalObject, globalObject->errorPrototype()), message, options); + JSC::JSObject* result = JSC::ErrorInstance::create(vm, JSC::ErrorInstance::createStructure(vm, globalObject, globalObject->errorPrototype()), message, {}); JSC::JSObject* info = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 0); auto clientData = WebCore::clientData(vm); - result->putDirect(vm, vm.propertyNames->name, JSC::JSValue(jsString(vm, String("SystemError"_s))), JSC::PropertyAttribute::DontEnum | 0); - result->putDirect(vm, clientData->builtinNames().codePublicName(), JSC::JSValue(jsString(vm, String("ERR_SYSTEM_ERROR"_s))), JSC::PropertyAttribute::DontEnum | 0); + result->putDirect(vm, vm.propertyNames->name, jsString(vm, String("SystemError"_s)), JSC::PropertyAttribute::DontEnum | 0); + result->putDirect(vm, clientData->builtinNames().codePublicName(), jsString(vm, String("ERR_SYSTEM_ERROR"_s)), JSC::PropertyAttribute::DontEnum | 0); info->putDirect(vm, clientData->builtinNames().codePublicName(), jsString(vm, codeString), JSC::PropertyAttribute::DontDelete | 0); @@ -2178,9 +2231,9 @@ JSC::EncodedJSValue SystemError__toErrorInstanceWithInfoObject(const SystemError info->putDirect(vm, vm.propertyNames->message, jsString(vm, messageString), JSC::PropertyAttribute::DontDelete | 0); info->putDirect(vm, clientData->builtinNames().errnoPublicName(), jsNumber(err.errno_), JSC::PropertyAttribute::DontDelete | 0); - result->putDirect(vm, clientData->builtinNames().errnoPublicName(), JSC::JSValue(err.errno_), JSC::PropertyAttribute::DontDelete | 0); + result->putDirect(vm, clientData->builtinNames().errnoPublicName(), jsNumber(err.errno_), JSC::PropertyAttribute::DontDelete | 0); - RELEASE_AND_RETURN(scope, JSC::JSValue::encode(JSC::JSValue(result))); + RELEASE_AND_RETURN(scope, JSC::JSValue::encode(result)); } JSC::EncodedJSValue @@ -2284,9 +2337,11 @@ double JSC__JSValue__getLengthIfPropertyExistsInternal(JSC::EncodedJSValue value if (auto* object = jsDynamicCast(cell)) { auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); - if (JSValue lengthValue = object->getIfPropertyExists(globalObject, globalObject->vm().propertyNames->length)) { - RETURN_IF_EXCEPTION(scope, {}); - RELEASE_AND_RETURN(scope, lengthValue.toNumber(globalObject)); + scope.release(); // zig binding handles exceptions + JSValue lengthValue = object->getIfPropertyExists(globalObject, globalObject->vm().propertyNames->length); + RETURN_IF_EXCEPTION(scope, 0); + if (lengthValue) { + return lengthValue.toNumber(globalObject); } } } @@ -2397,8 +2452,7 @@ JSC::JSPromise* JSC__JSValue__asPromise(JSC::EncodedJSValue JSValue0) JSC::EncodedJSValue JSC__JSValue__createInternalPromise(JSC::JSGlobalObject* globalObject) { auto& vm = JSC::getVM(globalObject); - return JSC::JSValue::encode( - JSC::JSValue(JSC::JSInternalPromise::create(vm, globalObject->internalPromiseStructure()))); + return JSC::JSValue::encode(JSC::JSInternalPromise::create(vm, globalObject->internalPromiseStructure())); } void JSC__JSFunction__optimizeSoon(JSC::EncodedJSValue JSValue0) @@ -2427,8 +2481,11 @@ void JSC__JSValue__jsonStringify(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObje BunString* arg3) { ASSERT_NO_PENDING_EXCEPTION(arg1); + auto& vm = JSC::getVM(arg1); + auto scope = DECLARE_THROW_SCOPE(vm); JSC::JSValue value = JSC::JSValue::decode(JSValue0); WTF::String str = JSC::JSONStringify(arg1, value, (unsigned)arg2); + RETURN_IF_EXCEPTION(scope, ); *arg3 = Bun::toStringRef(str); } unsigned char JSC__JSValue__jsType(JSC::EncodedJSValue JSValue0) @@ -2450,7 +2507,7 @@ CPP_DECL JSC::JSString* JSC__jsTypeStringForValue(JSC::JSGlobalObject* globalObj JSC::EncodedJSValue JSC__JSPromise__asValue(JSC::JSPromise* arg0, JSC::JSGlobalObject* arg1) { - JSValue value = JSC::JSValue(arg0); + JSValue value = arg0; ASSERT_WITH_MESSAGE(!value.isEmpty(), "JSPromise.asValue() called on a empty JSValue"); ASSERT_WITH_MESSAGE(value.inherits(), "JSPromise::asValue() called on a non-promise object"); return JSC::JSValue::encode(value); @@ -2477,10 +2534,12 @@ void JSC__JSValue___then(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1 JSC::EncodedJSValue JSC__JSGlobalObject__getCachedObject(JSC::JSGlobalObject* globalObject, const ZigString* arg1) { auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); WTF::String string = Zig::toString(*arg1); auto symbol = vm.privateSymbolRegistry().symbolForKey(string); JSC::Identifier ident = JSC::Identifier::fromUid(symbol); JSC::JSValue result = globalObject->getIfPropertyExists(globalObject, ident); + RETURN_IF_EXCEPTION(scope, {}); return JSC::JSValue::encode(result); } @@ -2557,19 +2616,6 @@ bool JSC__JSValue__jestStrictDeepEquals(JSC::EncodedJSValue JSValue0, JSC::Encod #undef IMPL_DEEP_EQUALS_WRAPPER -bool JSC__JSValue__deepMatch(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* globalObject, bool replacePropsWithAsymmetricMatchers) -{ - JSValue obj = JSValue::decode(JSValue0); - JSValue subset = JSValue::decode(JSValue1); - - ThrowScope scope = DECLARE_THROW_SCOPE(globalObject->vm()); - - std::set objVisited; - std::set subsetVisited; - MarkedArgumentBuffer gcBuffer; - return Bun__deepMatch(obj, &objVisited, subset, &subsetVisited, globalObject, &scope, &gcBuffer, replacePropsWithAsymmetricMatchers, false); -} - bool JSC__JSValue__jestDeepMatch(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* globalObject, bool replacePropsWithAsymmetricMatchers) { JSValue obj = JSValue::decode(JSValue0); @@ -2580,7 +2626,7 @@ bool JSC__JSValue__jestDeepMatch(JSC::EncodedJSValue JSValue0, JSC::EncodedJSVal std::set objVisited; std::set subsetVisited; MarkedArgumentBuffer gcBuffer; - return Bun__deepMatch(obj, &objVisited, subset, &subsetVisited, globalObject, &scope, &gcBuffer, replacePropsWithAsymmetricMatchers, false); + RELEASE_AND_RETURN(scope, Bun__deepMatch(obj, &objVisited, subset, &subsetVisited, globalObject, &scope, &gcBuffer, replacePropsWithAsymmetricMatchers, false)); } extern "C" bool Bun__JSValue__isAsyncContextFrame(JSC::EncodedJSValue value) @@ -2624,7 +2670,7 @@ extern "C" JSC::EncodedJSValue Bun__JSValue__call(JSContextRef ctx, JSC::Encoded ASSERT(jsObject.isCallable()); ASSERT(callData.type != JSC::CallData::Type::None); if (callData.type == JSC::CallData::Type::None) - return JSC::JSValue::encode(JSC::JSValue()); + return {}; auto result = JSC::profiledCall(globalObject, ProfilingReason::API, jsObject, callData, jsThisObject, argList); @@ -2652,7 +2698,7 @@ JSC::EncodedJSValue JSObjectCallAsFunctionReturnValueHoldingAPILock(JSContextRef #endif if (!object) - return JSC::JSValue::encode(JSC::JSValue()); + return {}; JSC::JSObject* jsObject = toJS(object); JSC::JSObject* jsThisObject = toJS(thisObject); @@ -2666,13 +2712,13 @@ JSC::EncodedJSValue JSObjectCallAsFunctionReturnValueHoldingAPILock(JSContextRef auto callData = getCallData(jsObject); if (callData.type == JSC::CallData::Type::None) - return JSC::JSValue::encode(JSC::JSValue()); + return {}; NakedPtr returnedException = nullptr; auto result = call(globalObject, jsObject, callData, jsThisObject, argList, returnedException); if (returnedException.get()) { - return JSC::JSValue::encode(JSC::JSValue(returnedException.get())); + return JSC::JSValue::encode(returnedException.get()); } return JSC::JSValue::encode(result); @@ -2684,11 +2730,16 @@ JSC::EncodedJSValue JSObjectCallAsFunctionReturnValueHoldingAPILock(JSContextRef // CPP_DECL void JSC__PropertyNameArray__release(JSC__PropertyNameArray* arg0); size_t JSC__JSObject__getArrayLength(JSC::JSObject* arg0) { return arg0->getArrayLength(); } -JSC::EncodedJSValue JSC__JSObject__getIndex(JSC::EncodedJSValue jsValue, JSC::JSGlobalObject* arg1, - uint32_t arg3) +JSC::EncodedJSValue JSC__JSObject__getIndex(JSC::EncodedJSValue jsValue, JSC::JSGlobalObject* globalObject, + uint32_t index) { - ASSERT_NO_PENDING_EXCEPTION(arg1); - return JSC::JSValue::encode(JSC::JSValue::decode(jsValue).toObject(arg1)->getIndex(arg1, arg3)); + ASSERT_NO_PENDING_EXCEPTION(globalObject); + auto scope = DECLARE_THROW_SCOPE(getVM(globalObject)); + auto* object = JSC::JSValue::decode(jsValue).toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto value = object->getIndex(globalObject, index); + RETURN_IF_EXCEPTION(scope, {}); + return JSC::JSValue::encode(value); } JSC::EncodedJSValue JSC__JSValue__getDirectIndex(JSC::EncodedJSValue jsValue, JSC::JSGlobalObject* arg1, @@ -2756,10 +2807,10 @@ JSC::JSObject* JSC__JSString__toObject(JSC::JSString* arg0, JSC::JSGlobalObject* extern "C" JSC::JSInternalPromise* JSModuleLoader__import(JSC::JSGlobalObject* globalObject, const BunString* moduleNameStr) { auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); + auto scope = DECLARE_CATCH_SCOPE(vm); auto* promise = JSC::importModule(globalObject, JSC::Identifier::fromString(vm, moduleNameStr->toWTFString()), jsUndefined(), jsUndefined(), jsUndefined()); - RETURN_IF_EXCEPTION(scope, {}); + EXCEPTION_ASSERT(!!scope.exception() == !promise); return promise; } @@ -2781,7 +2832,7 @@ JSC::EncodedJSValue JSC__JSModuleLoader__evaluate(JSC::JSGlobalObject* globalObj auto scope = DECLARE_THROW_SCOPE(vm); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { promise->rejectWithCaughtException(globalObject, scope); } @@ -3005,7 +3056,7 @@ JSC::EncodedJSValue JSC__JSGlobalObject__createAggregateError(JSC::JSGlobalObjec auto& vm = JSC::getVM(globalObject); auto scope = DECLARE_THROW_SCOPE(vm); - JSC::JSValue message = JSC::JSValue(JSC::jsOwnedString(vm, Zig::toString(*arg3))); + JSC::JSValue message = JSC::jsOwnedString(vm, Zig::toString(*arg3)); JSC::JSValue options = JSC::jsUndefined(); JSC::JSArray* array = nullptr; { @@ -3044,7 +3095,7 @@ JSC::EncodedJSValue ZigString__toAtomicValue(const ZigString* arg0, JSC::JSGloba } if (isTaggedUTF16Ptr(arg0->ptr)) { - if (auto impl = WTF::AtomStringImpl::lookUp(std::span { reinterpret_cast(untag(arg0->ptr)), arg0->len })) { + if (auto impl = WTF::AtomStringImpl::lookUp(std::span { reinterpret_cast(untag(arg0->ptr)), arg0->len })) { return JSC::JSValue::encode(JSC::jsString(arg1->vm(), WTF::String(WTFMove(impl)))); } } else { @@ -3053,13 +3104,13 @@ JSC::EncodedJSValue ZigString__toAtomicValue(const ZigString* arg0, JSC::JSGloba } } - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString(arg1->vm(), makeAtomString(Zig::toStringCopy(*arg0))))); + return JSC::JSValue::encode(JSC::jsString(arg1->vm(), makeAtomString(Zig::toStringCopy(*arg0)))); } JSC::EncodedJSValue ZigString__to16BitValue(const ZigString* arg0, JSC::JSGlobalObject* arg1) { auto str = WTF::String::fromUTF8(std::span { arg0->ptr, arg0->len }); - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString(arg1->vm(), str))); + return JSC::JSValue::encode(JSC::jsString(arg1->vm(), str)); } JSC::EncodedJSValue ZigString__toExternalU16(const uint16_t* arg0, size_t len, JSC::JSGlobalObject* global) @@ -3068,10 +3119,9 @@ JSC::EncodedJSValue ZigString__toExternalU16(const uint16_t* arg0, size_t len, J return JSC::JSValue::encode(JSC::jsEmptyString(global->vm())); } - auto ref = String(ExternalStringImpl::create({ reinterpret_cast(arg0), len }, reinterpret_cast(const_cast(arg0)), free_global_string)); + auto ref = String(ExternalStringImpl::create({ reinterpret_cast(arg0), len }, reinterpret_cast(const_cast(arg0)), free_global_string)); - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString( - global->vm(), WTFMove(ref)))); + return JSC::JSValue::encode(JSC::jsString(global->vm(), WTFMove(ref))); } // This must be a globally allocated string JSC::EncodedJSValue ZigString__toExternalValue(const ZigString* arg0, JSC::JSGlobalObject* arg1) @@ -3083,15 +3133,12 @@ JSC::EncodedJSValue ZigString__toExternalValue(const ZigString* arg0, JSC::JSGlo } if (Zig::isTaggedUTF16Ptr(str.ptr)) { - auto ref = String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, Zig::untagVoid(str.ptr), free_global_string)); + auto ref = String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, Zig::untagVoid(str.ptr), free_global_string)); - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString( - arg1->vm(), WTFMove(ref)))); + return JSC::JSValue::encode(JSC::jsString(arg1->vm(), WTFMove(ref))); } else { auto ref = String(ExternalStringImpl::create({ Zig::untag(str.ptr), str.len }, Zig::untagVoid(str.ptr), free_global_string)); - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString( - arg1->vm(), - WTFMove(ref)))); + return JSC::JSValue::encode(JSC::jsString(arg1->vm(), WTFMove(ref))); } } @@ -3102,7 +3149,7 @@ VirtualMachine* JSC__JSGlobalObject__bunVM(JSC::JSGlobalObject* arg0) JSC::EncodedJSValue ZigString__toValueGC(const ZigString* arg0, JSC::JSGlobalObject* arg1) { - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString(arg1->vm(), Zig::toStringCopy(*arg0)))); + return JSC::JSValue::encode(JSC::jsString(arg1->vm(), Zig::toStringCopy(*arg0))); } void JSC__JSValue__toZigString(JSC::EncodedJSValue JSValue0, ZigString* arg1, JSC::JSGlobalObject* arg2) @@ -3139,13 +3186,9 @@ JSC::EncodedJSValue ZigString__external(const ZigString* arg0, JSC::JSGlobalObje ZigString str = *arg0; if (Zig::isTaggedUTF16Ptr(str.ptr)) { - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString( - arg1->vm(), - WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, arg2, ArgFn3))))); + return JSC::JSValue::encode(JSC::jsString(arg1->vm(), WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, arg2, ArgFn3)))); } else { - return JSC::JSValue::encode(JSC::JSValue(JSC::jsString( - arg1->vm(), - WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, arg2, ArgFn3))))); + return JSC::JSValue::encode(JSC::jsString(arg1->vm(), WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, arg2, ArgFn3)))); } } @@ -3155,13 +3198,9 @@ JSC::EncodedJSValue ZigString__toExternalValueWithCallback(const ZigString* arg0 ZigString str = *arg0; if (Zig::isTaggedUTF16Ptr(str.ptr)) { - return JSC::JSValue::encode(JSC::JSValue(JSC::jsOwnedString( - arg1->vm(), - WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, nullptr, ArgFn2))))); + return JSC::JSValue::encode(JSC::jsOwnedString(arg1->vm(), WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, nullptr, ArgFn2)))); } else { - return JSC::JSValue::encode(JSC::JSValue(JSC::jsOwnedString( - arg1->vm(), - WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, nullptr, ArgFn2))))); + return JSC::JSValue::encode(JSC::jsOwnedString(arg1->vm(), WTF::String(ExternalStringImpl::create({ reinterpret_cast(Zig::untag(str.ptr)), str.len }, nullptr, ArgFn2)))); } } @@ -3201,7 +3240,7 @@ JSC__JSModuleLoader__loadAndEvaluateModule(JSC::JSGlobalObject* globalObject, const BunString* arg1) { auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); + auto scope = DECLARE_CATCH_SCOPE(vm); auto name = makeAtomString(arg1->toWTFString()); auto* promise = JSC::loadAndEvaluateModule(globalObject, name, JSC::jsUndefined(), JSC::jsUndefined()); @@ -3212,7 +3251,9 @@ JSC__JSModuleLoader__loadAndEvaluateModule(JSC::JSGlobalObject* globalObject, JSC::JSNativeStdFunction* resolverFunction = JSC::JSNativeStdFunction::create( vm, globalObject, 1, String(), resolverFunctionCallback); - return promise->then(globalObject, resolverFunction, nullptr); + auto* newPromise = promise->then(globalObject, resolverFunction, nullptr); + EXCEPTION_ASSERT(!!scope.exception() == !newPromise); + return newPromise; } #pragma mark - JSC::JSPromise @@ -3225,17 +3266,19 @@ void JSC__AnyPromise__wrap(JSC::JSGlobalObject* globalObject, EncodedJSValue enc ASSERT(!promiseValue.isEmpty()); JSValue result = JSC::JSValue::decode(func(ctx, globalObject)); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { auto* exception = scope.exception(); scope.clearException(); if (auto* promise = jsDynamicCast(promiseValue)) { promise->reject(globalObject, exception->value()); + RETURN_IF_EXCEPTION(scope, ); return; } if (auto* promise = jsDynamicCast(promiseValue)) { promise->reject(globalObject, exception->value()); + RETURN_IF_EXCEPTION(scope, ); return; } @@ -3245,11 +3288,13 @@ void JSC__AnyPromise__wrap(JSC::JSGlobalObject* globalObject, EncodedJSValue enc if (auto* errorInstance = jsDynamicCast(result)) { if (auto* promise = jsDynamicCast(promiseValue)) { promise->reject(globalObject, errorInstance); + RETURN_IF_EXCEPTION(scope, ); return; } if (auto* promise = jsDynamicCast(promiseValue)) { promise->reject(globalObject, errorInstance); + RETURN_IF_EXCEPTION(scope, ); return; } @@ -3258,10 +3303,12 @@ void JSC__AnyPromise__wrap(JSC::JSGlobalObject* globalObject, EncodedJSValue enc if (auto* promise = jsDynamicCast(promiseValue)) { promise->resolve(globalObject, result); + RETURN_IF_EXCEPTION(scope, ); return; } if (auto* promise = jsDynamicCast(promiseValue)) { promise->resolve(globalObject, result); + RETURN_IF_EXCEPTION(scope, ); return; } @@ -3271,24 +3318,24 @@ void JSC__AnyPromise__wrap(JSC::JSGlobalObject* globalObject, EncodedJSValue enc JSC::EncodedJSValue JSC__JSPromise__wrap(JSC::JSGlobalObject* globalObject, void* ctx, JSC::EncodedJSValue (*func)(void*, JSC::JSGlobalObject*)) { auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_CATCH_SCOPE(vm); + auto scope = DECLARE_THROW_SCOPE(vm); JSValue result = JSC::JSValue::decode(func(ctx, globalObject)); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { auto* exception = scope.exception(); scope.clearException(); - return JSValue::encode(JSC::JSPromise::rejectedPromise(globalObject, exception->value())); + RELEASE_AND_RETURN(scope, JSValue::encode(JSC::JSPromise::rejectedPromise(globalObject, exception->value()))); } if (auto* promise = jsDynamicCast(result)) { - return JSValue::encode(promise); + RELEASE_AND_RETURN(scope, JSValue::encode(promise)); } if (JSC::ErrorInstance* err = jsDynamicCast(result)) { - return JSValue::encode(JSC::JSPromise::rejectedPromise(globalObject, err)); + RELEASE_AND_RETURN(scope, JSValue::encode(JSC::JSPromise::rejectedPromise(globalObject, err))); } - return JSValue::encode(JSC::JSPromise::resolvedPromise(globalObject, result)); + RELEASE_AND_RETURN(scope, JSValue::encode(JSC::JSPromise::resolvedPromise(globalObject, result))); } void JSC__JSPromise__reject(JSC::JSPromise* arg0, JSC::JSGlobalObject* globalObject, @@ -3597,6 +3644,19 @@ void JSC__JSValue__put(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1, object->putDirect(arg1->vm(), Zig::toIdentifier(*arg2, arg1), JSC::JSValue::decode(JSValue3)); } +void JSC__JSValue__putToPropertyKey(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1, JSC::EncodedJSValue arg2, JSC::EncodedJSValue arg3) +{ + auto& vm = JSC::getVM(arg1); + auto scope = DECLARE_THROW_SCOPE(vm); + auto obj = JSValue::decode(JSValue0); + auto key = JSValue::decode(arg2); + auto value = JSValue::decode(arg3); + auto object = obj.asCell()->getObject(); + auto pkey = key.toPropertyKey(arg1); + RETURN_IF_EXCEPTION(scope, ); + object->putDirectMayBeIndex(arg1, pkey, value); +} + extern "C" void JSC__JSValue__putMayBeIndex(JSC::EncodedJSValue target, JSC::JSGlobalObject* globalObject, const BunString* key, JSC::EncodedJSValue value) { auto& vm = JSC::getVM(globalObject); @@ -3817,7 +3877,7 @@ uint8_t JSC__JSValue__asBigIntCompare(JSC::EncodedJSValue JSValue0, JSC::JSGloba JSC::EncodedJSValue JSC__JSValue__fromInt64NoTruncate(JSC::JSGlobalObject* globalObject, int64_t val) { - return JSC::JSValue::encode(JSC::JSValue(JSC::JSBigInt::createFrom(globalObject, val))); + return JSC::JSValue::encode(JSC::JSBigInt::createFrom(globalObject, val)); } JSC::EncodedJSValue JSC__JSValue__fromTimevalNoTruncate(JSC::JSGlobalObject* globalObject, int64_t nsec, int64_t sec) @@ -3849,7 +3909,7 @@ JSC::EncodedJSValue JSC__JSValue__bigIntSum(JSC::JSGlobalObject* globalObject, J JSC::EncodedJSValue JSC__JSValue__fromUInt64NoTruncate(JSC::JSGlobalObject* globalObject, uint64_t val) { - return JSC::JSValue::encode(JSC::JSValue(JSC::JSBigInt::createFrom(globalObject, val))); + return JSC::JSValue::encode(JSC::JSBigInt::createFrom(globalObject, val)); } uint64_t JSC__JSValue__toUInt64NoTruncate(JSC::EncodedJSValue val) @@ -3882,6 +3942,8 @@ JSC::EncodedJSValue JSC__JSValue__createObject2(JSC::JSGlobalObject* globalObjec const ZigString* arg2, JSC::EncodedJSValue JSValue3, JSC::EncodedJSValue JSValue4) { + auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); JSC::JSObject* object = JSC::constructEmptyObject(globalObject); auto key1 = Zig::toIdentifier(*arg1, globalObject); JSC::PropertyDescriptor descriptor1; @@ -3901,8 +3963,10 @@ JSC::EncodedJSValue JSC__JSValue__createObject2(JSC::JSGlobalObject* globalObjec object->methodTable() ->defineOwnProperty(object, globalObject, key2, descriptor2, true); + RETURN_IF_EXCEPTION(scope, {}); object->methodTable() ->defineOwnProperty(object, globalObject, key1, descriptor1, true); + RETURN_IF_EXCEPTION(scope, {}); return JSC::JSValue::encode(object); } @@ -3974,15 +4038,16 @@ extern "C" JSC::EncodedJSValue JSC__JSValue__getOwn(JSC::EncodedJSValue JSValue0 ASSERT_NO_PENDING_EXCEPTION(globalObject); VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); JSValue value = JSC::JSValue::decode(JSValue0); WTF::String propertyNameString = propertyName->tag == BunStringTag::Empty ? WTF::emptyString() : propertyName->toWTFString(BunString::ZeroCopy); auto identifier = JSC::Identifier::fromString(vm, propertyNameString); auto property = JSC::PropertyName(identifier); PropertySlot slot(value, PropertySlot::InternalMethodType::GetOwnProperty); if (value.getOwnPropertySlot(globalObject, property, slot)) { - return JSValue::encode(slot.getValue(globalObject, property)); + RELEASE_AND_RETURN(scope, JSValue::encode(slot.getValue(globalObject, property))); } - return JSValue::encode({}); + RELEASE_AND_RETURN(scope, {}); } JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* globalObject, JSC::EncodedJSValue arg1) @@ -3995,10 +4060,13 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu if (path.isString()) { String pathString = path.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); uint32_t length = pathString.length(); if (length == 0) { - JSValue prop = value.toObject(globalObject)->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); + auto* valueObject = value.toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + JSValue prop = valueObject->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(prop); } @@ -4013,7 +4081,9 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu // if "." is the only character, it will search for an empty string twice. if (pathString.characterAt(0) == '.') { - currProp = currProp.toObject(globalObject)->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); + auto* currPropObject = currProp.toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + currProp = currPropObject->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); RETURN_IF_EXCEPTION(scope, {}); if (currProp.isEmpty()) { return JSValue::encode(currProp); @@ -4021,29 +4091,33 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu } while (i < length) { - UChar ic = pathString.characterAt(i); + char16_t ic = pathString.characterAt(i); while (ic == '[' || ic == ']' || ic == '.') { i += 1; if (i == length) { if (ic == '.') { - currProp = currProp.toObject(globalObject)->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); + auto* currPropObject = currProp.toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + currProp = currPropObject->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(currProp); } // nothing found. if (j == 0) { - return JSValue::encode({}); + return {}; } return JSValue::encode(currProp); } - UChar previous = ic; + char16_t previous = ic; ic = pathString.characterAt(i); if (previous == '.' && ic == '.') { - currProp = currProp.toObject(globalObject)->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); + auto* currPropObject = currProp.toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + currProp = currPropObject->getIfPropertyExists(globalObject, PropertyName(Identifier::EmptyIdentifier)); RETURN_IF_EXCEPTION(scope, {}); if (currProp.isEmpty()) { return JSValue::encode(currProp); @@ -4053,7 +4127,7 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu } j = i; - UChar jc = pathString.characterAt(j); + char16_t jc = pathString.characterAt(j); while (!(jc == '[' || jc == ']' || jc == '.')) { j += 1; if (j == length) { @@ -4066,7 +4140,9 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu String propNameStr = pathString.substring(i, j - i); PropertyName propName = PropertyName(Identifier::fromString(vm, propNameStr)); - currProp = currProp.toObject(globalObject)->getIfPropertyExists(globalObject, propName); + auto* currPropObject = currProp.toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + currProp = currPropObject->getIfPropertyExists(globalObject, propName); RETURN_IF_EXCEPTION(scope, {}); if (currProp.isEmpty()) { return JSValue::encode(currProp); @@ -4081,7 +4157,9 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu if (isArray(globalObject, path)) { // each item in array is property name, ignore dot/bracket notation JSValue currProp = value; - forEachInArrayLike(globalObject, path.toObject(globalObject), [&](JSValue item) -> bool { + auto* pathObject = path.toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + forEachInArrayLike(globalObject, pathObject, [&](JSValue item) -> bool { if (!(item.isString() || item.isNumber())) { currProp = {}; return false; @@ -4092,7 +4170,9 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu PropertyName propName = PropertyName(propNameString->toIdentifier(globalObject)); RETURN_IF_EXCEPTION(scope, {}); - currProp = currProp.toObject(globalObject)->getIfPropertyExists(globalObject, propName); + auto* currPropObject = currProp.toObject(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + currProp = currPropObject->getIfPropertyExists(globalObject, propName); RETURN_IF_EXCEPTION(scope, {}); if (currProp.isEmpty()) { return false; @@ -4100,11 +4180,11 @@ JSC::EncodedJSValue JSC__JSValue__getIfPropertyExistsFromPath(JSC::EncodedJSValu return true; }); - + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(currProp); } - return JSValue::encode({}); + return {}; } void JSC__JSValue__getSymbolDescription(JSC::EncodedJSValue symbolValue_, JSC::JSGlobalObject* arg1, ZigString* arg2) @@ -4151,27 +4231,12 @@ int32_t JSC__JSValue__toInt32(JSC::EncodedJSValue JSValue0) return JSC::JSValue::decode(JSValue0).asInt32(); } -CPP_DECL double JSC__JSValue__coerceToDouble(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1) +CPP_DECL double Bun__JSValue__toNumber(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1) { ASSERT_NO_PENDING_EXCEPTION(arg1); - JSC::JSValue value = JSC::JSValue::decode(JSValue0); auto scope = DECLARE_THROW_SCOPE(arg1->vm()); - double result = value.toNumber(arg1); - if (scope.exception()) { - result = PNaN; - } - - return result; -} -CPP_DECL double Bun__JSValue__toNumber(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1, bool* had_exception) -{ - ASSERT_NO_PENDING_EXCEPTION(arg1); - auto catchScope = DECLARE_CATCH_SCOPE(arg1->vm()); double result = JSC::JSValue::decode(JSValue0).toNumber(arg1); - if (catchScope.exception()) { - *had_exception = true; - return PNaN; - } + RETURN_IF_EXCEPTION(scope, PNaN); return result; } @@ -4648,14 +4713,10 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, bool getFromSourceURL = false; if (stackTrace != nullptr && stackTrace->size() > 0) { populateStackTrace(vm, *stackTrace, &except->stack, global, flags); - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); - } + } else if (err->stackTrace() != nullptr && err->stackTrace()->size() > 0) { populateStackTrace(vm, *err->stackTrace(), &except->stack, global, flags); - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); - } + } else { getFromSourceURL = true; } @@ -4668,72 +4729,73 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, } if (except->type == SYNTAX_ERROR_CODE) { except->message = Bun::toStringRef(err->sanitizedMessageString(global)); + } else if (JSC::JSValue message = obj->getIfPropertyExists(global, vm.propertyNames->message)) { + except->message = Bun::toStringRef(global, message); } else { + except->message = Bun::toStringRef(err->sanitizedMessageString(global)); } - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] { + return; } except->name = Bun::toStringRef(err->sanitizedNameString(global)); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] { + return; + } except->runtime_type = err->runtimeTypeForCause(); const auto& names = builtinNames(vm); if (except->type != SYNTAX_ERROR_CODE) { - if (JSC::JSValue syscall = getNonObservable(vm, global, obj, names.syscallPublicName())) { + JSC::JSValue syscall = getNonObservable(vm, global, obj, names.syscallPublicName()); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (syscall) { if (syscall.isString()) { except->syscall = Bun::toStringRef(global, syscall); } } - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); - } - - if (JSC::JSValue code = getNonObservable(vm, global, obj, names.codePublicName())) { + JSC::JSValue code = getNonObservable(vm, global, obj, names.codePublicName()); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (code) { if (code.isString() || code.isNumber()) { except->system_code = Bun::toStringRef(global, code); } } - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); - } - - if (JSC::JSValue path = getNonObservable(vm, global, obj, names.pathPublicName())) { + JSC::JSValue path = getNonObservable(vm, global, obj, names.pathPublicName()); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (path) { if (path.isString()) { except->path = Bun::toStringRef(global, path); } } - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); - } - - if (JSC::JSValue fd = getNonObservable(vm, global, obj, names.fdPublicName())) { + JSC::JSValue fd = getNonObservable(vm, global, obj, names.fdPublicName()); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (fd) { if (fd.isNumber()) { except->fd = fd.toInt32(global); } } - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); - } - - if (JSC::JSValue errno_ = getNonObservable(vm, global, obj, names.errnoPublicName())) { + JSC::JSValue errno_ = getNonObservable(vm, global, obj, names.errnoPublicName()); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (errno_) { if (errno_.isNumber()) { except->errno_ = errno_.toInt32(global); } } - - if (scope.exception()) [[unlikely]] { - scope.clearExceptionExceptTermination(); - } } if (getFromSourceURL) { @@ -4742,14 +4804,15 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, // we don't want to serialize JSC::StackFrame longer than we need to // so in this case, we parse the stack trace as a string - auto catchScope = DECLARE_CATCH_SCOPE(vm); - // This one intentionally calls getters. - if (JSC::JSValue stackValue = obj->getIfPropertyExists(global, vm.propertyNames->stack)) { + JSC::JSValue stackValue = obj->getIfPropertyExists(global, vm.propertyNames->stack); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (stackValue) { if (stackValue.isString()) { WTF::String stack = stackValue.toWTFString(global); - if (catchScope.exception()) [[unlikely]] { - catchScope.clearExceptionExceptTermination(); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] { + return; } if (!stack.isEmpty()) { @@ -4792,29 +4855,37 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, } } } - - if (catchScope.exception()) [[unlikely]] { - catchScope.clearExceptionExceptTermination(); - } } - if (JSC::JSValue sourceURL = getNonObservable(vm, global, obj, vm.propertyNames->sourceURL)) { + JSC::JSValue sourceURL = getNonObservable(vm, global, obj, vm.propertyNames->sourceURL); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (sourceURL) { if (sourceURL.isString()) { except->stack.frames_ptr[0].source_url = Bun::toStringRef(global, sourceURL); // Take care not to make these getter calls observable. - if (JSC::JSValue column = getNonObservable(vm, global, obj, vm.propertyNames->column)) { + JSC::JSValue column = getNonObservable(vm, global, obj, vm.propertyNames->column); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (column) { if (column.isNumber()) { except->stack.frames_ptr[0].position.column_zero_based = OrdinalNumber::fromOneBasedInt(column.toInt32(global)).zeroBasedInt(); } } - if (JSC::JSValue line = getNonObservable(vm, global, obj, vm.propertyNames->line)) { + JSC::JSValue line = getNonObservable(vm, global, obj, vm.propertyNames->line); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (line) { if (line.isNumber()) { except->stack.frames_ptr[0].position.line_zero_based = OrdinalNumber::fromOneBasedInt(line.toInt32(global)).zeroBasedInt(); - if (JSC::JSValue lineText = getNonObservable(vm, global, obj, builtinNames(vm).lineTextPublicName())) { + JSC::JSValue lineText = getNonObservable(vm, global, obj, builtinNames(vm).lineTextPublicName()); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; + if (lineText) { if (lineText.isString()) { if (JSC::JSString* jsStr = lineText.toStringOrNull(global)) { auto str = jsStr->value(global); @@ -4833,6 +4904,8 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global, except->stack.frames_len = 1; PropertySlot slot = PropertySlot(obj, PropertySlot::InternalMethodType::VMInquiry, &vm); except->stack.frames_ptr[0].remapped = obj->getNonIndexPropertySlot(global, names.originalLinePublicName(), slot); + if (!scope.clearExceptionExceptTermination()) [[unlikely]] + return; } } } @@ -4852,7 +4925,11 @@ void exceptionFromString(ZigException* except, JSC::JSValue value, JSC::JSGlobal // Fallback case for when it's a user-defined ErrorLike-object that doesn't inherit from // ErrorInstance if (JSC::JSObject* obj = JSC::jsDynamicCast(value)) { - if (auto name_value = obj->getIfPropertyExists(global, vm.propertyNames->name)) { + auto name_value = obj->getIfPropertyExists(global, vm.propertyNames->name); + if (scope.exception()) [[unlikely]] { + scope.clearExceptionExceptTermination(); + } + if (name_value) { if (name_value.isString()) { auto name_str = name_value.toWTFString(global); except->name = Bun::toStringRef(name_str); @@ -4876,25 +4953,24 @@ void exceptionFromString(ZigException* except, JSC::JSValue value, JSC::JSGlobal } } + auto message = obj->getIfPropertyExists(global, vm.propertyNames->message); if (scope.exception()) [[unlikely]] { scope.clearExceptionExceptTermination(); } - - if (JSC::JSValue message = obj->getIfPropertyExists(global, vm.propertyNames->message)) { + if (message) { if (message.isString()) { except->message = Bun::toStringRef( message.toWTFString(global)); } } + auto sourceURL = obj->getIfPropertyExists(global, vm.propertyNames->sourceURL); if (scope.exception()) [[unlikely]] { scope.clearExceptionExceptTermination(); } - - if (JSC::JSValue sourceURL = obj->getIfPropertyExists(global, vm.propertyNames->sourceURL)) { + if (sourceURL) { if (sourceURL.isString()) { - except->stack.frames_ptr[0].source_url = Bun::toStringRef( - sourceURL.toWTFString(global)); + except->stack.frames_ptr[0].source_url = Bun::toStringRef(sourceURL.toWTFString(global)); except->stack.frames_len = 1; } } @@ -4903,12 +4979,20 @@ void exceptionFromString(ZigException* except, JSC::JSValue value, JSC::JSGlobal scope.clearExceptionExceptTermination(); } - if (JSC::JSValue line = obj->getIfPropertyExists(global, vm.propertyNames->line)) { + auto line = obj->getIfPropertyExists(global, vm.propertyNames->line); + if (scope.exception()) [[unlikely]] { + scope.clearExceptionExceptTermination(); + } + if (line) { if (line.isNumber()) { except->stack.frames_ptr[0].position.line_zero_based = OrdinalNumber::fromOneBasedInt(line.toInt32(global)).zeroBasedInt(); // TODO: don't sourcemap it twice - if (auto originalLine = obj->getIfPropertyExists(global, builtinNames(vm).originalLinePublicName())) { + auto originalLine = obj->getIfPropertyExists(global, builtinNames(vm).originalLinePublicName()); + if (scope.exception()) [[unlikely]] { + scope.clearExceptionExceptTermination(); + } + if (originalLine) { if (originalLine.isNumber()) { except->stack.frames_ptr[0].position.line_zero_based = OrdinalNumber::fromOneBasedInt(originalLine.toInt32(global)).zeroBasedInt(); } @@ -5004,6 +5088,7 @@ void JSC__JSValue__getNameProperty(JSC::EncodedJSValue JSValue0, JSC::JSGlobalOb { JSC::JSObject* obj = JSC::JSValue::decode(JSValue0).getObject(); JSC::VM& vm = arg1->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); if (obj == nullptr) { arg2->len = 0; @@ -5011,6 +5096,7 @@ void JSC__JSValue__getNameProperty(JSC::EncodedJSValue JSValue0, JSC::JSGlobalOb } JSC::JSValue name = obj->getIfPropertyExists(arg1, vm.propertyNames->toStringTagSymbol); + RETURN_IF_EXCEPTION(scope, ); if (name && name.isString()) { auto str = name.toWTFString(arg1); @@ -5056,13 +5142,15 @@ extern "C" void JSC__JSValue__getName(JSC::EncodedJSValue JSValue0, JSC::JSGloba // JSC doesn't include @@toStringTag in calculated display name if (displayName.isEmpty()) { - if (auto toStringTagValue = object->getIfPropertyExists(globalObject, vm.propertyNames->toStringTagSymbol)) { + auto toStringTagValue = object->getIfPropertyExists(globalObject, vm.propertyNames->toStringTagSymbol); + RETURN_IF_EXCEPTION(scope, ); + if (toStringTagValue) { if (toStringTagValue.isString()) { displayName = toStringTagValue.toWTFString(globalObject); } } } - if (scope.exception()) + if (scope.exception()) [[unlikely]] scope.clearException(); *arg2 = Bun::toStringRef(displayName); @@ -5072,7 +5160,7 @@ JSC::EncodedJSValue JSC__JSValue__toError_(JSC::EncodedJSValue JSValue0) { JSC::JSValue value = JSC::JSValue::decode(JSValue0); if (value.isEmpty() || !value.isCell()) - return JSC::JSValue::encode({}); + return {}; JSC::JSCell* cell = value.asCell(); @@ -5089,7 +5177,7 @@ JSC::EncodedJSValue JSC__JSValue__toError_(JSC::EncodedJSValue JSValue0) } } - return JSC::JSValue::encode({}); + return {}; } void JSC__JSValue__toZigException(JSC::EncodedJSValue jsException, JSC::JSGlobalObject* global, ZigException* exception) @@ -5206,10 +5294,13 @@ void JSC__VM__setExecutionTimeLimit(JSC::VM* vm, double limit) watchdog.setTimeLimit(WTF::Seconds { limit }); } -bool JSC__JSValue__isTerminationException(JSC::EncodedJSValue JSValue0, JSC::VM* arg1) +bool JSC__JSValue__isTerminationException(JSC::EncodedJSValue JSValue0) { JSC::Exception* exception = JSC::jsDynamicCast(JSC::JSValue::decode(JSValue0)); - return exception != NULL && arg1->isTerminationException(exception); + if (exception == nullptr) + return false; + + return exception->vm().isTerminationException(exception); } extern "C" void JSC__Exception__getStackTrace(JSC::Exception* arg0, JSC::JSGlobalObject* global, ZigStackTrace* trace) @@ -5494,7 +5585,7 @@ extern "C" JSC::EncodedJSValue JSC__JSValue__fastGetOwn(JSC::EncodedJSValue JSVa return JSValue::encode(slot.getValue(globalObject, name)); } - return JSValue::encode({}); + return {}; } bool JSC__JSValue__toBoolean(JSC::EncodedJSValue JSValue0) @@ -5519,10 +5610,10 @@ static void JSC__JSValue__forEachPropertyImpl(JSC::EncodedJSValue JSValue0, JSC: return; auto& vm = JSC::getVM(globalObject); - auto throwScope = DECLARE_THROW_SCOPE(vm); + auto throwScopeForStackOverflowException = DECLARE_THROW_SCOPE(vm); if (!vm.isSafeToRecurse()) [[unlikely]] { - throwStackOverflowError(globalObject, throwScope); + throwStackOverflowError(globalObject, throwScopeForStackOverflowException); return; } @@ -5537,7 +5628,7 @@ static void JSC__JSValue__forEachPropertyImpl(JSC::EncodedJSValue JSValue0, JSC: if (structure->outOfLineSize() == 0 && structure->inlineSize() == 0) { fast = false; - if (JSValue proto = object->getPrototype(vm, globalObject)) { + if (JSValue proto = object->getPrototype(globalObject)) { if ((structure = proto.structureOrNull())) { prototypeObject = proto; fast = canPerformFastPropertyEnumerationForIterationBun(structure); @@ -5589,7 +5680,7 @@ restart: } // Ignore exceptions due to getters. - if (scope.exception()) + if (scope.exception()) [[unlikely]] scope.clearException(); if (!propertyValue) @@ -5612,7 +5703,7 @@ restart: }); // Propagate exceptions from callbacks. - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { return; } @@ -5669,7 +5760,7 @@ restart: if (!object->getPropertySlot(globalObject, property, slot)) continue; // Ignore exceptions from "Get" proxy traps. - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); } @@ -5719,7 +5810,7 @@ restart: } // Ignore exceptions from getters. - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); propertyValue = jsUndefined(); } @@ -5746,13 +5837,13 @@ restart: break; if (iterating == globalObject) break; - iterating = iterating->getPrototype(vm, globalObject).getObject(); + iterating = iterating->getPrototype(globalObject).getObject(); } } properties.releaseData(); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); return; } @@ -5776,13 +5867,13 @@ void JSC__JSValue__forEachPropertyOrdered(JSC::EncodedJSValue JSValue0, JSC::JSG return; auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_CATCH_SCOPE(vm); JSC::PropertyNameArray properties(vm, PropertyNameMode::StringsAndSymbols, PrivateSymbolMode::Exclude); { - auto scope = DECLARE_CATCH_SCOPE(vm); JSC::JSObject::getOwnPropertyNames(object, globalObject, properties, DontEnumPropertiesMode::Include); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); return; } @@ -5805,8 +5896,11 @@ void JSC__JSValue__forEachPropertyOrdered(JSC::EncodedJSValue JSValue0, JSC::JSG continue; JSC::PropertySlot slot(object, PropertySlot::InternalMethodType::Get); - if (!object->getPropertySlot(globalObject, property, slot)) + bool hasProperty = object->getPropertySlot(globalObject, property, slot); + scope.clearException(); + if (!hasProperty) { continue; + } if ((slot.attributes() & PropertyAttribute::DontEnum) != 0) { if (property == vm.propertyNames->underscoreProto @@ -5815,7 +5909,6 @@ void JSC__JSValue__forEachPropertyOrdered(JSC::EncodedJSValue JSValue0, JSC::JSG } JSC::JSValue propertyValue = jsUndefined(); - auto scope = DECLARE_CATCH_SCOPE(vm); if ((slot.attributes() & PropertyAttribute::DontEnum) != 0) { if ((slot.attributes() & PropertyAttribute::Accessor) != 0) { propertyValue = slot.getPureResult(); @@ -5843,6 +5936,7 @@ void JSC__JSValue__forEachPropertyOrdered(JSC::EncodedJSValue JSValue0, JSC::JSG ZigString key = toZigString(name); JSC::EnsureStillAliveScope ensureStillAliveScope(propertyValue); + // TODO: properly propagate exception upwards iter(globalObject, arg2, &key, JSC::JSValue::encode(propertyValue), property.isSymbol(), property.isPrivateName()); } properties.releaseData(); @@ -5986,7 +6080,7 @@ extern "C" JSC::EncodedJSValue WebCore__AbortSignal__reasonIfAborted(WebCore::Ab return JSValue::encode(signal->jsReason(*globalObject)); } - return JSValue::encode({}); + return {}; } extern "C" bool WebCore__AbortSignal__aborted(WebCore::AbortSignal* arg0) @@ -6061,7 +6155,7 @@ extern "C" JSC::EncodedJSValue JSC__JSValue__getOwnByValue(JSC::EncodedJSValue v PropertySlot slot(object, PropertySlot::InternalMethodType::GetOwnProperty); if (property.getUInt32(index)) { if (!object->getOwnPropertySlotByIndex(object, globalObject, index, slot)) - return JSC::JSValue::encode({}); + return {}; RETURN_IF_EXCEPTION(scope, {}); @@ -6070,7 +6164,7 @@ extern "C" JSC::EncodedJSValue JSC__JSValue__getOwnByValue(JSC::EncodedJSValue v auto propertyName = property.toPropertyKey(globalObject); RETURN_IF_EXCEPTION(scope, {}); if (!object->getOwnNonIndexPropertySlot(vm, object->structure(), propertyName, slot)) - return JSC::JSValue::encode({}); + return {}; RETURN_IF_EXCEPTION(scope, {}); @@ -6425,7 +6519,7 @@ extern "C" EncodedJSValue Bun__JSObject__getCodePropertyVMInquiry(JSC::JSGlobalO } auto& vm = global->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); + auto scope = DECLARE_CATCH_SCOPE(vm); if (object->type() == JSC::ProxyObjectType) [[unlikely]] { return {}; } @@ -6433,9 +6527,10 @@ extern "C" EncodedJSValue Bun__JSObject__getCodePropertyVMInquiry(JSC::JSGlobalO auto& builtinNames = WebCore::builtinNames(vm); PropertySlot slot(object, PropertySlot::InternalMethodType::VMInquiry, &vm); - ASSERT(!scope.exception()); - if (!object->getNonIndexPropertySlot(global, builtinNames.codePublicName(), slot)) { - ASSERT(!scope.exception()); + scope.assertNoExceptionExceptTermination(); + auto has = object->getNonIndexPropertySlot(global, builtinNames.codePublicName(), slot); + scope.assertNoExceptionExceptTermination(); + if (!has) { return {}; } diff --git a/src/bun.js/bindings/decodeURIComponentSIMD.cpp b/src/bun.js/bindings/decodeURIComponentSIMD.cpp index 5f8c768ead..3aedbfed3f 100644 --- a/src/bun.js/bindings/decodeURIComponentSIMD.cpp +++ b/src/bun.js/bindings/decodeURIComponentSIMD.cpp @@ -29,7 +29,7 @@ WTF::String decodeURIComponentSIMD(std::span input) const uint8_t* end = cursor + input.size(); constexpr size_t stride = SIMD::stride; - constexpr UChar replacementChar = 0xFFFD; + constexpr char16_t replacementChar = 0xFFFD; auto percentVector = SIMD::splat('%'); @@ -110,7 +110,7 @@ slow_path: continue; } - result.append(static_cast(value)); + result.append(static_cast(value)); } else if ((byte & 0xF0) == 0xE0) { // 3-byte sequence uint32_t value = byte & 0x0F; @@ -162,7 +162,7 @@ slow_path: continue; } - result.append(static_cast(value)); + result.append(static_cast(value)); } else if ((byte & 0xF8) == 0xF0) { // 4-byte sequence -> surrogate pair uint32_t value = byte & 0x07; @@ -235,8 +235,8 @@ slow_path: // Convert to surrogate pair value -= 0x10000; - result.append(static_cast(0xD800 | (value >> 10))); - result.append(static_cast(0xDC00 | (value & 0x3FF))); + result.append(static_cast(0xD800 | (value >> 10))); + result.append(static_cast(0xDC00 | (value & 0x3FF))); } else { result.append(replacementChar); cursor += (cursor + 2 < end) ? 3 : 1; diff --git a/src/bun.js/bindings/generated_classes_list.zig b/src/bun.js/bindings/generated_classes_list.zig index 2d2639aaab..bfaf43245b 100644 --- a/src/bun.js/bindings/generated_classes_list.zig +++ b/src/bun.js/bindings/generated_classes_list.zig @@ -83,8 +83,11 @@ pub const Classes = struct { pub const DNSResolver = api.DNS.DNSResolver; pub const S3Client = webcore.S3Client; pub const S3Stat = webcore.S3Stat; + pub const ResumableFetchSink = webcore.ResumableFetchSink; + pub const ResumableS3UploadSink = webcore.ResumableS3UploadSink; pub const HTMLBundle = api.HTMLBundle; pub const RedisClient = api.Valkey; pub const BlockList = api.BlockList; pub const NativeZstd = api.NativeZstd; + pub const SourceMap = bun.sourcemap.JSSourceMap; }; diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h index d4b56c87fc..7a2a364ef0 100644 --- a/src/bun.js/bindings/headers-handwritten.h +++ b/src/bun.js/bindings/headers-handwritten.h @@ -85,7 +85,7 @@ typedef struct BunString { typedef struct ZigErrorType { ZigErrorCode code; - void* ptr; + JSC::EncodedJSValue value; } ZigErrorType; typedef union ErrorableZigStringResult { ZigString value; @@ -390,13 +390,13 @@ extern "C" const char* Bun__version_sha; extern "C" void ZigString__freeGlobal(const unsigned char* ptr, size_t len); extern "C" size_t Bun__encoding__writeLatin1(const unsigned char* ptr, size_t len, unsigned char* to, size_t other_len, Encoding encoding); -extern "C" size_t Bun__encoding__writeUTF16(const UChar* ptr, size_t len, unsigned char* to, size_t other_len, Encoding encoding); +extern "C" size_t Bun__encoding__writeUTF16(const char16_t* ptr, size_t len, unsigned char* to, size_t other_len, Encoding encoding); extern "C" size_t Bun__encoding__byteLengthLatin1AsUTF8(const unsigned char* ptr, size_t len); -extern "C" size_t Bun__encoding__byteLengthUTF16AsUTF8(const UChar* ptr, size_t len); +extern "C" size_t Bun__encoding__byteLengthUTF16AsUTF8(const char16_t* ptr, size_t len); extern "C" int64_t Bun__encoding__constructFromLatin1(void*, const unsigned char* ptr, size_t len, Encoding encoding); -extern "C" int64_t Bun__encoding__constructFromUTF16(void*, const UChar* ptr, size_t len, Encoding encoding); +extern "C" int64_t Bun__encoding__constructFromUTF16(void*, const char16_t* ptr, size_t len, Encoding encoding); extern "C" void Bun__EventLoop__runCallback1(JSC::JSGlobalObject* global, JSC::EncodedJSValue callback, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue arg1); extern "C" void Bun__EventLoop__runCallback2(JSC::JSGlobalObject* global, JSC::EncodedJSValue callback, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue arg1, JSC::EncodedJSValue arg2); diff --git a/src/bun.js/bindings/headers.h b/src/bun.js/bindings/headers.h index 9abc67f3b4..9b3cde8af6 100644 --- a/src/bun.js/bindings/headers.h +++ b/src/bun.js/bindings/headers.h @@ -20,6 +20,7 @@ #include "root.h" #include #include "headers-handwritten.h" +#include "webcore/WebSocketDeflate.h" namespace JSC { class JSGlobalObject; @@ -199,7 +200,6 @@ CPP_DECL JSC::JSCell* JSC__JSValue__asCell(JSC::EncodedJSValue JSValue0); CPP_DECL JSC::JSInternalPromise* JSC__JSValue__asInternalPromise(JSC::EncodedJSValue JSValue0); CPP_DECL JSC::JSPromise* JSC__JSValue__asPromise(JSC::EncodedJSValue JSValue0); CPP_DECL JSC::JSString* JSC__JSValue__asString(JSC::EncodedJSValue JSValue0); -CPP_DECL double JSC__JSValue__coerceToDouble(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1); CPP_DECL int32_t JSC__JSValue__coerceToInt32(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1); CPP_DECL int64_t JSC__JSValue__coerceToInt64(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1); CPP_DECL JSC::EncodedJSValue JSC__JSValue__createEmptyArray(JSC::JSGlobalObject* arg0, size_t arg1); @@ -212,7 +212,6 @@ CPP_DECL JSC::EncodedJSValue JSC__JSValue__createStringArray(JSC::JSGlobalObject CPP_DECL JSC::EncodedJSValue JSC__JSValue__createTypeError(const ZigString* arg0, const ZigString* arg1, JSC::JSGlobalObject* arg2); CPP_DECL JSC::EncodedJSValue JSC__JSValue__createUninitializedUint8Array(JSC::JSGlobalObject* arg0, size_t arg1); CPP_DECL bool JSC__JSValue__deepEquals(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2); -CPP_DECL bool JSC__JSValue__deepMatch(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2, bool arg3); CPP_DECL bool JSC__JSValue__eqlCell(JSC::EncodedJSValue JSValue0, JSC::JSCell* arg1); CPP_DECL bool JSC__JSValue__eqlValue(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1); CPP_DECL JSC::EncodedJSValue JSC__JSValue__fastGet(JSC::EncodedJSValue JSValue0, JSC::JSGlobalObject* arg1, unsigned char arg2); @@ -257,7 +256,7 @@ CPP_DECL bool JSC__JSValue__isObject(JSC::EncodedJSValue JSValue0); CPP_DECL bool JSC__JSValue__isPrimitive(JSC::EncodedJSValue JSValue0); CPP_DECL bool JSC__JSValue__isSameValue(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2); CPP_DECL bool JSC__JSValue__isSymbol(JSC::EncodedJSValue JSValue0); -CPP_DECL bool JSC__JSValue__isTerminationException(JSC::EncodedJSValue JSValue0, JSC::VM* arg1); +CPP_DECL bool JSC__JSValue__isTerminationException(JSC::EncodedJSValue JSValue0); CPP_DECL bool JSC__JSValue__isUInt32AsAnyInt(JSC::EncodedJSValue JSValue0); CPP_DECL bool JSC__JSValue__jestDeepEquals(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2); CPP_DECL bool JSC__JSValue__jestDeepMatch(JSC::EncodedJSValue JSValue0, JSC::EncodedJSValue JSValue1, JSC::JSGlobalObject* arg2, bool arg3); @@ -325,16 +324,6 @@ CPP_DECL void JSC__VM__throwError(JSC::VM* arg0, JSC::JSGlobalObject* arg1, JSC: CPP_DECL void JSC__VM__throwError(JSC::VM* arg0, JSC::JSGlobalObject* arg1, JSC::EncodedJSValue JSValue2); CPP_DECL void JSC__VM__whenIdle(JSC::VM* arg0, void(* ArgFn1)()); -#pragma mark - JSC::ThrowScope - -CPP_DECL void JSC__ThrowScope__clearException(JSC::ThrowScope* arg0); -CPP_DECL JSC::Exception* JSC__ThrowScope__exception(JSC::ThrowScope* arg0); -CPP_DECL void JSC__ThrowScope__release(JSC::ThrowScope* arg0); - -#pragma mark - JSC::CatchScope - -CPP_DECL void JSC__CatchScope__clearException(JSC::CatchScope* arg0); -CPP_DECL JSC::Exception* JSC__CatchScope__exception(JSC::CatchScope* arg0); CPP_DECL void FFI__ptr__put(JSC::JSGlobalObject* arg0, JSC::EncodedJSValue JSValue1); #ifdef __cplusplus @@ -609,7 +598,7 @@ ZIG_DECL size_t Bun__WebSocketHTTPSClient__memoryCost(WebSocketHTTPSClient* arg0 ZIG_DECL void Bun__WebSocketClient__cancel(WebSocketClient* arg0); ZIG_DECL void Bun__WebSocketClient__close(WebSocketClient* arg0, uint16_t arg1, const ZigString* arg2); ZIG_DECL void Bun__WebSocketClient__finalize(WebSocketClient* arg0); -ZIG_DECL void* Bun__WebSocketClient__init(CppWebSocket* arg0, void* arg1, void* arg2, JSC::JSGlobalObject* arg3, unsigned char* arg4, size_t arg5); +ZIG_DECL void* Bun__WebSocketClient__init(CppWebSocket* arg0, void* arg1, void* arg2, JSC::JSGlobalObject* arg3, unsigned char* arg4, size_t arg5, const PerMessageDeflateParams* arg6); ZIG_DECL void Bun__WebSocketClient__register(JSC::JSGlobalObject* arg0, void* arg1, void* arg2); ZIG_DECL void Bun__WebSocketClient__writeBinaryData(WebSocketClient* arg0, const unsigned char* arg1, size_t arg2, unsigned char arg3); ZIG_DECL void Bun__WebSocketClient__writeString(WebSocketClient* arg0, const ZigString* arg1, unsigned char arg2); @@ -622,7 +611,7 @@ ZIG_DECL size_t Bun__WebSocketClient__memoryCost(WebSocketClient* arg0); ZIG_DECL void Bun__WebSocketClientTLS__cancel(WebSocketClientTLS* arg0); ZIG_DECL void Bun__WebSocketClientTLS__close(WebSocketClientTLS* arg0, uint16_t arg1, const ZigString* arg2); ZIG_DECL void Bun__WebSocketClientTLS__finalize(WebSocketClientTLS* arg0); -ZIG_DECL void* Bun__WebSocketClientTLS__init(CppWebSocket* arg0, void* arg1, void* arg2, JSC::JSGlobalObject* arg3, unsigned char* arg4, size_t arg5); +ZIG_DECL void* Bun__WebSocketClientTLS__init(CppWebSocket* arg0, void* arg1, void* arg2, JSC::JSGlobalObject* arg3, unsigned char* arg4, size_t arg5, const PerMessageDeflateParams* arg6); ZIG_DECL void Bun__WebSocketClientTLS__register(JSC::JSGlobalObject* arg0, void* arg1, void* arg2); ZIG_DECL void Bun__WebSocketClientTLS__writeBinaryData(WebSocketClientTLS* arg0, const unsigned char* arg1, size_t arg2, unsigned char arg3); ZIG_DECL void Bun__WebSocketClientTLS__writeString(WebSocketClientTLS* arg0, const ZigString* arg1, unsigned char arg2); @@ -640,6 +629,7 @@ ZIG_DECL JSC::EncodedJSValue Bun__Process__getExecPath(JSC::JSGlobalObject* arg0 ZIG_DECL void Bun__Process__getTitle(JSC::JSGlobalObject* arg0, ZigString* arg1); ZIG_DECL JSC::EncodedJSValue Bun__Process__setCwd(JSC::JSGlobalObject* arg0, ZigString* arg1); ZIG_DECL JSC::EncodedJSValue Bun__Process__setTitle(JSC::JSGlobalObject* arg0, ZigString* arg1); +ZIG_DECL JSC::EncodedJSValue Bun__Process__getEval(JSC::JSGlobalObject* arg0); #endif CPP_DECL ZigException ZigException__fromException(JSC::Exception* arg0); @@ -698,6 +688,9 @@ BUN_DECLARE_HOST_FUNCTION(Bun__HTTPRequestContext__onResolveStream); BUN_DECLARE_HOST_FUNCTION(Bun__NodeHTTPRequest__onResolve); BUN_DECLARE_HOST_FUNCTION(Bun__NodeHTTPRequest__onReject); +BUN_DECLARE_HOST_FUNCTION(Bun__FileSink__onResolveStream); +BUN_DECLARE_HOST_FUNCTION(Bun__FileSink__onRejectStream); + #endif #ifdef __cplusplus @@ -761,11 +754,6 @@ CPP_DECL bool JSC__CustomGetterSetter__isSetterNull(JSC::CustomGetterSetter *arg BUN_DECLARE_HOST_FUNCTION(Bun__onResolveEntryPointResult); BUN_DECLARE_HOST_FUNCTION(Bun__onRejectEntryPointResult); -BUN_DECLARE_HOST_FUNCTION(Bun__FetchTasklet__onResolveRequestStream); -BUN_DECLARE_HOST_FUNCTION(Bun__FetchTasklet__onRejectRequestStream); - -BUN_DECLARE_HOST_FUNCTION(Bun__S3UploadStream__onResolveRequestStream); -BUN_DECLARE_HOST_FUNCTION(Bun__S3UploadStream__onRejectRequestStream); BUN_DECLARE_HOST_FUNCTION(Bun__FileStreamWrapper__onResolveRequestStream); BUN_DECLARE_HOST_FUNCTION(Bun__FileStreamWrapper__onRejectRequestStream); diff --git a/src/bun.js/bindings/helpers.h b/src/bun.js/bindings/helpers.h index 1c70272776..c726b4b312 100644 --- a/src/bun.js/bindings/helpers.h +++ b/src/bun.js/bindings/helpers.h @@ -92,7 +92,7 @@ static const WTF::String toString(ZigString str) return !isTaggedUTF16Ptr(str.ptr) ? WTF::String(WTF::ExternalStringImpl::create({ untag(str.ptr), str.len }, untagVoid(str.ptr), free_global_string)) : WTF::String(WTF::ExternalStringImpl::create( - { reinterpret_cast(untag(str.ptr)), str.len }, untagVoid(str.ptr), free_global_string)); + { reinterpret_cast(untag(str.ptr)), str.len }, untagVoid(str.ptr), free_global_string)); } // This will fail if the string is too long. Let's make it explicit instead of an ASSERT. @@ -103,7 +103,7 @@ static const WTF::String toString(ZigString str) return !isTaggedUTF16Ptr(str.ptr) ? WTF::String(WTF::StringImpl::createWithoutCopying({ untag(str.ptr), str.len })) : WTF::String(WTF::StringImpl::createWithoutCopying( - { reinterpret_cast(untag(str.ptr)), str.len })); + { reinterpret_cast(untag(str.ptr)), str.len })); } static WTF::AtomString toAtomString(ZigString str) @@ -112,7 +112,7 @@ static WTF::AtomString toAtomString(ZigString str) if (!isTaggedUTF16Ptr(str.ptr)) { return makeAtomString(untag(str.ptr), str.len); } else { - return makeAtomString(reinterpret_cast(untag(str.ptr)), str.len); + return makeAtomString(reinterpret_cast(untag(str.ptr)), str.len); } } @@ -133,7 +133,7 @@ static const WTF::String toString(ZigString str, StringPointer ptr) return !isTaggedUTF16Ptr(str.ptr) ? WTF::String(WTF::StringImpl::createWithoutCopying({ &untag(str.ptr)[ptr.off], ptr.len })) : WTF::String(WTF::StringImpl::createWithoutCopying( - { &reinterpret_cast(untag(str.ptr))[ptr.off], ptr.len })); + { &reinterpret_cast(untag(str.ptr))[ptr.off], ptr.len })); } static const WTF::String toStringCopy(ZigString str, StringPointer ptr) @@ -153,7 +153,7 @@ static const WTF::String toStringCopy(ZigString str, StringPointer ptr) return !isTaggedUTF16Ptr(str.ptr) ? WTF::String(WTF::StringImpl::create(std::span { &untag(str.ptr)[ptr.off], ptr.len })) : WTF::String(WTF::StringImpl::create( - std::span { &reinterpret_cast(untag(str.ptr))[ptr.off], ptr.len })); + std::span { &reinterpret_cast(untag(str.ptr))[ptr.off], ptr.len })); } static const WTF::String toStringCopy(ZigString str) @@ -166,12 +166,12 @@ static const WTF::String toStringCopy(ZigString str) } if (isTaggedUTF16Ptr(str.ptr)) { - std::span out; + std::span out; auto impl = WTF::StringImpl::tryCreateUninitialized(str.len, out); if (!impl) [[unlikely]] { return WTF::String(); } - memcpy(out.data(), untag(str.ptr), str.len * sizeof(UChar)); + memcpy(out.data(), untag(str.ptr), str.len * sizeof(char16_t)); return WTF::String(WTFMove(impl)); } else { std::span out; @@ -201,7 +201,7 @@ static const ZigString ZigStringCwd = ZigString { &__dot_char, 1 }; static const BunString BunStringCwd = BunString { BunStringTag::StaticZigString, ZigStringCwd }; static const BunString BunStringEmpty = BunString { BunStringTag::Empty, nullptr }; -static const unsigned char* taggedUTF16Ptr(const UChar* ptr) +static const unsigned char* taggedUTF16Ptr(const char16_t* ptr) { return reinterpret_cast(reinterpret_cast(ptr) | (static_cast(1) << 63)); } @@ -273,7 +273,7 @@ static WTF::StringView toStringView(ZigString str) static void throwException(JSC::ThrowScope& scope, ZigErrorType err, JSC::JSGlobalObject* global) { scope.throwException(global, - JSC::Exception::create(global->vm(), JSC::JSValue((JSC::JSCell*)err.ptr))); + JSC::Exception::create(global->vm(), JSC::JSValue::decode(err.value))); } static ZigString toZigString(JSC::JSValue val, JSC::JSGlobalObject* global) @@ -281,14 +281,14 @@ static ZigString toZigString(JSC::JSValue val, JSC::JSGlobalObject* global) auto scope = DECLARE_THROW_SCOPE(global->vm()); auto* str = val.toString(global); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); scope.release(); return ZigStringEmpty; } auto view = str->view(global); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); scope.release(); return ZigStringEmpty; @@ -307,7 +307,7 @@ static const WTF::String toStringStatic(ZigString str) } if (isTaggedUTF16Ptr(str.ptr)) { - return WTF::String(AtomStringImpl::add(std::span { reinterpret_cast(untag(str.ptr)), str.len })); + return WTF::String(AtomStringImpl::add(std::span { reinterpret_cast(untag(str.ptr)), str.len })); } auto* untagged = untag(str.ptr); @@ -321,13 +321,13 @@ static JSC::JSValue getErrorInstance(const ZigString* str, JSC::JSGlobalObject* WTF::String message = toString(*str); if (message.isNull() && str->len > 0) [[unlikely]] { // pending exception while creating an error. - return JSC::JSValue(); + return {}; } JSC::JSObject* result = JSC::createError(globalObject, message); JSC::EnsureStillAliveScope ensureAlive(result); - return JSC::JSValue(result); + return result; } static JSC::JSValue getTypeErrorInstance(const ZigString* str, JSC::JSGlobalObject* globalObject) @@ -335,7 +335,7 @@ static JSC::JSValue getTypeErrorInstance(const ZigString* str, JSC::JSGlobalObje JSC::JSObject* result = JSC::createTypeError(globalObject, toStringCopy(*str)); JSC::EnsureStillAliveScope ensureAlive(result); - return JSC::JSValue(result); + return result; } static JSC::JSValue getSyntaxErrorInstance(const ZigString* str, JSC::JSGlobalObject* globalObject) @@ -343,7 +343,7 @@ static JSC::JSValue getSyntaxErrorInstance(const ZigString* str, JSC::JSGlobalOb JSC::JSObject* result = JSC::createSyntaxError(globalObject, toStringCopy(*str)); JSC::EnsureStillAliveScope ensureAlive(result); - return JSC::JSValue(result); + return result; } static JSC::JSValue getRangeErrorInstance(const ZigString* str, JSC::JSGlobalObject* globalObject) @@ -351,7 +351,7 @@ static JSC::JSValue getRangeErrorInstance(const ZigString* str, JSC::JSGlobalObj JSC::JSObject* result = JSC::createRangeError(globalObject, toStringCopy(*str)); JSC::EnsureStillAliveScope ensureAlive(result); - return JSC::JSValue(result); + return result; } static const JSC::Identifier toIdentifier(ZigString str, JSC::JSGlobalObject* global) diff --git a/src/bun.js/bindings/napi.cpp b/src/bun.js/bindings/napi.cpp index 48154a3293..bfaa6ceb36 100644 --- a/src/bun.js/bindings/napi.cpp +++ b/src/bun.js/bindings/napi.cpp @@ -27,6 +27,7 @@ #include #include #include +#include #include "BufferEncodingType.h" #include #include @@ -83,12 +84,12 @@ using namespace Zig; // - if env is nullptr, return napi_invalid_arg // - if there is a pending exception, return napi_pending_exception // No do..while is used as this declares a variable that other macros need to use -#define NAPI_PREAMBLE(_env) \ - NAPI_LOG_CURRENT_FUNCTION; \ - NAPI_CHECK_ARG(_env, _env); \ - /* You should not use this throw scope directly -- if you need */ \ - /* to throw or clear exceptions, make your own scope */ \ - auto napi_preamble_throw_scope__ = DECLARE_THROW_SCOPE(toJS(_env)->vm()); \ +#define NAPI_PREAMBLE(_env) \ + NAPI_LOG_CURRENT_FUNCTION; \ + NAPI_CHECK_ARG(_env, _env); \ + /* You should not use this throw scope directly -- if you need */ \ + /* to throw or clear exceptions, make your own scope */ \ + auto napi_preamble_throw_scope__ = DECLARE_THROW_SCOPE(_env->vm()); \ NAPI_RETURN_IF_EXCEPTION(_env) // Every NAPI function should use this at the start. It does the following: @@ -96,12 +97,12 @@ using namespace Zig; // - if env is nullptr, return napi_invalid_arg // - if there is a pending exception, return napi_pending_exception // No do..while is used as this declares a variable that other macros need to use -#define NAPI_PREAMBLE(_env) \ - NAPI_LOG_CURRENT_FUNCTION; \ - NAPI_CHECK_ARG(_env, _env); \ - /* You should not use this throw scope directly -- if you need */ \ - /* to throw or clear exceptions, make your own scope */ \ - auto napi_preamble_throw_scope__ = DECLARE_THROW_SCOPE(toJS(_env)->vm()); \ +#define NAPI_PREAMBLE(_env) \ + NAPI_LOG_CURRENT_FUNCTION; \ + NAPI_CHECK_ARG(_env, _env); \ + /* You should not use this throw scope directly -- if you need */ \ + /* to throw or clear exceptions, make your own scope */ \ + auto napi_preamble_throw_scope__ = DECLARE_THROW_SCOPE(_env->vm()); \ NAPI_RETURN_IF_EXCEPTION(_env) // Only use this for functions that need their own throw or catch scope. Functions that call into @@ -263,7 +264,7 @@ void Napi::NapiRefSelfDeletingWeakHandleOwner::finalize(JSC::Handle(prop.attributes); @@ -283,103 +284,33 @@ static uint32_t getPropertyAttributes(napi_property_descriptor prop) return result; } -class NAPICallFrame { -public: - NAPICallFrame(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame, void* dataPtr, JSValue storedNewTarget) - : NAPICallFrame(globalObject, callFrame, dataPtr) - { - m_storedNewTarget = storedNewTarget; - m_isConstructorCall = !m_storedNewTarget.isEmpty(); +void NAPICallFrame::extract(size_t* argc, napi_value* argv, napi_value* this_arg, void** data, Zig::GlobalObject* globalObject) +{ + + if (this_arg != nullptr) { + *this_arg = ::toNapi(m_callFrame->thisValue(), globalObject); } - NAPICallFrame(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame, void* dataPtr) - : m_callFrame(callFrame) - , m_dataPtr(dataPtr) - { - // Node-API function calls always run in "sloppy mode," even if the JS side is in strict - // mode. So if `this` is null or undefined, we use globalThis instead; otherwise, we convert - // `this` to an object. - // TODO change to global? or find another way to avoid JSGlobalProxy - JSC::JSObject* jscThis = globalObject->globalThis(); - if (!m_callFrame->thisValue().isUndefinedOrNull()) { - auto scope = DECLARE_THROW_SCOPE(JSC::getVM(globalObject)); - jscThis = m_callFrame->thisValue().toObject(globalObject); - // https://tc39.es/ecma262/#sec-toobject - // toObject only throws for undefined and null, which we checked for - scope.assertNoException(); - } - m_callFrame->setThisValue(jscThis); + if (data != nullptr) { + *data = dataPtr(); } - JSValue thisValue() const - { - return m_callFrame->thisValue(); + size_t maxArgc = 0; + if (argc != nullptr) { + maxArgc = *argc; + *argc = m_callFrame->argumentCount(); } - napi_callback_info toNapi() - { - return reinterpret_cast(this); - } - - ALWAYS_INLINE void* dataPtr() const - { - return m_dataPtr; - } - - void extract(size_t* argc, // [in-out] Specifies the size of the provided argv array - // and receives the actual count of args. - napi_value* argv, // [out] Array of values - napi_value* this_arg, // [out] Receives the JS 'this' arg for the call - void** data, Zig::GlobalObject* globalObject) - { - if (this_arg != nullptr) { - *this_arg = ::toNapi(m_callFrame->thisValue(), globalObject); - } - - if (data != nullptr) { - *data = dataPtr(); - } - - size_t maxArgc = 0; - if (argc != nullptr) { - maxArgc = *argc; - *argc = m_callFrame->argumentCount(); - } - - if (argv != nullptr) { - for (size_t i = 0; i < maxArgc; i++) { - // OK if we overflow argumentCount(), because argument() returns JS undefined - // for OOB which is what we want - argv[i] = ::toNapi(m_callFrame->argument(i), globalObject); - } + if (argv != nullptr) { + for (size_t i = 0; i < maxArgc; i++) { + // OK if we overflow argumentCount(), because argument() returns JS undefined + // for OOB which is what we want + argv[i] = ::toNapi(m_callFrame->argument(i), globalObject); } } +} - JSValue newTarget() - { - if (!m_isConstructorCall) { - return JSValue(); - } - - if (m_storedNewTarget.isUndefined()) { - // napi_get_new_target: - // "This API returns the new.target of the constructor call. If the current callback - // is not a constructor call, the result is NULL." - // they mean a null pointer, not JavaScript null - return JSValue(); - } else { - return m_storedNewTarget; - } - } - -private: - JSC::CallFrame* m_callFrame; - void* m_dataPtr; - JSValue m_storedNewTarget; - bool m_isConstructorCall = false; -}; - -static void defineNapiProperty(napi_env env, JSC::JSObject* to, napi_property_descriptor property, bool isInstance, JSC::ThrowScope& scope) +void Napi::defineProperty(napi_env env, JSC::JSObject* to, const napi_property_descriptor& property, bool isInstance, JSC::ThrowScope& scope) { Zig::GlobalObject* globalObject = env->globalObject(); JSC::VM& vm = JSC::getVM(globalObject); @@ -699,12 +630,12 @@ extern "C" napi_status napi_is_typedarray(napi_env env, napi_value value, bool* // it doesn't copy the string // but it's only safe to use if we are not setting a property // because we can't guarantee the lifetime of it -#define PROPERTY_NAME_FROM_UTF8(identifierName) \ - size_t utf8Len = strlen(utf8Name); \ - WTF::String nameString = WTF::charactersAreAllASCII(std::span { reinterpret_cast(utf8Name), utf8Len }) \ - ? WTF::String(WTF::StringImpl::createWithoutCopying({ utf8Name, utf8Len })) \ - : WTF::String::fromUTF8(utf8Name); \ - JSC::PropertyName identifierName = JSC::Identifier::fromString(vm, nameString); +#define PROPERTY_NAME_FROM_UTF8(identifierName) \ + size_t utf8Len = strlen(utf8Name); \ + WTF::String&& nameString = WTF::charactersAreAllASCII(std::span { reinterpret_cast(utf8Name), utf8Len }) \ + ? WTF::String(WTF::StringImpl::createWithoutCopying({ utf8Name, utf8Len })) \ + : WTF::String::fromUTF8(utf8Name); \ + const JSC::PropertyName identifierName = JSC::Identifier::fromString(vm, nameString); extern "C" napi_status napi_has_named_property(napi_env env, napi_value object, const char* utf8Name, @@ -748,72 +679,6 @@ extern "C" napi_status napi_get_named_property(napi_env env, napi_value object, NAPI_RETURN_SUCCESS_UNLESS_EXCEPTION(env); } -extern "C" JS_EXPORT napi_status -node_api_create_external_string_latin1(napi_env env, - char* str, - size_t length, - napi_finalize finalize_callback, - void* finalize_hint, - napi_value* result, - bool* copied) -{ - // https://nodejs.org/api/n-api.html#node_api_create_external_string_latin1 - NAPI_PREAMBLE(env); - NAPI_CHECK_ARG(env, str); - NAPI_CHECK_ARG(env, result); - - length = length == NAPI_AUTO_LENGTH ? strlen(str) : length; - // WTF::ExternalStringImpl does not allow creating empty strings, so we have this limitation for now. - NAPI_RETURN_EARLY_IF_FALSE(env, length > 0, napi_invalid_arg); - Ref impl = WTF::ExternalStringImpl::create({ reinterpret_cast(str), static_cast(length) }, finalize_hint, [finalize_callback, env](void* hint, void* str, unsigned length) { - NAPI_LOG("latin1 string finalizer"); - env->doFinalizer(finalize_callback, str, hint); - }); - Zig::GlobalObject* globalObject = toJS(env); - - JSString* out = JSC::jsString(JSC::getVM(globalObject), WTF::String(WTFMove(impl))); - ensureStillAliveHere(out); - *result = toNapi(out, globalObject); - ensureStillAliveHere(out); - - if (copied) { - *copied = false; - } - - NAPI_RETURN_SUCCESS(env); -} - -extern "C" JS_EXPORT napi_status -node_api_create_external_string_utf16(napi_env env, - char16_t* str, - size_t length, - napi_finalize finalize_callback, - void* finalize_hint, - napi_value* result, - bool* copied) -{ - // https://nodejs.org/api/n-api.html#node_api_create_external_string_utf16 - NAPI_PREAMBLE(env); - NAPI_CHECK_ARG(env, str); - NAPI_CHECK_ARG(env, result); - - length = length == NAPI_AUTO_LENGTH ? std::char_traits::length(str) : length; - // WTF::ExternalStringImpl does not allow creating empty strings, so we have this limitation for now. - NAPI_RETURN_EARLY_IF_FALSE(env, length > 0, napi_invalid_arg); - - Ref impl = WTF::ExternalStringImpl::create({ reinterpret_cast(str), static_cast(length) }, finalize_hint, [finalize_callback, env](void* hint, void* str, unsigned length) { - NAPI_LOG("utf16 string finalizer"); - env->doFinalizer(finalize_callback, str, hint); - }); - Zig::GlobalObject* globalObject = toJS(env); - - JSString* out = JSC::jsString(JSC::getVM(globalObject), WTF::String(WTFMove(impl))); - ensureStillAliveHere(out); - *result = toNapi(out, globalObject); - ensureStillAliveHere(out); - - NAPI_RETURN_SUCCESS(env); -} extern "C" size_t Bun__napi_module_register_count; extern "C" void napi_module_register(napi_module* mod) { @@ -1089,7 +954,7 @@ napi_define_properties(napi_env env, napi_value object, size_t property_count, auto throwScope = DECLARE_THROW_SCOPE(vm); for (size_t i = 0; i < property_count; i++) { - defineNapiProperty(env, objectObject, properties[i], true, throwScope); + Napi::defineProperty(env, objectObject, properties[i], true, throwScope); RETURN_IF_EXCEPTION(throwScope, napi_set_last_error(env, napi_pending_exception)); } @@ -1098,16 +963,14 @@ napi_define_properties(napi_env env, napi_value object, size_t property_count, return napi_set_last_error(env, napi_ok); } -static JSC::ErrorInstance* createErrorWithCode(JSC::JSGlobalObject* globalObject, const WTF::String& code, const WTF::String& message, JSC::ErrorType type) +static JSC::ErrorInstance* createErrorWithCode(JSC::VM& vm, JSC::JSGlobalObject* globalObject, const WTF::String& code, const WTF::String& message, JSC::ErrorType type) { // no napi functions permit a null message, they must check before calling this function and // return the right error code ASSERT(!message.isNull()); - auto& vm = JSC::getVM(globalObject); - // we don't call JSC::createError() as it asserts the message is not an empty string "" - auto* error = JSC::ErrorInstance::create(JSC::getVM(globalObject), globalObject->errorStructure(type), message, JSValue(), nullptr, RuntimeType::TypeNothing, type); + auto* error = JSC::ErrorInstance::create(vm, globalObject->errorStructure(type), message, JSValue(), nullptr, RuntimeType::TypeNothing, type); if (!code.isNull()) { error->putDirect(vm, WebCore::builtinNames(vm).codePublicName(), JSC::jsString(vm, code), 0); } @@ -1129,7 +992,7 @@ static napi_status throwErrorWithCStrings(napi_env env, const char* code_utf8, c WTF::String code = code_utf8 ? WTF::String::fromUTF8(code_utf8) : WTF::String(); WTF::String message = WTF::String::fromUTF8(msg_utf8); - auto* error = createErrorWithCode(globalObject, code, message, type); + auto* error = createErrorWithCode(vm, globalObject, code, message, type); scope.throwException(globalObject, error); return napi_set_last_error(env, napi_ok); } @@ -1158,7 +1021,7 @@ static napi_status createErrorWithNapiValues(napi_env env, napi_value code, napi RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); *result = toNapi( - createErrorWithCode(globalObject, wtf_code, wtf_message, type), + createErrorWithCode(vm, globalObject, wtf_code, wtf_message, type), globalObject); RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); return napi_set_last_error(env, napi_ok); @@ -1385,7 +1248,7 @@ extern "C" napi_status napi_get_and_clear_last_exception(napi_env env, auto globalObject = toJS(env); auto scope = DECLARE_CATCH_SCOPE(JSC::getVM(globalObject)); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { *result = toNapi(JSValue(scope.exception()->value()), globalObject); } else { *result = toNapi(JSC::jsUndefined(), globalObject); @@ -1483,6 +1346,143 @@ extern "C" napi_status napi_create_type_error(napi_env env, napi_value code, return createErrorWithNapiValues(env, code, msg, JSC::ErrorType::TypeError, result); } +extern "C" JS_EXPORT napi_status +node_api_create_external_string_latin1(napi_env env, + char* str, + size_t length, + napi_finalize finalize_callback, + void* finalize_hint, + napi_value* result, + bool* copied) +{ + // https://nodejs.org/api/n-api.html#node_api_create_external_string_latin1 + NAPI_PREAMBLE(env); + NAPI_CHECK_ARG(env, str); + NAPI_CHECK_ARG(env, result); + + length = length == NAPI_AUTO_LENGTH ? strlen(str) : length; + // WTF::ExternalStringImpl does not allow creating empty strings, so we have this limitation for now. + NAPI_RETURN_EARLY_IF_FALSE(env, length > 0, napi_invalid_arg); + Ref impl = WTF::ExternalStringImpl::create({ reinterpret_cast(str), static_cast(length) }, finalize_hint, [finalize_callback, env](void* hint, void* str, unsigned length) { + NAPI_LOG("latin1 string finalizer"); + env->doFinalizer(finalize_callback, str, hint); + }); + Zig::GlobalObject* globalObject = toJS(env); + + JSString* out = JSC::jsString(JSC::getVM(globalObject), WTF::String(WTFMove(impl))); + ensureStillAliveHere(out); + *result = toNapi(out, globalObject); + ensureStillAliveHere(out); + + if (copied) { + *copied = false; + } + + NAPI_RETURN_SUCCESS(env); +} + +extern "C" JS_EXPORT napi_status +node_api_create_external_string_utf16(napi_env env, + char16_t* str, + size_t length, + napi_finalize finalize_callback, + void* finalize_hint, + napi_value* result, + bool* copied) +{ + // https://nodejs.org/api/n-api.html#node_api_create_external_string_utf16 + NAPI_PREAMBLE(env); + NAPI_CHECK_ARG(env, str); + NAPI_CHECK_ARG(env, result); + + length = length == NAPI_AUTO_LENGTH ? std::char_traits::length(str) : length; + // WTF::ExternalStringImpl does not allow creating empty strings, so we have this limitation for now. + NAPI_RETURN_EARLY_IF_FALSE(env, length > 0, napi_invalid_arg); + + Ref impl = WTF::ExternalStringImpl::create({ reinterpret_cast(str), static_cast(length) }, finalize_hint, [finalize_callback, env](void* hint, void* str, unsigned length) { + NAPI_LOG("utf16 string finalizer"); + env->doFinalizer(finalize_callback, str, hint); + }); + Zig::GlobalObject* globalObject = toJS(env); + + JSString* out = JSC::jsString(JSC::getVM(globalObject), WTF::String(WTFMove(impl))); + ensureStillAliveHere(out); + *result = toNapi(out, globalObject); + ensureStillAliveHere(out); + + NAPI_RETURN_SUCCESS(env); +} + +extern "C" JS_EXPORT napi_status node_api_create_property_key_latin1(napi_env env, const char* str, size_t length, napi_value* result) +{ + // EXPERIMENTAL + // This is semantically correct but it may not have the performance benefit intended for node_api_create_property_key_latin1 + // TODO(@190n) use jsAtomString or something + NAPI_LOG_CURRENT_FUNCTION; + return napi_create_string_latin1(env, str, length, result); +} + +extern "C" JS_EXPORT napi_status node_api_create_property_key_utf16(napi_env env, const char16_t* str, size_t length, napi_value* result) +{ + // EXPERIMENTAL + // This is semantically correct but it may not have the performance benefit intended for node_api_create_property_key_utf16 + // TODO(@190n) use jsAtomString or something + NAPI_LOG_CURRENT_FUNCTION; + return napi_create_string_utf16(env, str, length, result); +} + +extern "C" JS_EXPORT napi_status node_api_create_property_key_utf8(napi_env env, const char* str, size_t length, napi_value* result) +{ + // EXPERIMENTAL + // This is semantically correct but it may not have the performance benefit intended for node_api_create_property_key_utf8 + // TODO(@190n) use jsAtomString or something + NAPI_LOG_CURRENT_FUNCTION; + return napi_create_string_utf8(env, str, length, result); +} + +extern "C" JS_EXPORT napi_status node_api_create_buffer_from_arraybuffer(napi_env env, + napi_value arraybuffer, + size_t byte_offset, + size_t byte_length, + napi_value* result) +{ + NAPI_LOG_CURRENT_FUNCTION; + NAPI_PREAMBLE_NO_THROW_SCOPE(env); + NAPI_CHECK_ARG(env, result); + + JSC::JSArrayBuffer* jsArrayBuffer = JSC::jsDynamicCast(toJS(arraybuffer)); + NAPI_RETURN_EARLY_IF_FALSE(env, jsArrayBuffer, napi_arraybuffer_expected); + + auto* globalObject = toJS(env); + auto scope = DECLARE_THROW_SCOPE(JSC::getVM(globalObject)); + + auto* impl = jsArrayBuffer->impl(); + + if (!impl || byte_offset + byte_length > impl->byteLength()) [[unlikely]] { + auto* error = createErrorWithCode(JSC::getVM(globalObject), globalObject, "ERR_OUT_OF_RANGE"_s, "The byte offset + length is out of range"_s, JSC::ErrorType::RangeError); + RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); + scope.throwException(globalObject, error); + return napi_set_last_error(env, napi_pending_exception); + } + + auto* subclassStructure = globalObject->JSBufferSubclassStructure(); + JSC::JSUint8Array* uint8Array = JSC::JSUint8Array::create(globalObject, subclassStructure, impl, byte_offset, byte_length); + RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); + + *result = toNapi(uint8Array, globalObject); + + return napi_set_last_error(env, napi_ok); +} + +extern "C" JS_EXPORT napi_status node_api_get_module_file_name(napi_env env, + const char** result) +{ + NAPI_PREAMBLE(env); + NAPI_CHECK_ARG(env, result); + *result = env->filename; + NAPI_RETURN_SUCCESS(env); +} + extern "C" napi_status napi_create_error(napi_env env, napi_value code, napi_value msg, napi_value* result) @@ -1680,6 +1680,26 @@ extern "C" napi_status napi_create_typedarray( JSValue arraybufferValue = toJS(arraybuffer); auto arraybufferPtr = JSC::jsDynamicCast(arraybufferValue); NAPI_RETURN_EARLY_IF_FALSE(env, arraybufferPtr, napi_arraybuffer_expected); + switch (type) { + case napi_int8_array: + case napi_uint8_array: + case napi_uint8_clamped_array: + case napi_int16_array: + case napi_uint16_array: + case napi_int32_array: + case napi_uint32_array: + case napi_float32_array: + case napi_float64_array: + case napi_bigint64_array: + case napi_biguint64_array: { + break; + } + default: { + napi_set_last_error(env, napi_invalid_arg); + return napi_invalid_arg; + } + } + JSC::JSArrayBufferView* view = createArrayBufferView(globalObject, type, arraybufferPtr->impl(), byte_offset, length); NAPI_RETURN_IF_EXCEPTION(env); *result = toNapi(view, globalObject); @@ -1687,120 +1707,6 @@ extern "C" napi_status napi_create_typedarray( } namespace Zig { -template -void NapiClass::visitChildrenImpl(JSCell* cell, Visitor& visitor) -{ - NapiClass* thisObject = jsCast(cell); - ASSERT_GC_OBJECT_INHERITS(thisObject, info()); - Base::visitChildren(thisObject, visitor); -} - -DEFINE_VISIT_CHILDREN(NapiClass); - -template -JSC_HOST_CALL_ATTRIBUTES JSC::EncodedJSValue NapiClass_ConstructorFunction(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame) -{ - JSC::VM& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); - JSObject* constructorTarget = asObject(callFrame->jsCallee()); - NapiClass* napi = jsDynamicCast(constructorTarget); - while (!napi && constructorTarget) { - constructorTarget = constructorTarget->getPrototypeDirect().getObject(); - napi = jsDynamicCast(constructorTarget); - } - - if (!napi) [[unlikely]] { - JSC::throwVMError(globalObject, scope, JSC::createTypeError(globalObject, "NapiClass constructor called on an object that is not a NapiClass"_s)); - return JSValue::encode(JSC::jsUndefined()); - } - - JSValue newTarget; - - if constexpr (ConstructCall) { - NapiPrototype* prototype = JSC::jsDynamicCast(napi->getIfPropertyExists(globalObject, vm.propertyNames->prototype)); - RETURN_IF_EXCEPTION(scope, {}); - - if (!prototype) { - JSC::throwVMError(globalObject, scope, JSC::createTypeError(globalObject, "NapiClass constructor is missing the prototype"_s)); - return JSValue::encode(JSC::jsUndefined()); - } - - newTarget = callFrame->newTarget(); - auto* subclass = prototype->subclass(globalObject, asObject(newTarget)); - RETURN_IF_EXCEPTION(scope, {}); - callFrame->setThisValue(subclass); - } - - NAPICallFrame frame(globalObject, callFrame, napi->dataPtr(), newTarget); - Bun::NapiHandleScope handleScope(jsCast(globalObject)); - - JSValue ret = toJS(napi->constructor()(napi->env(), frame.toNapi())); - napi_set_last_error(napi->env(), napi_ok); - RETURN_IF_EXCEPTION(scope, {}); - if (ret.isEmpty()) { - ret = jsUndefined(); - } - if constexpr (ConstructCall) { - RELEASE_AND_RETURN(scope, JSValue::encode(frame.thisValue())); - } else { - RELEASE_AND_RETURN(scope, JSValue::encode(ret)); - } -} - -NapiClass* NapiClass::create(VM& vm, napi_env env, WTF::String name, - napi_callback constructor, - void* data, - size_t property_count, - const napi_property_descriptor* properties) -{ - NativeExecutable* executable = vm.getHostFunction( - // for normal call - NapiClass_ConstructorFunction, - ImplementationVisibility::Public, - // for constructor call - NapiClass_ConstructorFunction, name); - Structure* structure = env->globalObject()->NapiClassStructure(); - NapiClass* napiClass = new (NotNull, allocateCell(vm)) NapiClass(vm, executable, env, structure, data); - napiClass->finishCreation(vm, executable, name, constructor, data, property_count, properties); - return napiClass; -} - -void NapiClass::finishCreation(VM& vm, NativeExecutable* executable, const String& name, napi_callback constructor, - void* data, - size_t property_count, - const napi_property_descriptor* properties) -{ - Base::finishCreation(vm, executable, 0, name); - ASSERT(inherits(info())); - this->m_constructor = constructor; - auto globalObject = reinterpret_cast(this->globalObject()); - - this->putDirect(vm, vm.propertyNames->name, jsString(vm, name), JSC::PropertyAttribute::DontEnum | 0); - - NapiPrototype* prototype = NapiPrototype::create(vm, globalObject->NapiPrototypeStructure()); - - auto throwScope = DECLARE_THROW_SCOPE(vm); - - for (size_t i = 0; i < property_count; i++) { - const napi_property_descriptor& property = properties[i]; - - if (property.attributes & napi_static) { - defineNapiProperty(m_env, this, property, true, throwScope); - } else { - defineNapiProperty(m_env, prototype, property, false, throwScope); - } - - if (throwScope.exception()) - break; - } - - this->putDirect(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | 0); - prototype->putDirect(vm, vm.propertyNames->constructor, this, JSC::PropertyAttribute::DontEnum | 0); -} -} - -const ClassInfo NapiClass::s_info = { "Function"_s, &NapiClass::Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NapiClass) }; -const ClassInfo NapiPrototype::s_info = { "Object"_s, &NapiPrototype::Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NapiPrototype) }; extern "C" napi_status napi_get_all_property_names( napi_env env, napi_value objectNapi, napi_key_collection_mode key_mode, @@ -1849,7 +1755,7 @@ extern "C" napi_status napi_get_all_property_names( // Climb up the prototype chain to find inherited properties JSObject* current_object = object; while (!current_object->getOwnPropertyDescriptor(globalObject, key.toPropertyKey(globalObject), desc)) { - JSObject* proto = current_object->getPrototype(JSC::getVM(globalObject), globalObject).getObject(); + JSObject* proto = current_object->getPrototype(globalObject).getObject(); if (!proto) { break; } @@ -2007,6 +1913,31 @@ extern "C" napi_status napi_get_property_names(napi_env env, napi_value object, NAPI_RETURN_SUCCESS(env); } +extern "C" napi_status napi_create_buffer(napi_env env, size_t length, + void** data, + napi_value* result) +{ + NAPI_PREAMBLE(env); + NAPI_CHECK_ARG(env, result); + + Zig::GlobalObject* globalObject = toJS(env); + auto* subclassStructure = globalObject->JSBufferSubclassStructure(); + + // In Node.js, napi_create_buffer is uninitialized memory. + auto* uint8Array = JSC::JSUint8Array::createUninitialized(globalObject, subclassStructure, length); + NAPI_RETURN_IF_EXCEPTION(env); + + if (data != nullptr) { + // Node.js' code looks like this: + // *data = node::Buffer::Data(buffer); + // That means they unconditionally update the data pointer. + *data = length > 0 ? uint8Array->typedVector() : nullptr; + } + + *result = toNapi(uint8Array, globalObject); + NAPI_RETURN_SUCCESS(env); +} + extern "C" napi_status napi_create_external_buffer(napi_env env, size_t length, void* data, napi_finalize finalize_cb, @@ -2025,6 +1956,7 @@ extern "C" napi_status napi_create_external_buffer(napi_env env, size_t length, auto* subclassStructure = globalObject->JSBufferSubclassStructure(); auto* buffer = JSC::JSUint8Array::create(globalObject, subclassStructure, WTFMove(arrayBuffer), 0, length); + NAPI_RETURN_IF_EXCEPTION(env); *result = toNapi(buffer, globalObject); NAPI_RETURN_SUCCESS(env); @@ -2131,7 +2063,7 @@ extern "C" napi_status napi_get_value_int64(napi_env env, napi_value value, int6 // must match src/bun.js/node/types.zig#Encoding, which matches WebCore::BufferEncodingType enum class NapiStringEncoding : uint8_t { utf8 = static_cast(WebCore::BufferEncodingType::utf8), - utf16le = static_cast(WebCore::BufferEncodingType::utf16le), + utf16 = static_cast(WebCore::BufferEncodingType::utf16le), latin1 = static_cast(WebCore::BufferEncodingType::latin1), }; @@ -2141,39 +2073,42 @@ struct BufferElement { }; template<> -struct BufferElement { +struct BufferElement { using Type = char16_t; }; template napi_status napi_get_value_string_any_encoding(napi_env env, napi_value napiValue, typename BufferElement::Type* buf, size_t bufsize, size_t* writtenPtr) { + NAPI_PREAMBLE(env); NAPI_CHECK_ARG(env, napiValue); JSValue jsValue = toJS(napiValue); NAPI_RETURN_EARLY_IF_FALSE(env, jsValue.isString(), napi_string_expected); Zig::GlobalObject* globalObject = toJS(env); - String view = jsValue.asCell()->getString(globalObject); - size_t length = view.length(); + JSString* jsString = jsValue.toString(globalObject); + NAPI_RETURN_IF_EXCEPTION(env); + const auto view = jsString->view(globalObject); + NAPI_RETURN_IF_EXCEPTION(env); if (buf == nullptr) { // they just want to know the length NAPI_CHECK_ARG(env, writtenPtr); switch (EncodeTo) { case NapiStringEncoding::utf8: - if (view.is8Bit()) { - *writtenPtr = Bun__encoding__byteLengthLatin1AsUTF8(view.span8().data(), length); + if (view->is8Bit()) { + *writtenPtr = Bun__encoding__byteLengthLatin1AsUTF8(view->span8().data(), view->length()); } else { - *writtenPtr = Bun__encoding__byteLengthUTF16AsUTF8(view.span16().data(), length); + *writtenPtr = Bun__encoding__byteLengthUTF16AsUTF8(view->span16().data(), view->length()); } break; - case NapiStringEncoding::utf16le: + case NapiStringEncoding::utf16: [[fallthrough]]; case NapiStringEncoding::latin1: // if the string's encoding is the same as the destination encoding, this is trivially correct // if we are converting UTF-16 to Latin-1, then we do so by truncating each code unit, so the length is the same // if we are converting Latin-1 to UTF-16, then we do so by extending each code unit, so the length is also the same - *writtenPtr = length; + *writtenPtr = view->length(); break; } return napi_set_last_error(env, napi_ok); @@ -2184,36 +2119,33 @@ napi_status napi_get_value_string_any_encoding(napi_env env, napi_value napiValu return napi_set_last_error(env, napi_ok); } - if (bufsize == NAPI_AUTO_LENGTH) [[unlikely]] { - if (writtenPtr) *writtenPtr = 0; - buf[0] = '\0'; - return napi_set_last_error(env, napi_ok); - } - size_t written; std::span writable_byte_slice(reinterpret_cast(buf), - EncodeTo == NapiStringEncoding::utf16le + EncodeTo == NapiStringEncoding::utf16 // don't write encoded text to the last element of the destination buffer // since we need to put a null terminator there ? 2 * (bufsize - 1) : bufsize - 1); - if (view.is8Bit()) { - if constexpr (EncodeTo == NapiStringEncoding::utf16le) { + if (view->is8Bit()) { + const auto span = view->span8(); + if constexpr (EncodeTo == NapiStringEncoding::utf16) { + // pass subslice to work around Bun__encoding__writeLatin1 asserting that the output has room - written = Bun__encoding__writeLatin1(view.span8().data(), - std::min(static_cast(view.span8().size()), bufsize), + written = Bun__encoding__writeLatin1(span.data(), + std::min(static_cast(span.size()), bufsize), writable_byte_slice.data(), writable_byte_slice.size(), static_cast(EncodeTo)); } else { - written = Bun__encoding__writeLatin1(view.span8().data(), view.length(), writable_byte_slice.data(), writable_byte_slice.size(), static_cast(EncodeTo)); + written = Bun__encoding__writeLatin1(span.data(), span.size(), writable_byte_slice.data(), writable_byte_slice.size(), static_cast(EncodeTo)); } } else { - written = Bun__encoding__writeUTF16(view.span16().data(), view.length(), writable_byte_slice.data(), writable_byte_slice.size(), static_cast(EncodeTo)); + const auto span = view->span16(); + written = Bun__encoding__writeUTF16(span.data(), span.size(), writable_byte_slice.data(), writable_byte_slice.size(), static_cast(EncodeTo)); } // convert bytes to code units - if constexpr (EncodeTo == NapiStringEncoding::utf16le) { + if constexpr (EncodeTo == NapiStringEncoding::utf16) { written /= 2; } @@ -2252,7 +2184,7 @@ extern "C" napi_status napi_get_value_string_utf16(napi_env env, napi_value napi NAPI_PREAMBLE_NO_THROW_SCOPE(env); NAPI_CHECK_ENV_NOT_IN_GC(env); // this function does set_last_error - return napi_get_value_string_any_encoding(env, napiValue, buf, bufsize, writtenPtr); + return napi_get_value_string_any_encoding(env, napiValue, buf, bufsize, writtenPtr); } extern "C" napi_status napi_get_value_bool(napi_env env, napi_value value, bool* result) @@ -2446,9 +2378,10 @@ extern "C" napi_status napi_get_value_bigint_int64(napi_env env, napi_value valu *result = jsValue.toBigInt64(toJS(env)); JSBigInt* bigint = jsValue.asHeapBigInt(); - uint64_t digit = bigint->length() > 0 ? bigint->digit(0) : 0; + auto length = bigint->length(); + uint64_t digit = length > 0 ? bigint->digit(0) : 0; - if (bigint->length() > 1) { + if (length > 1) { *lossless = false; } else if (bigint->sign()) { // negative @@ -2477,6 +2410,7 @@ extern "C" napi_status napi_get_value_bigint_uint64(napi_env env, napi_value val // toBigInt64 can throw if the value is not a bigint. we have already checked, so we shouldn't // hit an exception here and it's okay to assert at the end *result = jsValue.toBigUInt64(toJS(env)); + NAPI_RETURN_IF_EXCEPTION(env); // bigint to uint64 conversion is lossless if and only if there aren't multiple digits and the // value is positive @@ -2504,21 +2438,16 @@ extern "C" napi_status napi_get_value_bigint_words(napi_env env, JSC::JSBigInt* bigInt = jsValue.asHeapBigInt(); - size_t available_words = *word_count; - *word_count = bigInt->length(); - // Return ok in this case if (sign_bit == nullptr && words == nullptr) { + *word_count = bigInt->length(); NAPI_RETURN_SUCCESS(env); } - *sign_bit = (int)bigInt->sign(); - - size_t len = *word_count; - for (size_t i = 0; i < available_words && i < len; i++) { - words[i] = bigInt->digit(i); - } - + std::span writable_words(words, *word_count); + *sign_bit = static_cast(bigInt->sign()); + *word_count = bigInt->toWordsArray(writable_words); + ensureStillAliveHere(bigInt); NAPI_RETURN_SUCCESS(env); } @@ -2593,6 +2522,30 @@ extern "C" napi_status napi_set_instance_data(napi_env env, NAPI_RETURN_SUCCESS(env); } +extern "C" napi_status napi_create_bigint_uint64(napi_env env, uint64_t value, napi_value* result) +{ + NAPI_PREAMBLE(env); + NAPI_CHECK_ARG(env, result); + auto* globalObject = toJS(env); + auto* bigint = JSBigInt::createFrom(globalObject, value); + NAPI_RETURN_IF_EXCEPTION(env); + *result = toNapi(bigint, globalObject); + ensureStillAliveHere(bigint); + NAPI_RETURN_SUCCESS(env); +} + +extern "C" napi_status napi_create_bigint_int64(napi_env env, int64_t value, napi_value* result) +{ + NAPI_PREAMBLE(env); + NAPI_CHECK_ARG(env, result); + auto* globalObject = toJS(env); + auto* bigint = JSBigInt::createFrom(globalObject, value); + NAPI_RETURN_IF_EXCEPTION(env); + *result = toNapi(bigint, globalObject); + ensureStillAliveHere(bigint); + NAPI_RETURN_SUCCESS(env); +} + extern "C" napi_status napi_create_bigint_words(napi_env env, int sign_bit, size_t word_count, @@ -2606,7 +2559,8 @@ extern "C" napi_status napi_create_bigint_words(napi_env env, NAPI_RETURN_EARLY_IF_FALSE(env, word_count <= UINT_MAX, napi_invalid_arg); Zig::GlobalObject* globalObject = toJS(env); - auto scope = DECLARE_THROW_SCOPE(JSC::getVM(globalObject)); + auto& vm = env->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); // we check INT_MAX here because it won't reject any bigints that should be able to be created // (as the true limit is much lower), and one Node.js test expects an exception instead of @@ -2618,34 +2572,16 @@ extern "C" napi_status napi_create_bigint_words(napi_env env, RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); } - // JSBigInt requires there are no leading zeroes in the words array, but native modules may have - // passed an array containing leading zeroes. so we have to cut those off. - while (word_count > 0 && words[word_count - 1] == 0) { - word_count--; - } - - if (word_count == 0) { - auto* bigint = JSBigInt::createZero(globalObject); - RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); - *result = toNapi(bigint, globalObject); - return napi_set_last_error(env, napi_ok); - } + std::span words_span(words, word_count); // throws RangeError if size is larger than JSC's limit - auto* bigint = JSBigInt::createWithLength(globalObject, word_count); + auto* bigint = JSBigInt::createFromWords(globalObject, words_span, sign_bit != 0); RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); ASSERT(bigint); - bigint->setSign(sign_bit != 0); - - const uint64_t* current_word = words; - // TODO: add fast path that uses memcpy here instead of setDigit - // we need to add this to JSC. V8 has this optimization - for (size_t i = 0; i < word_count; i++) { - bigint->setDigit(i, *current_word++); - } - *result = toNapi(bigint, globalObject); + + ensureStillAliveHere(bigint); return napi_set_last_error(env, napi_ok); } @@ -2674,7 +2610,9 @@ extern "C" napi_status napi_create_symbol(napi_env env, napi_value description, // TODO handle empty string? } - *result = toNapi(JSC::Symbol::create(vm), globalObject); + auto* symbol = JSC::Symbol::create(vm); + *result = toNapi(symbol, globalObject); + ensureStillAliveHere(symbol); NAPI_RETURN_SUCCESS(env); } @@ -2702,6 +2640,7 @@ extern "C" napi_status napi_new_instance(napi_env env, napi_value constructor, auto value = construct(globalObject, constructorObject, constructData, args); *result = toNapi(value, globalObject); + NAPI_RETURN_SUCCESS_UNLESS_EXCEPTION(env); } @@ -2784,8 +2723,9 @@ extern "C" napi_status napi_type_tag_object(napi_env env, napi_value value, cons Zig::GlobalObject* globalObject = toJS(env); JSObject* js_object = toJS(value).getObject(); NAPI_RETURN_EARLY_IF_FALSE(env, js_object, napi_object_expected); + JSValue napiTypeTagValue = globalObject->napiTypeTags()->get(js_object); - auto* existing_tag = jsDynamicCast(globalObject->napiTypeTags()->get(js_object)); + auto* existing_tag = jsDynamicCast(napiTypeTagValue); // cannot tag an object that is already tagged NAPI_RETURN_EARLY_IF_FALSE(env, existing_tag == nullptr, napi_invalid_arg); @@ -2815,74 +2755,6 @@ extern "C" napi_status napi_check_object_type_tag(napi_env env, napi_value value NAPI_RETURN_SUCCESS(env); } -extern "C" JS_EXPORT napi_status node_api_create_property_key_latin1(napi_env env, const char* str, size_t length, napi_value* result) -{ - // EXPERIMENTAL - // This is semantically correct but it may not have the performance benefit intended for node_api_create_property_key_latin1 - // TODO(@190n) use jsAtomString or something - NAPI_LOG_CURRENT_FUNCTION; - return napi_create_string_latin1(env, str, length, result); -} - -extern "C" JS_EXPORT napi_status node_api_create_property_key_utf16(napi_env env, const char16_t* str, size_t length, napi_value* result) -{ - // EXPERIMENTAL - // This is semantically correct but it may not have the performance benefit intended for node_api_create_property_key_utf16 - // TODO(@190n) use jsAtomString or something - NAPI_LOG_CURRENT_FUNCTION; - return napi_create_string_utf16(env, str, length, result); -} - -extern "C" JS_EXPORT napi_status node_api_create_property_key_utf8(napi_env env, const char* str, size_t length, napi_value* result) -{ - // EXPERIMENTAL - // This is semantically correct but it may not have the performance benefit intended for node_api_create_property_key_utf8 - // TODO(@190n) use jsAtomString or something - NAPI_LOG_CURRENT_FUNCTION; - return napi_create_string_utf8(env, str, length, result); -} - -extern "C" JS_EXPORT napi_status node_api_create_buffer_from_arraybuffer(napi_env env, - napi_value arraybuffer, - size_t byte_offset, - size_t byte_length, - napi_value* result) -{ - NAPI_LOG_CURRENT_FUNCTION; - NAPI_PREAMBLE_NO_THROW_SCOPE(env); - NAPI_CHECK_ARG(env, result); - - JSC::JSArrayBuffer* jsArrayBuffer = JSC::jsDynamicCast(toJS(arraybuffer)); - NAPI_RETURN_EARLY_IF_FALSE(env, jsArrayBuffer, napi_arraybuffer_expected); - - auto* globalObject = toJS(env); - auto scope = DECLARE_THROW_SCOPE(JSC::getVM(globalObject)); - - if (byte_offset + byte_length > jsArrayBuffer->impl()->byteLength()) { - JSC::throwRangeError(globalObject, scope, "byteOffset exceeds source ArrayBuffer byteLength"_s); - RETURN_IF_EXCEPTION(scope, napi_set_last_error(env, napi_pending_exception)); - } - - auto* subclassStructure = globalObject->JSBufferSubclassStructure(); - JSC::JSUint8Array* uint8Array = JSC::JSUint8Array::create(globalObject, subclassStructure, byte_length); - void* destination = uint8Array->vector(); - const void* source = reinterpret_cast(jsArrayBuffer->impl()->data()) + byte_offset; - memmove(destination, source, byte_length); - - *result = toNapi(uint8Array, globalObject); - scope.release(); - return napi_set_last_error(env, napi_ok); -} - -extern "C" JS_EXPORT napi_status node_api_get_module_file_name(napi_env env, - const char** result) -{ - NAPI_PREAMBLE(env); - NAPI_CHECK_ARG(env, result); - *result = env->filename; - NAPI_RETURN_SUCCESS(env); -} - extern "C" JS_EXPORT napi_status napi_add_env_cleanup_hook(napi_env env, void (*function)(void*), void* data) @@ -2914,7 +2786,7 @@ extern "C" JS_EXPORT napi_status napi_remove_env_cleanup_hook(napi_env env, { NAPI_PREAMBLE(env); - if (function != nullptr && !env->globalObject()->vm().hasTerminationRequest()) [[likely]] { + if (function != nullptr && !env->isVMTerminating()) [[likely]] { env->removeCleanupHook(function, data); } @@ -2928,7 +2800,7 @@ extern "C" JS_EXPORT napi_status napi_remove_async_cleanup_hook(napi_async_clean NAPI_PREAMBLE(env); - if (!env->globalObject()->vm().hasTerminationRequest()) { + if (!env->isVMTerminating()) { env->removeAsyncCleanupHook(handle); } @@ -2960,139 +2832,4 @@ extern "C" JSGlobalObject* NapiEnv__globalObject(napi_env env) return env->globalObject(); } -WTF_MAKE_TZONE_ALLOCATED_IMPL(NapiRef); - -void NapiRef::ref() -{ - NAPI_LOG("ref %p %u -> %u", this, refCount, refCount + 1); - ++refCount; - if (refCount == 1 && !weakValueRef.isClear()) { - auto& vm = globalObject.get()->vm(); - strongRef.set(vm, weakValueRef.get()); - - // isSet() will return always true after being set once - // We cannot rely on isSet() to check if the value is set we need to use isClear() - // .setString/.setObject/.setPrimitive will assert fail if called more than once (even after clear()) - // We should not clear the weakValueRef here because we need to keep it if we call NapiRef::unref() - // so we can call the finalizer - } -} - -void NapiRef::unref() -{ - NAPI_LOG("unref %p %u -> %u", this, refCount, refCount - 1); - bool clear = refCount == 1; - refCount = refCount > 0 ? refCount - 1 : 0; - if (clear && !m_isEternal) { - // we still dont clean weakValueRef so we can ref it again using NapiRef::ref() if the GC didn't collect it - // and use it to call the finalizer when GC'd - strongRef.clear(); - } -} - -void NapiRef::clear() -{ - NAPI_LOG("ref clear %p", this); - finalizer.call(env, nativeObject); - globalObject.clear(); - weakValueRef.clear(); - strongRef.clear(); -} - -NapiWeakValue::~NapiWeakValue() -{ - clear(); -} - -void NapiWeakValue::clear() -{ - switch (m_tag) { - case WeakTypeTag::Cell: { - m_value.cell.clear(); - break; - } - case WeakTypeTag::String: { - m_value.string.clear(); - break; - } - default: { - break; - } - } - - m_tag = WeakTypeTag::NotSet; -} - -bool NapiWeakValue::isClear() const -{ - return m_tag == WeakTypeTag::NotSet; -} - -void NapiWeakValue::setPrimitive(JSValue value) -{ - switch (m_tag) { - case WeakTypeTag::Cell: { - m_value.cell.clear(); - break; - } - case WeakTypeTag::String: { - m_value.string.clear(); - break; - } - default: { - break; - } - } - m_tag = WeakTypeTag::Primitive; - m_value.primitive = value; -} - -void NapiWeakValue::set(JSValue value, WeakHandleOwner& owner, void* context) -{ - if (value.isCell()) { - auto* cell = value.asCell(); - if (cell->isString()) { - setString(jsCast(cell), owner, context); - } else { - setCell(cell, owner, context); - } - } else { - setPrimitive(value); - } -} - -void NapiWeakValue::setCell(JSCell* cell, WeakHandleOwner& owner, void* context) -{ - switch (m_tag) { - case WeakTypeTag::Cell: { - m_value.cell.clear(); - break; - } - case WeakTypeTag::String: { - m_value.string.clear(); - break; - } - default: { - break; - } - } - - m_value.cell = JSC::Weak(cell, &owner, context); - m_tag = WeakTypeTag::Cell; -} - -void NapiWeakValue::setString(JSString* string, WeakHandleOwner& owner, void* context) -{ - switch (m_tag) { - case WeakTypeTag::Cell: { - m_value.cell.clear(); - break; - } - default: { - break; - } - } - - m_value.string = JSC::Weak(string, &owner, context); - m_tag = WeakTypeTag::String; } diff --git a/src/bun.js/bindings/napi.h b/src/bun.js/bindings/napi.h index f4e44cf135..db9c25ffd8 100644 --- a/src/bun.js/bindings/napi.h +++ b/src/bun.js/bindings/napi.h @@ -24,11 +24,16 @@ extern "C" void napi_internal_crash_in_gc(napi_env); extern "C" void Bun__crashHandler(const char* message, size_t message_len); namespace Napi { + +static constexpr int DEFAULT_NAPI_VERSION = 10; + struct AsyncCleanupHook { napi_async_cleanup_hook function = nullptr; void* data = nullptr; napi_async_cleanup_hook_handle handle = nullptr; }; + +void defineProperty(napi_env env, JSC::JSObject* to, const napi_property_descriptor& property, bool isInstance, JSC::ThrowScope& scope); } struct napi_async_cleanup_hook_handle__ { @@ -66,6 +71,7 @@ public: napi_env__(Zig::GlobalObject* globalObject, const napi_module& napiModule) : m_globalObject(globalObject) , m_napiModule(napiModule) + , m_vm(JSC::getVM(globalObject)) { napi_internal_register_cleanup_zig(this); } @@ -174,8 +180,7 @@ public: bool inGC() const { - JSC::VM& vm = JSC::getVM(m_globalObject); - return vm.isCollectorBusyOnCurrentThread(); + return this->vm().isCollectorBusyOnCurrentThread(); } void checkGC() const @@ -189,7 +194,7 @@ public: bool isVMTerminating() const { - return JSC::getVM(m_globalObject).hasTerminationRequest(); + return this->vm().hasTerminationRequest(); } void doFinalizer(napi_finalize finalize_cb, void* data, void* finalize_hint) @@ -207,6 +212,7 @@ public: inline Zig::GlobalObject* globalObject() const { return m_globalObject; } inline const napi_module& napiModule() const { return m_napiModule; } + inline JSC::VM& vm() const { return m_vm; } // Returns true if finalizers from this module need to be scheduled for the next tick after garbage collection, instead of running during garbage collection inline bool mustDeferFinalizers() const @@ -296,6 +302,7 @@ private: // TODO(@heimskr): Use WTF::HashSet std::unordered_set m_finalizers; bool m_isFinishingFinalizers = false; + JSC::VM& m_vm; std::list> m_cleanupHooks; std::list m_asyncCleanupHooks; }; @@ -388,11 +395,11 @@ public: case WeakTypeTag::Primitive: return m_value.primitive; case WeakTypeTag::Cell: - return JSC::JSValue(m_value.cell.get()); + return m_value.cell.get(); case WeakTypeTag::String: - return JSC::JSValue(m_value.string.get()); + return m_value.string.get(); default: - return JSC::JSValue(); + return {}; } } @@ -649,4 +656,78 @@ static inline NapiRef* toJS(napi_ref val) return reinterpret_cast(val); } +extern "C" napi_status napi_set_last_error(napi_env env, napi_status status); +class NAPICallFrame { +public: + NAPICallFrame(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame, void* dataPtr, JSValue storedNewTarget) + : NAPICallFrame(globalObject, callFrame, dataPtr) + { + m_storedNewTarget = storedNewTarget; + m_isConstructorCall = !m_storedNewTarget.isEmpty(); + } + + NAPICallFrame(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame, void* dataPtr) + : m_callFrame(callFrame) + , m_dataPtr(dataPtr) + { + // Node-API function calls always run in "sloppy mode," even if the JS side is in strict + // mode. So if `this` is null or undefined, we use globalThis instead; otherwise, we convert + // `this` to an object. + // TODO change to global? or find another way to avoid JSGlobalProxy + JSC::JSObject* jscThis = globalObject->globalThis(); + if (!m_callFrame->thisValue().isUndefinedOrNull()) { + auto scope = DECLARE_THROW_SCOPE(JSC::getVM(globalObject)); + jscThis = m_callFrame->thisValue().toObject(globalObject); + // https://tc39.es/ecma262/#sec-toobject + // toObject only throws for undefined and null, which we checked for + scope.assertNoException(); + } + m_callFrame->setThisValue(jscThis); + } + + JSValue thisValue() const + { + return m_callFrame->thisValue(); + } + + napi_callback_info toNapi() + { + return reinterpret_cast(this); + } + + ALWAYS_INLINE void* dataPtr() const + { + return m_dataPtr; + } + + void extract(size_t* argc, // [in-out] Specifies the size of the provided argv array + // and receives the actual count of args. + napi_value* argv, // [out] Array of values + napi_value* this_arg, // [out] Receives the JS 'this' arg for the call + void** data, Zig::GlobalObject* globalObject); + + JSValue newTarget() + { + if (!m_isConstructorCall) { + return JSValue(); + } + + if (m_storedNewTarget.isUndefined()) { + // napi_get_new_target: + // "This API returns the new.target of the constructor call. If the current callback + // is not a constructor call, the result is NULL." + // they mean a null pointer, not JavaScript null + return JSValue(); + } else { + return m_storedNewTarget; + } + } + +private: + JSC::CallFrame* m_callFrame; + void* m_dataPtr; + JSValue m_storedNewTarget; + bool m_isConstructorCall = false; +}; + } diff --git a/src/bun.js/bindings/node/crypto/CryptoDhJob.cpp b/src/bun.js/bindings/node/crypto/CryptoDhJob.cpp index bf529e76d5..11ea6c6ce5 100644 --- a/src/bun.js/bindings/node/crypto/CryptoDhJob.cpp +++ b/src/bun.js/bindings/node/crypto/CryptoDhJob.cpp @@ -138,18 +138,18 @@ JSC_DEFINE_HOST_FUNCTION(jsDiffieHellman, (JSGlobalObject * lexicalGlobalObject, JSValue optionsValue = callFrame->argument(0); V::validateObject(scope, lexicalGlobalObject, optionsValue, "options"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSObject* options = optionsValue.getObject(); JSValue callbackValue = callFrame->argument(1); if (!callbackValue.isUndefined()) { V::validateFunction(scope, lexicalGlobalObject, callbackValue, "callback"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } std::optional ctx = DhJobCtx::fromJS(lexicalGlobalObject, scope, options); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!callbackValue.isUndefined()) { DhJob::createAndSchedule(lexicalGlobalObject, WTFMove(*ctx), callbackValue); diff --git a/src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp b/src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp index f681752337..ef8c2415ff 100644 --- a/src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp +++ b/src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp @@ -45,7 +45,7 @@ void KeyPairJobCtx::runFromJS(JSGlobalObject* lexicalGlobalObject, JSValue callb } JSValue publicKeyValue = m_keyObj.exportPublic(lexicalGlobalObject, scope, m_publicKeyEncoding); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { JSValue exceptionValue = scope.exception(); scope.clearException(); exceptionCallback(exceptionValue); @@ -53,7 +53,7 @@ void KeyPairJobCtx::runFromJS(JSGlobalObject* lexicalGlobalObject, JSValue callb } JSValue privateKeyValue = m_keyObj.exportPrivate(lexicalGlobalObject, scope, m_privateKeyEncoding); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { JSValue exceptionValue = scope.exception(); scope.clearException(); exceptionCallback(exceptionValue); @@ -128,42 +128,42 @@ JSC_DEFINE_HOST_FUNCTION(jsGenerateKeyPair, (JSC::JSGlobalObject * globalObject, } V::validateFunction(scope, globalObject, callbackValue, "callback"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); V::validateString(scope, globalObject, typeValue, "type"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); KeyEncodingConfig config = parseKeyEncodingConfig(globalObject, scope, typeValue, optionsValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!optionsValue.isUndefined()) { V::validateObject(scope, globalObject, optionsValue, "options"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } JSString* typeString = typeValue.toString(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); GCOwnedDataScope typeView = typeString->view(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (typeView == "rsa"_s || typeView == "rsa-pss"_s) { std::optional ctx = RsaKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); RsaKeyPairJob::createAndSchedule(globalObject, WTFMove(*ctx), callbackValue); return JSValue::encode(jsUndefined()); } if (typeView == "dsa"_s) { std::optional ctx = DsaKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); DsaKeyPairJob::createAndSchedule(globalObject, WTFMove(*ctx), callbackValue); return JSValue::encode(jsUndefined()); } if (typeView == "ec"_s) { std::optional ctx = EcKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); EcKeyPairJob::createAndSchedule(globalObject, WTFMove(*ctx), callbackValue); return JSValue::encode(jsUndefined()); } @@ -171,14 +171,14 @@ JSC_DEFINE_HOST_FUNCTION(jsGenerateKeyPair, (JSC::JSGlobalObject * globalObject, if (typeView == "ed25519"_s || typeView == "ed448"_s || typeView == "x25519"_s || typeView == "x448"_s) { std::optional ctx = NidKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); NidKeyPairJob::createAndSchedule(globalObject, WTFMove(*ctx), callbackValue); return JSValue::encode(jsUndefined()); } if (typeView == "dh"_s) { std::optional ctx = DhKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); DhKeyPairJob::createAndSchedule(globalObject, WTFMove(*ctx), callbackValue); return JSValue::encode(jsUndefined()); } @@ -195,20 +195,20 @@ JSC_DEFINE_HOST_FUNCTION(jsGenerateKeyPairSync, (JSGlobalObject * globalObject, JSValue optionsValue = callFrame->argument(1); V::validateString(scope, globalObject, typeValue, "type"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); KeyEncodingConfig config = parseKeyEncodingConfig(globalObject, scope, typeValue, optionsValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!optionsValue.isUndefined()) { V::validateObject(scope, globalObject, optionsValue, "options"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } JSString* typeString = typeValue.toString(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); GCOwnedDataScope typeView = typeString->view(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSObject* result = JSC::constructEmptyObject(globalObject); JSValue publicKeyValue = jsUndefined(); @@ -217,99 +217,99 @@ JSC_DEFINE_HOST_FUNCTION(jsGenerateKeyPairSync, (JSGlobalObject * globalObject, if (typeView == "rsa"_s || typeView == "rsa-pss"_s) { std::optional ctx = RsaKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ncrypto::EVPKeyCtxPointer keyCtx = ctx->setup(); if (!keyCtx) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } ctx->runTask(globalObject, keyCtx); if (!ctx->m_keyObj.data()) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } publicKeyValue = ctx->m_keyObj.exportPublic(globalObject, scope, ctx->m_publicKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); privateKeyValue = ctx->m_keyObj.exportPrivate(globalObject, scope, ctx->m_privateKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } else if (typeView == "dsa"_s) { auto ctx = DsaKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ncrypto::EVPKeyCtxPointer keyCtx = ctx->setup(); if (!keyCtx) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } ctx->runTask(globalObject, keyCtx); if (!ctx->m_keyObj.data()) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } publicKeyValue = ctx->m_keyObj.exportPublic(globalObject, scope, ctx->m_publicKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); privateKeyValue = ctx->m_keyObj.exportPrivate(globalObject, scope, ctx->m_privateKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } else if (typeView == "ec"_s) { auto ctx = EcKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ncrypto::EVPKeyCtxPointer keyCtx = ctx->setup(); if (!keyCtx) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } ctx->runTask(globalObject, keyCtx); if (!ctx->m_keyObj.data()) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } publicKeyValue = ctx->m_keyObj.exportPublic(globalObject, scope, ctx->m_publicKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); privateKeyValue = ctx->m_keyObj.exportPrivate(globalObject, scope, ctx->m_privateKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } else if (typeView == "ed25519"_s || typeView == "ed448"_s || typeView == "x25519"_s || typeView == "x448"_s) { auto ctx = NidKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ncrypto::EVPKeyCtxPointer keyCtx = ctx->setup(); if (!keyCtx) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } ctx->runTask(globalObject, keyCtx); if (!ctx->m_keyObj.data()) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } publicKeyValue = ctx->m_keyObj.exportPublic(globalObject, scope, ctx->m_publicKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); privateKeyValue = ctx->m_keyObj.exportPrivate(globalObject, scope, ctx->m_privateKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } else if (typeView == "dh"_s) { auto ctx = DhKeyPairJobCtx::fromJS(globalObject, scope, typeView, optionsValue, config); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ncrypto::EVPKeyCtxPointer keyCtx = ctx->setup(); if (!keyCtx) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } ctx->runTask(globalObject, keyCtx); if (!ctx->m_keyObj.data()) { throwCryptoError(globalObject, scope, ctx->err()); - return JSValue::encode({}); + return {}; } publicKeyValue = ctx->m_keyObj.exportPublic(globalObject, scope, ctx->m_publicKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); privateKeyValue = ctx->m_keyObj.exportPrivate(globalObject, scope, ctx->m_privateKeyEncoding); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } else { return ERR::INVALID_ARG_VALUE(scope, globalObject, "type"_s, typeValue, "must be a supported key type"_s); } result->putDirect(vm, Identifier::fromString(vm, "publicKey"_s), publicKeyValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); result->putDirect(vm, Identifier::fromString(vm, "privateKey"_s), privateKeyValue); return JSValue::encode(result); } diff --git a/src/bun.js/bindings/node/crypto/CryptoHkdf.cpp b/src/bun.js/bindings/node/crypto/CryptoHkdf.cpp index c65aabcb9d..267eee4a17 100644 --- a/src/bun.js/bindings/node/crypto/CryptoHkdf.cpp +++ b/src/bun.js/bindings/node/crypto/CryptoHkdf.cpp @@ -245,11 +245,11 @@ JSC_DEFINE_HOST_FUNCTION(jsHkdf, (JSGlobalObject * lexicalGlobalObject, JSC::Cal auto scope = DECLARE_THROW_SCOPE(vm); std::optional ctx = HkdfJobCtx::fromJS(lexicalGlobalObject, callFrame, scope, HkdfJobCtx::Mode::Async); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue callback = callFrame->argument(5); V::validateFunction(scope, lexicalGlobalObject, callback, "callback"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); HkdfJob::createAndSchedule(lexicalGlobalObject, WTFMove(ctx.value()), callback); @@ -262,7 +262,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHkdfSync, (JSGlobalObject * lexicalGlobalObject, JSC: auto scope = DECLARE_THROW_SCOPE(vm); std::optional ctx = HkdfJobCtx::fromJS(lexicalGlobalObject, callFrame, scope, HkdfJobCtx::Mode::Sync); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ctx->runTask(lexicalGlobalObject); @@ -276,7 +276,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHkdfSync, (JSGlobalObject * lexicalGlobalObject, JSC: RefPtr buf = JSC::ArrayBuffer::tryCreateUninitialized(result.size(), 1); if (!buf) { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } memcpy(buf->data(), result.data(), result.size()); diff --git a/src/bun.js/bindings/node/crypto/CryptoKeygen.cpp b/src/bun.js/bindings/node/crypto/CryptoKeygen.cpp index 3836f9141e..87b6243e13 100644 --- a/src/bun.js/bindings/node/crypto/CryptoKeygen.cpp +++ b/src/bun.js/bindings/node/crypto/CryptoKeygen.cpp @@ -144,11 +144,11 @@ JSC_DEFINE_HOST_FUNCTION(jsGenerateKey, (JSC::JSGlobalObject * lexicalGlobalObje } V::validateFunction(scope, lexicalGlobalObject, callbackValue, "callback"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); std::optional ctx = SecretKeyJobCtx::fromJS(lexicalGlobalObject, scope, typeValue, optionsValue); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); SecretKeyJob::createAndSchedule(lexicalGlobalObject, WTFMove(ctx.value()), callbackValue); @@ -165,7 +165,7 @@ JSC_DEFINE_HOST_FUNCTION(jsGenerateKeySync, (JSC::JSGlobalObject * lexicalGlobal std::optional ctx = SecretKeyJobCtx::fromJS(lexicalGlobalObject, scope, typeValue, optionsValue); ASSERT(ctx.has_value() == !scope.exception()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ctx->runTask(lexicalGlobalObject); diff --git a/src/bun.js/bindings/node/crypto/CryptoKeys.cpp b/src/bun.js/bindings/node/crypto/CryptoKeys.cpp index 8b90e6b89e..799954e7c7 100644 --- a/src/bun.js/bindings/node/crypto/CryptoKeys.cpp +++ b/src/bun.js/bindings/node/crypto/CryptoKeys.cpp @@ -20,7 +20,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCreateSecretKey, (JSC::JSGlobalObject * lexicalGlobal JSValue encodingValue = callFrame->argument(1); KeyObject keyObject = KeyObject::prepareSecretKey(lexicalGlobalObject, scope, keyValue, encodingValue, true); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); Structure* structure = globalObject->m_JSSecretKeyObjectClassStructure.get(lexicalGlobalObject); JSSecretKeyObject* secretKey = JSSecretKeyObject::create(vm, structure, lexicalGlobalObject, WTFMove(keyObject)); @@ -37,7 +37,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCreatePublicKey, (JSC::JSGlobalObject * lexicalGlobal JSValue keyValue = callFrame->argument(0); auto prepareResult = KeyObject::prepareAsymmetricKey(lexicalGlobalObject, scope, keyValue, KeyObject::PrepareAsymmetricKeyMode::CreatePublic); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); KeyObject keyObject; diff --git a/src/bun.js/bindings/node/crypto/CryptoPrimes.cpp b/src/bun.js/bindings/node/crypto/CryptoPrimes.cpp index 7422f6eea7..10159a63ec 100644 --- a/src/bun.js/bindings/node/crypto/CryptoPrimes.cpp +++ b/src/bun.js/bindings/node/crypto/CryptoPrimes.cpp @@ -107,7 +107,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCheckPrimeSync, (JSC::JSGlobalObject * lexicalGlobalO ncrypto::BignumPointer candidate = ncrypto::BignumPointer(candidateView->data(), candidateView->size()); if (!candidate) { throwCryptoError(lexicalGlobalObject, scope, ERR_get_error(), "BignumPointer"_s); - return JSValue::encode({}); + return {}; } auto res = candidate.isPrime(checks, [](int32_t a, int32_t b) -> bool { @@ -126,11 +126,11 @@ JSC_DEFINE_HOST_FUNCTION(jsCheckPrime, (JSC::JSGlobalObject * lexicalGlobalObjec JSValue candidateValue = callFrame->argument(0); if (candidateValue.isBigInt()) { candidateValue = unsignedBigIntToBuffer(lexicalGlobalObject, scope, candidateValue, "candidate"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } auto candidateView = getArrayBufferOrView2(lexicalGlobalObject, scope, candidateValue, "candidate"_s, jsUndefined()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue optionsValue = callFrame->argument(1); JSValue callback = callFrame->argument(2); @@ -140,29 +140,29 @@ JSC_DEFINE_HOST_FUNCTION(jsCheckPrime, (JSC::JSGlobalObject * lexicalGlobalObjec } V::validateFunction(scope, lexicalGlobalObject, callback, "callback"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!optionsValue.isUndefined()) { V::validateObject(scope, lexicalGlobalObject, optionsValue, "options"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } int32_t checks = 0; if (optionsValue.isObject()) { JSObject* options = optionsValue.getObject(); JSValue checksValue = options->get(lexicalGlobalObject, Identifier::fromString(vm, "checks"_s)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!checksValue.isUndefined()) { V::validateInt32(scope, lexicalGlobalObject, checksValue, "options.checks"_s, jsNumber(0), jsUndefined(), &checks); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } } ncrypto::BignumPointer candidate = ncrypto::BignumPointer(candidateView->data(), candidateView->size()); if (!candidate) { throwCryptoError(lexicalGlobalObject, scope, ERR_get_error(), "BignumPointer"_s); - return JSValue::encode({}); + return {}; } CheckPrimeJob::createAndSchedule(lexicalGlobalObject, WTFMove(candidate), checks, callback); @@ -207,7 +207,7 @@ void GeneratePrimeJobCtx::runFromJS(JSGlobalObject* globalObject, JSValue callba JSValue result = GeneratePrimeJob::result(globalObject, scope, m_prime, m_bigint); ASSERT(result.isEmpty() == !!scope.exception()); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { auto* err = scope.exception(); scope.clearException(); Bun__EventLoop__runCallback1( @@ -379,12 +379,12 @@ JSC_DEFINE_HOST_FUNCTION(jsGeneratePrime, (JSC::JSGlobalObject * lexicalGlobalOb if (add) { if (ncrypto::BignumPointer::GetBitCount(add.get()) > size) [[unlikely]] { throwError(lexicalGlobalObject, scope, ErrorCode::ERR_OUT_OF_RANGE, "invalid options.add"_s); - return JSValue::encode({}); + return {}; } if (rem && add <= rem) [[unlikely]] { throwError(lexicalGlobalObject, scope, ErrorCode::ERR_OUT_OF_RANGE, "invalid options.rem"_s); - return JSValue::encode({}); + return {}; } } @@ -480,12 +480,12 @@ JSC_DEFINE_HOST_FUNCTION(jsGeneratePrimeSync, (JSC::JSGlobalObject * lexicalGlob if (add) { if (ncrypto::BignumPointer::GetBitCount(add.get()) > size) [[unlikely]] { throwError(lexicalGlobalObject, scope, ErrorCode::ERR_OUT_OF_RANGE, "invalid options.add"_s); - return JSValue::encode({}); + return {}; } if (rem && add <= rem) [[unlikely]] { throwError(lexicalGlobalObject, scope, ErrorCode::ERR_OUT_OF_RANGE, "invalid options.rem"_s); - return JSValue::encode({}); + return {}; } } diff --git a/src/bun.js/bindings/node/crypto/CryptoSignJob.cpp b/src/bun.js/bindings/node/crypto/CryptoSignJob.cpp index 0cce1bb73e..ee7e6bdd00 100644 --- a/src/bun.js/bindings/node/crypto/CryptoSignJob.cpp +++ b/src/bun.js/bindings/node/crypto/CryptoSignJob.cpp @@ -32,7 +32,7 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyOneShot, (JSGlobalObject * lexicalGlobalObject, if (!ctx->m_verifyResult) { throwCryptoError(lexicalGlobalObject, scope, ctx->m_opensslError, "verify operation failed"_s); - return JSValue::encode({}); + return {}; } return JSValue::encode(jsBoolean(*ctx->m_verifyResult)); @@ -60,7 +60,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSignOneShot, (JSGlobalObject * lexicalGlobalObject, C if (!ctx->m_signResult) { throwCryptoError(lexicalGlobalObject, scope, ctx->m_opensslError, "sign operation failed"_s); - return JSValue::encode({}); + return {}; } auto& result = ctx->m_signResult.value(); @@ -69,6 +69,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSignOneShot, (JSGlobalObject * lexicalGlobalObject, C auto sigBuf = ArrayBuffer::createUninitialized(result.size(), 1); memcpy(sigBuf->data(), result.data(), result.size()); auto* signature = JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(sigBuf), 0, result.size()); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(signature); } @@ -214,6 +215,7 @@ void SignJobCtx::runFromJS(JSGlobalObject* lexicalGlobalObject, JSValue callback auto sigBuf = ArrayBuffer::createUninitialized(m_signResult->size(), 1); memcpy(sigBuf->data(), m_signResult->data(), m_signResult->size()); auto* signature = JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(sigBuf), 0, m_signResult->size()); + RETURN_IF_EXCEPTION(scope, ); Bun__EventLoop__runCallback2( lexicalGlobalObject, diff --git a/src/bun.js/bindings/node/crypto/CryptoUtil.h b/src/bun.js/bindings/node/crypto/CryptoUtil.h index 4f6331be1a..c8e7c07b1b 100644 --- a/src/bun.js/bindings/node/crypto/CryptoUtil.h +++ b/src/bun.js/bindings/node/crypto/CryptoUtil.h @@ -43,7 +43,6 @@ bool isArrayBufferOrView(JSValue value); std::optional passphraseFromBufferSource(JSC::JSGlobalObject* globalObject, JSC::ThrowScope& scope, JSValue input); JSValue createCryptoError(JSC::JSGlobalObject* globalObject, ThrowScope& scope, uint32_t err, const char* message); void throwCryptoError(JSC::JSGlobalObject* globalObject, JSC::ThrowScope& scope, uint32_t err, const char* message = nullptr); -void throwCryptoOperationFailed(JSC::JSGlobalObject* globalObject, JSC::ThrowScope& scope); std::optional getIntOption(JSC::JSGlobalObject* globalObject, JSC::ThrowScope&, JSValue options, WTF::ASCIILiteral name); int32_t getPadding(JSC::JSGlobalObject* globalObject, JSC::ThrowScope&, JSValue options, const ncrypto::EVPKeyPointer& pkey); std::optional getSaltLength(JSC::JSGlobalObject* globalObject, JSC::ThrowScope& scope, JSValue options); diff --git a/src/bun.js/bindings/node/crypto/JSCipher.cpp b/src/bun.js/bindings/node/crypto/JSCipher.cpp index c5a9e2a6d7..d593ace4b2 100644 --- a/src/bun.js/bindings/node/crypto/JSCipher.cpp +++ b/src/bun.js/bindings/node/crypto/JSCipher.cpp @@ -215,7 +215,7 @@ JSValue rsaFunction(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* ca return {}; } - return JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, result.size()); + RELEASE_AND_RETURN(scope, JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, result.size())); } JSC_DEFINE_HOST_FUNCTION(jsPublicEncrypt, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) diff --git a/src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp b/src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp index d9a60647b1..9c8fe5d21c 100644 --- a/src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp +++ b/src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp @@ -98,7 +98,7 @@ JSC_DEFINE_HOST_FUNCTION(constructCipher, (JSC::JSGlobalObject * globalObject, J JSValue isDecipherValue = callFrame->argument(0); ASSERT(isDecipherValue.isBoolean()); CipherKind cipherKind = isDecipherValue.toBoolean(globalObject) ? CipherKind::Decipher : CipherKind::Cipher; - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue cipherValue = callFrame->argument(1); JSValue keyValue = callFrame->argument(2); @@ -106,37 +106,37 @@ JSC_DEFINE_HOST_FUNCTION(constructCipher, (JSC::JSGlobalObject * globalObject, J JSValue optionsValue = callFrame->argument(4); V::validateString(scope, globalObject, cipherValue, "cipher"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue encodingValue = jsUndefined(); if (optionsValue.pureToBoolean() != TriState::False) { encodingValue = optionsValue.get(globalObject, Identifier::fromString(vm, "encoding"_s)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (encodingValue.isUndefinedOrNull()) { encodingValue = jsUndefined(); } else { V::validateString(scope, globalObject, encodingValue, "options.encoding"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } } KeyObject keyObject = KeyObject::prepareSecretKey(globalObject, scope, keyValue, encodingValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); auto keyData = keyObject.symmetricKey().span(); JSArrayBufferView* ivView = nullptr; if (!ivValue.isNull()) { ivView = getArrayBufferOrView(globalObject, scope, ivValue, "iv"_s, jsUndefined()); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } std::optional authTagLength = std::nullopt; if (optionsValue.pureToBoolean() != TriState::False) { JSValue authTagLengthValue = optionsValue.get(globalObject, Identifier::fromString(vm, "authTagLength"_s)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!authTagLengthValue.isUndefinedOrNull()) { std::optional maybeAuthTagLength = authTagLengthValue.tryGetAsInt32(); @@ -149,7 +149,7 @@ JSC_DEFINE_HOST_FUNCTION(constructCipher, (JSC::JSGlobalObject * globalObject, J } WTF::String cipherString = cipherValue.toWTFString(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (keyData.size() > INT_MAX) [[unlikely]] { return ERR::OUT_OF_RANGE(scope, globalObject, "key is too big"_s, 0, INT_MAX, jsNumber(keyData.size())); @@ -197,13 +197,13 @@ JSC_DEFINE_HOST_FUNCTION(constructCipher, (JSC::JSGlobalObject * globalObject, J const bool encrypt = cipherKind == CipherKind::Cipher; if (!ctx.init(cipher, encrypt)) { throwCryptoError(globalObject, scope, ERR_get_error(), "Failed to initialize cipher"_s); - return JSValue::encode({}); + return {}; } int32_t maxMessageSize = 0; if (cipher.isSupportedAuthenticatedMode()) { initAuthenticated(globalObject, scope, ctx, cipherString, cipherKind, ivLen, authTagLength, maxMessageSize); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } if (!ctx.setKeyLength(keyData.size())) { diff --git a/src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp b/src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp index 9ce33a4847..676afd1542 100644 --- a/src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp +++ b/src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp @@ -50,7 +50,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherUpdate, (JSC::JSGlobalObject * lexicalGlobalObj JSCipher* cipher = jsDynamicCast(callFrame->thisValue()); if (!cipher) { throwThisTypeError(*lexicalGlobalObject, scope, "Cipher"_s, "update"_s); - return JSValue::encode({}); + return {}; } JSValue dataValue = callFrame->argument(0); @@ -60,7 +60,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherUpdate, (JSC::JSGlobalObject * lexicalGlobalObj WTF::String encodingString = WTF::nullString(); JSArrayBufferView* dataView = getArrayBufferOrView(lexicalGlobalObject, scope, dataValue, "data"_s, encodingValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); MarkPopErrorOnReturn popError; @@ -70,7 +70,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherUpdate, (JSC::JSGlobalObject * lexicalGlobalObj if (!cipher->m_ctx) { throwCryptoError(lexicalGlobalObject, scope, popError.peekError(), "Trying to add data in unsupported state"); - return JSValue::encode({}); + return {}; } if (cipher->m_ctx.isCcmMode() && !cipher->checkCCMMessageLength(dataView->byteLength())) { @@ -86,7 +86,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherUpdate, (JSC::JSGlobalObject * lexicalGlobalObj const int32_t blockSize = cipher->m_ctx.getBlockSize(); if (dataView->byteLength() + blockSize > INT_MAX) { throwCryptoError(lexicalGlobalObject, scope, popError.peekError(), "Trying to add data in unsupported state"); - return JSValue::encode({}); + return {}; } int32_t bufLen = dataView->byteLength() + blockSize; @@ -97,13 +97,13 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherUpdate, (JSC::JSGlobalObject * lexicalGlobalObj if (cipher->m_kind == CipherKind::Cipher && cipher->m_ctx.isWrapMode() && !cipher->m_ctx.update(buf, nullptr, &bufLen)) { throwCryptoError(lexicalGlobalObject, scope, popError.peekError(), "Trying to add data in unsupported state"); - return JSValue::encode({}); + return {}; } RefPtr outBuf = JSC::ArrayBuffer::tryCreateUninitialized(bufLen, 1); if (!outBuf) { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } buf = { @@ -116,15 +116,15 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherUpdate, (JSC::JSGlobalObject * lexicalGlobalObj if (!res && cipher->m_kind == CipherKind::Decipher && cipher->m_ctx.isCcmMode()) { cipher->m_pendingAuthFailed = true; - return JSValue::encode(JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, bufLen)); + RELEASE_AND_RETURN(scope, JSValue::encode(JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, bufLen))); } if (res != 1) { throwCryptoError(lexicalGlobalObject, scope, popError.peekError(), "Trying to add data in unsupported state"); - return JSValue::encode({}); + return {}; } - return JSValue::encode(JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, bufLen)); + RELEASE_AND_RETURN(scope, JSValue::encode(JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, bufLen))); } JSC_DEFINE_HOST_FUNCTION(jsCipherFinal, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) @@ -138,7 +138,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherFinal, (JSC::JSGlobalObject * lexicalGlobalObje JSCipher* cipher = jsDynamicCast(callFrame->thisValue()); if (!cipher) { throwThisTypeError(*lexicalGlobalObject, scope, "Cipher"_s, "final"_s); - return JSValue::encode({}); + return {}; } if (!cipher->m_ctx) { @@ -155,7 +155,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherFinal, (JSC::JSGlobalObject * lexicalGlobalObje RefPtr outBuf = ArrayBuffer::tryCreateUninitialized(outLen, 1); if (!outBuf) { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } if (cipher->m_kind == CipherKind::Decipher && Cipher::FromCtx(cipher->m_ctx).isSupportedAuthenticatedMode()) { @@ -164,7 +164,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherFinal, (JSC::JSGlobalObject * lexicalGlobalObje if (cipher->m_kind == CipherKind::Decipher && cipher->m_ctx.isChaCha20Poly1305() && cipher->m_authTagState != AuthTagState::AuthTagPassedToOpenSSL) { throwCryptoErrorWithAuth(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } bool ok; @@ -189,10 +189,10 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherFinal, (JSC::JSGlobalObject * lexicalGlobalObje if (!ok) { throwCryptoErrorWithAuth(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } - return JSValue::encode(JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, outLen)); + RELEASE_AND_RETURN(scope, JSValue::encode(JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(outBuf), 0, outLen))); } JSC_DEFINE_HOST_FUNCTION(jsCipherSetAutoPadding, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) @@ -203,13 +203,13 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherSetAutoPadding, (JSC::JSGlobalObject * globalOb JSCipher* cipher = jsDynamicCast(callFrame->thisValue()); if (!cipher) { throwThisTypeError(*globalObject, scope, "Cipher"_s, "setAutoPadding"_s); - return JSValue::encode({}); + return {}; } JSValue paddingValue = callFrame->argument(0); bool padding = paddingValue.toBoolean(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); MarkPopErrorOnReturn popError; if (!cipher->m_ctx.setPadding(padding)) { @@ -227,7 +227,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherGetAuthTag, (JSC::JSGlobalObject * lexicalGloba JSCipher* cipher = jsDynamicCast(callFrame->thisValue()); if (!cipher) { throwThisTypeError(*lexicalGlobalObject, scope, "Cipher"_s, "getAuthTag"_s); - return JSValue::encode({}); + return {}; } if (cipher->m_ctx || cipher->m_kind != CipherKind::Cipher || !cipher->m_authTagLen) { @@ -237,9 +237,10 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherGetAuthTag, (JSC::JSGlobalObject * lexicalGloba auto* globalObject = defaultGlobalObject(lexicalGlobalObject); JSC::JSUint8Array* buf = JSC::JSUint8Array::createUninitialized(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), *cipher->m_authTagLen); + RETURN_IF_EXCEPTION(scope, {}); if (!buf) { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } memcpy(buf->vector(), cipher->m_authTag, *cipher->m_authTagLen); @@ -255,13 +256,13 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherSetAuthTag, (JSC::JSGlobalObject * globalObject JSCipher* cipher = jsDynamicCast(callFrame->thisValue()); if (!cipher) { throwThisTypeError(*globalObject, scope, "Cipher"_s, "setAuthTag"_s); - return JSValue::encode({}); + return {}; } JSValue authTagValue = callFrame->argument(0); JSValue encodingValue = callFrame->argument(1); JSArrayBufferView* authTag = getArrayBufferOrView(globalObject, scope, authTagValue, "buffer"_s, encodingValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ASSERT(authTag); if (!cipher->m_ctx || !cipher->isAuthenticatedMode() || cipher->m_kind != CipherKind::Decipher || cipher->m_authTagState != AuthTagState::AuthTagUnknown) { @@ -311,7 +312,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherSetAAD, (JSC::JSGlobalObject * globalObject, JS JSCipher* cipher = jsDynamicCast(callFrame->thisValue()); if (!cipher) { throwThisTypeError(*globalObject, scope, "Cipher"_s, "setAAD"_s); - return JSValue::encode({}); + return {}; } JSValue aadbufValue = callFrame->argument(0); @@ -321,15 +322,15 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherSetAAD, (JSC::JSGlobalObject * globalObject, JS std::optional plaintextLength = std::nullopt; if (optionsValue.pureToBoolean() != TriState::False) { encodingValue = optionsValue.get(globalObject, Identifier::fromString(vm, "encoding"_s)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!encodingValue.isUndefinedOrNull()) { V::validateString(scope, globalObject, encodingValue, "options.encoding"_s); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); } JSValue plaintextLengthValue = optionsValue.get(globalObject, Identifier::fromString(vm, "plaintextLength"_s)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (!plaintextLengthValue.isUndefinedOrNull()) { std::optional maybePlaintextLength = plaintextLengthValue.tryGetAsInt32(); if (!maybePlaintextLength || *maybePlaintextLength < 0) { @@ -341,7 +342,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCipherSetAAD, (JSC::JSGlobalObject * globalObject, JS } JSArrayBufferView* aadbuf = getArrayBufferOrView(globalObject, scope, aadbufValue, "aadbuf"_s, encodingValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); ASSERT(aadbuf); if (aadbuf->byteLength() > std::numeric_limits::max()) { diff --git a/src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp b/src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp index f388977278..7aef773fc4 100644 --- a/src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp +++ b/src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp @@ -88,7 +88,7 @@ JSC_DEFINE_HOST_FUNCTION(constructDiffieHellman, (JSC::JSGlobalObject * globalOb if (bits < 2) { ERR_put_error(ERR_LIB_DH, 0, DH_R_MODULUS_TOO_LARGE, __FILE__, __LINE__); throwCryptoError(globalObject, scope, ERR_get_error(), "Invalid prime length"_s); - return JSValue::encode({}); + return {}; } if (!generatorValue.isNumber()) { @@ -102,7 +102,7 @@ JSC_DEFINE_HOST_FUNCTION(constructDiffieHellman, (JSC::JSGlobalObject * globalOb if (generator < 2) { ERR_put_error(ERR_LIB_DH, 0, DH_R_BAD_GENERATOR, __FILE__, __LINE__); throwCryptoError(globalObject, scope, ERR_get_error(), "Invalid generator"_s); - return JSValue::encode({}); + return {}; } dh = ncrypto::DHPointer::New(bits, generator); @@ -132,7 +132,7 @@ JSC_DEFINE_HOST_FUNCTION(constructDiffieHellman, (JSC::JSGlobalObject * globalOb if (generator < 2) { ERR_put_error(ERR_LIB_DH, 0, DH_R_BAD_GENERATOR, __FILE__, __LINE__); throwCryptoError(globalObject, scope, ERR_get_error(), "Invalid generator"_s); - return JSValue::encode({}); + return {}; } bn_g = ncrypto::BignumPointer::New(); if (!bn_g.setWord(generator)) { @@ -155,7 +155,7 @@ JSC_DEFINE_HOST_FUNCTION(constructDiffieHellman, (JSC::JSGlobalObject * globalOb if (bn_g.getWord() < 2) { ERR_put_error(ERR_LIB_DH, 0, DH_R_BAD_GENERATOR, __FILE__, __LINE__); throwCryptoError(globalObject, scope, ERR_get_error(), "Invalid generator"_s); - return JSValue::encode({}); + return {}; } } diff --git a/src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp b/src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp index 45f835411a..3d2a8be7af 100644 --- a/src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp +++ b/src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp @@ -125,7 +125,7 @@ JSC_DEFINE_HOST_FUNCTION(jsECDHConvertKey, (JSC::JSGlobalObject * lexicalGlobalO WTF::Vector buf; if (!buf.tryGrow(size)) { throwOutOfMemoryError(lexicalGlobalObject, scope); - return JSValue::encode({}); + return {}; } if (!EC_POINT_point2oct(group, point, form, buf.begin(), buf.size(), nullptr)) { diff --git a/src/bun.js/bindings/node/crypto/JSHash.cpp b/src/bun.js/bindings/node/crypto/JSHash.cpp index 98fc4f4542..1216f28895 100644 --- a/src/bun.js/bindings/node/crypto/JSHash.cpp +++ b/src/bun.js/bindings/node/crypto/JSHash.cpp @@ -244,7 +244,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHashProtoFuncDigest, (JSC::JSGlobalObject * lexicalGl auto totalDigestLen = ExternZigHash::digest(hash->m_zigHasher, globalObject, hash->m_digestBuffer.mutableSpan()); if (!totalDigestLen) { throwCryptoError(lexicalGlobalObject, scope, ERR_get_error(), "Failed to finalize digest"_s); - return JSValue::encode({}); + return {}; } hash->m_finalized = finalized; @@ -272,7 +272,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHashProtoFuncDigest, (JSC::JSGlobalObject * lexicalGl auto data = hash->m_ctx.digestFinal(bufLen); if (!data) { throwCryptoError(lexicalGlobalObject, scope, ERR_get_error(), "Failed to finalize digest"_s); - return JSValue::encode({}); + return {}; } // Some hash algorithms don't support calling EVP_DigestFinal_ex more than once @@ -284,11 +284,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHashProtoFuncDigest, (JSC::JSGlobalObject * lexicalGl hash->m_finalized = finalized; // Return the digest with the requested encoding - return StringBytes::encode( - lexicalGlobalObject, - scope, - std::span { reinterpret_cast(hash->m_digest.data()), len }, - encoding); + RELEASE_AND_RETURN(scope, StringBytes::encode(lexicalGlobalObject, scope, std::span { reinterpret_cast(hash->m_digest.data()), len }, encoding)); } JSC_DEFINE_HOST_FUNCTION(constructHash, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) @@ -365,19 +361,19 @@ JSC_DEFINE_HOST_FUNCTION(constructHash, (JSC::JSGlobalObject * globalObject, JSC if (zigHasher) { if (!hash->initZig(globalObject, scope, zigHasher, xofLen)) { throwCryptoError(globalObject, scope, 0, "Digest method not supported"_s); - return JSValue::encode({}); + return {}; } return JSValue::encode(hash); } if (md == nullptr || !hash->init(globalObject, scope, md, xofLen)) { throwCryptoError(globalObject, scope, ERR_get_error(), "Digest method not supported"_s); - return JSValue::encode({}); + return {}; } if (original != nullptr && !original->m_ctx.copyTo(hash->m_ctx)) { throwCryptoError(globalObject, scope, ERR_get_error(), "Digest copy error"_s); - return JSValue::encode({}); + return {}; } return JSC::JSValue::encode(hash); diff --git a/src/bun.js/bindings/node/crypto/JSHmac.cpp b/src/bun.js/bindings/node/crypto/JSHmac.cpp index fff0284917..82212b7954 100644 --- a/src/bun.js/bindings/node/crypto/JSHmac.cpp +++ b/src/bun.js/bindings/node/crypto/JSHmac.cpp @@ -224,11 +224,7 @@ JSC_DEFINE_HOST_FUNCTION(jsHmacProtoFuncDigest, (JSC::JSGlobalObject * lexicalGl // works because m_ctx is reset after digest hmac->m_finalized = true; - return StringBytes::encode( - lexicalGlobalObject, - scope, - std::span { reinterpret_cast(mdBuffer.data), mdBuffer.len }, - encoding); + RELEASE_AND_RETURN(scope, StringBytes::encode(lexicalGlobalObject, scope, std::span { reinterpret_cast(mdBuffer.data), mdBuffer.len }, encoding)); } JSC_DEFINE_HOST_FUNCTION(constructHmac, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) diff --git a/src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp b/src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp index 47370b03ed..52f0fe0448 100644 --- a/src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp +++ b/src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp @@ -46,7 +46,7 @@ JSC_DEFINE_HOST_FUNCTION(callKeyObject, (JSC::JSGlobalObject * lexicalGlobalObje VM& vm = lexicalGlobalObject->vm(); ThrowScope scope = DECLARE_THROW_SCOPE(vm); throwTypeError(lexicalGlobalObject, scope, "Cannot call KeyObject class constructor without |new|"_s); - return JSValue::encode({}); + return {}; } JSC_DEFINE_HOST_FUNCTION(constructKeyObject, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) @@ -63,9 +63,9 @@ JSC_DEFINE_HOST_FUNCTION(constructKeyObject, (JSC::JSGlobalObject * lexicalGloba } JSString* typeString = typeValue.toString(lexicalGlobalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); GCOwnedDataScope typeView = typeString->view(lexicalGlobalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); if (typeView != "secret"_s && typeView != "public"_s && typeView != "private"_s) { return ERR::INVALID_ARG_VALUE(scope, lexicalGlobalObject, "type"_s, typeValue); @@ -95,7 +95,8 @@ JSC_DEFINE_HOST_FUNCTION(jsKeyObjectConstructor_from, (JSGlobalObject * lexicalG auto keyObjectResult = KeyObject::create(wrappedKey); if (keyObjectResult.hasException()) [[unlikely]] { WebCore::propagateException(*lexicalGlobalObject, scope, keyObjectResult.releaseException()); - return JSValue::encode({}); + RELEASE_AND_RETURN(scope, {}); + return {}; } // 2. Determine Key Type and Extract Material diff --git a/src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp b/src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp index 79fa4aee24..e6b7140258 100644 --- a/src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp +++ b/src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp @@ -38,7 +38,7 @@ JSC_DEFINE_HOST_FUNCTION(jsKeyObjectPrototype_equals, (JSGlobalObject * globalOb JSKeyObject* thisObject = jsDynamicCast(callFrame->thisValue()); if (!thisObject) { throwThisTypeError(*globalObject, scope, "KeyObject"_s, "equals"_s); - return JSValue::encode({}); + return {}; } JSValue otherKeyObjectValue = callFrame->argument(0); diff --git a/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp b/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp index 820409fd8f..a0d69f850b 100644 --- a/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp +++ b/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp @@ -24,7 +24,7 @@ JSC_DEFINE_HOST_FUNCTION(callPrivateKeyObject, (JSC::JSGlobalObject * lexicalGlo VM& vm = lexicalGlobalObject->vm(); ThrowScope scope = DECLARE_THROW_SCOPE(vm); throwConstructorCannotBeCalledAsFunctionTypeError(lexicalGlobalObject, scope, "PrivateKeyObject"_s); - return JSValue::encode({}); + return {}; } JSC_DEFINE_HOST_FUNCTION(constructPrivateKeyObject, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) diff --git a/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp b/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp index 412e4d7ec5..753bc6fc93 100644 --- a/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp +++ b/src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp @@ -96,7 +96,7 @@ JSC_DEFINE_HOST_FUNCTION(jsPrivateKeyObjectPrototype_asymmetricKeyDetails, (JSGl // JSPrivateKeyObject* privateKeyObject = jsDynamicCast(callFrame->thisValue()); // if (!privateKeyObject) { // throwThisTypeError(*globalObject, scope, "PrivateKeyObject"_s, "toCryptoKey"_s); -// return JSValue::encode({}); +// return {}; // } // KeyObject& handle = privateKeyObject->handle(); diff --git a/src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp b/src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp index fd9f52b531..161717efdf 100644 --- a/src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp +++ b/src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp @@ -24,7 +24,7 @@ JSC_DEFINE_HOST_FUNCTION(callPublicKeyObject, (JSC::JSGlobalObject * lexicalGlob VM& vm = lexicalGlobalObject->vm(); ThrowScope scope = DECLARE_THROW_SCOPE(vm); throwConstructorCannotBeCalledAsFunctionTypeError(lexicalGlobalObject, scope, "PublicKeyObject"_s); - return JSValue::encode({}); + return {}; } JSC_DEFINE_HOST_FUNCTION(constructPublicKeyObject, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) diff --git a/src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp b/src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp index f21c272c7d..eda6e4f1e0 100644 --- a/src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp +++ b/src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp @@ -44,7 +44,7 @@ JSC_DEFINE_HOST_FUNCTION(jsPublicKeyObjectPrototype_export, (JSGlobalObject * gl JSPublicKeyObject* publicKeyObject = jsDynamicCast(callFrame->thisValue()); if (!publicKeyObject) { throwThisTypeError(*globalObject, scope, "PublicKeyObject"_s, "export"_s); - return JSValue::encode({}); + return {}; } KeyObject& handle = publicKeyObject->handle(); @@ -97,7 +97,7 @@ JSC_DEFINE_HOST_FUNCTION(jsPublicKeyObjectPrototype_asymmetricKeyDetails, (JSGlo // JSPublicKeyObject* publicKeyObject = jsDynamicCast(callFrame->thisValue()); // if (!publicKeyObject) { // throwThisTypeError(*globalObject, scope, "PublicKeyObject"_s, "toCryptoKey"_s); -// return JSValue::encode({}); +// return {}; // } // KeyObject& handle = publicKeyObject->handle(); diff --git a/src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp b/src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp index 6e5e5f2e37..3caa180da4 100644 --- a/src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp +++ b/src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp @@ -24,7 +24,7 @@ JSC_DEFINE_HOST_FUNCTION(callSecretKeyObject, (JSC::JSGlobalObject * lexicalGlob VM& vm = lexicalGlobalObject->vm(); ThrowScope scope = DECLARE_THROW_SCOPE(vm); throwConstructorCannotBeCalledAsFunctionTypeError(lexicalGlobalObject, scope, "SecretKeyObject"_s); - return JSValue::encode({}); + return {}; } JSC_DEFINE_HOST_FUNCTION(constructSecretKeyObject, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) diff --git a/src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp b/src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp index 222871d1fd..57dac4bf87 100644 --- a/src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp +++ b/src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp @@ -42,7 +42,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSecretKeyObjectExport, (JSGlobalObject * globalObject JSSecretKeyObject* secretKeyObject = jsDynamicCast(callFrame->thisValue()); if (!secretKeyObject) { throwThisTypeError(*globalObject, scope, "SecretKeyObject"_s, "export"_s); - return JSValue::encode({}); + return {}; } KeyObject& handle = secretKeyObject->handle(); @@ -70,7 +70,7 @@ JSC_DEFINE_CUSTOM_GETTER(jsSecretKeyObjectSymmetricKeySize, (JSGlobalObject*, JS // JSSecretKeyObject* secretKeyObject = jsDynamicCast(callFrame->thisValue()); // if (!secretKeyObject) { // throwThisTypeError(*globalObject, scope, "SecretKeyObject"_s, "toCryptoKey"_s); -// return JSValue::encode({}); +// return {}; // } // KeyObject& handle = secretKeyObject->handle(); diff --git a/src/bun.js/bindings/node/crypto/JSSign.cpp b/src/bun.js/bindings/node/crypto/JSSign.cpp index 9bbdab4eae..977bd2646d 100644 --- a/src/bun.js/bindings/node/crypto/JSSign.cpp +++ b/src/bun.js/bindings/node/crypto/JSSign.cpp @@ -233,7 +233,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncUpdate, (JSC::JSGlobalObject * globalObj JSSign* thisObject = jsDynamicCast(callFrame->thisValue()); if (!thisObject) [[unlikely]] { Bun::throwThisTypeError(*globalObject, scope, "Sign"_s, "update"_s); - return JSValue::encode({}); + return {}; } JSValue wrappedSign = callFrame->argument(0); @@ -241,7 +241,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncUpdate, (JSC::JSGlobalObject * globalObj // Check that we have at least 1 argument (the data) if (callFrame->argumentCount() < 2) { throwVMError(globalObject, scope, "Sign.prototype.update requires at least 1 argument"_s); - return JSValue::encode({}); + return {}; } // Get the data argument @@ -250,7 +250,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncUpdate, (JSC::JSGlobalObject * globalObj // if it's a string, using encoding for decode. if it's a buffer, just use the buffer if (data.isString()) { JSString* dataString = data.toString(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue encodingValue = callFrame->argument(2); auto encoding = parseEnumeration(*globalObject, encodingValue).value_or(BufferEncodingType::utf8); @@ -264,12 +264,12 @@ JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncUpdate, (JSC::JSGlobalObject * globalObj RETURN_IF_EXCEPTION(scope, {}); JSValue buf = JSValue::decode(constructFromEncoding(globalObject, dataView, encoding)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); auto* view = jsDynamicCast(buf); updateWithBufferView(globalObject, thisObject, view); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(wrappedSign); } @@ -282,7 +282,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncUpdate, (JSC::JSGlobalObject * globalObj if (auto* view = JSC::jsDynamicCast(data)) { updateWithBufferView(globalObject, thisObject, view); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(wrappedSign); } @@ -382,7 +382,7 @@ JSUint8Array* signWithKey(JSC::JSGlobalObject* lexicalGlobalObject, JSSign* this // Create and return JSUint8Array auto* globalObject = defaultGlobalObject(lexicalGlobalObject); - return JSC::JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(sigBuffer), 0, sigBuf.len); + RELEASE_AND_RETURN(scope, JSC::JSUint8Array::create(lexicalGlobalObject, globalObject->JSBufferSubclassStructure(), WTFMove(sigBuffer), 0, sigBuf.len)); } JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncSign, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) @@ -403,7 +403,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncSign, (JSC::JSGlobalObject * lexicalGlob JSValue options = callFrame->argument(0); bool optionsBool = options.toBoolean(lexicalGlobalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); // https://github.com/nodejs/node/blob/1b2d2f7e682268228b1352cba7389db01614812a/lib/internal/crypto/sig.js#L116 if (!optionsBool) { @@ -416,18 +416,18 @@ JSC_DEFINE_HOST_FUNCTION(jsSignProtoFuncSign, (JSC::JSGlobalObject * lexicalGlob JSValue outputEncodingValue = callFrame->argument(1); auto outputEncoding = parseEnumeration(*lexicalGlobalObject, outputEncodingValue).value_or(BufferEncodingType::buffer); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); // Get RSA padding mode and salt length if applicable int32_t padding = getPadding(lexicalGlobalObject, scope, options, {}); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); std::optional saltLen = getSaltLength(lexicalGlobalObject, scope, options); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); // Get DSA signature encoding format DSASigEnc dsaSigEnc = getDSASigEnc(lexicalGlobalObject, scope, options); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); auto prepareResult = KeyObject::preparePrivateKey(lexicalGlobalObject, scope, options); RETURN_IF_EXCEPTION(scope, {}); @@ -472,7 +472,7 @@ JSC_DEFINE_HOST_FUNCTION(callSign, (JSC::JSGlobalObject * globalObject, JSC::Cal JSC::VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); throwTypeError(globalObject, scope, "Sign constructor cannot be called as a function"_s); - return JSValue::encode({}); + return {}; } JSC_DEFINE_HOST_FUNCTION(constructSign, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) diff --git a/src/bun.js/bindings/node/crypto/JSVerify.cpp b/src/bun.js/bindings/node/crypto/JSVerify.cpp index a34eab4fca..d0d89d18c4 100644 --- a/src/bun.js/bindings/node/crypto/JSVerify.cpp +++ b/src/bun.js/bindings/node/crypto/JSVerify.cpp @@ -213,7 +213,7 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncUpdate, (JSGlobalObject * globalObject JSVerify* thisObject = jsDynamicCast(callFrame->thisValue()); if (!thisObject) [[unlikely]] { Bun::throwThisTypeError(*globalObject, scope, "Verify"_s, "update"_s); - return JSValue::encode({}); + return {}; } JSValue wrappedVerify = callFrame->argument(0); @@ -221,7 +221,7 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncUpdate, (JSGlobalObject * globalObject // Check that we have at least 1 argument (the data) if (callFrame->argumentCount() < 2) { throwVMError(globalObject, scope, "Verify.prototype.update requires at least 1 argument"_s); - return JSValue::encode({}); + return {}; } // Get the data argument @@ -230,7 +230,7 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncUpdate, (JSGlobalObject * globalObject // if it's a string, using encoding for decode. if it's a buffer, just use the buffer if (data.isString()) { JSString* dataString = data.toString(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue encodingValue = callFrame->argument(2); auto encoding = parseEnumeration(*globalObject, encodingValue).value_or(BufferEncodingType::utf8); @@ -241,22 +241,22 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncUpdate, (JSGlobalObject * globalObject } auto dataView = dataString->view(globalObject); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); JSValue buf = JSValue::decode(constructFromEncoding(globalObject, dataView, encoding)); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); auto* view = jsDynamicCast(buf); // Update the digest context with the buffer data if (view->isDetached()) { throwTypeError(globalObject, scope, "Buffer is detached"_s); - return JSValue::encode({}); + return {}; } size_t byteLength = view->byteLength(); if (byteLength > INT_MAX) { throwRangeError(globalObject, scope, "data is too long"_s); - return JSValue::encode({}); + return {}; } auto buffer = ncrypto::Buffer { @@ -266,7 +266,7 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncUpdate, (JSGlobalObject * globalObject if (!thisObject->m_mdCtx.digestUpdate(buffer)) { throwCryptoError(globalObject, scope, ERR_get_error(), "Failed to update digest"); - return JSValue::encode({}); + return {}; } return JSValue::encode(wrappedVerify); @@ -280,13 +280,13 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncUpdate, (JSGlobalObject * globalObject if (auto* view = JSC::jsDynamicCast(data)) { if (view->isDetached()) { throwTypeError(globalObject, scope, "Buffer is detached"_s); - return JSValue::encode({}); + return {}; } size_t byteLength = view->byteLength(); if (byteLength > INT_MAX) { throwRangeError(globalObject, scope, "data is too long"_s); - return JSValue::encode({}); + return {}; } auto buffer = ncrypto::Buffer { @@ -296,7 +296,7 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncUpdate, (JSGlobalObject * globalObject if (!thisObject->m_mdCtx.digestUpdate(buffer)) { throwCryptoError(globalObject, scope, ERR_get_error(), "Failed to update digest"); - return JSValue::encode({}); + return {}; } return JSValue::encode(wrappedVerify); @@ -316,13 +316,13 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncVerify, (JSGlobalObject * globalObject JSVerify* thisObject = jsDynamicCast(callFrame->thisValue()); if (!thisObject) [[unlikely]] { Bun::throwThisTypeError(*globalObject, scope, "Verify"_s, "verify"_s); - return JSValue::encode({}); + return {}; } // Check if the context is initialized if (!thisObject->m_mdCtx) { throwTypeError(globalObject, scope, "Verify.prototype.verify cannot be called before Verify.prototype.init"_s); - return JSValue::encode({}); + return {}; } // This function receives two arguments: options and signature @@ -331,7 +331,7 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncVerify, (JSGlobalObject * globalObject JSValue sigEncodingValue = callFrame->argument(2); JSC::JSArrayBufferView* signatureBuffer = getArrayBufferOrView(globalObject, scope, signatureValue, "signature"_s, sigEncodingValue); - RETURN_IF_EXCEPTION(scope, JSValue::encode({})); + RETURN_IF_EXCEPTION(scope, {}); auto prepareResult = KeyObject::preparePublicOrPrivateKey(globalObject, scope, options); RETURN_IF_EXCEPTION(scope, {}); @@ -371,35 +371,35 @@ JSC_DEFINE_HOST_FUNCTION(jsVerifyProtoFuncVerify, (JSGlobalObject * globalObject // Validate DSA parameters if (!keyPtr.validateDsaParameters()) { throwTypeError(globalObject, scope, "Invalid DSA parameters"_s); - return JSValue::encode({}); + return {}; } // Get the final digest auto data = mdCtx.digestFinal(mdCtx.getExpectedSize()); if (!data) { throwTypeError(globalObject, scope, "Failed to finalize digest"_s); - return JSValue::encode({}); + return {}; } // Create verification context auto pkctx = keyPtr.newCtx(); if (!pkctx || pkctx.initForVerify() <= 0) { throwCryptoError(globalObject, scope, ERR_peek_error(), "Failed to initialize verification context"_s); - return JSValue::encode({}); + return {}; } // Set RSA padding mode and salt length if applicable if (keyPtr.isRsaVariant()) { if (!ncrypto::EVPKeyCtxPointer::setRsaPadding(pkctx.get(), padding, saltLen)) { throwCryptoError(globalObject, scope, ERR_peek_error(), "Failed to set RSA padding"_s); - return JSValue::encode({}); + return {}; } } // Set signature MD from the digest context if (!pkctx.setSignatureMd(mdCtx)) { throwCryptoError(globalObject, scope, ERR_peek_error(), "Failed to set signature message digest"_s); - return JSValue::encode({}); + return {}; } // Handle P1363 format conversion for EC keys if needed diff --git a/src/bun.js/bindings/node/crypto/KeyObject.cpp b/src/bun.js/bindings/node/crypto/KeyObject.cpp index 563c186c3a..aff9e9deaf 100644 --- a/src/bun.js/bindings/node/crypto/KeyObject.cpp +++ b/src/bun.js/bindings/node/crypto/KeyObject.cpp @@ -1220,7 +1220,7 @@ KeyObject::PrepareAsymmetricKeyResult KeyObject::prepareAsymmetricKey(JSC::JSGlo auto keyObject = create(key); if (keyObject.hasException()) [[unlikely]] { WebCore::propagateException(*globalObject, scope, keyObject.releaseException()); - return {}; + RELEASE_AND_RETURN(scope, {}); } KeyObject handle = keyObject.releaseReturnValue(); RETURN_IF_EXCEPTION(scope, {}); @@ -1288,6 +1288,7 @@ KeyObject::PrepareAsymmetricKeyResult KeyObject::prepareAsymmetricKey(JSC::JSGlo auto keyObject = create(key); if (keyObject.hasException()) [[unlikely]] { WebCore::propagateException(*globalObject, scope, keyObject.releaseException()); + RELEASE_AND_RETURN(scope, {}); } KeyObject handle = keyObject.releaseReturnValue(); return { .keyData = handle.data() }; diff --git a/src/bun.js/bindings/node/crypto/node_crypto_binding.cpp b/src/bun.js/bindings/node/crypto/node_crypto_binding.cpp index 0c5bd60dd4..e9fd3535de 100644 --- a/src/bun.js/bindings/node/crypto/node_crypto_binding.cpp +++ b/src/bun.js/bindings/node/crypto/node_crypto_binding.cpp @@ -123,7 +123,7 @@ JSC_DEFINE_HOST_FUNCTION(jsGetCiphers, (JSC::JSGlobalObject * lexicalGlobalObjec EVP_CIPHER_do_all_sorted(callback, &ctx); if (ctx.hasException) - return JSValue::encode({}); + return {}; return JSValue::encode(result); } @@ -202,6 +202,7 @@ JSC_DEFINE_HOST_FUNCTION(jsCertExportChallenge, (JSC::JSGlobalObject * lexicalGl } auto* bufferResult = JSC::JSUint8Array::create(lexicalGlobalObject, reinterpret_cast(lexicalGlobalObject)->JSBufferSubclassStructure(), WTFMove(result), 0, cert.len); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(bufferResult); } diff --git a/src/bun.js/bindings/node/http/JSConnectionsList.cpp b/src/bun.js/bindings/node/http/JSConnectionsList.cpp index 20ee467ea9..bdc549c83c 100644 --- a/src/bun.js/bindings/node/http/JSConnectionsList.cpp +++ b/src/bun.js/bindings/node/http/JSConnectionsList.cpp @@ -60,6 +60,7 @@ JSArray* JSConnectionsList::all(JSGlobalObject* globalObject) RETURN_IF_EXCEPTION(scope, {}); auto iter = JSSetIterator::create(globalObject, globalObject->setIteratorStructure(), all, IterationKind::Keys); + RETURN_IF_EXCEPTION(scope, nullptr); JSValue item; size_t i = 0; @@ -85,6 +86,7 @@ JSArray* JSConnectionsList::idle(JSGlobalObject* globalObject) RETURN_IF_EXCEPTION(scope, {}); auto iter = JSSetIterator::create(globalObject, globalObject->setIteratorStructure(), all, IterationKind::Keys); + RETURN_IF_EXCEPTION(scope, nullptr); JSValue item; size_t i = 0; @@ -112,6 +114,7 @@ JSArray* JSConnectionsList::active(JSGlobalObject* globalObject) RETURN_IF_EXCEPTION(scope, {}); auto iter = JSSetIterator::create(globalObject, globalObject->setIteratorStructure(), active, IterationKind::Keys); + RETURN_IF_EXCEPTION(scope, nullptr); JSValue item; size_t i = 0; @@ -137,6 +140,7 @@ JSArray* JSConnectionsList::expired(JSGlobalObject* globalObject, uint64_t heade RETURN_IF_EXCEPTION(scope, {}); auto iter = JSSetIterator::create(globalObject, globalObject->setIteratorStructure(), active, IterationKind::Keys); + RETURN_IF_EXCEPTION(scope, nullptr); JSValue item = iter->next(vm); size_t i = 0; diff --git a/src/bun.js/bindings/node/http/NodeHTTPParser.cpp b/src/bun.js/bindings/node/http/NodeHTTPParser.cpp index c7986785b3..2320ca1655 100644 --- a/src/bun.js/bindings/node/http/NodeHTTPParser.cpp +++ b/src/bun.js/bindings/node/http/NodeHTTPParser.cpp @@ -541,7 +541,7 @@ int HTTPParser::onBody(const char* at, size_t length) JSC::profiledCall(lexicalGlobalObject, ProfilingReason::API, onBodyCallback, callData, m_thisParser, args); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { llhttp_set_error_reason(&m_parserData, "HPE_USER:JS Exception"); return HPE_USER; } @@ -583,7 +583,7 @@ int HTTPParser::onMessageComplete() MarkedArgumentBuffer args; JSC::profiledCall(globalObject, ProfilingReason::API, onMessageCompleteCallback, callData, thisParser, args); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { return -1; } diff --git a/src/bun.js/bindings/sqlite/JSSQLStatement.cpp b/src/bun.js/bindings/sqlite/JSSQLStatement.cpp index 7c580590ef..38cbd9fbb3 100644 --- a/src/bun.js/bindings/sqlite/JSSQLStatement.cpp +++ b/src/bun.js/bindings/sqlite/JSSQLStatement.cpp @@ -520,7 +520,9 @@ static JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, sqlite3_stmt size_t len = sqlite3_column_bytes(stmt, i); const void* blob = len > 0 ? sqlite3_column_blob(stmt, i) : nullptr; if (len > 0 && blob != nullptr) [[likely]] { + auto scope = DECLARE_THROW_SCOPE(vm); JSC::JSUint8Array* array = JSC::JSUint8Array::createUninitialized(globalObject, globalObject->m_typedArrayUint8.get(globalObject), len); + RETURN_IF_EXCEPTION(scope, {}); memcpy(array->vector(), blob, len); return array; } @@ -1803,7 +1805,7 @@ void JSSQLStatementConstructor::finishCreation(VM& vm) // TODO: use LazyClassStructure? auto* instanceObject = JSSQLStatement::create(reinterpret_cast(globalObject()), nullptr, nullptr); - JSValue proto = instanceObject->getPrototype(vm, globalObject()); + JSValue proto = instanceObject->getPrototype(globalObject()); this->putDirect(vm, vm.propertyNames->prototype, proto, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly); diff --git a/src/bun.js/bindings/sqlite/sqlite3.c b/src/bun.js/bindings/sqlite/sqlite3.c index df39d807df..191d99f0d1 100644 --- a/src/bun.js/bindings/sqlite/sqlite3.c +++ b/src/bun.js/bindings/sqlite/sqlite3.c @@ -1,7 +1,7 @@ // clang-format off /****************************************************************************** ** This file is an amalgamation of many separate C source files from SQLite -** version 3.50.1. By combining all the individual C code files into this +** version 3.50.2. By combining all the individual C code files into this ** single large file, the entire code can be compiled as a single translation ** unit. This allows many compilers to do optimizations that would not be ** possible if the files were compiled separately. Performance improvements @@ -19,7 +19,7 @@ ** separate file. This file contains only code for the core SQLite library. ** ** The content in this amalgamation comes from Fossil check-in -** b77dc5e0f596d2140d9ac682b2893ff65d3a with changes in files: +** 2af157d77fb1304a74176eaee7fbc7c7e932 with changes in files: ** ** */ @@ -466,9 +466,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.50.1" -#define SQLITE_VERSION_NUMBER 3050001 -#define SQLITE_SOURCE_ID "2025-06-06 14:52:32 b77dc5e0f596d2140d9ac682b2893ff65d3a4140aa86067a3efebe29dc914c95" +#define SQLITE_VERSION "3.50.2" +#define SQLITE_VERSION_NUMBER 3050002 +#define SQLITE_SOURCE_ID "2025-06-28 14:00:48 2af157d77fb1304a74176eaee7fbc7c7e932d946bf25325e9c26c91db19e3079" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -4399,7 +4399,7 @@ SQLITE_API sqlite3_file *sqlite3_database_file_object(const char*); ** ** The sqlite3_create_filename(D,J,W,N,P) allocates memory to hold a version of ** database filename D with corresponding journal file J and WAL file W and -** with N URI parameters key/values pairs in the array P. The result from +** an array P of N URI Key/Value pairs. The result from ** sqlite3_create_filename(D,J,W,N,P) is a pointer to a database filename that ** is safe to pass to routines like: **
    @@ -5080,7 +5080,7 @@ typedef struct sqlite3_context sqlite3_context; ** METHOD: sqlite3_stmt ** ** ^(In the SQL statement text input to [sqlite3_prepare_v2()] and its variants, -** literals may be replaced by a [parameter] that matches one of following +** literals may be replaced by a [parameter] that matches one of the following ** templates: ** **
      @@ -5125,7 +5125,7 @@ typedef struct sqlite3_context sqlite3_context; ** ** [[byte-order determination rules]] ^The byte-order of ** UTF16 input text is determined by the byte-order mark (BOM, U+FEFF) -** found in first character, which is removed, or in the absence of a BOM +** found in the first character, which is removed, or in the absence of a BOM ** the byte order is the native byte order of the host ** machine for sqlite3_bind_text16() or the byte order specified in ** the 6th parameter for sqlite3_bind_text64().)^ @@ -5145,7 +5145,7 @@ typedef struct sqlite3_context sqlite3_context; ** or sqlite3_bind_text16() or sqlite3_bind_text64() then ** that parameter must be the byte offset ** where the NUL terminator would occur assuming the string were NUL -** terminated. If any NUL characters occurs at byte offsets less than +** terminated. If any NUL characters occur at byte offsets less than ** the value of the fourth parameter then the resulting string value will ** contain embedded NULs. The result of expressions involving strings ** with embedded NULs is undefined. @@ -5357,7 +5357,7 @@ SQLITE_API const void *sqlite3_column_name16(sqlite3_stmt*, int N); ** METHOD: sqlite3_stmt ** ** ^These routines provide a means to determine the database, table, and -** table column that is the origin of a particular result column in +** table column that is the origin of a particular result column in a ** [SELECT] statement. ** ^The name of the database or table or column can be returned as ** either a UTF-8 or UTF-16 string. ^The _database_ routines return @@ -5926,8 +5926,8 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** ** For best security, the [SQLITE_DIRECTONLY] flag is recommended for ** all application-defined SQL functions that do not need to be -** used inside of triggers, view, CHECK constraints, or other elements of -** the database schema. This flags is especially recommended for SQL +** used inside of triggers, views, CHECK constraints, or other elements of +** the database schema. This flag is especially recommended for SQL ** functions that have side effects or reveal internal application state. ** Without this flag, an attacker might be able to modify the schema of ** a database file to include invocations of the function with parameters @@ -5958,7 +5958,7 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** [user-defined window functions|available here]. ** ** ^(If the final parameter to sqlite3_create_function_v2() or -** sqlite3_create_window_function() is not NULL, then it is destructor for +** sqlite3_create_window_function() is not NULL, then it is the destructor for ** the application data pointer. The destructor is invoked when the function ** is deleted, either by being overloaded or when the database connection ** closes.)^ ^The destructor is also invoked if the call to @@ -6358,7 +6358,7 @@ SQLITE_API unsigned int sqlite3_value_subtype(sqlite3_value*); ** METHOD: sqlite3_value ** ** ^The sqlite3_value_dup(V) interface makes a copy of the [sqlite3_value] -** object D and returns a pointer to that copy. ^The [sqlite3_value] returned +** object V and returns a pointer to that copy. ^The [sqlite3_value] returned ** is a [protected sqlite3_value] object even if the input is not. ** ^The sqlite3_value_dup(V) interface returns NULL if V is NULL or if a ** memory allocation fails. ^If V is a [pointer value], then the result @@ -6396,7 +6396,7 @@ SQLITE_API void sqlite3_value_free(sqlite3_value*); ** allocation error occurs. ** ** ^(The amount of space allocated by sqlite3_aggregate_context(C,N) is -** determined by the N parameter on first successful call. Changing the +** determined by the N parameter on the first successful call. Changing the ** value of N in any subsequent call to sqlite3_aggregate_context() within ** the same aggregate function instance will not resize the memory ** allocation.)^ Within the xFinal callback, it is customary to set @@ -6558,7 +6558,7 @@ SQLITE_API void sqlite3_set_auxdata(sqlite3_context*, int N, void*, void (*)(voi ** ** Security Warning: These interfaces should not be exposed in scripting ** languages or in other circumstances where it might be possible for an -** an attacker to invoke them. Any agent that can invoke these interfaces +** attacker to invoke them. Any agent that can invoke these interfaces ** can probably also take control of the process. ** ** Database connection client data is only available for SQLite @@ -6672,7 +6672,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** pointed to by the 2nd parameter are taken as the application-defined ** function result. If the 3rd parameter is non-negative, then it ** must be the byte offset into the string where the NUL terminator would -** appear if the string where NUL terminated. If any NUL characters occur +** appear if the string were NUL terminated. If any NUL characters occur ** in the string at a byte offset that is less than the value of the 3rd ** parameter, then the resulting string will contain embedded NULs and the ** result of expressions operating on strings with embedded NULs is undefined. @@ -6730,7 +6730,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** string and preferably a string literal. The sqlite3_result_pointer() ** routine is part of the [pointer passing interface] added for SQLite 3.20.0. ** -** If these routines are called from within the different thread +** If these routines are called from within a different thread ** than the one containing the application-defined function that received ** the [sqlite3_context] pointer, the results are undefined. */ @@ -7136,7 +7136,7 @@ SQLITE_API sqlite3 *sqlite3_db_handle(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^The sqlite3_db_name(D,N) interface returns a pointer to the schema name -** for the N-th database on database connection D, or a NULL pointer of N is +** for the N-th database on database connection D, or a NULL pointer if N is ** out of range. An N value of 0 means the main database file. An N of 1 is ** the "temp" schema. Larger values of N correspond to various ATTACH-ed ** databases. @@ -7231,7 +7231,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
      The SQLITE_TXN_READ state means that the database is currently ** in a read transaction. Content has been read from the database file ** but nothing in the database file has changed. The transaction state -** will advanced to SQLITE_TXN_WRITE if any changes occur and there are +** will be advanced to SQLITE_TXN_WRITE if any changes occur and there are ** no other conflicting concurrent write transactions. The transaction ** state will revert to SQLITE_TXN_NONE following a [ROLLBACK] or ** [COMMIT].
      @@ -7240,7 +7240,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
      The SQLITE_TXN_WRITE state means that the database is currently ** in a write transaction. Content has been written to the database file ** but has not yet committed. The transaction state will change to -** to SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT].
      +** SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT]. */ #define SQLITE_TXN_NONE 0 #define SQLITE_TXN_READ 1 @@ -7521,7 +7521,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); ** CAPI3REF: Impose A Limit On Heap Size ** ** These interfaces impose limits on the amount of heap memory that will be -** by all database connections within a single process. +** used by all database connections within a single process. ** ** ^The sqlite3_soft_heap_limit64() interface sets and/or queries the ** soft limit on the amount of heap memory that may be allocated by SQLite. @@ -7579,7 +7579,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); **
    )^ ** ** The circumstances under which SQLite will enforce the heap limits may -** changes in future releases of SQLite. +** change in future releases of SQLite. */ SQLITE_API sqlite3_int64 sqlite3_soft_heap_limit64(sqlite3_int64 N); SQLITE_API sqlite3_int64 sqlite3_hard_heap_limit64(sqlite3_int64 N); @@ -7694,8 +7694,8 @@ SQLITE_API int sqlite3_table_column_metadata( ** ^The entry point is zProc. ** ^(zProc may be 0, in which case SQLite will try to come up with an ** entry point name on its own. It first tries "sqlite3_extension_init". -** If that does not work, it constructs a name "sqlite3_X_init" where the -** X is consists of the lower-case equivalent of all ASCII alphabetic +** If that does not work, it constructs a name "sqlite3_X_init" where +** X consists of the lower-case equivalent of all ASCII alphabetic ** characters in the filename from the last "/" to the first following ** "." and omitting any initial "lib".)^ ** ^The sqlite3_load_extension() interface returns @@ -7766,7 +7766,7 @@ SQLITE_API int sqlite3_enable_load_extension(sqlite3 *db, int onoff); ** ^(Even though the function prototype shows that xEntryPoint() takes ** no arguments and returns void, SQLite invokes xEntryPoint() with three ** arguments and expects an integer result as if the signature of the -** entry point where as follows: +** entry point were as follows: ** **
     **    int xEntryPoint(
    @@ -7930,7 +7930,7 @@ struct sqlite3_module {
     ** virtual table and might not be checked again by the byte code.)^ ^(The
     ** aConstraintUsage[].omit flag is an optimization hint. When the omit flag
     ** is left in its default setting of false, the constraint will always be
    -** checked separately in byte code.  If the omit flag is change to true, then
    +** checked separately in byte code.  If the omit flag is changed to true, then
     ** the constraint may or may not be checked in byte code.  In other words,
     ** when the omit flag is true there is no guarantee that the constraint will
     ** not be checked again using byte code.)^
    @@ -7956,7 +7956,7 @@ struct sqlite3_module {
     ** The xBestIndex method may optionally populate the idxFlags field with a
     ** mask of SQLITE_INDEX_SCAN_* flags. One such flag is
     ** [SQLITE_INDEX_SCAN_HEX], which if set causes the [EXPLAIN QUERY PLAN]
    -** output to show the idxNum has hex instead of as decimal.  Another flag is
    +** output to show the idxNum as hex instead of as decimal.  Another flag is
     ** SQLITE_INDEX_SCAN_UNIQUE, which if set indicates that the query plan will
     ** return at most one row.
     **
    @@ -8097,7 +8097,7 @@ struct sqlite3_index_info {
     ** the implementation of the [virtual table module].   ^The fourth
     ** parameter is an arbitrary client data pointer that is passed through
     ** into the [xCreate] and [xConnect] methods of the virtual table module
    -** when a new virtual table is be being created or reinitialized.
    +** when a new virtual table is being created or reinitialized.
     **
     ** ^The sqlite3_create_module_v2() interface has a fifth parameter which
     ** is a pointer to a destructor for the pClientData.  ^SQLite will
    @@ -8262,7 +8262,7 @@ typedef struct sqlite3_blob sqlite3_blob;
     ** in *ppBlob. Otherwise an [error code] is returned and, unless the error
     ** code is SQLITE_MISUSE, *ppBlob is set to NULL.)^ ^This means that, provided
     ** the API is not misused, it is always safe to call [sqlite3_blob_close()]
    -** on *ppBlob after this function it returns.
    +** on *ppBlob after this function returns.
     **
     ** This function fails with SQLITE_ERROR if any of the following are true:
     ** 
      @@ -8382,7 +8382,7 @@ SQLITE_API int sqlite3_blob_close(sqlite3_blob *); ** ** ^Returns the size in bytes of the BLOB accessible via the ** successfully opened [BLOB handle] in its only argument. ^The -** incremental blob I/O routines can only read or overwriting existing +** incremental blob I/O routines can only read or overwrite existing ** blob content; they cannot change the size of a blob. ** ** This routine only works on a [BLOB handle] which has been created @@ -8532,7 +8532,7 @@ SQLITE_API int sqlite3_vfs_unregister(sqlite3_vfs*); ** ^The sqlite3_mutex_alloc() routine allocates a new ** mutex and returns a pointer to it. ^The sqlite3_mutex_alloc() ** routine returns NULL if it is unable to allocate the requested -** mutex. The argument to sqlite3_mutex_alloc() must one of these +** mutex. The argument to sqlite3_mutex_alloc() must be one of these ** integer constants: ** **
        @@ -8765,7 +8765,7 @@ SQLITE_API int sqlite3_mutex_notheld(sqlite3_mutex*); ** CAPI3REF: Retrieve the mutex for a database connection ** METHOD: sqlite3 ** -** ^This interface returns a pointer the [sqlite3_mutex] object that +** ^This interface returns a pointer to the [sqlite3_mutex] object that ** serializes access to the [database connection] given in the argument ** when the [threading mode] is Serialized. ** ^If the [threading mode] is Single-thread or Multi-thread then this @@ -8888,7 +8888,7 @@ SQLITE_API int sqlite3_test_control(int op, ...); ** CAPI3REF: SQL Keyword Checking ** ** These routines provide access to the set of SQL language keywords -** recognized by SQLite. Applications can uses these routines to determine +** recognized by SQLite. Applications can use these routines to determine ** whether or not a specific identifier needs to be escaped (for example, ** by enclosing in double-quotes) so as not to confuse the parser. ** @@ -9056,7 +9056,7 @@ SQLITE_API void sqlite3_str_reset(sqlite3_str*); ** content of the dynamic string under construction in X. The value ** returned by [sqlite3_str_value(X)] is managed by the sqlite3_str object X ** and might be freed or altered by any subsequent method on the same -** [sqlite3_str] object. Applications must not used the pointer returned +** [sqlite3_str] object. Applications must not use the pointer returned by ** [sqlite3_str_value(X)] after any subsequent method call on the same ** object. ^Applications may change the content of the string returned ** by [sqlite3_str_value(X)] as long as they do not write into any bytes @@ -9142,7 +9142,7 @@ SQLITE_API int sqlite3_status64( ** allocation which could not be satisfied by the [SQLITE_CONFIG_PAGECACHE] ** buffer and where forced to overflow to [sqlite3_malloc()]. The ** returned value includes allocations that overflowed because they -** where too large (they were larger than the "sz" parameter to +** were too large (they were larger than the "sz" parameter to ** [SQLITE_CONFIG_PAGECACHE]) and allocations that overflowed because ** no space was left in the page cache.)^ ** @@ -9226,28 +9226,29 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** [[SQLITE_DBSTATUS_LOOKASIDE_HIT]] ^(
        SQLITE_DBSTATUS_LOOKASIDE_HIT
        **
        This parameter returns the number of malloc attempts that were ** satisfied using lookaside memory. Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
        )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE]] ** ^(
        SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE
        -**
        This parameter returns the number malloc attempts that might have +**
        This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to the amount of ** memory requested being larger than the lookaside slot size. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
        )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL]] ** ^(
        SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL
        -**
        This parameter returns the number malloc attempts that might have +**
        This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to all lookaside ** memory already being in use. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
        )^ ** ** [[SQLITE_DBSTATUS_CACHE_USED]] ^(
        SQLITE_DBSTATUS_CACHE_USED
        **
        This parameter returns the approximate number of bytes of heap ** memory used by all pager caches associated with the database connection.)^ ** ^The highwater mark associated with SQLITE_DBSTATUS_CACHE_USED is always 0. +**
        ** ** [[SQLITE_DBSTATUS_CACHE_USED_SHARED]] ** ^(
        SQLITE_DBSTATUS_CACHE_USED_SHARED
        @@ -9256,10 +9257,10 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** memory used by that pager cache is divided evenly between the attached ** connections.)^ In other words, if none of the pager caches associated ** with the database connection are shared, this request returns the same -** value as DBSTATUS_CACHE_USED. Or, if one or more or the pager caches are +** value as DBSTATUS_CACHE_USED. Or, if one or more of the pager caches are ** shared, the value returned by this call will be smaller than that returned ** by DBSTATUS_CACHE_USED. ^The highwater mark associated with -** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. +** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. ** ** [[SQLITE_DBSTATUS_SCHEMA_USED]] ^(
        SQLITE_DBSTATUS_SCHEMA_USED
        **
        This parameter returns the approximate number of bytes of heap @@ -9269,6 +9270,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** schema memory is shared with other database connections due to ** [shared cache mode] being enabled. ** ^The highwater mark associated with SQLITE_DBSTATUS_SCHEMA_USED is always 0. +**
        ** ** [[SQLITE_DBSTATUS_STMT_USED]] ^(
        SQLITE_DBSTATUS_STMT_USED
        **
        This parameter returns the approximate number of bytes of heap @@ -9305,7 +9307,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** been written to disk in the middle of a transaction due to the page ** cache overflowing. Transactions are more efficient if they are written ** to disk all at once. When pages spill mid-transaction, that introduces -** additional overhead. This parameter can be used help identify +** additional overhead. This parameter can be used to help identify ** inefficiencies that can be resolved by increasing the cache size. **
        ** @@ -9785,7 +9787,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** external process or via a database connection other than the one being ** used by the backup operation, then the backup will be automatically ** restarted by the next call to sqlite3_backup_step(). ^If the source -** database is modified by the using the same database connection as is used +** database is modified by using the same database connection as is used ** by the backup operation, then the backup database is automatically ** updated at the same time. ** @@ -9802,7 +9804,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** and may not be used following a call to sqlite3_backup_finish(). ** ** ^The value returned by sqlite3_backup_finish is [SQLITE_OK] if no -** sqlite3_backup_step() errors occurred, regardless or whether or not +** sqlite3_backup_step() errors occurred, regardless of whether or not ** sqlite3_backup_step() completed. ** ^If an out-of-memory condition or IO error occurred during any prior ** sqlite3_backup_step() call on the same [sqlite3_backup] object, then @@ -10872,7 +10874,7 @@ SQLITE_API void sqlite3_stmt_scanstatus_reset(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^If a write-transaction is open on [database connection] D when the -** [sqlite3_db_cacheflush(D)] interface invoked, any dirty +** [sqlite3_db_cacheflush(D)] interface is invoked, any dirty ** pages in the pager-cache that are not currently in use are written out ** to disk. A dirty page may be in use if a database cursor created by an ** active SQL statement is reading from it, or if it is page 1 of a database @@ -15443,8 +15445,8 @@ typedef INT16_TYPE LogEst; ** assuming n is a signed integer type. UMXV(n) is similar for unsigned ** integer types. */ -#define SMXV(n) ((((i64)1)<<(sizeof(n)-1))-1) -#define UMXV(n) ((((i64)1)<<(sizeof(n)))-1) +#define SMXV(n) ((((i64)1)<<(sizeof(n)*8-1))-1) +#define UMXV(n) ((((i64)1)<<(sizeof(n)*8))-1) /* ** Round up a number to the next larger multiple of 8. This is used @@ -19255,7 +19257,7 @@ struct AggInfo { ** from source tables rather than from accumulators */ u8 useSortingIdx; /* In direct mode, reference the sorting index rather ** than the source table */ - u16 nSortingColumn; /* Number of columns in the sorting index */ + u32 nSortingColumn; /* Number of columns in the sorting index */ int sortingIdx; /* Cursor number of the sorting index */ int sortingIdxPTab; /* Cursor number of pseudo-table */ int iFirstReg; /* First register in range for aCol[] and aFunc[] */ @@ -19264,8 +19266,8 @@ struct AggInfo { Table *pTab; /* Source table */ Expr *pCExpr; /* The original expression */ int iTable; /* Cursor number of the source table */ - i16 iColumn; /* Column number within the source table */ - i16 iSorterColumn; /* Column number in the sorting index */ + int iColumn; /* Column number within the source table */ + int iSorterColumn; /* Column number in the sorting index */ } *aCol; int nColumn; /* Number of used entries in aCol[] */ int nAccumulator; /* Number of columns that show through to the output. @@ -54967,7 +54969,9 @@ bitvec_set_rehash: }else{ memcpy(aiValues, p->u.aHash, sizeof(p->u.aHash)); memset(p->u.apSub, 0, sizeof(p->u.apSub)); - p->iDivisor = (p->iSize + BITVEC_NPTR - 1)/BITVEC_NPTR; + p->iDivisor = p->iSize/BITVEC_NPTR; + if( (p->iSize%BITVEC_NPTR)!=0 ) p->iDivisor++; + if( p->iDivisoriDivisor = BITVEC_NBIT; rc = sqlite3BitvecSet(p, i); for(j=0; jhdr.mxFrame ) walCleanupHash(pWal); } SEH_EXCEPT( rc = SQLITE_IOERR_IN_PAGE; ) + pWal->iReCksum = 0; } return rc; } @@ -69702,6 +69707,9 @@ SQLITE_PRIVATE int sqlite3WalSavepointUndo(Wal *pWal, u32 *aWalData){ walCleanupHash(pWal); } SEH_EXCEPT( rc = SQLITE_IOERR_IN_PAGE; ) + if( pWal->iReCksum>pWal->hdr.mxFrame ){ + pWal->iReCksum = 0; + } } return rc; @@ -117347,7 +117355,9 @@ static void findOrCreateAggInfoColumn( ){ struct AggInfo_col *pCol; int k; + int mxTerm = pParse->db->aLimit[SQLITE_LIMIT_COLUMN]; + assert( mxTerm <= SMXV(i16) ); assert( pAggInfo->iFirstReg==0 ); pCol = pAggInfo->aCol; for(k=0; knColumn; k++, pCol++){ @@ -117365,6 +117375,10 @@ static void findOrCreateAggInfoColumn( assert( pParse->db->mallocFailed ); return; } + if( k>mxTerm ){ + sqlite3ErrorMsg(pParse, "more than %d aggregate terms", mxTerm); + k = mxTerm; + } pCol = &pAggInfo->aCol[k]; assert( ExprUseYTab(pExpr) ); pCol->pTab = pExpr->y.pTab; @@ -117398,6 +117412,7 @@ fix_up_expr: if( pExpr->op==TK_COLUMN ){ pExpr->op = TK_AGG_COLUMN; } + assert( k <= SMXV(pExpr->iAgg) ); pExpr->iAgg = (i16)k; } @@ -117482,13 +117497,19 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ ** function that is already in the pAggInfo structure */ struct AggInfo_func *pItem = pAggInfo->aFunc; + int mxTerm = pParse->db->aLimit[SQLITE_LIMIT_COLUMN]; + assert( mxTerm <= SMXV(i16) ); for(i=0; inFunc; i++, pItem++){ if( NEVER(pItem->pFExpr==pExpr) ) break; if( sqlite3ExprCompare(0, pItem->pFExpr, pExpr, -1)==0 ){ break; } } - if( i>=pAggInfo->nFunc ){ + if( i>mxTerm ){ + sqlite3ErrorMsg(pParse, "more than %d aggregate terms", mxTerm); + i = mxTerm; + assert( inFunc ); + }else if( i>=pAggInfo->nFunc ){ /* pExpr is original. Make a new entry in pAggInfo->aFunc[] */ u8 enc = ENC(pParse->db); @@ -117542,6 +117563,7 @@ static int analyzeAggregate(Walker *pWalker, Expr *pExpr){ */ assert( !ExprHasProperty(pExpr, EP_TokenOnly|EP_Reduced) ); ExprSetVVAProperty(pExpr, EP_NoReduce); + assert( i <= SMXV(pExpr->iAgg) ); pExpr->iAgg = (i16)i; pExpr->pAggInfo = pAggInfo; return WRC_Prune; @@ -132000,7 +132022,7 @@ static void concatFuncCore( int nSep, const char *zSep ){ - i64 j, k, n = 0; + i64 j, n = 0; int i; char *z; for(i=0; i0 ){ + if( sqlite3_value_type(argv[i])!=SQLITE_NULL ){ + int k = sqlite3_value_bytes(argv[i]); const char *v = (const char*)sqlite3_value_text(argv[i]); if( v!=0 ){ if( j>0 && nSep>0 ){ @@ -163434,30 +163456,42 @@ static void exprAnalyzeOrTerm( ** 1. The SQLITE_Transitive optimization must be enabled ** 2. Must be either an == or an IS operator ** 3. Not originating in the ON clause of an OUTER JOIN -** 4. The affinities of A and B must be compatible -** 5a. Both operands use the same collating sequence OR -** 5b. The overall collating sequence is BINARY +** 4. The operator is not IS or else the query does not contain RIGHT JOIN +** 5. The affinities of A and B must be compatible +** 6a. Both operands use the same collating sequence OR +** 6b. The overall collating sequence is BINARY ** If this routine returns TRUE, that means that the RHS can be substituted ** for the LHS anyplace else in the WHERE clause where the LHS column occurs. ** This is an optimization. No harm comes from returning 0. But if 1 is ** returned when it should not be, then incorrect answers might result. */ -static int termIsEquivalence(Parse *pParse, Expr *pExpr){ +static int termIsEquivalence(Parse *pParse, Expr *pExpr, SrcList *pSrc){ char aff1, aff2; CollSeq *pColl; - if( !OptimizationEnabled(pParse->db, SQLITE_Transitive) ) return 0; - if( pExpr->op!=TK_EQ && pExpr->op!=TK_IS ) return 0; - if( ExprHasProperty(pExpr, EP_OuterON) ) return 0; + if( !OptimizationEnabled(pParse->db, SQLITE_Transitive) ) return 0; /* (1) */ + if( pExpr->op!=TK_EQ && pExpr->op!=TK_IS ) return 0; /* (2) */ + if( ExprHasProperty(pExpr, EP_OuterON) ) return 0; /* (3) */ + assert( pSrc!=0 ); + if( pExpr->op==TK_IS + && pSrc->nSrc + && (pSrc->a[0].fg.jointype & JT_LTORJ)!=0 + ){ + return 0; /* (4) */ + } aff1 = sqlite3ExprAffinity(pExpr->pLeft); aff2 = sqlite3ExprAffinity(pExpr->pRight); if( aff1!=aff2 && (!sqlite3IsNumericAffinity(aff1) || !sqlite3IsNumericAffinity(aff2)) ){ - return 0; + return 0; /* (5) */ } pColl = sqlite3ExprCompareCollSeq(pParse, pExpr); - if( sqlite3IsBinary(pColl) ) return 1; - return sqlite3ExprCollSeqMatch(pParse, pExpr->pLeft, pExpr->pRight); + if( !sqlite3IsBinary(pColl) + && !sqlite3ExprCollSeqMatch(pParse, pExpr->pLeft, pExpr->pRight) + ){ + return 0; /* (6) */ + } + return 1; } /* @@ -163722,8 +163756,8 @@ static void exprAnalyze( if( op==TK_IS ) pNew->wtFlags |= TERM_IS; pTerm = &pWC->a[idxTerm]; pTerm->wtFlags |= TERM_COPIED; - - if( termIsEquivalence(pParse, pDup) ){ + assert( pWInfo->pTabList!=0 ); + if( termIsEquivalence(pParse, pDup, pWInfo->pTabList) ){ pTerm->eOperator |= WO_EQUIV; eExtraOp = WO_EQUIV; } @@ -184425,6 +184459,7 @@ SQLITE_API int sqlite3_setlk_timeout(sqlite3 *db, int ms, int flags){ #endif if( ms<-1 ) return SQLITE_RANGE; #ifdef SQLITE_ENABLE_SETLK_TIMEOUT + sqlite3_mutex_enter(db->mutex); db->setlkTimeout = ms; db->setlkFlags = flags; sqlite3BtreeEnterAll(db); @@ -184436,6 +184471,7 @@ SQLITE_API int sqlite3_setlk_timeout(sqlite3 *db, int ms, int flags){ } } sqlite3BtreeLeaveAll(db); + sqlite3_mutex_leave(db->mutex); #endif #if !defined(SQLITE_ENABLE_API_ARMOR) && !defined(SQLITE_ENABLE_SETLK_TIMEOUT) UNUSED_PARAMETER(db); @@ -257231,7 +257267,7 @@ static void fts5SourceIdFunc( ){ assert( nArg==0 ); UNUSED_PARAM2(nArg, apUnused); - sqlite3_result_text(pCtx, "fts5: 2025-06-06 14:52:32 b77dc5e0f596d2140d9ac682b2893ff65d3a4140aa86067a3efebe29dc914c95", -1, SQLITE_TRANSIENT); + sqlite3_result_text(pCtx, "fts5: 2025-06-28 14:00:48 2af157d77fb1304a74176eaee7fbc7c7e932d946bf25325e9c26c91db19e3079", -1, SQLITE_TRANSIENT); } /* @@ -258046,6 +258082,7 @@ static int fts5StorageDeleteFromIndex( for(iCol=1; rc==SQLITE_OK && iCol<=pConfig->nCol; iCol++){ if( pConfig->abUnindexed[iCol-1]==0 ){ sqlite3_value *pVal = 0; + sqlite3_value *pFree = 0; const char *pText = 0; int nText = 0; const char *pLoc = 0; @@ -258062,11 +258099,22 @@ static int fts5StorageDeleteFromIndex( if( pConfig->bLocale && sqlite3Fts5IsLocaleValue(pConfig, pVal) ){ rc = sqlite3Fts5DecodeLocaleValue(pVal, &pText, &nText, &pLoc, &nLoc); }else{ - pText = (const char*)sqlite3_value_text(pVal); - nText = sqlite3_value_bytes(pVal); - if( pConfig->bLocale && pSeek ){ - pLoc = (const char*)sqlite3_column_text(pSeek, iCol + pConfig->nCol); - nLoc = sqlite3_column_bytes(pSeek, iCol + pConfig->nCol); + if( sqlite3_value_type(pVal)!=SQLITE_TEXT ){ + /* Make a copy of the value to work with. This is because the call + ** to sqlite3_value_text() below forces the type of the value to + ** SQLITE_TEXT, and we may need to use it again later. */ + pFree = pVal = sqlite3_value_dup(pVal); + if( pVal==0 ){ + rc = SQLITE_NOMEM; + } + } + if( rc==SQLITE_OK ){ + pText = (const char*)sqlite3_value_text(pVal); + nText = sqlite3_value_bytes(pVal); + if( pConfig->bLocale && pSeek ){ + pLoc = (const char*)sqlite3_column_text(pSeek, iCol+pConfig->nCol); + nLoc = sqlite3_column_bytes(pSeek, iCol + pConfig->nCol); + } } } @@ -258082,6 +258130,7 @@ static int fts5StorageDeleteFromIndex( } sqlite3Fts5ClearLocale(pConfig); } + sqlite3_value_free(pFree); } } if( rc==SQLITE_OK && p->nTotalRow<1 ){ diff --git a/src/bun.js/bindings/sqlite/sqlite3_local.h b/src/bun.js/bindings/sqlite/sqlite3_local.h index abb9734770..658b278548 100644 --- a/src/bun.js/bindings/sqlite/sqlite3_local.h +++ b/src/bun.js/bindings/sqlite/sqlite3_local.h @@ -147,9 +147,9 @@ extern "C" { ** [sqlite3_libversion_number()], [sqlite3_sourceid()], ** [sqlite_version()] and [sqlite_source_id()]. */ -#define SQLITE_VERSION "3.50.1" -#define SQLITE_VERSION_NUMBER 3050001 -#define SQLITE_SOURCE_ID "2025-06-06 14:52:32 b77dc5e0f596d2140d9ac682b2893ff65d3a4140aa86067a3efebe29dc914c95" +#define SQLITE_VERSION "3.50.2" +#define SQLITE_VERSION_NUMBER 3050002 +#define SQLITE_SOURCE_ID "2025-06-28 14:00:48 2af157d77fb1304a74176eaee7fbc7c7e932d946bf25325e9c26c91db19e3079" /* ** CAPI3REF: Run-Time Library Version Numbers @@ -4080,7 +4080,7 @@ SQLITE_API sqlite3_file *sqlite3_database_file_object(const char*); ** ** The sqlite3_create_filename(D,J,W,N,P) allocates memory to hold a version of ** database filename D with corresponding journal file J and WAL file W and -** with N URI parameters key/values pairs in the array P. The result from +** an array P of N URI Key/Value pairs. The result from ** sqlite3_create_filename(D,J,W,N,P) is a pointer to a database filename that ** is safe to pass to routines like: **
          @@ -4761,7 +4761,7 @@ typedef struct sqlite3_context sqlite3_context; ** METHOD: sqlite3_stmt ** ** ^(In the SQL statement text input to [sqlite3_prepare_v2()] and its variants, -** literals may be replaced by a [parameter] that matches one of following +** literals may be replaced by a [parameter] that matches one of the following ** templates: ** **
            @@ -4806,7 +4806,7 @@ typedef struct sqlite3_context sqlite3_context; ** ** [[byte-order determination rules]] ^The byte-order of ** UTF16 input text is determined by the byte-order mark (BOM, U+FEFF) -** found in first character, which is removed, or in the absence of a BOM +** found in the first character, which is removed, or in the absence of a BOM ** the byte order is the native byte order of the host ** machine for sqlite3_bind_text16() or the byte order specified in ** the 6th parameter for sqlite3_bind_text64().)^ @@ -4826,7 +4826,7 @@ typedef struct sqlite3_context sqlite3_context; ** or sqlite3_bind_text16() or sqlite3_bind_text64() then ** that parameter must be the byte offset ** where the NUL terminator would occur assuming the string were NUL -** terminated. If any NUL characters occurs at byte offsets less than +** terminated. If any NUL characters occur at byte offsets less than ** the value of the fourth parameter then the resulting string value will ** contain embedded NULs. The result of expressions involving strings ** with embedded NULs is undefined. @@ -5038,7 +5038,7 @@ SQLITE_API const void *sqlite3_column_name16(sqlite3_stmt*, int N); ** METHOD: sqlite3_stmt ** ** ^These routines provide a means to determine the database, table, and -** table column that is the origin of a particular result column in +** table column that is the origin of a particular result column in a ** [SELECT] statement. ** ^The name of the database or table or column can be returned as ** either a UTF-8 or UTF-16 string. ^The _database_ routines return @@ -5607,8 +5607,8 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** ** For best security, the [SQLITE_DIRECTONLY] flag is recommended for ** all application-defined SQL functions that do not need to be -** used inside of triggers, view, CHECK constraints, or other elements of -** the database schema. This flags is especially recommended for SQL +** used inside of triggers, views, CHECK constraints, or other elements of +** the database schema. This flag is especially recommended for SQL ** functions that have side effects or reveal internal application state. ** Without this flag, an attacker might be able to modify the schema of ** a database file to include invocations of the function with parameters @@ -5639,7 +5639,7 @@ SQLITE_API int sqlite3_reset(sqlite3_stmt *pStmt); ** [user-defined window functions|available here]. ** ** ^(If the final parameter to sqlite3_create_function_v2() or -** sqlite3_create_window_function() is not NULL, then it is destructor for +** sqlite3_create_window_function() is not NULL, then it is the destructor for ** the application data pointer. The destructor is invoked when the function ** is deleted, either by being overloaded or when the database connection ** closes.)^ ^The destructor is also invoked if the call to @@ -6039,7 +6039,7 @@ SQLITE_API unsigned int sqlite3_value_subtype(sqlite3_value*); ** METHOD: sqlite3_value ** ** ^The sqlite3_value_dup(V) interface makes a copy of the [sqlite3_value] -** object D and returns a pointer to that copy. ^The [sqlite3_value] returned +** object V and returns a pointer to that copy. ^The [sqlite3_value] returned ** is a [protected sqlite3_value] object even if the input is not. ** ^The sqlite3_value_dup(V) interface returns NULL if V is NULL or if a ** memory allocation fails. ^If V is a [pointer value], then the result @@ -6077,7 +6077,7 @@ SQLITE_API void sqlite3_value_free(sqlite3_value*); ** allocation error occurs. ** ** ^(The amount of space allocated by sqlite3_aggregate_context(C,N) is -** determined by the N parameter on first successful call. Changing the +** determined by the N parameter on the first successful call. Changing the ** value of N in any subsequent call to sqlite3_aggregate_context() within ** the same aggregate function instance will not resize the memory ** allocation.)^ Within the xFinal callback, it is customary to set @@ -6239,7 +6239,7 @@ SQLITE_API void sqlite3_set_auxdata(sqlite3_context*, int N, void*, void (*)(voi ** ** Security Warning: These interfaces should not be exposed in scripting ** languages or in other circumstances where it might be possible for an -** an attacker to invoke them. Any agent that can invoke these interfaces +** attacker to invoke them. Any agent that can invoke these interfaces ** can probably also take control of the process. ** ** Database connection client data is only available for SQLite @@ -6353,7 +6353,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** pointed to by the 2nd parameter are taken as the application-defined ** function result. If the 3rd parameter is non-negative, then it ** must be the byte offset into the string where the NUL terminator would -** appear if the string where NUL terminated. If any NUL characters occur +** appear if the string were NUL terminated. If any NUL characters occur ** in the string at a byte offset that is less than the value of the 3rd ** parameter, then the resulting string will contain embedded NULs and the ** result of expressions operating on strings with embedded NULs is undefined. @@ -6411,7 +6411,7 @@ typedef void (*sqlite3_destructor_type)(void*); ** string and preferably a string literal. The sqlite3_result_pointer() ** routine is part of the [pointer passing interface] added for SQLite 3.20.0. ** -** If these routines are called from within the different thread +** If these routines are called from within a different thread ** than the one containing the application-defined function that received ** the [sqlite3_context] pointer, the results are undefined. */ @@ -6817,7 +6817,7 @@ SQLITE_API sqlite3 *sqlite3_db_handle(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^The sqlite3_db_name(D,N) interface returns a pointer to the schema name -** for the N-th database on database connection D, or a NULL pointer of N is +** for the N-th database on database connection D, or a NULL pointer if N is ** out of range. An N value of 0 means the main database file. An N of 1 is ** the "temp" schema. Larger values of N correspond to various ATTACH-ed ** databases. @@ -6912,7 +6912,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
            The SQLITE_TXN_READ state means that the database is currently ** in a read transaction. Content has been read from the database file ** but nothing in the database file has changed. The transaction state -** will advanced to SQLITE_TXN_WRITE if any changes occur and there are +** will be advanced to SQLITE_TXN_WRITE if any changes occur and there are ** no other conflicting concurrent write transactions. The transaction ** state will revert to SQLITE_TXN_NONE following a [ROLLBACK] or ** [COMMIT].
            @@ -6921,7 +6921,7 @@ SQLITE_API int sqlite3_txn_state(sqlite3*,const char *zSchema); **
            The SQLITE_TXN_WRITE state means that the database is currently ** in a write transaction. Content has been written to the database file ** but has not yet committed. The transaction state will change to -** to SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT].
            +** SQLITE_TXN_NONE at the next [ROLLBACK] or [COMMIT]. */ #define SQLITE_TXN_NONE 0 #define SQLITE_TXN_READ 1 @@ -7202,7 +7202,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); ** CAPI3REF: Impose A Limit On Heap Size ** ** These interfaces impose limits on the amount of heap memory that will be -** by all database connections within a single process. +** used by all database connections within a single process. ** ** ^The sqlite3_soft_heap_limit64() interface sets and/or queries the ** soft limit on the amount of heap memory that may be allocated by SQLite. @@ -7260,7 +7260,7 @@ SQLITE_API int sqlite3_db_release_memory(sqlite3*); **
          )^ ** ** The circumstances under which SQLite will enforce the heap limits may -** changes in future releases of SQLite. +** change in future releases of SQLite. */ SQLITE_API sqlite3_int64 sqlite3_soft_heap_limit64(sqlite3_int64 N); SQLITE_API sqlite3_int64 sqlite3_hard_heap_limit64(sqlite3_int64 N); @@ -7375,8 +7375,8 @@ SQLITE_API int sqlite3_table_column_metadata( ** ^The entry point is zProc. ** ^(zProc may be 0, in which case SQLite will try to come up with an ** entry point name on its own. It first tries "sqlite3_extension_init". -** If that does not work, it constructs a name "sqlite3_X_init" where the -** X is consists of the lower-case equivalent of all ASCII alphabetic +** If that does not work, it constructs a name "sqlite3_X_init" where +** X consists of the lower-case equivalent of all ASCII alphabetic ** characters in the filename from the last "/" to the first following ** "." and omitting any initial "lib".)^ ** ^The sqlite3_load_extension() interface returns @@ -7447,7 +7447,7 @@ SQLITE_API int sqlite3_enable_load_extension(sqlite3 *db, int onoff); ** ^(Even though the function prototype shows that xEntryPoint() takes ** no arguments and returns void, SQLite invokes xEntryPoint() with three ** arguments and expects an integer result as if the signature of the -** entry point where as follows: +** entry point were as follows: ** **
           **    int xEntryPoint(
          @@ -7611,7 +7611,7 @@ struct sqlite3_module {
           ** virtual table and might not be checked again by the byte code.)^ ^(The
           ** aConstraintUsage[].omit flag is an optimization hint. When the omit flag
           ** is left in its default setting of false, the constraint will always be
          -** checked separately in byte code.  If the omit flag is change to true, then
          +** checked separately in byte code.  If the omit flag is changed to true, then
           ** the constraint may or may not be checked in byte code.  In other words,
           ** when the omit flag is true there is no guarantee that the constraint will
           ** not be checked again using byte code.)^
          @@ -7637,7 +7637,7 @@ struct sqlite3_module {
           ** The xBestIndex method may optionally populate the idxFlags field with a
           ** mask of SQLITE_INDEX_SCAN_* flags. One such flag is
           ** [SQLITE_INDEX_SCAN_HEX], which if set causes the [EXPLAIN QUERY PLAN]
          -** output to show the idxNum has hex instead of as decimal.  Another flag is
          +** output to show the idxNum as hex instead of as decimal.  Another flag is
           ** SQLITE_INDEX_SCAN_UNIQUE, which if set indicates that the query plan will
           ** return at most one row.
           **
          @@ -7778,7 +7778,7 @@ struct sqlite3_index_info {
           ** the implementation of the [virtual table module].   ^The fourth
           ** parameter is an arbitrary client data pointer that is passed through
           ** into the [xCreate] and [xConnect] methods of the virtual table module
          -** when a new virtual table is be being created or reinitialized.
          +** when a new virtual table is being created or reinitialized.
           **
           ** ^The sqlite3_create_module_v2() interface has a fifth parameter which
           ** is a pointer to a destructor for the pClientData.  ^SQLite will
          @@ -7943,7 +7943,7 @@ typedef struct sqlite3_blob sqlite3_blob;
           ** in *ppBlob. Otherwise an [error code] is returned and, unless the error
           ** code is SQLITE_MISUSE, *ppBlob is set to NULL.)^ ^This means that, provided
           ** the API is not misused, it is always safe to call [sqlite3_blob_close()]
          -** on *ppBlob after this function it returns.
          +** on *ppBlob after this function returns.
           **
           ** This function fails with SQLITE_ERROR if any of the following are true:
           ** 
            @@ -8063,7 +8063,7 @@ SQLITE_API int sqlite3_blob_close(sqlite3_blob *); ** ** ^Returns the size in bytes of the BLOB accessible via the ** successfully opened [BLOB handle] in its only argument. ^The -** incremental blob I/O routines can only read or overwriting existing +** incremental blob I/O routines can only read or overwrite existing ** blob content; they cannot change the size of a blob. ** ** This routine only works on a [BLOB handle] which has been created @@ -8213,7 +8213,7 @@ SQLITE_API int sqlite3_vfs_unregister(sqlite3_vfs*); ** ^The sqlite3_mutex_alloc() routine allocates a new ** mutex and returns a pointer to it. ^The sqlite3_mutex_alloc() ** routine returns NULL if it is unable to allocate the requested -** mutex. The argument to sqlite3_mutex_alloc() must one of these +** mutex. The argument to sqlite3_mutex_alloc() must be one of these ** integer constants: ** **
              @@ -8446,7 +8446,7 @@ SQLITE_API int sqlite3_mutex_notheld(sqlite3_mutex*); ** CAPI3REF: Retrieve the mutex for a database connection ** METHOD: sqlite3 ** -** ^This interface returns a pointer the [sqlite3_mutex] object that +** ^This interface returns a pointer to the [sqlite3_mutex] object that ** serializes access to the [database connection] given in the argument ** when the [threading mode] is Serialized. ** ^If the [threading mode] is Single-thread or Multi-thread then this @@ -8569,7 +8569,7 @@ SQLITE_API int sqlite3_test_control(int op, ...); ** CAPI3REF: SQL Keyword Checking ** ** These routines provide access to the set of SQL language keywords -** recognized by SQLite. Applications can uses these routines to determine +** recognized by SQLite. Applications can use these routines to determine ** whether or not a specific identifier needs to be escaped (for example, ** by enclosing in double-quotes) so as not to confuse the parser. ** @@ -8737,7 +8737,7 @@ SQLITE_API void sqlite3_str_reset(sqlite3_str*); ** content of the dynamic string under construction in X. The value ** returned by [sqlite3_str_value(X)] is managed by the sqlite3_str object X ** and might be freed or altered by any subsequent method on the same -** [sqlite3_str] object. Applications must not used the pointer returned +** [sqlite3_str] object. Applications must not use the pointer returned by ** [sqlite3_str_value(X)] after any subsequent method call on the same ** object. ^Applications may change the content of the string returned ** by [sqlite3_str_value(X)] as long as they do not write into any bytes @@ -8823,7 +8823,7 @@ SQLITE_API int sqlite3_status64( ** allocation which could not be satisfied by the [SQLITE_CONFIG_PAGECACHE] ** buffer and where forced to overflow to [sqlite3_malloc()]. The ** returned value includes allocations that overflowed because they -** where too large (they were larger than the "sz" parameter to +** were too large (they were larger than the "sz" parameter to ** [SQLITE_CONFIG_PAGECACHE]) and allocations that overflowed because ** no space was left in the page cache.)^ ** @@ -8907,28 +8907,29 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** [[SQLITE_DBSTATUS_LOOKASIDE_HIT]] ^(
              SQLITE_DBSTATUS_LOOKASIDE_HIT
              **
              This parameter returns the number of malloc attempts that were ** satisfied using lookaside memory. Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
              )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE]] ** ^(
              SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE
              -**
              This parameter returns the number malloc attempts that might have +**
              This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to the amount of ** memory requested being larger than the lookaside slot size. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
              )^ ** ** [[SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL]] ** ^(
              SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL
              -**
              This parameter returns the number malloc attempts that might have +**
              This parameter returns the number of malloc attempts that might have ** been satisfied using lookaside memory but failed due to all lookaside ** memory already being in use. ** Only the high-water value is meaningful; -** the current value is always zero.)^ +** the current value is always zero.
              )^ ** ** [[SQLITE_DBSTATUS_CACHE_USED]] ^(
              SQLITE_DBSTATUS_CACHE_USED
              **
              This parameter returns the approximate number of bytes of heap ** memory used by all pager caches associated with the database connection.)^ ** ^The highwater mark associated with SQLITE_DBSTATUS_CACHE_USED is always 0. +**
              ** ** [[SQLITE_DBSTATUS_CACHE_USED_SHARED]] ** ^(
              SQLITE_DBSTATUS_CACHE_USED_SHARED
              @@ -8937,10 +8938,10 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** memory used by that pager cache is divided evenly between the attached ** connections.)^ In other words, if none of the pager caches associated ** with the database connection are shared, this request returns the same -** value as DBSTATUS_CACHE_USED. Or, if one or more or the pager caches are +** value as DBSTATUS_CACHE_USED. Or, if one or more of the pager caches are ** shared, the value returned by this call will be smaller than that returned ** by DBSTATUS_CACHE_USED. ^The highwater mark associated with -** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. +** SQLITE_DBSTATUS_CACHE_USED_SHARED is always 0. ** ** [[SQLITE_DBSTATUS_SCHEMA_USED]] ^(
              SQLITE_DBSTATUS_SCHEMA_USED
              **
              This parameter returns the approximate number of bytes of heap @@ -8950,6 +8951,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** schema memory is shared with other database connections due to ** [shared cache mode] being enabled. ** ^The highwater mark associated with SQLITE_DBSTATUS_SCHEMA_USED is always 0. +**
              ** ** [[SQLITE_DBSTATUS_STMT_USED]] ^(
              SQLITE_DBSTATUS_STMT_USED
              **
              This parameter returns the approximate number of bytes of heap @@ -8986,7 +8988,7 @@ SQLITE_API int sqlite3_db_status(sqlite3*, int op, int *pCur, int *pHiwtr, int r ** been written to disk in the middle of a transaction due to the page ** cache overflowing. Transactions are more efficient if they are written ** to disk all at once. When pages spill mid-transaction, that introduces -** additional overhead. This parameter can be used help identify +** additional overhead. This parameter can be used to help identify ** inefficiencies that can be resolved by increasing the cache size. **
              ** @@ -9466,7 +9468,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** external process or via a database connection other than the one being ** used by the backup operation, then the backup will be automatically ** restarted by the next call to sqlite3_backup_step(). ^If the source -** database is modified by the using the same database connection as is used +** database is modified by using the same database connection as is used ** by the backup operation, then the backup database is automatically ** updated at the same time. ** @@ -9483,7 +9485,7 @@ typedef struct sqlite3_backup sqlite3_backup; ** and may not be used following a call to sqlite3_backup_finish(). ** ** ^The value returned by sqlite3_backup_finish is [SQLITE_OK] if no -** sqlite3_backup_step() errors occurred, regardless or whether or not +** sqlite3_backup_step() errors occurred, regardless of whether or not ** sqlite3_backup_step() completed. ** ^If an out-of-memory condition or IO error occurred during any prior ** sqlite3_backup_step() call on the same [sqlite3_backup] object, then @@ -10553,7 +10555,7 @@ SQLITE_API void sqlite3_stmt_scanstatus_reset(sqlite3_stmt*); ** METHOD: sqlite3 ** ** ^If a write-transaction is open on [database connection] D when the -** [sqlite3_db_cacheflush(D)] interface invoked, any dirty +** [sqlite3_db_cacheflush(D)] interface is invoked, any dirty ** pages in the pager-cache that are not currently in use are written out ** to disk. A dirty page may be in use if a database cursor created by an ** active SQL statement is reading from it, or if it is page 1 of a database diff --git a/src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp b/src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp index f819282b5b..f044d67ffd 100644 --- a/src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp +++ b/src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp @@ -9,11 +9,11 @@ == sizeof(void*) * real_v8::FunctionCallbackInfo::V8_NAME, \ "Position of `" #BUN_NAME "` in implicit arguments does not match V8"); -CHECK_IMPLICIT_ARG(holder, kHolderIndex) -CHECK_IMPLICIT_ARG(isolate, kIsolateIndex) CHECK_IMPLICIT_ARG(unused, kUnusedIndex) +CHECK_IMPLICIT_ARG(isolate, kIsolateIndex) +CHECK_IMPLICIT_ARG(context, kContextIndex) CHECK_IMPLICIT_ARG(return_value, kReturnValueIndex) -CHECK_IMPLICIT_ARG(data, kDataIndex) +CHECK_IMPLICIT_ARG(target, kTargetIndex) CHECK_IMPLICIT_ARG(new_target, kNewTargetIndex) ASSERT_V8_TYPE_LAYOUT_MATCHES(v8::FunctionCallbackInfo) diff --git a/src/bun.js/bindings/v8/V8FunctionCallbackInfo.h b/src/bun.js/bindings/v8/V8FunctionCallbackInfo.h index 7e9f6f9177..bfb9f97779 100644 --- a/src/bun.js/bindings/v8/V8FunctionCallbackInfo.h +++ b/src/bun.js/bindings/v8/V8FunctionCallbackInfo.h @@ -9,15 +9,13 @@ class Context; class Value; struct ImplicitArgs { - // v8-function-callback.h:168 - void* holder; - Isolate* isolate; - void* unused; - // overwritten by the callback - TaggedPointer return_value; - // holds the value passed for data in FunctionTemplate::New - TaggedPointer data; - void* new_target; + // v8-function-callback.h:149-154 + void* unused; // kUnusedIndex = 0 + Isolate* isolate; // kIsolateIndex = 1 + void* context; // kContextIndex = 2 + TaggedPointer return_value; // kReturnValueIndex = 3 + TaggedPointer target; // kTargetIndex = 4 + void* new_target; // kNewTargetIndex = 5 }; // T = return value diff --git a/src/bun.js/bindings/v8/V8Isolate.cpp b/src/bun.js/bindings/v8/V8Isolate.cpp index f6385c53b9..2f6928b49a 100644 --- a/src/bun.js/bindings/v8/V8Isolate.cpp +++ b/src/bun.js/bindings/v8/V8Isolate.cpp @@ -5,14 +5,11 @@ #include "real_v8.h" #include "v8_compatibility_assertions.h" -static_assert(offsetof(v8::Isolate, m_roots) == real_v8::internal::Internals::kIsolateRootsOffset, - "Isolate roots array is at wrong offset"); +static_assert(offsetof(v8::Isolate, m_roots) == real_v8::internal::Internals::kIsolateRootsOffset, "Isolate roots array is at wrong offset"); -#define CHECK_ROOT_INDEX(NAME) \ - static_assert(v8::Isolate::NAME == real_v8::internal::Internals::NAME, \ - "Isolate root index " #NAME " does not match between Bun and V8"); \ - static_assert(v8::Isolate::NAME < std::tuple_size_v, \ - "Bun v8::Isolate roots array is too small for index " #NAME); +#define CHECK_ROOT_INDEX(NAME) \ + static_assert(v8::Isolate::NAME == real_v8::internal::Internals::NAME, "Isolate root index " #NAME " does not match between Bun and V8"); \ + static_assert(v8::Isolate::NAME < std::tuple_size_v, "Bun v8::Isolate roots array is too small for index " #NAME); CHECK_ROOT_INDEX(kUndefinedValueRootIndex) CHECK_ROOT_INDEX(kTheHoleValueRootIndex) diff --git a/src/bun.js/bindings/v8/V8Isolate.h b/src/bun.js/bindings/v8/V8Isolate.h index de603e0690..5069cbd3e9 100644 --- a/src/bun.js/bindings/v8/V8Isolate.h +++ b/src/bun.js/bindings/v8/V8Isolate.h @@ -50,7 +50,7 @@ public: shim::GlobalInternals* m_globalInternals; Zig::GlobalObject* m_globalObject; - uintptr_t m_padding[72]; + uintptr_t m_padding[78]; std::array m_roots; }; diff --git a/src/bun.js/bindings/v8/V8Object.cpp b/src/bun.js/bindings/v8/V8Object.cpp index f6f0bdc096..0149532350 100644 --- a/src/bun.js/bindings/v8/V8Object.cpp +++ b/src/bun.js/bindings/v8/V8Object.cpp @@ -55,7 +55,7 @@ Maybe Object::Set(Local context, Local key, Local v scope.clearExceptionExceptTermination(); return Nothing(); } - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); return Nothing(); } diff --git a/src/bun.js/bindings/v8/V8String.cpp b/src/bun.js/bindings/v8/V8String.cpp index fc792cc7e4..eca567867c 100644 --- a/src/bun.js/bindings/v8/V8String.cpp +++ b/src/bun.js/bindings/v8/V8String.cpp @@ -154,7 +154,7 @@ bool String::IsExternalOneByte() const } extern "C" size_t TextEncoder__encodeInto8(const LChar* stringPtr, size_t stringLen, void* ptr, size_t len); -extern "C" size_t TextEncoder__encodeInto16(const UChar* stringPtr, size_t stringLen, void* ptr, size_t len); +extern "C" size_t TextEncoder__encodeInto16(const char16_t* stringPtr, size_t stringLen, void* ptr, size_t len); int String::WriteUtf8(Isolate* isolate, char* buffer, int length, int* nchars_ref, int options) const { @@ -175,7 +175,7 @@ int String::WriteUtf8(Isolate* isolate, char* buffer, int length, int* nchars_re } if (read < string.length() && U16_IS_SURROGATE(string[read]) && written + 3 <= length) { // encode unpaired surrogate - UChar surrogate = string[read]; + char16_t surrogate = string[read]; buffer[written + 0] = 0xe0 | (surrogate >> 12); buffer[written + 1] = 0x80 | ((surrogate >> 6) & 0x3f); buffer[written + 2] = 0x80 | (surrogate & 0x3f); diff --git a/src/bun.js/bindings/v8/shim/FunctionTemplate.cpp b/src/bun.js/bindings/v8/shim/FunctionTemplate.cpp index d10063e301..affa454e3f 100644 --- a/src/bun.js/bindings/v8/shim/FunctionTemplate.cpp +++ b/src/bun.js/bindings/v8/shim/FunctionTemplate.cpp @@ -82,16 +82,17 @@ JSC::EncodedJSValue FunctionTemplate::functionCall(JSC::JSGlobalObject* globalOb args[i + 1] = argValue.tagged(); } - Local data = hs.createLocal(vm, functionTemplate->m_data.get()); + // In V8, the target is the function being called + Local target = hs.createLocal(vm, callee); ImplicitArgs implicit_args = { - .holder = nullptr, - .isolate = isolate, .unused = nullptr, + .isolate = isolate, + // Context is always a reinterpret pointer to Zig::GlobalObject + .context = reinterpret_cast(globalObject), .return_value = TaggedPointer(), - // data may be an object - // put it in the handle scope so that it has a map ptr - .data = data.tagged(), + // target holds the Function being called, which contains the FunctionTemplate + .target = target.tagged(), .new_target = nullptr, }; diff --git a/src/bun.js/bindings/v8/shim/FunctionTemplate.h b/src/bun.js/bindings/v8/shim/FunctionTemplate.h index 29027bd3fc..cdc264efaa 100644 --- a/src/bun.js/bindings/v8/shim/FunctionTemplate.h +++ b/src/bun.js/bindings/v8/shim/FunctionTemplate.h @@ -7,6 +7,18 @@ namespace v8 { class FunctionTemplate; +template +class Local; +class Value; +class Data; + +class Isolate; + +namespace api_internal { +// Forward declaration - defined in v8_api_internal.cpp +Local GetFunctionTemplateData(Isolate* isolate, Local target); +} + namespace shim { class FunctionTemplate : public JSC::InternalFunction { @@ -35,6 +47,8 @@ public: static JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES functionCall(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame); + friend v8::Local api_internal::GetFunctionTemplateData(v8::Isolate* isolate, v8::Local target); + private: FunctionCallback m_callback; JSC::WriteBarrier m_data; diff --git a/src/bun.js/bindings/v8/shim/HandleScopeBuffer.h b/src/bun.js/bindings/v8/shim/HandleScopeBuffer.h index e51e59e9e8..f30c535c23 100644 --- a/src/bun.js/bindings/v8/shim/HandleScopeBuffer.h +++ b/src/bun.js/bindings/v8/shim/HandleScopeBuffer.h @@ -1,6 +1,7 @@ #pragma once #include "../v8.h" +#include "../V8Isolate.h" #include "TaggedPointer.h" #include "Map.h" #include "Handle.h" @@ -8,7 +9,6 @@ namespace v8 { -class Isolate; class EscapableHandleScopeBase; namespace shim { diff --git a/src/bun.js/bindings/v8/v8_api_internal.cpp b/src/bun.js/bindings/v8/v8_api_internal.cpp index 8f45b84073..10384fc017 100644 --- a/src/bun.js/bindings/v8/v8_api_internal.cpp +++ b/src/bun.js/bindings/v8/v8_api_internal.cpp @@ -1,7 +1,12 @@ #include "v8_api_internal.h" #include "V8Isolate.h" +#include "V8HandleScope.h" +#include "V8Data.h" +#include "V8Value.h" #include "shim/HandleScopeBuffer.h" #include "shim/GlobalInternals.h" +#include "shim/Function.h" +#include "shim/FunctionTemplate.h" namespace v8 { @@ -31,5 +36,19 @@ void DisposeGlobal(uintptr_t* location) (void)location; } +Local GetFunctionTemplateData(Isolate* isolate, Local target) +{ + // The target should be a Function that was created from a FunctionTemplate + // Use operator* to get the Data* from Local, then call localToObjectPointer + auto* function = target->localToObjectPointer(); + if (!function) return Local(); + + auto* functionTemplate = function->functionTemplate(); + if (!functionTemplate) return Local(); + + JSC::JSValue data = functionTemplate->m_data.get(); + return isolate->currentHandleScope()->createLocal(isolate->vm(), data); +} + } // namespace api_internal } // namespace v8 diff --git a/src/bun.js/bindings/v8/v8_api_internal.h b/src/bun.js/bindings/v8/v8_api_internal.h index df54422290..3d3a5a0716 100644 --- a/src/bun.js/bindings/v8/v8_api_internal.h +++ b/src/bun.js/bindings/v8/v8_api_internal.h @@ -4,12 +4,20 @@ #include "v8_internal.h" namespace v8 { + +class Isolate; +template +class Local; +class Value; +class Data; + namespace api_internal { BUN_EXPORT void ToLocalEmpty(); BUN_EXPORT void FromJustIsNothing(); BUN_EXPORT uintptr_t* GlobalizeReference(v8::internal::Isolate* isolate, uintptr_t address); BUN_EXPORT void DisposeGlobal(uintptr_t* location); +BUN_EXPORT Local GetFunctionTemplateData(Isolate* isolate, Local target); } // namespace api_internal } // namespace v8 diff --git a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h index 66e29275fd..427954d53c 100644 --- a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h @@ -36,6 +36,7 @@ public: std::unique_ptr m_clientSubspaceForRequireResolveFunction; std::unique_ptr m_clientSubspaceForBundlerPlugin; std::unique_ptr m_clientSubspaceForNodeVMGlobalObject; + std::unique_ptr m_clientSubspaceForNodeVMSpecialSandbox; std::unique_ptr m_clientSubspaceForNodeVMScript; std::unique_ptr m_clientSubspaceForNodeVMSourceTextModule; std::unique_ptr m_clientSubspaceForNodeVMSyntheticModule; diff --git a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h index 1de228dd13..12a275ca46 100644 --- a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h @@ -36,6 +36,7 @@ public: std::unique_ptr m_subspaceForRequireResolveFunction; std::unique_ptr m_subspaceForBundlerPlugin; std::unique_ptr m_subspaceForNodeVMGlobalObject; + std::unique_ptr m_subspaceForNodeVMSpecialSandbox; std::unique_ptr m_subspaceForNodeVMScript; std::unique_ptr m_subspaceForNodeVMSourceTextModule; std::unique_ptr m_subspaceForNodeVMSyntheticModule; diff --git a/src/bun.js/bindings/webcore/EventEmitter.cpp b/src/bun.js/bindings/webcore/EventEmitter.cpp index a34db09b4f..afdbedab4c 100644 --- a/src/bun.js/bindings/webcore/EventEmitter.cpp +++ b/src/bun.js/bindings/webcore/EventEmitter.cpp @@ -217,7 +217,7 @@ bool EventEmitter::innerInvokeEventListeners(const Identifier& eventType, Simple VM& vm = context.vm(); auto* thisObject = protectedThis->m_thisObject.get(); - JSC::JSValue thisValue = thisObject ? JSC::JSValue(thisObject) : JSC::jsUndefined(); + JSC::JSValue thisValue = thisObject ? thisObject : JSC::jsUndefined(); auto fired = false; for (auto& registeredListener : listeners) { diff --git a/src/bun.js/bindings/webcore/HTTPHeaderField.cpp b/src/bun.js/bindings/webcore/HTTPHeaderField.cpp index 189d1c3794..26d5b080df 100644 --- a/src/bun.js/bindings/webcore/HTTPHeaderField.cpp +++ b/src/bun.js/bindings/webcore/HTTPHeaderField.cpp @@ -30,11 +30,11 @@ namespace WebCore { namespace RFC7230 { -bool isTokenCharacter(UChar c) +bool isTokenCharacter(char16_t c) { return c < 0x80 && isTokenCharacter(static_cast(c)); } -bool isDelimiter(UChar c) +bool isDelimiter(char16_t c) { return c < 0x80 && isDelimiter(static_cast(c)); } @@ -59,28 +59,28 @@ bool isDelimiter(LChar c) || c == '}' || c == '"'; } -static bool isVisibleCharacter(UChar c) +static bool isVisibleCharacter(char16_t c) { return isTokenCharacter(c) || isDelimiter(c); } -bool isWhitespace(UChar c) +bool isWhitespace(char16_t c) { return c == ' ' || c == '\t'; } template -static bool isInRange(UChar c) +static bool isInRange(char16_t c) { return c >= min && c <= max; } -static bool isOBSText(UChar c) +static bool isOBSText(char16_t c) { return isInRange<0x80, 0xFF>(c); } -static bool isQuotedTextCharacter(UChar c) +static bool isQuotedTextCharacter(char16_t c) { return isWhitespace(c) || c == 0x21 @@ -89,14 +89,14 @@ static bool isQuotedTextCharacter(UChar c) || isOBSText(c); } -bool isQuotedPairSecondOctet(UChar c) +bool isQuotedPairSecondOctet(char16_t c) { return isWhitespace(c) || isVisibleCharacter(c) || isOBSText(c); } -bool isCommentText(UChar c) +bool isCommentText(char16_t c) { return isWhitespace(c) || isInRange<0x21, 0x27>(c) @@ -129,7 +129,7 @@ static bool isValidValue(StringView value) bool hadNonWhitespace = false; for (size_t i = 0; i < value.length(); ++i) { - UChar c = value[i]; + char16_t c = value[i]; switch (state) { case State::OptionalWhitespace: if (isWhitespace(c)) @@ -213,8 +213,8 @@ static bool isValidValue(StringView value) std::optional HTTPHeaderField::create(String&& unparsedName, String&& unparsedValue) { - auto trimmedName = StringView(unparsedName).trim(isTabOrSpace); - auto trimmedValue = StringView(unparsedValue).trim(isTabOrSpace); + auto trimmedName = StringView(unparsedName).trim(isTabOrSpace); + auto trimmedValue = StringView(unparsedValue).trim(isTabOrSpace); if (!RFC7230::isValidName(trimmedName) || !RFC7230::isValidValue(trimmedValue)) return std::nullopt; diff --git a/src/bun.js/bindings/webcore/HTTPHeaderField.h b/src/bun.js/bindings/webcore/HTTPHeaderField.h index 37a3624711..30f94fc67b 100644 --- a/src/bun.js/bindings/webcore/HTTPHeaderField.h +++ b/src/bun.js/bindings/webcore/HTTPHeaderField.h @@ -73,13 +73,13 @@ std::optional HTTPHeaderField::decode(Decoder& decoder) } namespace RFC7230 { -bool isTokenCharacter(UChar); -bool isWhitespace(UChar); +bool isTokenCharacter(char16_t); +bool isWhitespace(char16_t); bool isTokenCharacter(LChar); bool isWhitespace(LChar); -bool isCommentText(UChar); -bool isQuotedPairSecondOctet(UChar); -bool isDelimiter(UChar); +bool isCommentText(char16_t); +bool isQuotedPairSecondOctet(char16_t); +bool isDelimiter(char16_t); } // namespace RFC7230 } // namespace WebCore diff --git a/src/bun.js/bindings/webcore/HTTPHeaderNames.cpp b/src/bun.js/bindings/webcore/HTTPHeaderNames.cpp index 8b36ed87cb..428d1a6fb4 100644 --- a/src/bun.js/bindings/webcore/HTTPHeaderNames.cpp +++ b/src/bun.js/bindings/webcore/HTTPHeaderNames.cpp @@ -670,7 +670,7 @@ bool findHTTPHeaderName(const StringView stringView, HTTPHeaderName& headerName) LChar characters[maxHTTPHeaderNameLength]; const auto span = stringView.span16(); for (unsigned i = 0; i < length; ++i) { - UChar character = span.data()[i]; + char16_t character = span.data()[i]; if (!isASCII(character)) return false; diff --git a/src/bun.js/bindings/webcore/HTTPParsers.cpp b/src/bun.js/bindings/webcore/HTTPParsers.cpp index 6ea74c3bb3..c973a531bb 100644 --- a/src/bun.js/bindings/webcore/HTTPParsers.cpp +++ b/src/bun.js/bindings/webcore/HTTPParsers.cpp @@ -47,7 +47,7 @@ namespace WebCore { // True if characters which satisfy the predicate are present, incrementing // "pos" to the next character which does not satisfy the predicate. // Note: might return pos == str.length(). -static inline bool skipWhile(const String& str, unsigned& pos, const Function& predicate) +static inline bool skipWhile(const String& str, unsigned& pos, const Function& predicate) { const unsigned start = pos; const unsigned len = str.length(); @@ -60,7 +60,7 @@ static inline bool skipWhile(const String& str, unsigned& pos, const Function); + skipWhile(str, pos, isTabOrSpace); return pos < str.length(); } @@ -109,7 +109,7 @@ static inline bool skipValue(const String& str, unsigned& pos) bool isValidReasonPhrase(const String& value) { for (unsigned i = 0; i < value.length(); ++i) { - UChar c = value[i]; + char16_t c = value[i]; if (c == 0x7F || !isLatin1(c) || (c < 0x20 && c != '\t')) return false; } @@ -121,7 +121,7 @@ bool isValidHTTPHeaderValue(const StringView& value) { auto length = value.length(); if (length == 0) return true; - UChar c = value[0]; + char16_t c = value[0]; if (isTabOrSpace(c)) return false; c = value[length - 1]; @@ -152,7 +152,7 @@ bool isValidHTTPHeaderValue(const StringView& value) bool isValidAcceptHeaderValue(const StringView& value) { for (unsigned i = 0; i < value.length(); ++i) { - UChar c = value[i]; + char16_t c = value[i]; // First check for alphanumeric for performance reasons then allowlist four delimiter characters. if (isASCIIAlphanumeric(c) || c == ',' || c == '/' || c == ';' || c == '=') @@ -172,7 +172,7 @@ bool isValidAcceptHeaderValue(const StringView& value) static bool containsCORSUnsafeRequestHeaderBytes(const String& value) { for (unsigned i = 0; i < value.length(); ++i) { - UChar c = value[i]; + char16_t c = value[i]; // https://fetch.spec.whatwg.org/#cors-unsafe-request-header-byte if ((c < 0x20 && c != '\t') || (c == '"' || c == '(' || c == ')' || c == ':' || c == '<' || c == '>' || c == '?' || c == '@' || c == '[' || c == '\\' || c == ']' || c == 0x7B || c == '{' || c == '}' || c == 0x7F)) return true; @@ -186,7 +186,7 @@ static bool containsCORSUnsafeRequestHeaderBytes(const String& value) bool isValidLanguageHeaderValue(const StringView& value) { for (unsigned i = 0; i < value.length(); ++i) { - UChar c = value[i]; + char16_t c = value[i]; if (isASCIIAlphanumeric(c) || c == ' ' || c == '*' || c == ',' || c == '-' || c == '.' || c == ';' || c == '=') continue; return false; @@ -210,7 +210,7 @@ bool isValidHTTPToken(const StringView& value) return true; } - for (UChar c : value.codeUnits()) { + for (char16_t c : value.codeUnits()) { if (!RFC7230::isTokenCharacter(c)) return false; } @@ -220,7 +220,7 @@ bool isValidHTTPToken(const StringView& value) #if USE(GLIB) // True if the character at the given position satisifies a predicate, incrementing "pos" by one. // Note: Might return pos == str.length() -static inline bool skipCharacter(const String& value, unsigned& pos, Function&& predicate) +static inline bool skipCharacter(const String& value, unsigned& pos, Function&& predicate) { if (pos < value.length() && predicate(value[pos])) { ++pos; @@ -231,9 +231,9 @@ static inline bool skipCharacter(const String& value, unsigned& pos, Function); + auto key = keyValuePair.left(valueStartPos).trim(isUnicodeCompatibleASCIIWhitespace); if (key.isEmpty() || key != "filename"_s) continue; - auto value = keyValuePair.substring(valueStartPos + 1).trim(isUnicodeCompatibleASCIIWhitespace); + auto value = keyValuePair.substring(valueStartPos + 1).trim(isUnicodeCompatibleASCIIWhitespace); // Remove quotes if there are any if (value.length() > 1 && value[0] == '\"') @@ -381,7 +381,7 @@ String extractMIMETypeFromMediaType(const String& mediaType) unsigned length = mediaType.length(); for (; position < length; ++position) { - UChar c = mediaType[position]; + char16_t c = mediaType[position]; if (!isTabOrSpace(c)) break; } @@ -393,7 +393,7 @@ String extractMIMETypeFromMediaType(const String& mediaType) unsigned typeEnd = position; for (; position < length; ++position) { - UChar c = mediaType[position]; + char16_t c = mediaType[position]; // While RFC 2616 does not allow it, other browsers allow multiple values in the HTTP media // type header field, Content-Type. In such cases, the media type string passed here may contain @@ -551,7 +551,7 @@ XSSProtectionDisposition parseXSSProtectionHeader(const String& header, String& ContentTypeOptionsDisposition parseContentTypeOptionsHeader(StringView header) { StringView leftToken = header.left(header.find(',')); - if (equalLettersIgnoringASCIICase(leftToken.trim(isASCIIWhitespaceWithoutFF), "nosniff"_s)) + if (equalLettersIgnoringASCIICase(leftToken.trim(isASCIIWhitespaceWithoutFF), "nosniff"_s)) return ContentTypeOptionsDisposition::Nosniff; return ContentTypeOptionsDisposition::None; } @@ -579,7 +579,7 @@ XFrameOptionsDisposition parseXFrameOptionsHeader(StringView header) return result; for (auto currentHeader : header.splitAllowingEmptyEntries(',')) { - currentHeader = currentHeader.trim(isUnicodeCompatibleASCIIWhitespace); + currentHeader = currentHeader.trim(isUnicodeCompatibleASCIIWhitespace); XFrameOptionsDisposition currentValue = XFrameOptionsDisposition::None; if (equalLettersIgnoringASCIICase(currentHeader, "deny"_s)) currentValue = XFrameOptionsDisposition::Deny; @@ -610,7 +610,7 @@ XFrameOptionsDisposition parseXFrameOptionsHeader(StringView header) // return result; // for (auto value : StringView(headerValue).split(',')) { -// auto trimmedValue = value.trim(isASCIIWhitespaceWithoutFF); +// auto trimmedValue = value.trim(isASCIIWhitespaceWithoutFF); // if (trimmedValue == "\"cache\""_s) // result.add(ClearSiteDataValue::Cache); // else if (trimmedValue == "\"cookies\""_s) @@ -632,7 +632,7 @@ bool parseRange(StringView range, RangeAllowWhitespace allowWhitespace, long lon rangeStart = rangeEnd = -1; // Only 0x20 and 0x09 matter as newlines are already gone by the time we parse a header value. - if (allowWhitespace == RangeAllowWhitespace::No && range.find(isTabOrSpace) != notFound) + if (allowWhitespace == RangeAllowWhitespace::No && range.find(isTabOrSpace) != notFound) return false; // The "bytes" unit identifier should be present. @@ -640,7 +640,7 @@ bool parseRange(StringView range, RangeAllowWhitespace allowWhitespace, long lon if (!startsWithLettersIgnoringASCIICase(range, "bytes"_s)) return false; - auto byteRange = range.substring(bytesLength).trim(isASCIIWhitespaceWithoutFF); + auto byteRange = range.substring(bytesLength).trim(isASCIIWhitespaceWithoutFF); if (!byteRange.startsWith('=')) return false; @@ -970,7 +970,7 @@ bool isSafeMethod(const String& method) CrossOriginResourcePolicy parseCrossOriginResourcePolicyHeader(StringView header) { - auto trimmedHeader = header.trim(isASCIIWhitespaceWithoutFF); + auto trimmedHeader = header.trim(isASCIIWhitespaceWithoutFF); if (trimmedHeader.isEmpty()) return CrossOriginResourcePolicy::None; diff --git a/src/bun.js/bindings/webcore/HTTPParsers.h b/src/bun.js/bindings/webcore/HTTPParsers.h index a128ee94e2..b12b1b284b 100644 --- a/src/bun.js/bindings/webcore/HTTPParsers.h +++ b/src/bun.js/bindings/webcore/HTTPParsers.h @@ -118,7 +118,7 @@ WEBCORE_EXPORT CrossOriginResourcePolicy parseCrossOriginResourcePolicyHeader(St // -1 could be set to one of the return parameters to indicate the value is not specified. WEBCORE_EXPORT bool parseRange(StringView, RangeAllowWhitespace, long long& rangeStart, long long& rangeEnd); -inline bool isHTTPSpace(UChar character) +inline bool isHTTPSpace(char16_t character) { return character <= ' ' && (character == ' ' || character == '\n' || character == '\t' || character == '\r'); } diff --git a/src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp b/src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp index 29eae31ad3..c0134438fd 100644 --- a/src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp +++ b/src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp @@ -38,7 +38,7 @@ HeaderFieldTokenizer::HeaderFieldTokenizer(const String& headerField) skipSpaces(); } -bool HeaderFieldTokenizer::consume(UChar c) +bool HeaderFieldTokenizer::consume(char16_t c) { ASSERT(!isTabOrSpace(c)); @@ -106,7 +106,7 @@ void HeaderFieldTokenizer::skipSpaces() ++m_index; } -void HeaderFieldTokenizer::consumeBeforeAnyCharMatch(const Vector& chars) +void HeaderFieldTokenizer::consumeBeforeAnyCharMatch(const Vector& chars) { ASSERT(chars.size() > 0U && chars.size() < 3U); diff --git a/src/bun.js/bindings/webcore/HeaderFieldTokenizer.h b/src/bun.js/bindings/webcore/HeaderFieldTokenizer.h index fe29f409ff..f05049c878 100644 --- a/src/bun.js/bindings/webcore/HeaderFieldTokenizer.h +++ b/src/bun.js/bindings/webcore/HeaderFieldTokenizer.h @@ -41,7 +41,7 @@ public: // string from the |header_field| input. Return |true| on success. Return // |false| if the separator character, the token or the quoted string is // missing or invalid. - bool consume(UChar); + bool consume(char16_t); String consumeToken(); String consumeTokenOrQuotedString(); @@ -49,7 +49,7 @@ public: // the Vector parameter are found. // Because we potentially have to iterate through the entire Vector for each // character of the base string, the Vector should be small (< 3 members). - void consumeBeforeAnyCharMatch(const Vector&); + void consumeBeforeAnyCharMatch(const Vector&); bool isConsumed() const { return m_index >= m_input.length(); } diff --git a/src/bun.js/bindings/webcore/InternalWritableStream.cpp b/src/bun.js/bindings/webcore/InternalWritableStream.cpp index e630db19cb..e227ac6167 100644 --- a/src/bun.js/bindings/webcore/InternalWritableStream.cpp +++ b/src/bun.js/bindings/webcore/InternalWritableStream.cpp @@ -89,7 +89,7 @@ bool InternalWritableStream::locked() const ASSERT(!arguments.hasOverflowed()); auto result = invokeWritableStreamFunction(*globalObject, privateName, arguments); - if (scope.exception()) + if (scope.exception()) [[unlikely]] scope.clearException(); return result.hasException() ? false : result.returnValue().isTrue(); diff --git a/src/bun.js/bindings/webcore/JSBroadcastChannel.cpp b/src/bun.js/bindings/webcore/JSBroadcastChannel.cpp index 728bc13e30..55be2327e6 100644 --- a/src/bun.js/bindings/webcore/JSBroadcastChannel.cpp +++ b/src/bun.js/bindings/webcore/JSBroadcastChannel.cpp @@ -212,6 +212,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBroadcastChannelPrototype_inspectCustom, (JSC::JSGlob inputObj->putDirect(vm, Identifier::fromString(vm, "active"_s), jsBoolean(!channel->isClosed()), 0); JSFunction* utilInspect = globalObject->utilInspectFunction(); + RETURN_IF_EXCEPTION(throwScope, {}); auto callData = JSC::getCallData(utilInspect); MarkedArgumentBuffer arguments; arguments.append(inputObj); diff --git a/src/bun.js/bindings/webcore/JSCookie.cpp b/src/bun.js/bindings/webcore/JSCookie.cpp index b1d25e9c50..f0873240f6 100644 --- a/src/bun.js/bindings/webcore/JSCookie.cpp +++ b/src/bun.js/bindings/webcore/JSCookie.cpp @@ -108,78 +108,94 @@ static std::optional cookieInitFromJS(JSC::VM& vm, JSGlobalObject* l if (auto* optionsObj = options.getObject()) { if (checkName) { - if (auto nameValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->name)) { - name = convert(*lexicalGlobalObject, nameValue); - } + auto nameValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->name); RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (nameValue) { + name = convert(*lexicalGlobalObject, nameValue); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + } if (name.isEmpty()) { throwVMTypeError(lexicalGlobalObject, throwScope, "name is required"_s); return std::nullopt; } - if (auto valueValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->value)) { + auto valueValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->value); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (valueValue) { value = convert(*lexicalGlobalObject, valueValue); } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); } // domain - if (auto domainValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.domainPublicName())) { + auto domainValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.domainPublicName()); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (domainValue) { if (!domainValue.isUndefined() && !domainValue.isNull()) { domain = convert(*lexicalGlobalObject, domainValue); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); } } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); // path - if (auto pathValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.pathPublicName())) { + auto pathValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.pathPublicName()); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (pathValue) { if (!pathValue.isUndefined() && !pathValue.isNull()) { path = convert(*lexicalGlobalObject, pathValue); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); } } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); // expires - if (auto expiresValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.expiresPublicName())) { - expires = getExpiresValue(lexicalGlobalObject, throwScope, expiresValue); - } + auto expiresValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.expiresPublicName()); RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (expiresValue) { + expires = getExpiresValue(lexicalGlobalObject, throwScope, expiresValue); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + } // maxAge - if (auto maxAgeValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.maxAgePublicName())) { + auto maxAgeValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.maxAgePublicName()); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (maxAgeValue) { if (!maxAgeValue.isUndefined() && !maxAgeValue.isNull() && maxAgeValue.isNumber()) { maxAge = maxAgeValue.asNumber(); } } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); // secure - if (auto secureValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.securePublicName())) { + auto secureValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.securePublicName()); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (secureValue) { if (!secureValue.isUndefined()) { secure = secureValue.toBoolean(lexicalGlobalObject); } } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); // httpOnly - if (auto httpOnlyValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.httpOnlyPublicName())) { + auto httpOnlyValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.httpOnlyPublicName()); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (httpOnlyValue) { if (!httpOnlyValue.isUndefined()) { httpOnly = httpOnlyValue.toBoolean(lexicalGlobalObject); } } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); // partitioned - if (auto partitionedValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.partitionedPublicName())) { + auto partitionedValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.partitionedPublicName()); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (partitionedValue) { if (!partitionedValue.isUndefined()) { partitioned = partitionedValue.toBoolean(lexicalGlobalObject); } } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); // sameSite - if (auto sameSiteValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.sameSitePublicName())) { + auto sameSiteValue = optionsObj->getIfPropertyExists(lexicalGlobalObject, names.sameSitePublicName()); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); + if (sameSiteValue) { if (!sameSiteValue.isUndefined() && !sameSiteValue.isNull()) { String sameSiteStr = convert(*lexicalGlobalObject, sameSiteValue); @@ -191,9 +207,9 @@ static std::optional cookieInitFromJS(JSC::VM& vm, JSGlobalObject* l sameSite = CookieSameSite::None; else throwVMTypeError(lexicalGlobalObject, throwScope, "Invalid sameSite value. Must be 'strict', 'lax', or 'none'"_s); + RETURN_IF_EXCEPTION(throwScope, std::nullopt); } } - RETURN_IF_EXCEPTION(throwScope, std::nullopt); } } @@ -304,8 +320,8 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSCookieDOMConstructor:: auto cookie_exception = Cookie::parse(cookieString); if (cookie_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, cookie_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto cookie = cookie_exception.releaseReturnValue(); auto* globalObject = castedThis->globalObject(); @@ -326,8 +342,8 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSCookieDOMConstructor:: auto cookie_exception = Cookie::create(*cookieInit); if (cookie_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, cookie_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto cookie = cookie_exception.releaseReturnValue(); auto* globalObject = castedThis->globalObject(); RELEASE_AND_RETURN(throwScope, JSValue::encode(toJS(lexicalGlobalObject, globalObject, WTFMove(cookie)))); @@ -361,8 +377,8 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSCookieDOMConstructor:: auto cookie_exception = Cookie::create(cookieInit); if (cookie_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, cookie_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto cookie = cookie_exception.releaseReturnValue(); auto* globalObject = castedThis->globalObject(); @@ -527,8 +543,8 @@ JSC_DEFINE_HOST_FUNCTION(jsCookieStaticFunctionParse, (JSGlobalObject * lexicalG auto cookie_exception = Cookie::create(CookieInit {}); if (cookie_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, cookie_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto cookie = cookie_exception.releaseReturnValue(); return JSValue::encode(toJSNewlyCreated(lexicalGlobalObject, defaultGlobalObject(lexicalGlobalObject), WTFMove(cookie))); } @@ -541,8 +557,8 @@ JSC_DEFINE_HOST_FUNCTION(jsCookieStaticFunctionParse, (JSGlobalObject * lexicalG auto cookie_exception = Cookie::parse(cookieString); if (cookie_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, cookie_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto cookie = cookie_exception.releaseReturnValue(); auto* globalObject = defaultGlobalObject(lexicalGlobalObject); @@ -581,8 +597,8 @@ JSC_DEFINE_HOST_FUNCTION(jsCookieStaticFunctionFrom, (JSGlobalObject * lexicalGl auto cookie_exception = Cookie::create(cookieInit); if (cookie_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, cookie_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto cookie = cookie_exception.releaseReturnValue(); auto* globalObject = jsCast(lexicalGlobalObject); return JSValue::encode(toJSNewlyCreated(lexicalGlobalObject, globalObject, WTFMove(cookie))); diff --git a/src/bun.js/bindings/webcore/JSCookieMap.cpp b/src/bun.js/bindings/webcore/JSCookieMap.cpp index e225f9f8c8..0f1aacd447 100644 --- a/src/bun.js/bindings/webcore/JSCookieMap.cpp +++ b/src/bun.js/bindings/webcore/JSCookieMap.cpp @@ -176,8 +176,8 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSCookieMapDOMConstructo auto result_exception = CookieMap::create(WTFMove(init)); if (result_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, result_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto result = result_exception.releaseReturnValue(); RELEASE_AND_RETURN(throwScope, JSValue::encode(toJSNewlyCreated(lexicalGlobalObject, castedThis->globalObject(), WTFMove(result)))); @@ -407,8 +407,8 @@ static inline JSC::EncodedJSValue jsCookieMapPrototypeFunction_setBody(JSC::JSGl auto cookie_exception = Cookie::create(cookieInit); if (cookie_exception.hasException()) { WebCore::propagateException(lexicalGlobalObject, throwScope, cookie_exception.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } - RETURN_IF_EXCEPTION(throwScope, {}); auto cookie = cookie_exception.releaseReturnValue(); impl.set(WTFMove(cookie)); @@ -456,10 +456,12 @@ static inline JSC::EncodedJSValue jsCookieMapPrototypeFunction_deleteBody(JSC::J // Extract name if (nameValue.isUndefined()) nameValue = options->getIfPropertyExists(lexicalGlobalObject, PropertyName(vm.propertyNames->name)); + RETURN_IF_EXCEPTION(throwScope, {}); // Extract optional domain - if (auto domainValue = options->getIfPropertyExists(lexicalGlobalObject, names.domainPublicName())) { - RETURN_IF_EXCEPTION(throwScope, {}); + auto domainValue = options->getIfPropertyExists(lexicalGlobalObject, names.domainPublicName()); + RETURN_IF_EXCEPTION(throwScope, {}); + if (domainValue) { if (!domainValue.isUndefined() && !domainValue.isNull()) { deleteOptions.domain = convert(*lexicalGlobalObject, domainValue); @@ -468,9 +470,9 @@ static inline JSC::EncodedJSValue jsCookieMapPrototypeFunction_deleteBody(JSC::J } // Extract optional path - if (auto pathValue = options->getIfPropertyExists(lexicalGlobalObject, names.pathPublicName())) { - RETURN_IF_EXCEPTION(throwScope, {}); - + auto pathValue = options->getIfPropertyExists(lexicalGlobalObject, names.pathPublicName()); + RETURN_IF_EXCEPTION(throwScope, {}); + if (pathValue) { if (!pathValue.isUndefined() && !pathValue.isNull()) { deleteOptions.path = convert(*lexicalGlobalObject, pathValue); RETURN_IF_EXCEPTION(throwScope, {}); diff --git a/src/bun.js/bindings/webcore/JSDOMCastThisValue.h b/src/bun.js/bindings/webcore/JSDOMCastThisValue.h index d7028c4b5a..142cad7818 100644 --- a/src/bun.js/bindings/webcore/JSDOMCastThisValue.h +++ b/src/bun.js/bindings/webcore/JSDOMCastThisValue.h @@ -46,7 +46,7 @@ JSClass* castThisValue(JSC::JSGlobalObject& lexicalGlobalObject, JSC::JSValue th { auto& vm = JSC::getVM(&lexicalGlobalObject); if constexpr (std::is_base_of_v) - return toJSDOMGlobalObject(vm, thisValue.isUndefinedOrNull() ? JSC::JSValue(&lexicalGlobalObject) : thisValue); + return toJSDOMGlobalObject(vm, thisValue.isUndefinedOrNull() ? &lexicalGlobalObject : thisValue); else return JSC::jsDynamicCast(thisValue); } diff --git a/src/bun.js/bindings/webcore/JSDOMConvertPromise.h b/src/bun.js/bindings/webcore/JSDOMConvertPromise.h index c3ca6bb158..cf4a7ea4cd 100644 --- a/src/bun.js/bindings/webcore/JSDOMConvertPromise.h +++ b/src/bun.js/bindings/webcore/JSDOMConvertPromise.h @@ -48,7 +48,7 @@ template struct Converter> : DefaultConverterscriptExecutionContext(); // if (is(scriptExecutionContext)) { // auto* scriptController = downcast(*scriptExecutionContext).script(); diff --git a/src/bun.js/bindings/webcore/JSDOMPromise.cpp b/src/bun.js/bindings/webcore/JSDOMPromise.cpp index db1597552f..fbcf8cc54e 100644 --- a/src/bun.js/bindings/webcore/JSDOMPromise.cpp +++ b/src/bun.js/bindings/webcore/JSDOMPromise.cpp @@ -58,7 +58,7 @@ auto DOMPromise::whenPromiseIsSettled(JSDOMGlobalObject* globalObject, JSC::JSOb auto thenFunction = promise->get(&lexicalGlobalObject, privateName); EXCEPTION_ASSERT(!scope.exception() || vm.hasPendingTerminationException()); - if (scope.exception()) + if (scope.exception()) [[unlikely]] return IsCallbackRegistered::No; ASSERT(thenFunction.isCallable()); diff --git a/src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp b/src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp index ce367f441b..1bd1fc8a4e 100644 --- a/src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp +++ b/src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp @@ -64,6 +64,7 @@ JSEventEmitter* jsEventEmitterCastFast(VM& vm, JSC::JSGlobalObject* lexicalGloba return jsCast(asObject(_events)); } } + // TODO: properly propagate exception upwards (^ getIfPropertyExists) auto scope = DECLARE_CATCH_SCOPE(vm); auto* globalObject = reinterpret_cast(lexicalGlobalObject); @@ -75,7 +76,7 @@ JSEventEmitter* jsEventEmitterCastFast(VM& vm, JSC::JSGlobalObject* lexicalGloba thisObject->putDirect(vm, name, result, 0); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); return nullptr; } diff --git a/src/bun.js/bindings/webcore/JSEventTarget.cpp b/src/bun.js/bindings/webcore/JSEventTarget.cpp index 953bc19363..32ae670f65 100644 --- a/src/bun.js/bindings/webcore/JSEventTarget.cpp +++ b/src/bun.js/bindings/webcore/JSEventTarget.cpp @@ -233,6 +233,7 @@ static inline JSC::EncodedJSValue jsEventTargetPrototypeFunction_addEventListene warningMessage = "addEventListener called with undefined listener, which has no effect."_s; } auto errorInstance = JSC::ErrorInstance::create(vm, lexicalGlobalObject->errorStructure(JSC::ErrorType::Error), warningMessage, JSValue(), nullptr, RuntimeType::TypeNothing, JSC::ErrorType::Error); + RETURN_IF_EXCEPTION(throwScope, {}); errorInstance->putDirect(vm, vm.propertyNames->name, jsString(vm, String("AddEventListenerArgumentTypeWarning"_s))); JSObject& target = *castedThis; errorInstance->putDirect(vm, vm.propertyNames->target, &target); diff --git a/src/bun.js/bindings/webcore/JSEventTargetCustom.h b/src/bun.js/bindings/webcore/JSEventTargetCustom.h index 4de018f6f8..e719800691 100644 --- a/src/bun.js/bindings/webcore/JSEventTargetCustom.h +++ b/src/bun.js/bindings/webcore/JSEventTargetCustom.h @@ -64,7 +64,7 @@ public: auto throwScope = DECLARE_THROW_SCOPE(vm); auto thisValue = callFrame.thisValue().toThis(&lexicalGlobalObject, JSC::ECMAMode::strict()); - auto thisObject = jsEventTargetCast(vm, thisValue.isUndefinedOrNull() ? JSC::JSValue(&lexicalGlobalObject) : thisValue); + auto thisObject = jsEventTargetCast(vm, thisValue.isUndefinedOrNull() ? &lexicalGlobalObject : thisValue); if (!thisObject) [[unlikely]] { return throwThisTypeError(lexicalGlobalObject, throwScope, "EventTarget", operationName); } diff --git a/src/bun.js/bindings/webcore/JSEventTargetNode.cpp b/src/bun.js/bindings/webcore/JSEventTargetNode.cpp index de4588d552..fa4c3159c3 100644 --- a/src/bun.js/bindings/webcore/JSEventTargetNode.cpp +++ b/src/bun.js/bindings/webcore/JSEventTargetNode.cpp @@ -44,7 +44,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionNodeEventsGetEventListeners, (JSGlobalObject } } - return JSValue::encode(constructArray(globalObject, static_cast(nullptr), values)); + RELEASE_AND_RETURN(throwScope, JSValue::encode(constructArray(globalObject, static_cast(nullptr), values))); } } diff --git a/src/bun.js/bindings/webcore/JSMIMEParams.cpp b/src/bun.js/bindings/webcore/JSMIMEParams.cpp index 562589a711..f368a9145b 100644 --- a/src/bun.js/bindings/webcore/JSMIMEParams.cpp +++ b/src/bun.js/bindings/webcore/JSMIMEParams.cpp @@ -60,7 +60,7 @@ static int findFirstInvalidHTTPTokenChar(const StringView& view) // Checks if a character is valid within an HTTP quoted string value (excluding DQUOTE and backslash). // Equivalent to /[^\t\u0020-\u007E\u0080-\u00FF]/, but we handle quotes/backslash separately. -static inline bool isHTTPQuotedStringChar(UChar c) +static inline bool isHTTPQuotedStringChar(char16_t c) { return c == 0x09 || (c >= 0x20 && c <= 0x7E) || (c >= 0x80 && c <= 0xFF); } @@ -101,7 +101,7 @@ static size_t findEndBeginningWhitespace(const StringView& view) } else { const auto span = view.span16(); for (size_t i = 0; i < span.size(); ++i) { - UChar c = span[i]; + char16_t c = span[i]; if (c != '\t' && c != ' ' && c != '\r' && c != '\n') { return i; } @@ -125,7 +125,7 @@ static size_t findStartEndingWhitespace(const StringView& view) } else { const auto span = view.span16(); for (size_t i = span.size(); i > 0; --i) { - UChar c = span[i - 1]; + char16_t c = span[i - 1]; if (c != '\t' && c != ' ' && c != '\r' && c != '\n') { return i; } @@ -154,7 +154,7 @@ static String removeBackslashes(const StringView& view) } else { auto span = view.span16(); for (size_t i = 0; i < span.size(); ++i) { - UChar c = span[i]; + char16_t c = span[i]; if (c == '\\' && i + 1 < span.size()) { builder.append(span[++i]); } else { @@ -167,7 +167,7 @@ static String removeBackslashes(const StringView& view) static void escapeQuoteOrBackslash(const StringView& view, StringBuilder& builder) { - if (view.find([](UChar c) { return c == '"' || c == '\\'; }) == notFound) { + if (view.find([](char16_t c) { return c == '"' || c == '\\'; }) == notFound) { builder.append(view); return; } @@ -182,7 +182,7 @@ static void escapeQuoteOrBackslash(const StringView& view, StringBuilder& builde } } else { auto span = view.span16(); - for (UChar c : span) { + for (char16_t c : span) { if (c == '"' || c == '\\') { builder.append('\\'); } @@ -227,7 +227,7 @@ bool parseMIMEParamsString(JSGlobalObject* globalObject, JSMap* map, StringView // Find the end of the parameter name (next ';' or '=') size_t nameEnd = position; while (nameEnd < length) { - UChar c = input[nameEnd]; + char16_t c = input[nameEnd]; if (c == ';' || c == '=') break; nameEnd++; } @@ -249,7 +249,7 @@ bool parseMIMEParamsString(JSGlobalObject* globalObject, JSMap* map, StringView size_t valueStart = position; bool escaped = false; while (position < length) { - UChar c = input[position]; + char16_t c = input[position]; if (escaped) { escaped = false; } else if (c == '\\') { diff --git a/src/bun.js/bindings/webcore/JSMIMEType.cpp b/src/bun.js/bindings/webcore/JSMIMEType.cpp index 7e01208479..cb92824bd0 100644 --- a/src/bun.js/bindings/webcore/JSMIMEType.cpp +++ b/src/bun.js/bindings/webcore/JSMIMEType.cpp @@ -63,13 +63,13 @@ static int findFirstInvalidHTTPTokenChar(const StringView& view) // Checks if a character is valid within an HTTP quoted string value (excluding DQUOTE and backslash). // Equivalent to /[^\t\u0020-\u007E\u0080-\u00FF]/, but we handle quotes/backslash separately. -static inline bool isHTTPQuotedStringChar(UChar c) +static inline bool isHTTPQuotedStringChar(char16_t c) { return c == 0x09 || (c >= 0x20 && c <= 0x7E) || (c >= 0x80 && c <= 0xFF); } // Checks if a character is NOT a valid HTTP quoted string code point. -static inline bool isNotHTTPQuotedStringChar(UChar c) +static inline bool isNotHTTPQuotedStringChar(char16_t c) { return !isHTTPQuotedStringChar(c); } @@ -110,7 +110,7 @@ static size_t findEndBeginningWhitespace(const StringView& view) } else { const auto span = view.span16(); for (size_t i = 0; i < span.size(); ++i) { - UChar c = span[i]; + char16_t c = span[i]; if (c != '\t' && c != ' ' && c != '\r' && c != '\n') { return i; } @@ -134,7 +134,7 @@ static size_t findStartEndingWhitespace(const StringView& view) } else { const auto span = view.span16(); for (size_t i = span.size(); i > 0; --i) { - UChar c = span[i - 1]; + char16_t c = span[i - 1]; if (c != '\t' && c != ' ' && c != '\r' && c != '\n') { return i; } @@ -163,7 +163,7 @@ static String removeBackslashes(const StringView& view) } else { auto span = view.span16(); for (size_t i = 0; i < span.size(); ++i) { - UChar c = span[i]; + char16_t c = span[i]; if (c == '\\' && i + 1 < span.size()) { builder.append(span[++i]); } else { @@ -176,7 +176,7 @@ static String removeBackslashes(const StringView& view) static String escapeQuoteOrBackslash(const StringView& view) { - if (view.find([](UChar c) { return c == '"' || c == '\\'; }) == notFound) { + if (view.find([](char16_t c) { return c == '"' || c == '\\'; }) == notFound) { return view.toString(); } @@ -191,7 +191,7 @@ static String escapeQuoteOrBackslash(const StringView& view) } } else { auto span = view.span16(); - for (UChar c : span) { + for (char16_t c : span) { if (c == '"' || c == '\\') { builder.append('\\'); } @@ -560,6 +560,7 @@ JSC_DEFINE_HOST_FUNCTION(constructMIMEType, (JSGlobalObject * globalObject, Call auto* jsInputString = inputArg.toString(globalObject); RETURN_IF_EXCEPTION(scope, {}); auto inputString = jsInputString->view(globalObject); + RETURN_IF_EXCEPTION(scope, {}); // 2. Parse type and subtype String type, subtype; diff --git a/src/bun.js/bindings/webcore/JSReadableStream.cpp b/src/bun.js/bindings/webcore/JSReadableStream.cpp index 2079d2f5c6..1b5c4fc84d 100644 --- a/src/bun.js/bindings/webcore/JSReadableStream.cpp +++ b/src/bun.js/bindings/webcore/JSReadableStream.cpp @@ -40,6 +40,7 @@ #include #include "ZigGeneratedClasses.h" #include "JavaScriptCore/BuiltinNames.h" +#include "ZigGlobalObject.h" namespace WebCore { using namespace JSC; @@ -84,6 +85,57 @@ private: }; STATIC_ASSERT_ISO_SUBSPACE_SHARABLE(JSReadableStreamPrototype, JSReadableStreamPrototype::Base); +JSC_DEFINE_HOST_FUNCTION(jsReadableStreamProtoFuncText, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + JSReadableStream* thisObject = jsDynamicCast(callFrame->thisValue()); + if (!thisObject) [[unlikely]] { + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + throwThisTypeError(*globalObject, scope, "ReadableStream"_s, "text"_s); + return {}; + } + + return ZigGlobalObject__readableStreamToText(defaultGlobalObject(globalObject), JSValue::encode(thisObject)); +} + +JSC_DEFINE_HOST_FUNCTION(jsReadableStreamProtoFuncBytes, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + JSReadableStream* thisObject = jsDynamicCast(callFrame->thisValue()); + if (!thisObject) [[unlikely]] { + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + throwThisTypeError(*globalObject, scope, "ReadableStream"_s, "bytes"_s); + return {}; + } + + return ZigGlobalObject__readableStreamToBytes(defaultGlobalObject(globalObject), JSValue::encode(thisObject)); +} + +JSC_DEFINE_HOST_FUNCTION(jsReadableStreamProtoFuncJSON, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + JSReadableStream* thisObject = jsDynamicCast(callFrame->thisValue()); + if (!thisObject) [[unlikely]] { + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + throwThisTypeError(*globalObject, scope, "ReadableStream"_s, "json"_s); + return {}; + } + + return ZigGlobalObject__readableStreamToJSON(defaultGlobalObject(globalObject), JSValue::encode(thisObject)); +} + +JSC_DEFINE_HOST_FUNCTION(jsReadableStreamProtoFuncBlob, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + JSReadableStream* thisObject = jsDynamicCast(callFrame->thisValue()); + if (!thisObject) [[unlikely]] { + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + throwThisTypeError(*globalObject, scope, "ReadableStream"_s, "blob"_s); + return {}; + } + + return ZigGlobalObject__readableStreamToBlob(defaultGlobalObject(globalObject), JSValue::encode(thisObject)); +} using JSReadableStreamDOMConstructor = JSDOMBuiltinConstructor; template<> const ClassInfo JSReadableStreamDOMConstructor::s_info = { "ReadableStream"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSReadableStreamDOMConstructor) }; @@ -112,13 +164,16 @@ template<> FunctionExecutable* JSReadableStreamDOMConstructor::initializeExecuta static const HashTableValue JSReadableStreamPrototypeTableValues[] = { { "constructor"_s, static_cast(JSC::PropertyAttribute::DontEnum), NoIntrinsic, { HashTableValue::GetterSetterType, jsReadableStreamConstructor, 0 } }, - { "locked"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::Accessor | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinAccessorType, readableStreamLockedCodeGenerator, 0 } }, + { "blob"_s, static_cast(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsReadableStreamProtoFuncBlob, 0 } }, + { "bytes"_s, static_cast(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsReadableStreamProtoFuncBytes, 0 } }, { "cancel"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, readableStreamCancelCodeGenerator, 0 } }, { "getReader"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, readableStreamGetReaderCodeGenerator, 0 } }, - { "pipeTo"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, readableStreamPipeToCodeGenerator, 1 } }, + { "json"_s, static_cast(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsReadableStreamProtoFuncJSON, 0 } }, + { "locked"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::Accessor | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinAccessorType, readableStreamLockedCodeGenerator, 0 } }, { "pipeThrough"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, readableStreamPipeThroughCodeGenerator, 2 } }, + { "pipeTo"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, readableStreamPipeToCodeGenerator, 1 } }, { "tee"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::Builtin), NoIntrinsic, { HashTableValue::BuiltinGeneratorType, readableStreamTeeCodeGenerator, 0 } }, - + { "text"_s, static_cast(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsReadableStreamProtoFuncText, 0 } }, }; const ClassInfo JSReadableStreamPrototype::s_info = { "ReadableStream"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSReadableStreamPrototype) }; diff --git a/src/bun.js/bindings/webcore/JSTextEncoder.cpp b/src/bun.js/bindings/webcore/JSTextEncoder.cpp index aab8702b6c..4e61872dc9 100644 --- a/src/bun.js/bindings/webcore/JSTextEncoder.cpp +++ b/src/bun.js/bindings/webcore/JSTextEncoder.cpp @@ -69,9 +69,9 @@ using namespace JSC; using namespace JSC::DOMJIT; extern "C" JSC::EncodedJSValue TextEncoder__encode8(JSC::JSGlobalObject* global, const LChar* stringPtr, size_t stringLen); -extern "C" JSC::EncodedJSValue TextEncoder__encode16(JSC::JSGlobalObject* global, const UChar* stringPtr, size_t stringLen); +extern "C" JSC::EncodedJSValue TextEncoder__encode16(JSC::JSGlobalObject* global, const char16_t* stringPtr, size_t stringLen); extern "C" size_t TextEncoder__encodeInto8(const LChar* stringPtr, size_t stringLen, void* ptr, size_t len); -extern "C" size_t TextEncoder__encodeInto16(const UChar* stringPtr, size_t stringLen, void* ptr, size_t len); +extern "C" size_t TextEncoder__encodeInto16(const char16_t* stringPtr, size_t stringLen, void* ptr, size_t len); extern "C" JSC::EncodedJSValue TextEncoder__encodeRopeString(JSC::JSGlobalObject* lexicalGlobalObject, JSC::JSString* str); // extern "C" { diff --git a/src/bun.js/bindings/webcore/JSWebSocket.cpp b/src/bun.js/bindings/webcore/JSWebSocket.cpp index 8fc8c4985f..b6e032ab6a 100644 --- a/src/bun.js/bindings/webcore/JSWebSocket.cpp +++ b/src/bun.js/bindings/webcore/JSWebSocket.cpp @@ -213,30 +213,42 @@ static inline JSC::EncodedJSValue constructJSWebSocket3(JSGlobalObject* lexicalG auto headersInit = std::optional>, IDLRecord>>::ReturnType>(); if (JSC::JSObject* options = optionsObjectValue.getObject()) { const auto& builtinnames = WebCore::builtinNames(vm); - if (JSValue headersValue = options->getIfPropertyExists(globalObject, builtinnames.headersPublicName())) { + auto headersValue = options->getIfPropertyExists(globalObject, builtinnames.headersPublicName()); + RETURN_IF_EXCEPTION(throwScope, {}); + if (headersValue) { if (!headersValue.isUndefinedOrNull()) { headersInit = convert>, IDLRecord>>(*lexicalGlobalObject, headersValue); RETURN_IF_EXCEPTION(throwScope, {}); } } - if (JSValue protocolsValue = options->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "protocols"_s)))) { + auto protocolsValue = options->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "protocols"_s))); + RETURN_IF_EXCEPTION(throwScope, {}); + if (protocolsValue) { if (!protocolsValue.isUndefinedOrNull()) { protocols = convert>(*lexicalGlobalObject, protocolsValue); RETURN_IF_EXCEPTION(throwScope, {}); } - } else if (JSValue protocolValue = options->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "protocol"_s)))) { - if (!protocolValue.isUndefinedOrNull()) { - protocols = Vector { convert(*lexicalGlobalObject, protocolValue) }; - RETURN_IF_EXCEPTION(throwScope, {}); + } else { + auto protocolValue = options->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "protocol"_s))); + RETURN_IF_EXCEPTION(throwScope, {}); + if (protocolValue) { + if (!protocolValue.isUndefinedOrNull()) { + protocols = Vector { convert(*lexicalGlobalObject, protocolValue) }; + RETURN_IF_EXCEPTION(throwScope, {}); + } } } - if (JSValue tlsOptionsValue = options->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "tls"_s)))) { + auto tlsOptionsValue = options->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "tls"_s))); + RETURN_IF_EXCEPTION(throwScope, {}); + if (tlsOptionsValue) { if (!tlsOptionsValue.isUndefinedOrNull() && tlsOptionsValue.isObject()) { if (JSC::JSObject* tlsOptions = tlsOptionsValue.getObject()) { - if (JSValue rejectUnauthorizedValue = tlsOptions->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "rejectUnauthorized"_s)))) { + auto rejectUnauthorizedValue = tlsOptions->getIfPropertyExists(globalObject, PropertyName(Identifier::fromString(vm, "rejectUnauthorized"_s))); + RETURN_IF_EXCEPTION(throwScope, {}); + if (rejectUnauthorizedValue) { if (!rejectUnauthorizedValue.isUndefinedOrNull() && rejectUnauthorizedValue.isBoolean()) { rejectUnauthorized = rejectUnauthorizedValue.asBoolean() ? 1 : 0; } @@ -246,7 +258,6 @@ static inline JSC::EncodedJSValue constructJSWebSocket3(JSGlobalObject* lexicalG } } - RETURN_IF_EXCEPTION(throwScope, {}); auto object = (rejectUnauthorized == -1) ? WebSocket::create(*context, WTFMove(url), protocols, WTFMove(headersInit)) : WebSocket::create(*context, WTFMove(url), protocols, WTFMove(headersInit), rejectUnauthorized ? true : false); if constexpr (IsExceptionOr) diff --git a/src/bun.js/bindings/webcore/JSWorker.cpp b/src/bun.js/bindings/webcore/JSWorker.cpp index a4bc10d9e7..d7b52f90cd 100644 --- a/src/bun.js/bindings/webcore/JSWorker.cpp +++ b/src/bun.js/bindings/webcore/JSWorker.cpp @@ -153,30 +153,36 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSWorkerDOMConstructor:: Vector> transferList; if (JSObject* optionsObject = JSC::jsDynamicCast(argument1.value())) { - if (auto nameValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->name)) { + auto nameValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, vm.propertyNames->name); + RETURN_IF_EXCEPTION(throwScope, {}); + if (nameValue) { if (nameValue.isString()) { options.name = nameValue.toWTFString(lexicalGlobalObject); RETURN_IF_EXCEPTION(throwScope, {}); } } - RETURN_IF_EXCEPTION(throwScope, {}); - if (auto miniModeValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "smol"_s))) { + auto miniModeValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "smol"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (miniModeValue) { options.mini = miniModeValue.toBoolean(lexicalGlobalObject); } - RETURN_IF_EXCEPTION(throwScope, {}); - if (auto ref = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "ref"_s))) { + auto ref = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "ref"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (ref) { options.unref = !ref.toBoolean(lexicalGlobalObject); } - RETURN_IF_EXCEPTION(throwScope, {}); - if (auto eval = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "eval"_s))) { + auto eval = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "eval"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (eval) { options.evalMode = eval.toBoolean(lexicalGlobalObject); } - RETURN_IF_EXCEPTION(throwScope, {}); - if (auto preloadModulesValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "preload"_s))) { + auto preloadModulesValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "preload"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (preloadModulesValue) { if (!preloadModulesValue.isUndefinedOrNull()) { if (preloadModulesValue.isString()) { auto str = preloadModulesValue.toWTFString(lexicalGlobalObject); @@ -201,13 +207,16 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSWorkerDOMConstructor:: } workerData = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "workerData"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); if (!workerData) { workerData = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "data"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); if (!workerData) workerData = jsUndefined(); } - RETURN_IF_EXCEPTION(throwScope, {}); - if (JSValue transferListValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "transferList"_s))) { + auto transferListValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "transferList"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (transferListValue) { if (transferListValue.isObject()) { JSC::JSObject* transferListObject = transferListValue.getObject(); if (auto* transferListArray = jsDynamicCast(transferListObject)) { @@ -217,6 +226,7 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSWorkerDOMConstructor:: } return true; }); + RETURN_IF_EXCEPTION(throwScope, {}); } } } @@ -307,7 +317,7 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSWorkerDOMConstructor:: ExceptionOr> serialized = SerializedScriptValue::create(*lexicalGlobalObject, valueToTransfer, WTFMove(transferList), ports, SerializationForStorage::No, SerializationContext::WorkerPostMessage); if (serialized.hasException()) { WebCore::propagateException(*lexicalGlobalObject, throwScope, serialized.releaseException()); - return encodedJSValue(); + RELEASE_AND_RETURN(throwScope, {}); } Vector transferredPorts; @@ -316,7 +326,7 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSWorkerDOMConstructor:: auto disentangleResult = MessagePort::disentanglePorts(WTFMove(ports)); if (disentangleResult.hasException()) { WebCore::propagateException(*lexicalGlobalObject, throwScope, disentangleResult.releaseException()); - return encodedJSValue(); + RELEASE_AND_RETURN(throwScope, {}); } transferredPorts = disentangleResult.releaseReturnValue(); } @@ -574,7 +584,9 @@ static inline JSC::EncodedJSValue jsWorkerPrototypeFunction_postMessage2Body(JSC StructuredSerializeOptions options; if (optionsValue.isObject()) { JSObject* optionsObject = asObject(optionsValue); - if (auto transferListValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "transfer"_s))) { + auto transferListValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "transfer"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (transferListValue) { auto transferList = convert>(*lexicalGlobalObject, transferListValue); RETURN_IF_EXCEPTION(throwScope, {}); options.transfer = WTFMove(transferList); diff --git a/src/bun.js/bindings/webcore/RFC7230.cpp b/src/bun.js/bindings/webcore/RFC7230.cpp index b447a2f180..2c5b3fa7de 100644 --- a/src/bun.js/bindings/webcore/RFC7230.cpp +++ b/src/bun.js/bindings/webcore/RFC7230.cpp @@ -31,7 +31,7 @@ namespace RFC7230 { -bool isTokenCharacter(UChar c) +bool isTokenCharacter(char16_t c) { return isASCIIAlpha(c) || isASCIIDigit(c) || c == '!' || c == '#' || c == '$' @@ -41,7 +41,7 @@ bool isTokenCharacter(UChar c) || c == '`' || c == '|' || c == '~'; } -bool isDelimiter(UChar c) +bool isDelimiter(char16_t c) { return c == '(' || c == ')' || c == ',' || c == '/' || c == ':' || c == ';' @@ -51,23 +51,23 @@ bool isDelimiter(UChar c) || c == '}' || c == '"'; } -static bool isVisibleCharacter(UChar c) +static bool isVisibleCharacter(char16_t c) { return isTokenCharacter(c) || isDelimiter(c); } template -static bool isInRange(UChar c) +static bool isInRange(char16_t c) { return c >= min && c <= max; } -static bool isOBSText(UChar c) +static bool isOBSText(char16_t c) { return isInRange<0x80, 0xFF>(c); } -static bool isQuotedTextCharacter(UChar c) +static bool isQuotedTextCharacter(char16_t c) { return isTabOrSpace(c) || c == 0x21 @@ -76,14 +76,14 @@ static bool isQuotedTextCharacter(UChar c) || isOBSText(c); } -bool isQuotedPairSecondOctet(UChar c) +bool isQuotedPairSecondOctet(char16_t c) { return isTabOrSpace(c) || isVisibleCharacter(c) || isOBSText(c); } -bool isCommentText(UChar c) +bool isCommentText(char16_t c) { return isTabOrSpace(c) || isInRange<0x21, 0x27>(c) @@ -116,7 +116,7 @@ bool isValidValue(StringView value) bool hadNonWhitespace = false; for (size_t i = 0; i < value.length(); ++i) { - UChar c = value[i]; + char16_t c = value[i]; switch (state) { case State::OptionalWhitespace: if (isTabOrSpace(c)) diff --git a/src/bun.js/bindings/webcore/RFC7230.h b/src/bun.js/bindings/webcore/RFC7230.h index 47ecff033c..213a0711e6 100644 --- a/src/bun.js/bindings/webcore/RFC7230.h +++ b/src/bun.js/bindings/webcore/RFC7230.h @@ -30,10 +30,10 @@ namespace RFC7230 { -bool isTokenCharacter(UChar); -bool isCommentText(UChar); -bool isQuotedPairSecondOctet(UChar); -bool isDelimiter(UChar); +bool isTokenCharacter(char16_t); +bool isCommentText(char16_t); +bool isQuotedPairSecondOctet(char16_t); +bool isDelimiter(char16_t); bool isValidName(StringView); bool isValidValue(StringView); diff --git a/src/bun.js/bindings/webcore/ReadableStream.cpp b/src/bun.js/bindings/webcore/ReadableStream.cpp index 6f267a0b35..90f1ebd756 100644 --- a/src/bun.js/bindings/webcore/ReadableStream.cpp +++ b/src/bun.js/bindings/webcore/ReadableStream.cpp @@ -107,7 +107,7 @@ static inline std::optional invokeReadableStreamFunction(JSC::JSGl auto callData = JSC::getCallData(function); auto result = call(&lexicalGlobalObject, function, callData, thisValue, arguments); EXCEPTION_ASSERT(!scope.exception() || vm.hasPendingTerminationException()); - if (scope.exception()) + if (scope.exception()) [[unlikely]] return {}; return result; } diff --git a/src/bun.js/bindings/webcore/SerializedScriptValue.cpp b/src/bun.js/bindings/webcore/SerializedScriptValue.cpp index 02d23e529d..681c16c4d9 100644 --- a/src/bun.js/bindings/webcore/SerializedScriptValue.cpp +++ b/src/bun.js/bindings/webcore/SerializedScriptValue.cpp @@ -2187,7 +2187,7 @@ private: unsigned length = str.length(); // Guard against overflow - if (length > (std::numeric_limits::max() - sizeof(uint32_t)) / sizeof(UChar)) { + if (length > (std::numeric_limits::max() - sizeof(uint32_t)) / sizeof(char16_t)) { fail(); return; } @@ -3338,7 +3338,7 @@ private: static bool readString(const uint8_t*& ptr, const uint8_t* end, String& str, unsigned length, bool is8Bit) { - if (length >= std::numeric_limits::max() / sizeof(UChar)) + if (length >= std::numeric_limits::max() / sizeof(char16_t)) return false; if (is8Bit) { @@ -3349,15 +3349,15 @@ private: return true; } - unsigned size = length * sizeof(UChar); + unsigned size = length * sizeof(char16_t); if ((end - ptr) < static_cast(size)) return false; #if ASSUME_LITTLE_ENDIAN - str = String({ reinterpret_cast(ptr), length }); - ptr += length * sizeof(UChar); + str = String({ reinterpret_cast(ptr), length }); + ptr += length * sizeof(char16_t); #else - std::span characters; + std::span characters; str = String::createUninitialized(length, characters); for (unsigned i = 0; i < length; ++i) { uint16_t c; @@ -3384,7 +3384,7 @@ private: static bool readIdentifier(JSC::VM& vm, const uint8_t*& ptr, const uint8_t* end, Identifier& str, unsigned length, bool is8Bit) { - if (length >= std::numeric_limits::max() / sizeof(UChar)) + if (length >= std::numeric_limits::max() / sizeof(char16_t)) return false; if (is8Bit) { @@ -3395,15 +3395,15 @@ private: return true; } - unsigned size = length * sizeof(UChar); + unsigned size = length * sizeof(char16_t); if ((end - ptr) < static_cast(size)) return false; #if ASSUME_LITTLE_ENDIAN - str = Identifier::fromString(vm, { reinterpret_cast(ptr), length }); - ptr += length * sizeof(UChar); + str = Identifier::fromString(vm, { reinterpret_cast(ptr), length }); + ptr += length * sizeof(char16_t); #else - std::span characters; + std::span characters; str = String::createUninitialized(length, characters); for (unsigned i = 0; i < length; ++i) { uint16_t c; @@ -5641,13 +5641,13 @@ static void maybeThrowExceptionIfSerializationFailed(JSGlobalObject& lexicalGlob break; case SerializationReturnCode::StackOverflowError: throwException(&lexicalGlobalObject, scope, createStackOverflowError(&lexicalGlobalObject)); - break; + RELEASE_AND_RETURN(scope, ); case SerializationReturnCode::ValidationError: throwTypeError(&lexicalGlobalObject, scope, "Unable to deserialize data."_s); - break; + RELEASE_AND_RETURN(scope, ); case SerializationReturnCode::DataCloneError: throwDataCloneError(lexicalGlobalObject, scope); - break; + RELEASE_AND_RETURN(scope, ); case SerializationReturnCode::ExistingExceptionError: case SerializationReturnCode::UnspecifiedError: break; @@ -5773,7 +5773,7 @@ ExceptionOr> SerializedScriptValue::create(JSGlobalOb if (arrayBuffer->isLocked()) { auto scope = DECLARE_THROW_SCOPE(vm); throwVMTypeError(&lexicalGlobalObject, scope, errorMessageForTransfer(arrayBuffer)); - return Exception { ExistingExceptionError }; + RELEASE_AND_RETURN(scope, Exception { ExistingExceptionError }); } arrayBuffers.append(WTFMove(arrayBuffer)); continue; @@ -5886,11 +5886,11 @@ ExceptionOr> SerializedScriptValue::create(JSGlobalOb // If we rethrew an exception just now, or we failed with a status code other than success, // we should exit right now. if (scope.exception() || code != SerializationReturnCode::SuccessfullyCompleted) [[unlikely]] - return exceptionForSerializationFailure(code); + RELEASE_AND_RETURN(scope, exceptionForSerializationFailure(code)); auto arrayBufferContentsArray = transferArrayBuffers(vm, arrayBuffers); if (arrayBufferContentsArray.hasException()) { - return arrayBufferContentsArray.releaseException(); + RELEASE_AND_RETURN(scope, arrayBufferContentsArray.releaseException()); } // auto backingStores = ImageBitmap::detachBitmaps(WTFMove(imageBitmaps)); @@ -5932,6 +5932,7 @@ ExceptionOr> SerializedScriptValue::create(JSGlobalOb // WTFMove(serializedVideoChunks), WTFMove(serializedVideoFrameData) // #endif // )); + scope.releaseAssertNoException(); return adoptRef(*new SerializedScriptValue(WTFMove(buffer), arrayBufferContentsArray.releaseReturnValue(), context == SerializationContext::WorkerPostMessage ? WTFMove(sharedBuffers) : nullptr #if ENABLE(OFFSCREEN_CANVAS_IN_WORKERS) , diff --git a/src/bun.js/bindings/webcore/WebSocket.cpp b/src/bun.js/bindings/webcore/WebSocket.cpp index 528fdfee0a..8cc3fd3276 100644 --- a/src/bun.js/bindings/webcore/WebSocket.cpp +++ b/src/bun.js/bindings/webcore/WebSocket.cpp @@ -31,6 +31,7 @@ #include "config.h" #include "WebSocket.h" +#include "WebSocketDeflate.h" #include "headers.h" // #include "Blob.h" #include "CloseEvent.h" @@ -71,6 +72,7 @@ #include "JSBuffer.h" #include "ErrorEvent.h" +#include "WebSocketDeflate.h" // #if USE(WEB_THREAD) // #include "WebCoreThreadRun.h" @@ -96,12 +98,12 @@ static size_t getFramingOverhead(size_t payloadSize) const size_t maxReasonSizeInBytes = 123; -static inline bool isValidProtocolCharacter(UChar character) +static inline bool isValidProtocolCharacter(char16_t character) { // Hybi-10 says "(Subprotocol string must consist of) characters in the range U+0021 to U+007E not including // separator characters as defined in [RFC2616]." - const UChar minimumProtocolCharacter = '!'; // U+0021. - const UChar maximumProtocolCharacter = '~'; // U+007E. + const char16_t minimumProtocolCharacter = '!'; // U+0021. + const char16_t maximumProtocolCharacter = '~'; // U+007E. return character >= minimumProtocolCharacter && character <= maximumProtocolCharacter && character != '"' && character != '(' && character != ')' && character != ',' && character != '/' && !(character >= ':' && character <= '@') // U+003A - U+0040 (':', ';', '<', '=', '>', '?', '@'). @@ -1258,22 +1260,44 @@ void WebSocket::didClose(unsigned unhandledBufferedAmount, unsigned short code, this->disablePendingActivity(); } -void WebSocket::didConnect(us_socket_t* socket, char* bufferedData, size_t bufferedDataSize) +void WebSocket::didConnect(us_socket_t* socket, char* bufferedData, size_t bufferedDataSize, const PerMessageDeflateParams* deflate_params) { this->m_upgradeClient = nullptr; + + // Set extensions if permessage-deflate was negotiated + if (deflate_params != nullptr) { + StringBuilder extensions; + extensions.append("permessage-deflate"_s); + if (deflate_params->server_no_context_takeover) { + extensions.append("; server_no_context_takeover"_s); + } + if (deflate_params->client_no_context_takeover) { + extensions.append("; client_no_context_takeover"_s); + } + if (deflate_params->server_max_window_bits != 15) { + extensions.append("; server_max_window_bits="_s); + extensions.append(String::number(deflate_params->server_max_window_bits)); + } + if (deflate_params->client_max_window_bits != 15) { + extensions.append("; client_max_window_bits="_s); + extensions.append(String::number(deflate_params->client_max_window_bits)); + } + this->m_extensions = extensions.toString(); + } + if (m_isSecure) { us_socket_context_t* ctx = (us_socket_context_t*)this->scriptExecutionContext()->connectedWebSocketContext(); - this->m_connectedWebSocket.clientSSL = Bun__WebSocketClientTLS__init(reinterpret_cast(this), socket, ctx, this->scriptExecutionContext()->jsGlobalObject(), reinterpret_cast(bufferedData), bufferedDataSize); + this->m_connectedWebSocket.clientSSL = Bun__WebSocketClientTLS__init(reinterpret_cast(this), socket, ctx, this->scriptExecutionContext()->jsGlobalObject(), reinterpret_cast(bufferedData), bufferedDataSize, deflate_params); this->m_connectedWebSocketKind = ConnectedWebSocketKind::ClientSSL; } else { us_socket_context_t* ctx = (us_socket_context_t*)this->scriptExecutionContext()->connectedWebSocketContext(); - this->m_connectedWebSocket.client = Bun__WebSocketClient__init(reinterpret_cast(this), socket, ctx, this->scriptExecutionContext()->jsGlobalObject(), reinterpret_cast(bufferedData), bufferedDataSize); + this->m_connectedWebSocket.client = Bun__WebSocketClient__init(reinterpret_cast(this), socket, ctx, this->scriptExecutionContext()->jsGlobalObject(), reinterpret_cast(bufferedData), bufferedDataSize, deflate_params); this->m_connectedWebSocketKind = ConnectedWebSocketKind::Client; } this->didConnect(); } -void WebSocket::didFailWithErrorCode(int32_t code) +void WebSocket::didFailWithErrorCode(Bun::WebSocketErrorCode code) { // from new WebSocket() -> connect() @@ -1281,150 +1305,143 @@ void WebSocket::didFailWithErrorCode(int32_t code) return; this->m_upgradeClient = nullptr; + if (this->m_connectedWebSocketKind == ConnectedWebSocketKind::ClientSSL) { + this->m_connectedWebSocket.clientSSL = nullptr; + } else if (this->m_connectedWebSocketKind == ConnectedWebSocketKind::Client) { + this->m_connectedWebSocket.client = nullptr; + } this->m_connectedWebSocketKind = ConnectedWebSocketKind::None; - this->m_connectedWebSocket.client = nullptr; - switch (code) { - // cancel - case 0: { + + case Bun::WebSocketErrorCode::cancel: { + didReceiveClose(CleanStatus::NotClean, 1000, "Connection cancelled"_s); break; } - // invalid_response - case 1: { + case Bun::WebSocketErrorCode::invalid_response: { didReceiveClose(CleanStatus::NotClean, 1002, "Invalid response"_s); break; } - // expected_101_status_code - case 2: { + case Bun::WebSocketErrorCode::expected_101_status_code: { didReceiveClose(CleanStatus::NotClean, 1002, "Expected 101 status code"_s); break; } - // missing_upgrade_header - case 3: { + case Bun::WebSocketErrorCode::missing_upgrade_header: { didReceiveClose(CleanStatus::NotClean, 1002, "Missing upgrade header"_s); break; } - // missing_connection_header - case 4: { + case Bun::WebSocketErrorCode::missing_connection_header: { didReceiveClose(CleanStatus::NotClean, 1002, "Missing connection header"_s); break; } - // missing_websocket_accept_header - case 5: { + case Bun::WebSocketErrorCode::missing_websocket_accept_header: { didReceiveClose(CleanStatus::NotClean, 1002, "Missing websocket accept header"_s); break; } - // invalid_upgrade_header - case 6: { + case Bun::WebSocketErrorCode::invalid_upgrade_header: { didReceiveClose(CleanStatus::NotClean, 1002, "Invalid upgrade header"_s); break; } - // invalid_connection_header - case 7: { + case Bun::WebSocketErrorCode::invalid_connection_header: { didReceiveClose(CleanStatus::NotClean, 1002, "Invalid connection header"_s); break; } - // invalid_websocket_version - case 8: { + case Bun::WebSocketErrorCode::invalid_websocket_version: { didReceiveClose(CleanStatus::NotClean, 1002, "Invalid websocket version"_s); break; } - // mismatch_websocket_accept_header - case 9: { + case Bun::WebSocketErrorCode::mismatch_websocket_accept_header: { didReceiveClose(CleanStatus::NotClean, 1002, "Mismatch websocket accept header"_s); break; } - // missing_client_protocol - case 10: { + case Bun::WebSocketErrorCode::missing_client_protocol: { didReceiveClose(CleanStatus::Clean, 1002, "Missing client protocol"_s); break; } - // mismatch_client_protocol - case 11: { + case Bun::WebSocketErrorCode::mismatch_client_protocol: { didReceiveClose(CleanStatus::Clean, 1002, "Mismatch client protocol"_s); break; } - // timeout - case 12: { + case Bun::WebSocketErrorCode::timeout: { didReceiveClose(CleanStatus::Clean, 1013, "Timeout"_s); break; } - // closed - case 13: { + case Bun::WebSocketErrorCode::closed: { didReceiveClose(CleanStatus::Clean, 1000, "Closed by client"_s); break; } - // failed_to_write - case 14: { + case Bun::WebSocketErrorCode::failed_to_write: { didReceiveClose(CleanStatus::NotClean, 1006, "Failed to write"_s); break; } - // failed_to_connect - case 15: { + case Bun::WebSocketErrorCode::failed_to_connect: { didReceiveClose(CleanStatus::NotClean, 1006, "Failed to connect"_s, true); break; } - // headers_too_large - case 16: { + case Bun::WebSocketErrorCode::headers_too_large: { didReceiveClose(CleanStatus::NotClean, 1007, "Headers too large"_s); break; } - // ended - case 17: { + case Bun::WebSocketErrorCode::ended: { didReceiveClose(CleanStatus::NotClean, 1006, "Connection ended"_s); break; } - // failed_to_allocate_memory - case 18: { + case Bun::WebSocketErrorCode::failed_to_allocate_memory: { didReceiveClose(CleanStatus::NotClean, 1001, "Failed to allocate memory"_s); break; } - // control_frame_is_fragmented - case 19: { + case Bun::WebSocketErrorCode::control_frame_is_fragmented: { didReceiveClose(CleanStatus::NotClean, 1002, "Protocol error - control frame is fragmented"_s); break; } - // invalid_control_frame - case 20: { + case Bun::WebSocketErrorCode::invalid_control_frame: { didReceiveClose(CleanStatus::NotClean, 1002, "Protocol error - invalid control frame"_s); break; } - // compression_unsupported - case 21: { + case Bun::WebSocketErrorCode::compression_unsupported: { didReceiveClose(CleanStatus::Clean, 1011, "Compression not implemented yet"_s); break; } - // unexpected_mask_from_server - case 22: { + case Bun::WebSocketErrorCode::unexpected_mask_from_server: { didReceiveClose(CleanStatus::NotClean, 1002, "Protocol error - unexpected mask from server"_s); break; } - // expected_control_frame - case 23: { + case Bun::WebSocketErrorCode::expected_control_frame: { didReceiveClose(CleanStatus::NotClean, 1002, "Protocol error - expected control frame"_s); break; } - // unsupported_control_frame - case 24: { + case Bun::WebSocketErrorCode::unsupported_control_frame: { didReceiveClose(CleanStatus::NotClean, 1002, "Protocol error - unsupported control frame"_s); break; } - // unexpected_opcode - case 25: { + case Bun::WebSocketErrorCode::unexpected_opcode: { didReceiveClose(CleanStatus::NotClean, 1002, "Protocol error - unexpected opcode"_s); break; } - // invalid_utf8 - case 26: { + case Bun::WebSocketErrorCode::invalid_utf8: { didReceiveClose(CleanStatus::NotClean, 1003, "Server sent invalid UTF8"_s); break; } - // tls_handshake_failed - case 27: { + case Bun::WebSocketErrorCode::tls_handshake_failed: { didReceiveClose(CleanStatus::NotClean, 1015, "TLS handshake failed"_s); break; } + case Bun::WebSocketErrorCode::message_too_big: { + didReceiveClose(CleanStatus::NotClean, 1009, "Message too big"_s); + break; + } + case Bun::WebSocketErrorCode::protocol_error: { + didReceiveClose(CleanStatus::NotClean, 1002, "Protocol error"_s); + break; + } + case Bun::WebSocketErrorCode::compression_failed: { + didReceiveClose(CleanStatus::NotClean, 1002, "Compression failed"_s); + break; + } + case Bun::WebSocketErrorCode::invalid_compressed_data: { + didReceiveClose(CleanStatus::NotClean, 1002, "Invalid compressed data"_s); + break; + } } m_state = CLOSED; @@ -1452,11 +1469,11 @@ void WebSocket::updateHasPendingActivity() } // namespace WebCore -extern "C" void WebSocket__didConnect(WebCore::WebSocket* webSocket, us_socket_t* socket, char* bufferedData, size_t len) +extern "C" void WebSocket__didConnect(WebCore::WebSocket* webSocket, us_socket_t* socket, char* bufferedData, size_t len, const PerMessageDeflateParams* deflate_params) { - webSocket->didConnect(socket, bufferedData, len); + webSocket->didConnect(socket, bufferedData, len, deflate_params); } -extern "C" void WebSocket__didAbruptClose(WebCore::WebSocket* webSocket, int32_t errorCode) +extern "C" void WebSocket__didAbruptClose(WebCore::WebSocket* webSocket, Bun::WebSocketErrorCode errorCode) { webSocket->didFailWithErrorCode(errorCode); } diff --git a/src/bun.js/bindings/webcore/WebSocket.h b/src/bun.js/bindings/webcore/WebSocket.h index 19c3f93930..eecbcf4e4a 100644 --- a/src/bun.js/bindings/webcore/WebSocket.h +++ b/src/bun.js/bindings/webcore/WebSocket.h @@ -30,6 +30,7 @@ #pragma once +#include "WebSocketDeflate.h" #include "ContextDestructionObserver.h" #include "EventTarget.h" #include "ExceptionOr.h" @@ -37,6 +38,7 @@ #include #include #include "FetchHeaders.h" +#include "WebSocketErrorCode.h" namespace uWS { template @@ -127,8 +129,8 @@ public: void didConnect(); void disablePendingActivity(); void didClose(unsigned unhandledBufferedAmount, unsigned short code, const String& reason); - void didConnect(us_socket_t* socket, char* bufferedData, size_t bufferedDataSize); - void didFailWithErrorCode(int32_t code); + void didConnect(us_socket_t* socket, char* bufferedData, size_t bufferedDataSize, const PerMessageDeflateParams* deflate_params); + void didFailWithErrorCode(Bun::WebSocketErrorCode code); void didReceiveMessage(String&& message); void didReceiveData(const char* data, size_t length); diff --git a/src/bun.js/bindings/webcore/WebSocketDeflate.h b/src/bun.js/bindings/webcore/WebSocketDeflate.h new file mode 100644 index 0000000000..2ac79e8614 --- /dev/null +++ b/src/bun.js/bindings/webcore/WebSocketDeflate.h @@ -0,0 +1,11 @@ +#pragma once +#include +#include + +// This must match the layout of WebSocketDeflate.Params in WebSocketDeflate.zig +typedef struct { + uint8_t server_max_window_bits; + uint8_t client_max_window_bits; + uint8_t server_no_context_takeover; + uint8_t client_no_context_takeover; +} PerMessageDeflateParams; diff --git a/src/bun.js/bindings/webcore/WebSocketErrorCode.h b/src/bun.js/bindings/webcore/WebSocketErrorCode.h new file mode 100644 index 0000000000..93e642847b --- /dev/null +++ b/src/bun.js/bindings/webcore/WebSocketErrorCode.h @@ -0,0 +1,42 @@ +#pragma once + +#include + +namespace Bun { + +enum class WebSocketErrorCode : int32_t { + cancel = 1, + invalid_response = 2, + expected_101_status_code = 3, + missing_upgrade_header = 4, + missing_connection_header = 5, + missing_websocket_accept_header = 6, + invalid_upgrade_header = 7, + invalid_connection_header = 8, + invalid_websocket_version = 9, + mismatch_websocket_accept_header = 10, + missing_client_protocol = 11, + mismatch_client_protocol = 12, + timeout = 13, + closed = 14, + failed_to_write = 15, + failed_to_connect = 16, + headers_too_large = 17, + ended = 18, + failed_to_allocate_memory = 19, + control_frame_is_fragmented = 20, + invalid_control_frame = 21, + compression_unsupported = 22, + invalid_compressed_data = 23, + compression_failed = 24, + unexpected_mask_from_server = 25, + expected_control_frame = 26, + unsupported_control_frame = 27, + unexpected_opcode = 28, + invalid_utf8 = 29, + tls_handshake_failed = 30, + message_too_big = 31, + protocol_error = 32, +}; + +} diff --git a/src/bun.js/bindings/webcore/Worker.cpp b/src/bun.js/bindings/webcore/Worker.cpp index 13d4748d51..c2bd4deeef 100644 --- a/src/bun.js/bindings/webcore/Worker.cpp +++ b/src/bun.js/bindings/webcore/Worker.cpp @@ -417,9 +417,11 @@ bool Worker::dispatchErrorWithValue(Zig::GlobalObject* workerGlobalObject, JSVal ScriptExecutionContext::postTaskTo(ctx->identifier(), [protectedThis = Ref { *this }, serialized](ScriptExecutionContext& context) -> void { auto* globalObject = context.globalObject(); + auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); ErrorEvent::Init init; JSValue deserialized = serialized->deserialize(*globalObject, globalObject, SerializationErrorMode::NonThrowing); - if (!deserialized) return; + RETURN_IF_EXCEPTION(scope, ); init.error = deserialized; auto event = ErrorEvent::create(eventNames().errorEvent, init, EventIsTrusted::Yes); @@ -474,8 +476,13 @@ extern "C" void WebWorker__dispatchExit(Zig::GlobalObject* globalObject, Worker* vm.setHasTerminationRequest(); { - globalObject->esmRegistryMap()->clear(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); + auto* esmRegistryMap = globalObject->esmRegistryMap(); + scope.exception(); // TODO: handle or assert none? + esmRegistryMap->clear(globalObject); + scope.exception(); // TODO: handle or assert none? globalObject->requireMap()->clear(globalObject); + scope.exception(); // TODO: handle or assert none? vm.deleteAllCode(JSC::DeleteAllCodeEffort::PreventCollectionAndDeleteAllCode); gcUnprotect(globalObject); globalObject = nullptr; @@ -569,8 +576,9 @@ JSValue createNodeWorkerThreadsBinding(Zig::GlobalObject* globalObject) ASSERT(pair->canGetIndexQuickly(1u)); workerData = pair->getIndexQuickly(0); RETURN_IF_EXCEPTION(scope, {}); + auto environmentDataValue = pair->getIndexQuickly(1); // it might not be a Map if the parent had not set up environmentData yet - environmentData = jsDynamicCast(pair->getIndexQuickly(1)); + environmentData = environmentDataValue ? jsDynamicCast(environmentDataValue) : nullptr; RETURN_IF_EXCEPTION(scope, {}); // Main thread starts at 1 @@ -611,8 +619,6 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionPostMessage, if (!context) return JSValue::encode(jsUndefined()); - auto throwScope = DECLARE_THROW_SCOPE(vm); - JSC::JSValue value = callFrame->argument(0); JSC::JSValue options = callFrame->argument(1); @@ -638,15 +644,17 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionPostMessage, Vector> ports; ExceptionOr> serialized = SerializedScriptValue::create(*globalObject, value, WTFMove(transferList), ports, SerializationForStorage::No, SerializationContext::WorkerPostMessage); if (serialized.hasException()) { - WebCore::propagateException(*globalObject, throwScope, serialized.releaseException()); - return JSValue::encode(jsUndefined()); + WebCore::propagateException(*globalObject, scope, serialized.releaseException()); + RELEASE_AND_RETURN(scope, {}); } + scope.assertNoException(); ExceptionOr> disentangledPorts = MessagePort::disentanglePorts(WTFMove(ports)); if (disentangledPorts.hasException()) { - WebCore::propagateException(*globalObject, throwScope, serialized.releaseException()); - return JSValue::encode(jsUndefined()); + WebCore::propagateException(*globalObject, scope, serialized.releaseException()); + RELEASE_AND_RETURN(scope, {}); } + scope.assertNoException(); MessageWithMessagePorts messageWithMessagePorts { serialized.releaseReturnValue(), disentangledPorts.releaseReturnValue() }; diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index 100aa7a6cc..d928e59b2f 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -70,20 +70,20 @@ pub fn exit(this: *EventLoop) void { defer this.debug.exit(); if (count == 1 and !this.virtual_machine.is_inside_deferred_task_queue) { - this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc); + this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc) catch {}; } this.entered_event_loop_count -= 1; } -pub fn exitMaybeDrainMicrotasks(this: *EventLoop, allow_drain_microtask: bool) void { +pub fn exitMaybeDrainMicrotasks(this: *EventLoop, allow_drain_microtask: bool) bun.JSExecutionTerminated!void { const count = this.entered_event_loop_count; log("exit() = {d}", .{count - 1}); defer this.debug.exit(); if (allow_drain_microtask and count == 1 and !this.virtual_machine.is_inside_deferred_task_queue) { - this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc); + try this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc); } this.entered_event_loop_count -= 1; @@ -107,11 +107,15 @@ pub fn tickWhilePaused(this: *EventLoop, done: *bool) void { } extern fn JSC__JSGlobalObject__drainMicrotasks(*JSC.JSGlobalObject) void; -pub fn drainMicrotasksWithGlobal(this: *EventLoop, globalObject: *JSC.JSGlobalObject, jsc_vm: *JSC.VM) void { +pub fn drainMicrotasksWithGlobal(this: *EventLoop, globalObject: *JSC.JSGlobalObject, jsc_vm: *JSC.VM) bun.JSExecutionTerminated!void { JSC.markBinding(@src()); + var scope: JSC.CatchScope = undefined; + scope.init(globalObject, @src()); + defer scope.deinit(); jsc_vm.releaseWeakRefs(); JSC__JSGlobalObject__drainMicrotasks(globalObject); + try scope.assertNoExceptionExceptTermination(); this.virtual_machine.is_inside_deferred_task_queue = true; this.deferred_tasks.run(); @@ -122,14 +126,14 @@ pub fn drainMicrotasksWithGlobal(this: *EventLoop, globalObject: *JSC.JSGlobalOb } } -pub fn drainMicrotasks(this: *EventLoop) void { - this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc); +pub fn drainMicrotasks(this: *EventLoop) bun.JSExecutionTerminated!void { + try this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc); } // should be called after exit() pub fn maybeDrainMicrotasks(this: *EventLoop) void { if (this.entered_event_loop_count == 0 and !this.virtual_machine.is_inside_deferred_task_queue) { - this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc); + this.drainMicrotasksWithGlobal(this.global, this.virtual_machine.jsc) catch {}; } } @@ -310,7 +314,7 @@ pub fn tickConcurrentWithCount(this: *EventLoop) usize { return this.tasks.count - start_count; } -pub inline fn usocketsLoop(this: *const EventLoop) *uws.Loop { +pub fn usocketsLoop(this: *const EventLoop) *uws.Loop { if (comptime Environment.isWindows) { return this.uws_loop.?; } @@ -444,6 +448,9 @@ pub fn processGCTimer(this: *EventLoop) void { pub fn tick(this: *EventLoop) void { JSC.markBinding(@src()); + var scope: JSC.CatchScope = undefined; + scope.init(this.global, @src()); + defer scope.deinit(); this.entered_event_loop_count += 1; this.debug.enter(); defer { @@ -462,7 +469,8 @@ pub fn tick(this: *EventLoop) void { while (this.tickWithCount(ctx) > 0) : (this.global.handleRejectedPromises()) { this.tickConcurrent(); } else { - this.drainMicrotasksWithGlobal(global, global_vm); + this.drainMicrotasksWithGlobal(global, global_vm) catch return; + if (scope.hasException()) return; this.tickConcurrent(); if (this.tasks.count > 0) continue; } @@ -545,6 +553,11 @@ pub fn ensureWaker(this: *EventLoop) void { // _ = actual.addPostHandler(*JSC.EventLoop, this, JSC.EventLoop.afterUSocketsTick); // _ = actual.addPreHandler(*JSC.VM, this.virtual_machine.jsc, JSC.VM.drainMicrotasks); } + if (comptime Environment.isWindows) { + if (this.uws_loop == null) { + this.uws_loop = bun.uws.Loop.get(); + } + } bun.uws.Loop.get().internal_loop_data.setParentEventLoop(bun.JSC.EventLoopHandle.init(this)); } diff --git a/src/bun.js/event_loop/JSCScheduler.zig b/src/bun.js/event_loop/JSCScheduler.zig index 57babb283f..09c4c72205 100644 --- a/src/bun.js/event_loop/JSCScheduler.zig +++ b/src/bun.js/event_loop/JSCScheduler.zig @@ -2,7 +2,14 @@ const JSCScheduler = @This(); pub const JSCDeferredWorkTask = opaque { extern fn Bun__runDeferredWork(task: *JSCScheduler.JSCDeferredWorkTask) void; - pub const run = Bun__runDeferredWork; + pub fn run(task: *JSCScheduler.JSCDeferredWorkTask) void { + const globalThis = bun.jsc.VirtualMachine.get().global; + var scope: bun.jsc.ExceptionValidationScope = undefined; + scope.init(globalThis, @src()); + defer scope.deinit(); + Bun__runDeferredWork(task); + scope.assertNoExceptionExceptTermination() catch return; + } }; export fn Bun__eventLoop__incrementRefConcurrently(jsc_vm: *VirtualMachine, delta: c_int) void { diff --git a/src/bun.js/event_loop/Task.zig b/src/bun.js/event_loop/Task.zig index c29a78de2a..4e43fe5c94 100644 --- a/src/bun.js/event_loop/Task.zig +++ b/src/bun.js/event_loop/Task.zig @@ -71,6 +71,7 @@ pub const Task = TaggedPointerUnion(.{ ShellGlobTask, ShellIOReaderAsyncDeinit, ShellIOWriterAsyncDeinit, + ShellIOWriter, ShellLsTask, ShellMkdirTask, ShellMvBatchedTask, @@ -80,6 +81,7 @@ pub const Task = TaggedPointerUnion(.{ ShellTouchTask, Stat, StatFS, + StreamPending, Symlink, ThreadSafeFunction, TimeoutObject, @@ -95,7 +97,7 @@ pub const Task = TaggedPointerUnion(.{ pub fn tickQueueWithCount(this: *EventLoop, virtual_machine: *VirtualMachine) u32 { var global = this.global; const global_vm = global.vm(); - var counter: usize = 0; + var counter: u32 = 0; if (comptime Environment.isDebug) { if (this.debug.js_call_count_outside_tick_queue > this.debug.drain_microtasks_count_outside_tick_queue) { @@ -144,6 +146,10 @@ pub fn tickQueueWithCount(this: *EventLoop, virtual_machine: *VirtualMachine) u3 var shell_ls_task: *ShellIOWriterAsyncDeinit = task.get(ShellIOWriterAsyncDeinit).?; shell_ls_task.runFromMainThread(); }, + @field(Task.Tag, @typeName(ShellIOWriter)) => { + var shell_io_writer: *ShellIOWriter = task.get(ShellIOWriter).?; + shell_io_writer.runFromMainThread(); + }, @field(Task.Tag, @typeName(ShellIOReaderAsyncDeinit)) => { var shell_ls_task: *ShellIOReaderAsyncDeinit = task.get(ShellIOReaderAsyncDeinit).?; shell_ls_task.runFromMainThread(); @@ -218,7 +224,7 @@ pub fn tickQueueWithCount(this: *EventLoop, virtual_machine: *VirtualMachine) u3 }, @field(Task.Tag, @typeName(bun.api.napi.napi_async_work)) => { const transform_task: *bun.api.napi.napi_async_work = task.get(bun.api.napi.napi_async_work).?; - transform_task.*.runFromJS(); + transform_task.runFromJS(virtual_machine, global); }, @field(Task.Tag, @typeName(ThreadSafeFunction)) => { var transform_task: *ThreadSafeFunction = task.as(ThreadSafeFunction); @@ -460,7 +466,7 @@ pub fn tickQueueWithCount(this: *EventLoop, virtual_machine: *VirtualMachine) u3 }, @field(Task.Tag, @typeName(RuntimeTranspilerStore)) => { var any: *RuntimeTranspilerStore = task.get(RuntimeTranspilerStore).?; - any.drain(); + any.drain() catch {}; }, @field(Task.Tag, @typeName(ServerAllConnectionsClosedTask)) => { var any: *ServerAllConnectionsClosedTask = task.get(ServerAllConnectionsClosedTask).?; @@ -484,17 +490,25 @@ pub fn tickQueueWithCount(this: *EventLoop, virtual_machine: *VirtualMachine) u3 var any: *FlushPendingFileSinkTask = task.get(FlushPendingFileSinkTask).?; any.runFromJSThread(); }, + @field(Task.Tag, @typeName(StreamPending)) => { + var any: *StreamPending = task.get(StreamPending).?; + any.runFromJSThread(); + }, - else => { + .@"shell.builtin.yes.YesTask", .@"bun.js.api.Timer.ImmediateObject", .@"bun.js.api.Timer.TimeoutObject" => { bun.Output.panic("Unexpected tag: {s}", .{@tagName(task.tag())}); }, + _ => { + // handle unnamed variants + bun.Output.panic("Unknown tag: {d}", .{@intFromEnum(task.tag())}); + }, } - this.drainMicrotasksWithGlobal(global, global_vm); + this.drainMicrotasksWithGlobal(global, global_vm) catch return counter; } this.tasks.head = if (this.tasks.count == 0) 0 else this.tasks.head; - return @as(u32, @truncate(counter)); + return counter; } const TaggedPointerUnion = bun.TaggedPointerUnion; @@ -564,6 +578,7 @@ const Unlink = AsyncFS.unlink; const NativeZlib = JSC.API.NativeZlib; const NativeBrotli = JSC.API.NativeBrotli; const NativeZstd = JSC.API.NativeZstd; +const StreamPending = JSC.WebCore.streams.Result.Pending; const ShellGlobTask = shell.interpret.Interpreter.Expansion.ShellGlobTask; const ShellRmTask = shell.Interpreter.Builtin.Rm.ShellRmTask; @@ -579,6 +594,7 @@ const ShellAsync = shell.Interpreter.Async; // const ShellIOReaderAsyncDeinit = shell.Interpreter.IOReader.AsyncDeinit; const ShellIOReaderAsyncDeinit = shell.Interpreter.AsyncDeinitReader; const ShellIOWriterAsyncDeinit = shell.Interpreter.AsyncDeinitWriter; +const ShellIOWriter = shell.Interpreter.IOWriter; const TimeoutObject = Timer.TimeoutObject; const ImmediateObject = Timer.ImmediateObject; const ProcessWaiterThreadTask = if (Environment.isPosix) bun.spawn.process.WaiterThread.ProcessQueue.ResultTask else opaque {}; diff --git a/src/bun.js/ipc.zig b/src/bun.js/ipc.zig index 3e43f8151f..c4a9518cbe 100644 --- a/src/bun.js/ipc.zig +++ b/src/bun.js/ipc.zig @@ -55,14 +55,12 @@ pub const IPCDecodeError = error{ NotEnoughBytes, /// Format could not be recognized. Report an error and close the socket. InvalidFormat, -} || bun.OOM; +} || bun.JSError; pub const IPCSerializationError = error{ /// Value could not be serialized. SerializationFailed, - /// Out of memory - OutOfMemory, -}; +} || bun.JSError; const advanced = struct { pub const header_length = @sizeOf(IPCMessageType) + @sizeOf(u32); @@ -109,11 +107,7 @@ const advanced = struct { } const message = data[header_length .. header_length + message_len]; - const deserialized = JSValue.deserialize(message, global); - - if (deserialized == .zero) { - return IPCDecodeError.InvalidFormat; - } + const deserialized = try JSValue.deserialize(message, global); return .{ .bytes_consumed = header_length + message_len, @@ -137,8 +131,7 @@ const advanced = struct { } pub fn serialize(writer: *bun.io.StreamBuffer, global: *JSC.JSGlobalObject, value: JSValue, is_internal: IsInternal) !usize { - const serialized = value.serialize(global, true) orelse - return IPCSerializationError.SerializationFailed; + const serialized = try value.serialize(global, true); defer serialized.deinit(); const size: u32 = @intCast(serialized.data.len); @@ -241,7 +234,7 @@ const json = struct { pub fn serialize(writer: *bun.io.StreamBuffer, global: *JSC.JSGlobalObject, value: JSValue, is_internal: IsInternal) !usize { var out: bun.String = undefined; - value.jsonStringify(global, 0, &out); + try value.jsonStringify(global, 0, &out); defer out.deref(); if (out.tag == .Dead) return IPCSerializationError.SerializationFailed; @@ -346,17 +339,17 @@ pub const CallbackList = union(enum) { const prev = self.callback; const arr = try JSC.JSValue.createEmptyArray(global, 2); arr.protect(); - arr.putIndex(global, 0, prev); // add the old callback to the array - arr.putIndex(global, 1, callback); // add the new callback to the array + try arr.putIndex(global, 0, prev); // add the old callback to the array + try arr.putIndex(global, 1, callback); // add the new callback to the array prev.unprotect(); // owned by the array now self.* = .{ .callback_array = arr }; }, .callback_array => |arr| { - arr.push(global, callback); + try arr.push(global, callback); }, } } - fn callNextTick(self: *@This(), global: *JSC.JSGlobalObject) void { + fn callNextTick(self: *@This(), global: *JSC.JSGlobalObject) bun.JSError!void { switch (self.*) { .ack_nack => {}, .none => {}, @@ -366,8 +359,8 @@ pub const CallbackList = union(enum) { self.* = .none; }, .callback_array => { - var iter = self.callback_array.arrayIterator(global); - while (iter.next()) |item| { + var iter = try self.callback_array.arrayIterator(global); + while (try iter.next()) |item| { item.callNextTick(global, .{.null}); } self.callback_array.unprotect(); @@ -399,8 +392,8 @@ pub const SendHandle = struct { /// Call the callback and deinit pub fn complete(self: *SendHandle, global: *JSC.JSGlobalObject) void { - self.callbacks.callNextTick(global); - self.deinit(); + defer self.deinit(); + self.callbacks.callNextTick(global) catch {}; // TODO: properly propagate exception upwards } pub fn deinit(self: *SendHandle) void { self.data.deinit(); @@ -840,7 +833,7 @@ pub const SendQueue = struct { if (fd) |fd_unwrapped| { this._onWriteComplete(socket.writeFd(data, fd_unwrapped)); } else { - this._onWriteComplete(socket.write(data, false)); + this._onWriteComplete(socket.write(data)); } }, }; @@ -981,8 +974,8 @@ pub fn doSend(ipc: ?*SendQueue, globalObject: *JSC.JSGlobalObject, callFrame: *J if (serialized_array.isUndefinedOrNull()) { handle = .js_undefined; } else { - const serialized_handle = serialized_array.getIndex(globalObject, 0); - const serialized_message = serialized_array.getIndex(globalObject, 1); + const serialized_handle = try serialized_array.getIndex(globalObject, 0); + const serialized_message = try serialized_array.getIndex(globalObject, 1); handle = serialized_handle; message = serialized_message; } @@ -1155,7 +1148,7 @@ fn onData2(send_queue: *SendQueue, all_data: []const u8) void { log("hit NotEnoughBytes", .{}); return; }, - error.InvalidFormat => { + error.InvalidFormat, error.JSError => { send_queue.closeSocket(.failure, .user); return; }, @@ -1188,7 +1181,7 @@ fn onData2(send_queue: *SendQueue, all_data: []const u8) void { log("hit NotEnoughBytes2", .{}); return; }, - error.InvalidFormat => { + error.InvalidFormat, error.JSError => { send_queue.closeSocket(.failure, .user); return; }, @@ -1349,7 +1342,7 @@ pub const IPCHandlers = struct { log("hit NotEnoughBytes3", .{}); return; }, - error.InvalidFormat => { + error.InvalidFormat, error.JSError => { send_queue.closeSocket(.failure, .user); return; }, @@ -1382,15 +1375,11 @@ pub const IPCHandlers = struct { extern "C" fn IPCSerialize(globalObject: *JSC.JSGlobalObject, message: JSC.JSValue, handle: JSC.JSValue) JSC.JSValue; pub fn ipcSerialize(globalObject: *JSC.JSGlobalObject, message: JSC.JSValue, handle: JSC.JSValue) bun.JSError!JSC.JSValue { - const result = IPCSerialize(globalObject, message, handle); - if (result == .zero) return error.JSError; - return result; + return bun.jsc.fromJSHostCall(globalObject, @src(), IPCSerialize, .{ globalObject, message, handle }); } extern "C" fn IPCParse(globalObject: *JSC.JSGlobalObject, target: JSC.JSValue, serialized: JSC.JSValue, fd: JSC.JSValue) JSC.JSValue; pub fn ipcParse(globalObject: *JSC.JSGlobalObject, target: JSC.JSValue, serialized: JSC.JSValue, fd: JSC.JSValue) bun.JSError!JSC.JSValue { - const result = IPCParse(globalObject, target, serialized, fd); - if (result == .zero) return error.JSError; - return result; + return bun.jsc.fromJSHostCall(globalObject, @src(), IPCParse, .{ globalObject, target, serialized, fd }); } diff --git a/src/bun.js/jsc.zig b/src/bun.js/jsc.zig index 36b43620f8..ec091dafed 100644 --- a/src/bun.js/jsc.zig +++ b/src/bun.js/jsc.zig @@ -33,8 +33,9 @@ pub const JSHostFnZigWithContext = host_fn.JSHostFnZigWithContext; pub const JSHostFunctionTypeWithContext = host_fn.JSHostFunctionTypeWithContext; pub const toJSHostFn = host_fn.toJSHostFn; pub const toJSHostFnWithContext = host_fn.toJSHostFnWithContext; -pub const toJSHostValue = host_fn.toJSHostValue; -pub const fromJSHostValue = host_fn.fromJSHostValue; +pub const toJSHostCall = host_fn.toJSHostCall; +pub const fromJSHostCall = host_fn.fromJSHostCall; +pub const fromJSHostCallGeneric = host_fn.fromJSHostCallGeneric; pub const createCallback = host_fn.createCallback; // JSC Classes Bindings @@ -79,6 +80,8 @@ pub const Weak = @import("Weak.zig").Weak; pub const WeakRefType = @import("Weak.zig").WeakRefType; pub const Exception = @import("bindings/Exception.zig").Exception; pub const SourceProvider = @import("bindings/SourceProvider.zig").SourceProvider; +pub const CatchScope = @import("bindings/CatchScope.zig").CatchScope; +pub const ExceptionValidationScope = @import("bindings/CatchScope.zig").ExceptionValidationScope; // JavaScript-related pub const Errorable = @import("bindings/Errorable.zig").Errorable; diff --git a/src/bun.js/jsc/array_buffer.zig b/src/bun.js/jsc/array_buffer.zig index 8904402659..aafb6a04ea 100644 --- a/src/bun.js/jsc/array_buffer.zig +++ b/src/bun.js/jsc/array_buffer.zig @@ -55,7 +55,7 @@ pub const ArrayBuffer = extern struct { } }, .err => |err| { - return globalObject.throwValue(err.toJSC(globalObject)) catch .zero; + return globalObject.throwValue(err.toJS(globalObject)) catch .zero; }, } } @@ -72,7 +72,7 @@ pub const ArrayBuffer = extern struct { const stat = switch (bun.sys.fstat(fd)) { .err => |err| { fd.close(); - return globalObject.throwValue(err.toJSC(globalObject)); + return globalObject.throwValue(err.toJS(globalObject)); }, .result => |fstat| fstat, }; @@ -109,7 +109,7 @@ pub const ArrayBuffer = extern struct { return JSBuffer__fromMmap(globalObject, buf.ptr, buf.len); }, .err => |err| { - return globalObject.throwValue(err.toJSC(globalObject)); + return globalObject.throwValue(err.toJS(globalObject)); }, } } @@ -141,34 +141,32 @@ pub const ArrayBuffer = extern struct { return Stream{ .pos = 0, .buf = this.slice() }; } - // TODO: this can throw an error! should use JSError!JSValue - pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSC.JSValue.JSType) JSC.JSValue { + pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSC.JSValue.JSType) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); return switch (comptime kind) { - .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false), - .ArrayBuffer => Bun__createArrayBufferForCopy(globalThis, bytes.ptr, bytes.len), + .Uint8Array => bun.jsc.fromJSHostCall(globalThis, @src(), Bun__createUint8ArrayForCopy, .{ globalThis, bytes.ptr, bytes.len, false }), + .ArrayBuffer => bun.jsc.fromJSHostCall(globalThis, @src(), Bun__createArrayBufferForCopy, .{ globalThis, bytes.ptr, bytes.len }), else => @compileError("Not implemented yet"), }; } - pub fn createEmpty(globalThis: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType) JSC.JSValue { + pub fn createEmpty(globalThis: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); - return switch (comptime kind) { - .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, null, 0, false), - .ArrayBuffer => Bun__createArrayBufferForCopy(globalThis, null, 0), + .Uint8Array => bun.jsc.fromJSHostCall(Bun__createUint8ArrayForCopy, .{ globalThis, null, 0, false }), + .ArrayBuffer => bun.jsc.fromJSHostCall(Bun__createArrayBufferForCopy, .{ globalThis, null, 0 }), else => @compileError("Not implemented yet"), }; } - pub fn createBuffer(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { + pub fn createBuffer(globalThis: *JSC.JSGlobalObject, bytes: []const u8) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); - return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, true); + return bun.jsc.fromJSHostCall(globalThis, @src(), Bun__createUint8ArrayForCopy, .{ globalThis, bytes.ptr, bytes.len, true }); } - pub fn createUint8Array(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { + pub fn createUint8Array(globalThis: *JSC.JSGlobalObject, bytes: []const u8) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); - return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false); + return bun.jsc.fromJSHostCall(globalThis, @src(), Bun__createUint8ArrayForCopy, .{ globalThis, bytes.ptr, bytes.len, false }); } extern "c" fn Bun__allocUint8ArrayForCopy(*JSC.JSGlobalObject, usize, **anyopaque) JSC.JSValue; @@ -177,13 +175,10 @@ pub const ArrayBuffer = extern struct { pub fn alloc(global: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType, len: u32) JSError!struct { JSC.JSValue, []u8 } { var ptr: [*]u8 = undefined; const buf = switch (comptime kind) { - .Uint8Array => Bun__allocUint8ArrayForCopy(global, len, @ptrCast(&ptr)), - .ArrayBuffer => Bun__allocArrayBufferForCopy(global, len, @ptrCast(&ptr)), + .Uint8Array => try bun.jsc.fromJSHostCall(global, @src(), Bun__allocUint8ArrayForCopy, .{ global, len, @ptrCast(&ptr) }), + .ArrayBuffer => try bun.jsc.fromJSHostCall(global, @src(), Bun__allocArrayBufferForCopy, .{ global, len, @ptrCast(&ptr) }), else => @compileError("Not implemented yet"), }; - if (buf == .zero) { - return error.JSError; - } return .{ buf, ptr[0..len] }; } @@ -215,7 +210,7 @@ pub const ArrayBuffer = extern struct { return ArrayBuffer{ .offset = 0, .len = @as(u32, @intCast(bytes.len)), .byte_len = @as(u32, @intCast(bytes.len)), .typed_array_type = typed_array_type, .ptr = bytes.ptr }; } - pub fn toJSUnchecked(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) JSC.JSValue { + pub fn toJSUnchecked(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) bun.JSError!JSC.JSValue { // The reason for this is // JSC C API returns a detached arraybuffer @@ -258,7 +253,7 @@ pub const ArrayBuffer = extern struct { const log = Output.scoped(.ArrayBuffer, false); - pub fn toJS(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) JSC.JSValue { + pub fn toJS(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) bun.JSError!JSC.JSValue { if (this.value != .zero) { return this.value; } @@ -434,7 +429,7 @@ pub const ArrayBuffer = extern struct { } /// This clones bytes - pub fn toJS(this: BinaryType, bytes: []const u8, globalThis: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJS(this: BinaryType, bytes: []const u8, globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { switch (this) { .Buffer => return JSC.ArrayBuffer.createBuffer(globalThis, bytes), .ArrayBuffer => return JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer), @@ -442,7 +437,7 @@ pub const ArrayBuffer = extern struct { // These aren't documented, but they are supported .Uint16Array, .Uint32Array, .Int8Array, .Int16Array, .Int32Array, .Float16Array, .Float32Array, .Float64Array => { - const buffer = JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer); + const buffer = try JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer); return JSC.JSValue.c(JSC.C.JSObjectMakeTypedArrayWithArrayBuffer(globalThis, this.toTypedArrayType(), buffer.asObjectRef(), null)); }, } diff --git a/src/bun.js/jsc/host_fn.zig b/src/bun.js/jsc/host_fn.zig index a563783e63..3b8677944b 100644 --- a/src/bun.js/jsc/host_fn.zig +++ b/src/bun.js/jsc/host_fn.zig @@ -81,20 +81,73 @@ pub fn toJSHostSetterValue(globalThis: *JSGlobalObject, value: error{ OutOfMemor return true; } -pub fn toJSHostValue(globalThis: *JSGlobalObject, value: error{ OutOfMemory, JSError }!JSValue) JSValue { - const normal = value catch |err| switch (err) { +/// Convert the return value of a function returning an error union into a maybe-empty JSValue +pub fn toJSHostCall( + globalThis: *JSGlobalObject, + src: std.builtin.SourceLocation, + comptime function: anytype, + // This can't use std.meta.ArgsTuple because that will turn comptime function parameters into + // runtime tuple values + args: anytype, +) JSValue { + var scope: jsc.ExceptionValidationScope = undefined; + scope.init(globalThis, src); + defer scope.deinit(); + + const returned: error{ OutOfMemory, JSError }!JSValue = @call(.auto, function, args); + const normal = returned catch |err| switch (err) { error.JSError => .zero, error.OutOfMemory => globalThis.throwOutOfMemoryValue(), }; - if (Environment.allow_assert and Environment.is_canary) { - debugExceptionAssertion(globalThis, normal, toJSHostValue); - } + scope.assertExceptionPresenceMatches(normal == .zero); return normal; } -pub fn fromJSHostValue(value: JSValue) bun.JSError!JSValue { - if (value == .zero) return error.JSError; - return value; +/// Convert the return value of a function returning a maybe-empty JSValue into an error union. +/// The wrapped function must return an empty JSValue if and only if it has thrown an exception. +/// If your function does not follow this pattern (if it can return empty without an exception, or +/// throw an exception and return non-empty), either fix the function or write a custom wrapper with +/// CatchScope. +pub fn fromJSHostCall( + globalThis: *JSGlobalObject, + /// For attributing thrown exceptions + src: std.builtin.SourceLocation, + comptime function: anytype, + args: std.meta.ArgsTuple(@TypeOf(function)), +) bun.JSError!JSValue { + var scope: jsc.ExceptionValidationScope = undefined; + scope.init(globalThis, src); + defer scope.deinit(); + + const value = @call(.auto, function, args); + scope.assertExceptionPresenceMatches(value == .zero); + return if (value == .zero) error.JSError else value; +} + +pub fn fromJSHostCallGeneric( + globalThis: *JSGlobalObject, + /// For attributing thrown exceptions + src: std.builtin.SourceLocation, + comptime function: anytype, + args: std.meta.ArgsTuple(@TypeOf(function)), +) bun.JSError!@typeInfo(@TypeOf(function)).@"fn".return_type.? { + var scope: jsc.CatchScope = undefined; + scope.init(globalThis, src); + defer scope.deinit(); + + const result = @call(.auto, function, args); + // supporting JSValue would make it too easy to mix up this function with fromJSHostCall + // fromJSHostCall has the benefit of checking that the function is correctly returning an empty + // value if and only if it has thrown. + // fromJSHostCallGeneric is only for functions where the return value tells you nothing about + // whether an exception was thrown. + // + // alternatively, we could consider something like `comptime exception_sentinel: ?T` + // to generically support using a value of any type to signal exceptions (INT_MAX, infinity, + // nullptr...?) but it's unclear how often that would be useful + if (@TypeOf(result) == JSValue) @compileError("fromJSHostCallGeneric does not support JSValue"); + try scope.returnIfException(); + return result; } const ParsedHostFunctionErrorSet = struct { @@ -122,14 +175,7 @@ pub fn wrap1(comptime func: anytype) @"return": { const p = @typeInfo(@TypeOf(func)).@"fn".params; return struct { pub fn wrapped(arg0: p[0].type.?) callconv(.c) JSValue { - const value = func(arg0) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => arg0.throwOutOfMemoryValue(), - }; - if (Environment.allow_assert and Environment.is_canary) { - debugExceptionAssertion(arg0, value, func); - } - return value; + return toJSHostCall(arg0, @src(), func, .{arg0}); } }.wrapped; } @@ -141,14 +187,7 @@ pub fn wrap2(comptime func: anytype) @"return": { const p = @typeInfo(@TypeOf(func)).@"fn".params; return struct { pub fn wrapped(arg0: p[0].type.?, arg1: p[1].type.?) callconv(.c) JSValue { - const value = func(arg0, arg1) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => arg0.throwOutOfMemoryValue(), - }; - if (Environment.allow_assert and Environment.is_canary) { - debugExceptionAssertion(arg0, value, func); - } - return value; + return toJSHostCall(arg0, @src(), func, .{ arg0, arg1 }); } }.wrapped; } @@ -160,14 +199,7 @@ pub fn wrap3(comptime func: anytype) @"return": { const p = @typeInfo(@TypeOf(func)).@"fn".params; return struct { pub fn wrapped(arg0: p[0].type.?, arg1: p[1].type.?, arg2: p[2].type.?) callconv(.c) JSValue { - const value = func(arg0, arg1, arg2) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => arg0.throwOutOfMemoryValue(), - }; - if (Environment.allow_assert and Environment.is_canary) { - debugExceptionAssertion(arg0, value, func); - } - return value; + return toJSHostCall(arg0, @src(), func, .{ arg0, arg1, arg2 }); } }.wrapped; } @@ -179,14 +211,7 @@ pub fn wrap4(comptime func: anytype) @"return": { const p = @typeInfo(@TypeOf(func)).@"fn".params; return struct { pub fn wrapped(arg0: p[0].type.?, arg1: p[1].type.?, arg2: p[2].type.?, arg3: p[3].type.?) callconv(.c) JSValue { - const value = func(arg0, arg1, arg2, arg3) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => arg0.throwOutOfMemoryValue(), - }; - if (Environment.allow_assert and Environment.is_canary) { - debugExceptionAssertion(arg0, value, func); - } - return value; + return toJSHostCall(arg0, @src(), func, .{ arg0, arg1, arg2, arg3 }); } }.wrapped; } @@ -198,14 +223,7 @@ pub fn wrap5(comptime func: anytype) @"return": { const p = @typeInfo(@TypeOf(func)).@"fn".params; return struct { pub fn wrapped(arg0: p[0].type.?, arg1: p[1].type.?, arg2: p[2].type.?, arg3: p[3].type.?, arg4: p[4].type.?) callconv(.c) JSValue { - const value = func(arg0, arg1, arg2, arg3, arg4) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => arg0.throwOutOfMemoryValue(), - }; - if (Environment.allow_assert and Environment.is_canary) { - debugExceptionAssertion(arg0, value, func); - } - return value; + return toJSHostCall(arg0, @src(), func, .{ arg0, arg1, arg2, arg3, arg4 }); } }.wrapped; } @@ -469,7 +487,7 @@ pub fn DOMCall( arguments_ptr: [*]const jsc.JSValue, arguments_len: usize, ) callconv(jsc.conv) jsc.JSValue { - return jsc.toJSHostValue(globalObject, @field(Container, functionName)(globalObject, thisValue, arguments_ptr[0..arguments_len])); + return jsc.toJSHostCall(globalObject, @src(), @field(Container, functionName), .{ globalObject, thisValue, arguments_ptr[0..arguments_len] }); } pub const fastpath = @field(Container, functionName ++ "WithoutTypeChecks"); diff --git a/src/bun.js/modules/BunJSCModule.h b/src/bun.js/modules/BunJSCModule.h index 509cd96180..7465263605 100644 --- a/src/bun.js/modules/BunJSCModule.h +++ b/src/bun.js/modules/BunJSCModule.h @@ -584,8 +584,11 @@ JSC_DEFINE_HOST_FUNCTION(functionDrainMicrotasks, (JSGlobalObject * globalObject, CallFrame*)) { VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); vm.drainMicrotasks(); + RETURN_IF_EXCEPTION(scope, {}); Bun__drainMicrotasks(); + RETURN_IF_EXCEPTION(scope, {}); return JSValue::encode(jsUndefined()); } @@ -615,7 +618,7 @@ JSC_DEFINE_HOST_FUNCTION(functionSetTimeZone, (JSGlobalObject * globalObject, Ca return {}; } vm.dateCache.resetIfNecessarySlow(); - WTF::Vector buffer; + WTF::Vector buffer; WTF::getTimeZoneOverride(buffer); WTF::String timeZoneString(buffer.span()); return JSValue::encode(jsString(vm, timeZoneString)); @@ -767,7 +770,9 @@ JSC_DEFINE_HOST_FUNCTION(functionSerialize, bool asNodeBuffer = false; if (optionsObject.isObject()) { JSC::JSObject* options = optionsObject.getObject(); - if (JSC::JSValue binaryTypeValue = options->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "binaryType"_s))) { + auto binaryTypeValue = options->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "binaryType"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (binaryTypeValue) { if (!binaryTypeValue.isString()) { throwTypeError(globalObject, throwScope, "binaryType must be a string"_s); return {}; @@ -783,9 +788,8 @@ JSC_DEFINE_HOST_FUNCTION(functionSerialize, ExceptionOr> serialized = SerializedScriptValue::create(*globalObject, value, WTFMove(transferList), dummyPorts); if (serialized.hasException()) { - WebCore::propagateException(*globalObject, throwScope, - serialized.releaseException()); - return JSValue::encode(jsUndefined()); + WebCore::propagateException(*globalObject, throwScope, serialized.releaseException()); + RELEASE_AND_RETURN(throwScope, {}); } auto serializedValue = serialized.releaseReturnValue(); @@ -795,6 +799,7 @@ JSC_DEFINE_HOST_FUNCTION(functionSerialize, size_t byteLength = arrayBuffer->byteLength(); auto* subclassStructure = globalObject->JSBufferSubclassStructure(); JSC::JSUint8Array* uint8Array = JSC::JSUint8Array::create(lexicalGlobalObject, subclassStructure, WTFMove(arrayBuffer), 0, byteLength); + RETURN_IF_EXCEPTION(throwScope, {}); return JSValue::encode(uint8Array); } diff --git a/src/bun.js/modules/NodeBufferModule.h b/src/bun.js/modules/NodeBufferModule.h index eba221af55..8542ac4dcf 100644 --- a/src/bun.js/modules/NodeBufferModule.h +++ b/src/bun.js/modules/NodeBufferModule.h @@ -158,6 +158,7 @@ JSC_DEFINE_CUSTOM_SETTER(jsSetter_INSPECT_MAX_BYTES, (JSGlobalObject * lexicalGl DEFINE_NATIVE_MODULE(NodeBuffer) { INIT_NATIVE_MODULE(12); + auto scope = DECLARE_THROW_SCOPE(vm); put(JSC::Identifier::fromString(vm, "Buffer"_s), globalObject->JSBufferConstructor()); @@ -192,9 +193,11 @@ DEFINE_NATIVE_MODULE(NodeBuffer) JSC::Identifier atobI = JSC::Identifier::fromString(vm, "atob"_s); JSC::JSValue atobV = lexicalGlobalObject->get(globalObject, PropertyName(atobI)); + RETURN_IF_EXCEPTION(scope, ); JSC::Identifier btoaI = JSC::Identifier::fromString(vm, "btoa"_s); JSC::JSValue btoaV = lexicalGlobalObject->get(globalObject, PropertyName(btoaI)); + RETURN_IF_EXCEPTION(scope, ); put(atobI, atobV); put(btoaI, btoaV); diff --git a/src/bun.js/modules/NodeModuleModule.cpp b/src/bun.js/modules/NodeModuleModule.cpp index 20dce1e733..ffafec3306 100644 --- a/src/bun.js/modules/NodeModuleModule.cpp +++ b/src/bun.js/modules/NodeModuleModule.cpp @@ -20,21 +20,22 @@ #include "ErrorCode.h" #include "GeneratedNodeModuleModule.h" +#include "ZigGeneratedClasses.h" namespace Bun { using namespace JSC; +BUN_DECLARE_HOST_FUNCTION(Bun__JSSourceMap__find); + BUN_DECLARE_HOST_FUNCTION(Resolver__nodeModulePathsForJS); JSC_DECLARE_HOST_FUNCTION(jsFunctionDebugNoop); JSC_DECLARE_HOST_FUNCTION(jsFunctionFindPath); -JSC_DECLARE_HOST_FUNCTION(jsFunctionFindSourceMap); JSC_DECLARE_HOST_FUNCTION(jsFunctionIsBuiltinModule); JSC_DECLARE_HOST_FUNCTION(jsFunctionNodeModuleCreateRequire); JSC_DECLARE_HOST_FUNCTION(jsFunctionNodeModuleModuleConstructor); JSC_DECLARE_HOST_FUNCTION(jsFunctionResolveFileName); JSC_DECLARE_HOST_FUNCTION(jsFunctionResolveLookupPaths); -JSC_DECLARE_HOST_FUNCTION(jsFunctionSourceMap); JSC_DECLARE_HOST_FUNCTION(jsFunctionSyncBuiltinExports); JSC_DECLARE_HOST_FUNCTION(jsFunctionWrap); @@ -252,13 +253,13 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionNodeModuleCreateRequire, ERR::INVALID_ARG_VALUE(scope, globalObject, "filename"_s, argument, "must be a file URL object, file URL string, or absolute path string"_s); - RELEASE_AND_RETURN(scope, JSValue::encode({})); + RELEASE_AND_RETURN(scope, {}); } if (!url.protocolIsFile()) { ERR::INVALID_ARG_VALUE(scope, globalObject, "filename"_s, argument, "must be a file URL object, file URL string, or absolute path string"_s); - RELEASE_AND_RETURN(scope, JSValue::encode({})); + RELEASE_AND_RETURN(scope, {}); } val = url.fileSystemPath(); } @@ -287,13 +288,6 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionNodeModuleCreateRequire, scope, JSValue::encode(Bun::JSCommonJSModule::createBoundRequireFunction(vm, globalObject, val))); } -JSC_DEFINE_HOST_FUNCTION(jsFunctionFindSourceMap, - (JSGlobalObject * globalObject, - CallFrame* callFrame)) -{ - return JSValue::encode(jsUndefined()); -} - JSC_DEFINE_HOST_FUNCTION(jsFunctionSyncBuiltinExports, (JSGlobalObject * globalObject, CallFrame* callFrame)) @@ -301,41 +295,26 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionSyncBuiltinExports, return JSValue::encode(jsUndefined()); } -JSC_DEFINE_HOST_FUNCTION(jsFunctionSourceMap, (JSGlobalObject * globalObject, CallFrame* callFrame)) -{ - auto& vm = JSC::getVM(globalObject); - auto scope = DECLARE_THROW_SCOPE(vm); - throwException(globalObject, scope, - createError(globalObject, "Not implemented"_s)); - return {}; -} - JSC_DEFINE_HOST_FUNCTION(jsFunctionResolveFileName, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto& vm = JSC::getVM(globalObject); + auto scope = DECLARE_THROW_SCOPE(vm); switch (callFrame->argumentCount()) { case 0: { - auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); // not "requires" because "require" could be confusing - JSC::throwTypeError( - globalObject, scope, - "Module._resolveFilename needs 2+ arguments (a string)"_s); - scope.release(); - return JSC::JSValue::encode(JSC::JSValue {}); + JSC::throwTypeError(globalObject, scope, "Module._resolveFilename needs 2+ arguments (a string)"_s); + return {}; } default: { JSC::JSValue moduleName = callFrame->argument(0); JSC::JSValue fromValue = callFrame->argument(1); if (moduleName.isUndefinedOrNull()) { - auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); - JSC::throwTypeError(globalObject, scope, - "Module._resolveFilename expects a string"_s); - scope.release(); - return JSC::JSValue::encode(JSC::JSValue {}); + JSC::throwTypeError(globalObject, scope, "Module._resolveFilename expects a string"_s); + return {}; } if ( @@ -347,28 +326,23 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionResolveFileName, // weird thing. (fromValue.isObject()) { - if (auto idValue = fromValue.getObject()->getIfPropertyExists( - globalObject, builtinNames(vm).filenamePublicName())) { + auto idValue = fromValue.getObject()->getIfPropertyExists(globalObject, builtinNames(vm).filenamePublicName()); + RETURN_IF_EXCEPTION(scope, {}); + if (idValue) { if (idValue.isString()) { fromValue = idValue; } } } - auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); - auto result = Bun__resolveSync( - globalObject, - JSC::JSValue::encode(moduleName), JSValue::encode(fromValue), - false, - true); + auto result = Bun__resolveSync(globalObject, JSC::JSValue::encode(moduleName), JSValue::encode(fromValue), false, true); RETURN_IF_EXCEPTION(scope, {}); if (!JSC::JSValue::decode(result).isString()) { JSC::throwException(globalObject, scope, JSC::JSValue::decode(result)); - return JSC::JSValue::encode(JSC::JSValue {}); + return {}; } - scope.release(); return result; } } @@ -596,10 +570,10 @@ static JSValue getPathCacheObject(VM& vm, JSObject* moduleObject) static JSValue getSourceMapFunction(VM& vm, JSObject* moduleObject) { auto* globalObject = defaultGlobalObject(moduleObject->globalObject()); - JSFunction* sourceMapFunction = JSFunction::create( - vm, globalObject, 1, "SourceMap"_s, jsFunctionSourceMap, - ImplementationVisibility::Public, NoIntrinsic, jsFunctionSourceMap); - return sourceMapFunction; + auto* zigGlobalObject = jsCast(globalObject); + + // Return the actual SourceMap constructor from code generation + return zigGlobalObject->JSSourceMapConstructor(); } static JSValue getBuiltinModulesObject(VM& vm, JSObject* moduleObject) @@ -612,9 +586,7 @@ static JSValue getBuiltinModulesObject(VM& vm, JSObject* moduleObject) } auto* globalObject = defaultGlobalObject(moduleObject->globalObject()); - return JSC::constructArray( - globalObject, static_cast(nullptr), - JSC::ArgList(args)); + return JSC::constructArray(globalObject, static_cast(nullptr), JSC::ArgList(args)); } static JSValue getConstantsObject(VM& vm, JSObject* moduleObject) @@ -860,7 +832,7 @@ builtinModules getBuiltinModulesObject PropertyCallback constants getConstantsObject PropertyCallback createRequire jsFunctionNodeModuleCreateRequire Function 1 enableCompileCache jsFunctionEnableCompileCache Function 0 -findSourceMap jsFunctionFindSourceMap Function 0 +findSourceMap Bun__JSSourceMap__find Function 1 getCompileCacheDir jsFunctionGetCompileCacheDir Function 0 globalPaths getGlobalPathsObject PropertyCallback isBuiltin jsFunctionIsBuiltinModule Function 1 @@ -931,6 +903,82 @@ const JSC::ClassInfo JSModuleConstructor::s_info = { CREATE_METHOD_TABLE(JSModuleConstructor) }; +static JSC::Structure* createNodeModuleSourceMapEntryStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +{ + Structure* structure = globalObject->structureCache().emptyObjectStructureForPrototype(globalObject, globalObject->objectPrototype(), 6); + PropertyOffset offset; + + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "generatedLine"), 0, offset); + RELEASE_ASSERT(offset == 0); + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "generatedColumn"), 0, offset); + RELEASE_ASSERT(offset == 1); + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "originalLine"), 0, offset); + RELEASE_ASSERT(offset == 2); + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "originalColumn"), 0, offset); + RELEASE_ASSERT(offset == 3); + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "originalSource"), 0, offset); + RELEASE_ASSERT(offset == 4); + structure = Structure::addPropertyTransition(vm, structure, vm.propertyNames->name, 0, offset); + RELEASE_ASSERT(offset == 5); + + return structure; +} + +extern "C" JSC::EncodedJSValue Bun__createNodeModuleSourceMapEntryObject( + JSC::JSGlobalObject* globalObject, + JSC::EncodedJSValue encodedGeneratedLine, + JSC::EncodedJSValue encodedGeneratedColumn, + JSC::EncodedJSValue encodedOriginalLine, + JSC::EncodedJSValue encodedOriginalColumn, + JSC::EncodedJSValue encodedOriginalSource, + JSC::EncodedJSValue encodedName) +{ + auto& vm = globalObject->vm(); + auto* zigGlobalObject = defaultGlobalObject(globalObject); + JSObject* object = JSC::constructEmptyObject(vm, zigGlobalObject->m_nodeModuleSourceMapEntryStructure.getInitializedOnMainThread(zigGlobalObject)); + object->putDirectOffset(vm, 0, JSC::JSValue::decode(encodedGeneratedLine)); + object->putDirectOffset(vm, 1, JSC::JSValue::decode(encodedGeneratedColumn)); + object->putDirectOffset(vm, 2, JSC::JSValue::decode(encodedOriginalLine)); + object->putDirectOffset(vm, 3, JSC::JSValue::decode(encodedOriginalColumn)); + object->putDirectOffset(vm, 4, JSC::JSValue::decode(encodedOriginalSource)); + object->putDirectOffset(vm, 5, JSC::JSValue::decode(encodedName)); + return JSValue::encode(object); +} + +static JSC::Structure* createNodeModuleSourceMapOriginStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +{ + Structure* structure = globalObject->structureCache().emptyObjectStructureForPrototype(globalObject, globalObject->objectPrototype(), 4); + PropertyOffset offset; + + structure = Structure::addPropertyTransition(vm, structure, vm.propertyNames->name, 0, offset); + RELEASE_ASSERT(offset == 0); + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "line"), 0, offset); + RELEASE_ASSERT(offset == 1); + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "column"), 0, offset); + RELEASE_ASSERT(offset == 2); + structure = Structure::addPropertyTransition(vm, structure, Identifier::fromString(vm, "fileName"), 0, offset); + RELEASE_ASSERT(offset == 3); + + return structure; +} + +extern "C" JSC::EncodedJSValue Bun__createNodeModuleSourceMapOriginObject( + JSC::JSGlobalObject* globalObject, + JSC::EncodedJSValue encodedName, + JSC::EncodedJSValue encodedLine, + JSC::EncodedJSValue encodedColumn, + JSC::EncodedJSValue encodedSource) +{ + auto& vm = globalObject->vm(); + auto* zigGlobalObject = defaultGlobalObject(globalObject); + JSObject* object = JSC::constructEmptyObject(vm, zigGlobalObject->m_nodeModuleSourceMapOriginStructure.getInitializedOnMainThread(zigGlobalObject)); + object->putDirectOffset(vm, 0, JSC::JSValue::decode(encodedName)); + object->putDirectOffset(vm, 1, JSC::JSValue::decode(encodedLine)); + object->putDirectOffset(vm, 2, JSC::JSValue::decode(encodedColumn)); + object->putDirectOffset(vm, 3, JSC::JSValue::decode(encodedSource)); + return JSValue::encode(object); +} + void addNodeModuleConstructorProperties(JSC::VM& vm, Zig::GlobalObject* globalObject) { @@ -941,6 +989,15 @@ void addNodeModuleConstructorProperties(JSC::VM& vm, init.set(moduleConstructor); }); + globalObject->m_nodeModuleSourceMapEntryStructure.initLater( + [](const Zig::GlobalObject::Initializer& init) { + init.set(createNodeModuleSourceMapEntryStructure(init.vm, init.owner)); + }); + globalObject->m_nodeModuleSourceMapOriginStructure.initLater( + [](const Zig::GlobalObject::Initializer& init) { + init.set(createNodeModuleSourceMapOriginStructure(init.vm, init.owner)); + }); + globalObject->m_moduleRunMainFunction.initLater( [](const Zig::GlobalObject::Initializer& init) { JSFunction* runMainFunction = JSFunction::create( @@ -1016,8 +1073,7 @@ void generateNativeModule_NodeModule(JSC::JSGlobalObject* lexicalGlobalObject, Zig::GlobalObject* globalObject = defaultGlobalObject(lexicalGlobalObject); auto& vm = JSC::getVM(globalObject); auto catchScope = DECLARE_CATCH_SCOPE(vm); - auto* constructor = globalObject->m_nodeModuleConstructor.getInitializedOnMainThread( - globalObject); + auto* constructor = globalObject->m_nodeModuleConstructor.getInitializedOnMainThread(globalObject); if (constructor->hasNonReifiedStaticProperties()) { constructor->reifyAllStaticProperties(globalObject); if (catchScope.exception()) { @@ -1031,19 +1087,15 @@ void generateNativeModule_NodeModule(JSC::JSGlobalObject* lexicalGlobalObject, exportNames.append(vm.propertyNames->defaultKeyword); exportValues.append(constructor); - for (unsigned i = 0; i < Bun::countof(Bun::nodeModuleObjectTableValues); - ++i) { + for (unsigned i = 0; i < Bun::countof(Bun::nodeModuleObjectTableValues); ++i) { const auto& entry = Bun::nodeModuleObjectTableValues[i]; const auto& property = Identifier::fromString(vm, entry.m_key); - JSValue value = constructor->getIfPropertyExists(globalObject, property); + JSValue value = constructor->get(globalObject, property); if (catchScope.exception()) [[unlikely]] { value = {}; catchScope.clearException(); } - if (value.isEmpty()) [[unlikely]] { - value = JSC::jsUndefined(); - } exportNames.append(property); exportValues.append(value); diff --git a/src/bun.js/modules/NodeProcessModule.h b/src/bun.js/modules/NodeProcessModule.h index 605095387b..7022433828 100644 --- a/src/bun.js/modules/NodeProcessModule.h +++ b/src/bun.js/modules/NodeProcessModule.h @@ -15,13 +15,13 @@ DEFINE_NATIVE_MODULE(NodeProcess) Bun::Process* process = globalObject->processObject(); if (!process->staticPropertiesReified()) { process->reifyAllStaticProperties(globalObject); - if (scope.exception()) + if (scope.exception()) [[unlikely]] return; } PropertyNameArray properties(vm, PropertyNameMode::Strings, PrivateSymbolMode::Exclude); process->getPropertyNames(globalObject, properties, DontEnumPropertiesMode::Exclude); - if (scope.exception()) + if (scope.exception()) [[unlikely]] return; exportNames.append(vm.propertyNames->defaultKeyword); diff --git a/src/bun.js/modules/NodeUtilTypesModule.cpp b/src/bun.js/modules/NodeUtilTypesModule.cpp index e3217555a4..f89b9cc808 100644 --- a/src/bun.js/modules/NodeUtilTypesModule.cpp +++ b/src/bun.js/modules/NodeUtilTypesModule.cpp @@ -140,7 +140,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionIsError, } } - JSValue proto = object->getPrototype(vm, globalObject); + JSValue proto = object->getPrototype(globalObject); if (proto.isCell() && (proto.inherits() || proto.asCell()->type() == ErrorInstanceType || proto.inherits())) return JSValue::encode(jsBoolean(true)); } @@ -193,8 +193,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionIsAsyncFunction, return JSValue::encode(jsBoolean(true)); } - auto& vm = JSC::getVM(globalObject); - auto proto = function->getPrototype(vm, globalObject); + auto proto = function->getPrototype(globalObject); if (!proto.isCell()) { return JSValue::encode(jsBoolean(false)); } diff --git a/src/bun.js/modules/ObjectModule.cpp b/src/bun.js/modules/ObjectModule.cpp index fa37494a80..125cd79481 100644 --- a/src/bun.js/modules/ObjectModule.cpp +++ b/src/bun.js/modules/ObjectModule.cpp @@ -26,7 +26,7 @@ generateObjectModuleSourceCode(JSC::JSGlobalObject* globalObject, auto scope = DECLARE_CATCH_SCOPE(vm); JSValue value = object->get(globalObject, entry); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); value = jsUndefined(); } @@ -66,7 +66,7 @@ generateObjectModuleSourceCodeForJSON(JSC::JSGlobalObject* globalObject, auto scope = DECLARE_CATCH_SCOPE(vm); JSValue value = object->get(globalObject, entry); - if (scope.exception()) { + if (scope.exception()) [[unlikely]] { scope.clearException(); value = jsUndefined(); } diff --git a/src/bun.js/node.zig b/src/bun.js/node.zig index 0c12e67007..83cde3e878 100644 --- a/src/bun.js/node.zig +++ b/src/bun.js/node.zig @@ -193,7 +193,7 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { }; } - pub fn toJS(this: @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJS(this: @This(), globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { return switch (this) { .result => |r| switch (ReturnType) { JSC.JSValue => r, @@ -215,14 +215,14 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { }, }, }, - .err => |e| e.toJSC(globalObject), + .err => |e| e.toJS(globalObject), }; } pub fn toArrayBuffer(this: @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { return switch (this) { .result => |r| JSC.ArrayBuffer.fromBytes(r, .ArrayBuffer).toJS(globalObject, null), - .err => |e| e.toJSC(globalObject), + .err => |e| e.toJS(globalObject), }; } diff --git a/src/bun.js/node/Stat.zig b/src/bun.js/node/Stat.zig index a873f604a5..3a3e77ef4b 100644 --- a/src/bun.js/node/Stat.zig +++ b/src/bun.js/node/Stat.zig @@ -44,7 +44,7 @@ pub fn StatType(comptime big: bool) type { } } - pub fn toJS(this: *const @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJS(this: *const @This(), globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { return statToJS(&this.value, globalObject); } @@ -56,7 +56,7 @@ pub fn StatType(comptime big: bool) type { return @intCast(@min(@max(value, 0), std.math.maxInt(i64))); } - fn statToJS(stat_: *const bun.Stat, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + fn statToJS(stat_: *const bun.Stat, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { const aTime = stat_.atime(); const mTime = stat_.mtime(); const cTime = stat_.ctime(); @@ -80,7 +80,7 @@ pub fn StatType(comptime big: bool) type { const birthtime_ns: u64 = if (big and !Environment.isLinux) toNanoseconds(stat_.birthtime()) else 0; if (big) { - return Bun__createJSBigIntStatsObject( + return bun.jsc.fromJSHostCall(globalObject, @src(), Bun__createJSBigIntStatsObject, .{ globalObject, dev, ino, @@ -100,7 +100,7 @@ pub fn StatType(comptime big: bool) type { mtime_ns, ctime_ns, birthtime_ns, - ); + }); } return Bun__createJSStatsObject( @@ -188,7 +188,7 @@ pub const Stats = union(enum) { } } - pub fn toJSNewlyCreated(this: *const Stats, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJSNewlyCreated(this: *const Stats, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { return switch (this.*) { .big => this.big.toJS(globalObject), .small => this.small.toJS(globalObject), diff --git a/src/bun.js/node/StatFS.zig b/src/bun.js/node/StatFS.zig index a8414a2ba4..8066f87b1d 100644 --- a/src/bun.js/node/StatFS.zig +++ b/src/bun.js/node/StatFS.zig @@ -15,13 +15,13 @@ pub fn StatFSType(comptime big: bool) type { const This = @This(); - pub fn toJS(this: *const This, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJS(this: *const This, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { return statfsToJS(this, globalObject); } - fn statfsToJS(this: *const This, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + fn statfsToJS(this: *const This, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { if (big) { - return Bun__createJSBigIntStatFSObject( + return bun.jsc.fromJSHostCall(globalObject, @src(), Bun__createJSBigIntStatFSObject, .{ globalObject, this._fstype, this._bsize, @@ -30,7 +30,7 @@ pub fn StatFSType(comptime big: bool) type { this._bavail, this._files, this._ffree, - ); + }); } return Bun__createJSStatFSObject( @@ -121,7 +121,7 @@ pub const StatFS = union(enum) { } } - pub fn toJSNewlyCreated(this: *const StatFS, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJSNewlyCreated(this: *const StatFS, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { return switch (this.*) { .big => |*big| big.toJS(globalObject), .small => |*small| small.toJS(globalObject), diff --git a/src/bun.js/node/dir_iterator.zig b/src/bun.js/node/dir_iterator.zig index c9ba26706b..d6a23f3fe1 100644 --- a/src/bun.js/node/dir_iterator.zig +++ b/src/bun.js/node/dir_iterator.zig @@ -18,6 +18,7 @@ const IteratorError = error{ AccessDenied, SystemResources } || posix.Unexpected const mem = std.mem; const strings = bun.strings; const Maybe = JSC.Maybe; +const FD = bun.FD; pub const IteratorResult = struct { name: PathString, @@ -50,7 +51,7 @@ pub const IteratorW = NewIterator(true); pub fn NewIterator(comptime use_windows_ospath: bool) type { return switch (builtin.os.tag) { .macos, .ios, .freebsd, .netbsd, .dragonfly, .openbsd, .solaris => struct { - dir: Dir, + dir: FD, seek: i64, buf: [8192]u8 align(@alignOf(std.posix.system.dirent)), index: usize, @@ -61,10 +62,6 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { pub const Error = IteratorError; - fn fd(self: *Self) posix.fd_t { - return self.dir.fd; - } - /// Memory such as file names referenced in this returned entry becomes invalid /// with subsequent calls to `next`, as well as when this `Dir` is deinitialized. pub const next = switch (builtin.os.tag) { @@ -94,7 +91,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { self.buf[self.buf.len - 4 ..][0..4].* = .{ 0, 0, 0, 0 }; const rc = posix.system.__getdirentries64( - self.dir.fd, + self.dir.cast(), &self.buf, self.buf.len, &self.seek, @@ -146,7 +143,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { } }, .linux => struct { - dir: Dir, + dir: FD, // The if guard is solely there to prevent compile errors from missing `linux.dirent64` // definition when compiling for other OSes. It doesn't do anything when compiling for Linux. buf: [8192]u8 align(if (builtin.os.tag != .linux) 1 else @alignOf(linux.dirent64)), @@ -158,16 +155,12 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { pub const Error = IteratorError; - fn fd(self: *Self) posix.fd_t { - return self.dir.fd; - } - /// Memory such as file names referenced in this returned entry becomes invalid /// with subsequent calls to `next`, as well as when this `Dir` is deinitialized. pub fn next(self: *Self) Result { start_over: while (true) { if (self.index >= self.end_index) { - const rc = linux.getdents64(self.dir.fd, &self.buf, self.buf.len); + const rc = linux.getdents64(self.dir.cast(), &self.buf, self.buf.len); if (Result.errnoSys(rc, .getdents64)) |err| return err; if (rc == 0) return .{ .result = null }; self.index = 0; @@ -208,7 +201,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { // While the official api docs guarantee FILE_BOTH_DIR_INFORMATION to be aligned properly // this may not always be the case (e.g. due to faulty VM/Sandboxing tools) const FILE_DIRECTORY_INFORMATION_PTR = *align(2) FILE_DIRECTORY_INFORMATION; - dir: Dir, + dir: FD, // This structure must be aligned on a LONGLONG (8-byte) boundary. // If a buffer contains two or more of these structures, the @@ -227,10 +220,6 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { const ResultT = if (use_windows_ospath) ResultW else Result; - fn fd(self: *Self) posix.fd_t { - return self.dir.fd; - } - /// Memory such as file names referenced in this returned entry becomes invalid /// with subsequent calls to `next`, as well as when this `Dir` is deinitialized. pub fn next(self: *Self) ResultT { @@ -244,7 +233,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { } const rc = w.ntdll.NtQueryDirectoryFile( - self.dir.fd, + self.dir.cast(), null, null, null, @@ -259,14 +248,14 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { self.first = false; if (io.Information == 0) { - bun.sys.syslog("NtQueryDirectoryFile({}) = 0", .{bun.FD.fromStdDir(self.dir)}); + bun.sys.syslog("NtQueryDirectoryFile({}) = 0", .{self.dir}); return .{ .result = null }; } self.index = 0; self.end_index = io.Information; // If the handle is not a directory, we'll get STATUS_INVALID_PARAMETER. if (rc == .INVALID_PARAMETER) { - bun.sys.syslog("NtQueryDirectoryFile({}) = {s}", .{ bun.FD.fromStdDir(self.dir), @tagName(rc) }); + bun.sys.syslog("NtQueryDirectoryFile({}) = {s}", .{ self.dir, @tagName(rc) }); return .{ .err = .{ .errno = @intFromEnum(bun.sys.SystemErrno.ENOTDIR), @@ -276,13 +265,13 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { } if (rc == .NO_MORE_FILES) { - bun.sys.syslog("NtQueryDirectoryFile({}) = {s}", .{ bun.FD.fromStdDir(self.dir), @tagName(rc) }); + bun.sys.syslog("NtQueryDirectoryFile({}) = {s}", .{ self.dir, @tagName(rc) }); self.end_index = self.index; return .{ .result = null }; } if (rc != .SUCCESS) { - bun.sys.syslog("NtQueryDirectoryFile({}) = {s}", .{ bun.FD.fromStdDir(self.dir), @tagName(rc) }); + bun.sys.syslog("NtQueryDirectoryFile({}) = {s}", .{ self.dir, @tagName(rc) }); if ((bun.windows.Win32Error.fromNTStatus(rc).toSystemErrno())) |errno| { return .{ @@ -301,7 +290,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { }; } - bun.sys.syslog("NtQueryDirectoryFile({}) = {d}", .{ bun.FD.fromStdDir(self.dir), self.end_index }); + bun.sys.syslog("NtQueryDirectoryFile({}) = {d}", .{ self.dir, self.end_index }); } const dir_info: FILE_DIRECTORY_INFORMATION_PTR = @ptrCast(@alignCast(&self.buf[self.index])); @@ -356,7 +345,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { } }, .wasi => struct { - dir: Dir, + dir: FD, buf: [8192]u8, // TODO align(@alignOf(os.wasi.dirent_t)), cookie: u64, index: usize, @@ -366,10 +355,6 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { pub const Error = IteratorError; - fn fd(self: *Self) posix.fd_t { - return self.dir.fd; - } - /// Memory such as file names referenced in this returned entry becomes invalid /// with subsequent calls to `next`, as well as when this `Dir` is deinitialized. pub fn next(self: *Self) Result { @@ -380,7 +365,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { start_over: while (true) { if (self.index >= self.end_index) { var bufused: usize = undefined; - switch (w.fd_readdir(self.fd, &self.buf, self.buf.len, self.cookie, &bufused)) { + switch (w.fd_readdir(self.dir.cast(), &self.buf, self.buf.len, self.cookie, &bufused)) { .SUCCESS => {}, .BADF => unreachable, // Dir is invalid or was opened without iteration ability .FAULT => unreachable, @@ -440,13 +425,9 @@ pub fn NewWrappedIterator(comptime path_type: PathType) type { return self.iter.next(); } - pub inline fn fd(self: *Self) posix.fd_t { - return self.iter.fd(); - } - pub const Error = IteratorError; - pub fn init(dir: Dir) Self { + pub fn init(dir: FD) Self { return Self{ .iter = switch (builtin.os.tag) { .macos, @@ -494,6 +475,6 @@ pub fn NewWrappedIterator(comptime path_type: PathType) type { pub const WrappedIterator = NewWrappedIterator(.u8); pub const WrappedIteratorW = NewWrappedIterator(.u16); -pub fn iterate(self: Dir, comptime path_type: PathType) NewWrappedIterator(path_type) { +pub fn iterate(self: FD, comptime path_type: PathType) NewWrappedIterator(path_type) { return NewWrappedIterator(path_type).init(self); } diff --git a/src/bun.js/node/node_assert.zig b/src/bun.js/node/node_assert.zig index 619c00997c..69dcaa0639 100644 --- a/src/bun.js/node/node_assert.zig +++ b/src/bun.js/node/node_assert.zig @@ -114,7 +114,7 @@ fn diffLines( fn diffListToJS(comptime T: type, global: *JSC.JSGlobalObject, diff_list: MyersDiff.DiffList(T)) bun.JSError!JSC.JSValue { var array = try JSC.JSValue.createEmptyArray(global, diff_list.items.len); for (diff_list.items, 0..) |*line, i| { - array.putIndex(global, @truncate(i), (try JSC.JSObject.createNullProto(line.*, global)).toJS()); + try array.putIndex(global, @truncate(i), (try JSC.JSObject.createNullProto(line.*, global)).toJS()); } return array; } diff --git a/src/bun.js/node/node_crypto_binding.zig b/src/bun.js/node/node_crypto_binding.zig index 4931d46ed1..df0f5313cf 100644 --- a/src/bun.js/node/node_crypto_binding.zig +++ b/src/bun.js/node/node_crypto_binding.zig @@ -48,7 +48,7 @@ fn ExternCryptoJob(comptime name: []const u8) type { } pub fn createAndSchedule(global: *JSGlobalObject, ctx: *Ctx, callback: JSValue) callconv(.c) void { - var job = create(global, ctx, callback); + var job = create(global, ctx, callback.withAsyncContextIfNeeded(global)); job.schedule(); } @@ -142,7 +142,7 @@ fn CryptoJob(comptime Ctx: type) type { }, .any_task = undefined, .ctx = ctx.*, - .callback = .create(callback, global), + .callback = .create(callback.withAsyncContextIfNeeded(global), global), }); errdefer bun.destroy(job); try job.ctx.init(global); @@ -266,6 +266,8 @@ const random = struct { const res = std.crypto.random.intRangeLessThan(i64, min, max); if (!callback.isUndefined()) { + callback = callback.withAsyncContextIfNeeded(global); + callback.callNextTick(global, [2]JSValue{ .js_undefined, JSValue.jsNumber(res) }); return .js_undefined; } @@ -513,7 +515,7 @@ fn getHashes(global: *JSGlobalObject, _: *JSC.CallFrame) JSError!JSValue { for (hashes.keys(), 0..) |hash, i| { const str = String.createUTF8ForJS(global, hash); - array.putIndex(global, @intCast(i), str); + try array.putIndex(global, @intCast(i), str); } return array; diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 3bbe2deca5..324a45e3de 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -269,7 +269,7 @@ pub const Async = struct { this.result = @field(NodeFS, "uv_" ++ @tagName(FunctionEnum))(&node_fs, this.args, @intFromEnum(req.result)); if (this.result == .err) { - this.result.err = this.result.err.clone(bun.default_allocator) catch bun.outOfMemory(); + this.result.err = this.result.err.clone(bun.default_allocator); std.mem.doNotOptimizeAway(&node_fs); } @@ -283,7 +283,7 @@ pub const Async = struct { this.result = @field(NodeFS, "uv_" ++ @tagName(FunctionEnum))(&node_fs, this.args, req, @intFromEnum(req.result)); if (this.result == .err) { - this.result.err = this.result.err.clone(bun.default_allocator) catch bun.outOfMemory(); + this.result.err = this.result.err.clone(bun.default_allocator); std.mem.doNotOptimizeAway(&node_fs); } @@ -296,7 +296,7 @@ pub const Async = struct { var promise_value = this.promise.value(); var promise = this.promise.get(); const result = switch (this.result) { - .err => |err| err.toJSC(globalObject), + .err => |err| err.toJS(globalObject), .result => |*res| brk: { break :brk globalObject.toJS(res) catch return promise.reject(globalObject, error.JSError); }, @@ -382,7 +382,7 @@ pub const Async = struct { this.result = function(&node_fs, this.args, .@"async"); if (this.result == .err) { - this.result.err = this.result.err.clone(bun.default_allocator) catch bun.outOfMemory(); + this.result.err = this.result.err.clone(bun.default_allocator); std.mem.doNotOptimizeAway(&node_fs); } @@ -395,7 +395,7 @@ pub const Async = struct { var promise_value = this.promise.value(); var promise = this.promise.get(); const result = switch (this.result) { - .err => |err| err.toJSC(globalObject), + .err => |err| err.toJS(globalObject), .result => |*res| brk: { break :brk globalObject.toJS(res) catch return promise.reject(globalObject, error.JSError); }, @@ -642,7 +642,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { this.result = result; if (this.result == .err) { - this.result.err = this.result.err.clone(bun.default_allocator) catch bun.outOfMemory(); + this.result.err = this.result.err.clone(bun.default_allocator); } if (this.evtloop == .js) { @@ -669,7 +669,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { var promise_value = this.promise.value(); var promise = this.promise.get(); const result = switch (this.result) { - .err => |err| err.toJSC(globalObject), + .err => |err| err.toJS(globalObject), .result => |*res| brk: { break :brk globalObject.toJS(res) catch return promise.reject(globalObject, error.JSError); }, @@ -859,8 +859,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { }, } - const dir = fd.stdDir(); - var iterator = DirIterator.iterate(dir, if (Environment.isWindows) .u16 else .u8); + var iterator = DirIterator.iterate(fd, if (Environment.isWindows) .u16 else .u8); var entry = iterator.next(); while (switch (entry) { .err => |err| { @@ -1220,7 +1219,7 @@ pub const AsyncReaddirRecursiveTask = struct { const success = this.pending_err == null; var promise_value = this.promise.value(); var promise = this.promise.get(); - const result = if (this.pending_err) |*err| err.toJSC(globalObject) else brk: { + const result = if (this.pending_err) |*err| err.toJS(globalObject) else brk: { const res = switch (this.result_list) { .with_file_types => |*res| Return.Readdir{ .with_file_types = res.moveToUnmanaged().items }, .buffers => |*res| Return.Readdir{ .buffers = res.moveToUnmanaged().items }, @@ -1577,7 +1576,7 @@ pub const Arguments = struct { }; errdefer path.deinit(); - const atime = JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { + const atime = try JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { return ctx.throwInvalidArguments("atime is required", .{}); }) orelse { return ctx.throwInvalidArguments("atime must be a number or a Date", .{}); @@ -1585,7 +1584,7 @@ pub const Arguments = struct { arguments.eat(); - const mtime = JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { + const mtime = try JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { return ctx.throwInvalidArguments("mtime is required", .{}); }) orelse { return ctx.throwInvalidArguments("mtime must be a number or a Date", .{}); @@ -2360,14 +2359,14 @@ pub const Arguments = struct { return throwInvalidFdError(ctx, fd_value); }; - const atime = JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { + const atime = try JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { return ctx.throwInvalidArguments("atime is required", .{}); }) orelse { return ctx.throwInvalidArguments("atime must be a number or a Date", .{}); }; arguments.eat(); - const mtime = JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { + const mtime = try JSC.Node.timeLikeFromJS(ctx, arguments.next() orelse { return ctx.throwInvalidArguments("mtime is required", .{}); }) orelse { return ctx.throwInvalidArguments("mtime must be a number or a Date", .{}); @@ -3085,7 +3084,7 @@ pub const Arguments = struct { if (arguments.next()) |arg| { arguments.eat(); if (arg.isNumber()) { - mode = arg.coerce(i32, ctx); + mode = try arg.coerce(i32, ctx); } } @@ -3148,7 +3147,7 @@ pub const StatOrNotFound = union(enum) { }; } - pub fn toJSNewlyCreated(this: *const StatOrNotFound, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJSNewlyCreated(this: *const StatOrNotFound, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { return switch (this.*) { .stats => this.stats.toJSNewlyCreated(globalObject), .not_found => .js_undefined, @@ -3215,7 +3214,7 @@ const Return = struct { .bytesRead = JSC.ZigString.init("bytesRead"), .buffer = JSC.ZigString.init("buffer"), }; - pub fn toJS(this: *const ReadPromise, ctx: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJS(this: *const ReadPromise, ctx: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { defer if (!this.buffer_val.isEmptyOrUndefinedOrNull()) this.buffer_val.unprotect(); @@ -3238,7 +3237,7 @@ const Return = struct { }; // Excited for the issue that's like "cannot read file bigger than 2 GB" - pub fn toJS(this: *const WritePromise, globalObject: *JSC.JSGlobalObject) JSC.C.JSValueRef { + pub fn toJS(this: *const WritePromise, globalObject: *JSC.JSGlobalObject) bun.JSError!bun.jsc.JSValue { defer if (!this.buffer_val.isEmptyOrUndefinedOrNull()) this.buffer_val.unprotect(); @@ -3283,13 +3282,8 @@ const Return = struct { var array = try JSC.JSValue.createEmptyArray(globalObject, this.with_file_types.len); var previous_jsstring: ?*JSC.JSString = null; for (this.with_file_types, 0..) |*item, i| { - const res = item.toJSNewlyCreated(globalObject, &previous_jsstring); - if (res == .zero) return .zero; - array.putIndex( - globalObject, - @truncate(i), - res, - ); + const res = try item.toJSNewlyCreated(globalObject, &previous_jsstring); + try array.putIndex(globalObject, @truncate(i), res); } return array; }, @@ -3727,8 +3721,8 @@ pub const NodeFS = struct { } if (comptime Environment.isWindows) { - const dest_buf = bun.OSPathBufferPool.get(); - defer bun.OSPathBufferPool.put(dest_buf); + const dest_buf = bun.os_path_buffer_pool.get(); + defer bun.os_path_buffer_pool.put(dest_buf); const src = bun.strings.toKernel32Path(bun.reinterpretSlice(u16, &fs.sync_error_buf), args.src.slice()); const dest = bun.strings.toKernel32Path(dest_buf, args.dest.slice()); @@ -3918,8 +3912,8 @@ pub const NodeFS = struct { } pub fn mkdirRecursiveImpl(this: *NodeFS, args: Arguments.Mkdir, comptime Ctx: type, ctx: Ctx) Maybe(Return.Mkdir) { - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); const path = args.path.osPathKernel32(buf); return switch (args.always_return_none) { @@ -4430,7 +4424,6 @@ pub const NodeFS = struct { comptime ExpectedType: type, entries: *std.ArrayList(ExpectedType), ) Maybe(void) { - const dir = fd.stdDir(); const is_u16 = comptime Environment.isWindows and (ExpectedType == bun.String or ExpectedType == bun.JSC.Node.Dirent); var dirent_path: bun.String = bun.String.dead; @@ -4438,15 +4431,15 @@ pub const NodeFS = struct { dirent_path.deref(); } - var iterator = DirIterator.iterate(dir, comptime if (is_u16) .u16 else .u8); + var iterator = DirIterator.iterate(fd, comptime if (is_u16) .u16 else .u8); var entry = iterator.next(); const re_encoding_buffer: ?*bun.PathBuffer = if (is_u16 and args.encoding != .utf8) - bun.PathBufferPool.get() + bun.path_buffer_pool.get() else null; defer if (is_u16 and args.encoding != .utf8) - bun.PathBufferPool.put(re_encoding_buffer.?); + bun.path_buffer_pool.put(re_encoding_buffer.?); while (switch (entry) { .err => |err| { @@ -4578,7 +4571,7 @@ pub const NodeFS = struct { } } - var iterator = DirIterator.iterate(fd.stdDir(), .u8); + var iterator = DirIterator.iterate(fd, .u8); var entry = iterator.next(); var dirent_path_prev: bun.String = bun.String.empty; defer { @@ -4732,7 +4725,7 @@ pub const NodeFS = struct { } } - var iterator = DirIterator.iterate(fd.stdDir(), .u8); + var iterator = DirIterator.iterate(fd, .u8); var entry = iterator.next(); var dirent_path_prev: bun.String = bun.String.dead; defer { @@ -5044,7 +5037,7 @@ pub const NodeFS = struct { if (this.vm) |vm| { // Attempt to create the buffer in JSC's heap. // This avoids creating a WastefulTypedArray. - const array_buffer = JSC.ArrayBuffer.createBuffer(vm.global, temporary_read_buffer); + const array_buffer = JSC.ArrayBuffer.createBuffer(vm.global, temporary_read_buffer) catch .zero; // TODO: properly propagate exception upwards array_buffer.ensureStillAlive(); return .{ .result = .{ @@ -5978,8 +5971,8 @@ pub const NodeFS = struct { pub fn osPathIntoSyncErrorBufOverlap(this: *NodeFS, slice: anytype) []const u8 { if (Environment.isWindows) { - const tmp = bun.OSPathBufferPool.get(); - defer bun.OSPathBufferPool.put(tmp); + const tmp = bun.os_path_buffer_pool.get(); + defer bun.os_path_buffer_pool.put(tmp); @memcpy(tmp[0..slice.len], slice); return bun.strings.fromWPath(&this.sync_error_buf, tmp[0..slice.len]); } @@ -6093,10 +6086,7 @@ pub const NodeFS = struct { .result => {}, } - var iterator = iterator: { - const dir = fd.stdDir(); - break :iterator DirIterator.iterate(dir, if (Environment.isWindows) .u16 else .u8); - }; + var iterator = DirIterator.iterate(fd, if (Environment.isWindows) .u16 else .u8); var entry = iterator.next(); while (switch (entry) { .err => |err| { @@ -6488,8 +6478,8 @@ pub const NodeFS = struct { .err => |err| return .{ .err = err }, .result => |src_fd| src_fd, }; - const wbuf = bun.OSPathBufferPool.get(); - defer bun.OSPathBufferPool.put(wbuf); + const wbuf = bun.os_path_buffer_pool.get(); + defer bun.os_path_buffer_pool.put(wbuf); const len = bun.windows.GetFinalPathNameByHandleW(handle.cast(), wbuf, wbuf.len, 0); if (len == 0) { return ret.errnoSysP(0, .copyfile, this.osPathIntoSyncErrorBuf(dest)) orelse dst_enoent_maybe; diff --git a/src/bun.js/node/node_fs_binding.zig b/src/bun.js/node/node_fs_binding.zig index 7d8a7f17b1..77488ca1b6 100644 --- a/src/bun.js/node/node_fs_binding.zig +++ b/src/bun.js/node/node_fs_binding.zig @@ -34,7 +34,7 @@ fn Bindings(comptime function_name: NodeFSFunctionEnum) type { var result = function(&this.node_fs, args, .sync); return switch (result) { - .err => |err| globalObject.throwValue(JSC.JSValue.c(err.toJS(globalObject))), + .err => |err| globalObject.throwValue(err.toJS(globalObject)), .result => |*res| globalObject.toJS(res), }; } @@ -232,7 +232,7 @@ pub fn createMemfdForTesting(globalObject: *JSC.JSGlobalObject, callFrame: *JSC. return JSC.JSValue.jsNumber(fd.cast()); }, .err => |err| { - return globalObject.throwValue(err.toJSC(globalObject)); + return globalObject.throwValue(err.toJS(globalObject)); }, } } diff --git a/src/bun.js/node/node_fs_stat_watcher.zig b/src/bun.js/node/node_fs_stat_watcher.zig index a58cb8aacf..6052c180be 100644 --- a/src/bun.js/node/node_fs_stat_watcher.zig +++ b/src/bun.js/node/node_fs_stat_watcher.zig @@ -18,7 +18,7 @@ const StatsBig = bun.JSC.Node.StatsBig; const log = bun.Output.scoped(.StatWatcher, false); -fn statToJSStats(globalThis: *JSC.JSGlobalObject, stats: *const bun.Stat, bigint: bool) JSC.JSValue { +fn statToJSStats(globalThis: *JSC.JSGlobalObject, stats: *const bun.Stat, bigint: bool) bun.JSError!JSC.JSValue { if (bigint) { return StatsBig.init(stats).toJS(globalThis); } else { @@ -279,7 +279,7 @@ pub const StatWatcher = struct { if (!interval_.isNumber() and !interval_.isAnyInt()) { return global.throwInvalidArguments("interval must be a number", .{}); } - interval = interval_.coerce(i32, global); + interval = try interval_.coerce(i32, global); } } } @@ -392,7 +392,7 @@ pub const StatWatcher = struct { return; } - const jsvalue = statToJSStats(this.globalThis, &this.last_stat, this.bigint); + const jsvalue = statToJSStats(this.globalThis, &this.last_stat, this.bigint) catch return; // TODO: properly propagate exception upwards this.last_jsvalue = .create(jsvalue, this.globalThis); this.scheduler.data.append(this); @@ -403,7 +403,7 @@ pub const StatWatcher = struct { return; } - const jsvalue = statToJSStats(this.globalThis, &this.last_stat, this.bigint); + const jsvalue = statToJSStats(this.globalThis, &this.last_stat, this.bigint) catch return; // TODO: properly propagate exception upwards this.last_jsvalue = .create(jsvalue, this.globalThis); _ = js.listenerGetCached(this.js_this).?.call( @@ -430,7 +430,17 @@ pub const StatWatcher = struct { .err => std.mem.zeroes(bun.Stat), }; - if (std.mem.eql(u8, std.mem.asBytes(&res), std.mem.asBytes(&this.last_stat))) return; + var compare = res; + const StatT = @TypeOf(compare); + if (@hasField(StatT, "st_atim")) { + compare.st_atim = this.last_stat.st_atim; + } else if (@hasField(StatT, "st_atimespec")) { + compare.st_atimespec = this.last_stat.st_atimespec; + } else if (@hasField(StatT, "atim")) { + compare.atim = this.last_stat.atim; + } + + if (std.mem.eql(u8, std.mem.asBytes(&compare), std.mem.asBytes(&this.last_stat))) return; this.last_stat = res; this.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(this, swapAndCallListenerOnMainThread)); @@ -439,7 +449,7 @@ pub const StatWatcher = struct { /// After a restat found the file changed, this calls the listener function. pub fn swapAndCallListenerOnMainThread(this: *StatWatcher) void { const prev_jsvalue = this.last_jsvalue.swap(); - const current_jsvalue = statToJSStats(this.globalThis, &this.last_stat, this.bigint); + const current_jsvalue = statToJSStats(this.globalThis, &this.last_stat, this.bigint) catch return; // TODO: properly propagate exception upwards this.last_jsvalue.set(this.globalThis, current_jsvalue); _ = js.listenerGetCached(this.js_this).?.call( @@ -455,8 +465,8 @@ pub const StatWatcher = struct { pub fn init(args: Arguments) !*StatWatcher { log("init", .{}); - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); var slice = args.path.slice(); if (bun.strings.startsWith(slice, "file://")) { slice = slice[6..]; diff --git a/src/bun.js/node/node_fs_watcher.zig b/src/bun.js/node/node_fs_watcher.zig index b5df617bf7..80f2e87820 100644 --- a/src/bun.js/node/node_fs_watcher.zig +++ b/src/bun.js/node/node_fs_watcher.zig @@ -167,6 +167,7 @@ pub const FSWatcher = struct { pub fn dupe(event: Event) !Event { return switch (event) { inline .rename, .change => |path, t| @unionInit(Event, @tagName(t), try bun.default_allocator.dupe(u8, path)), + .@"error" => |err| .{ .@"error" = err.clone(bun.default_allocator) }, inline else => |value, t| @unionInit(Event, @tagName(t), value), }; } @@ -177,6 +178,7 @@ pub const FSWatcher = struct { else => bun.default_allocator.free(path.*), .windows => path.deinit(), }, + .@"error" => |*err| err.deinit(), else => {}, } } @@ -501,7 +503,7 @@ pub const FSWatcher = struct { const globalObject = this.globalThis; var args = [_]JSC.JSValue{ EventType.@"error".toJS(globalObject), - err.toJSC(globalObject), + err.toJS(globalObject), }; _ = listener.callWithGlobalThis( globalObject, @@ -527,7 +529,7 @@ pub const FSWatcher = struct { var filename: JSC.JSValue = .js_undefined; if (file_name.len > 0) { if (this.encoding == .buffer) - filename = JSC.ArrayBuffer.createBuffer(globalObject, file_name) + filename = JSC.ArrayBuffer.createBuffer(globalObject, file_name) catch return // TODO: properly propagate exception upwards else if (this.encoding == .utf8) { filename = JSC.ZigString.fromUTF8(file_name).toJS(globalObject); } else { @@ -641,33 +643,35 @@ pub const FSWatcher = struct { } pub fn init(args: Arguments) bun.JSC.Maybe(*FSWatcher) { - var buf: bun.PathBuffer = undefined; - var slice = args.path.slice(); - if (bun.strings.startsWith(slice, "file://")) { - slice = slice[6..]; - } + const joined_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(joined_buf); + const file_path: [:0]const u8 = brk: { + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + var slice = args.path.slice(); + if (bun.strings.startsWith(slice, "file://")) { + slice = slice[6..]; + } - var parts = [_]string{ - slice, + const cwd = switch (bun.sys.getcwd(buf)) { + .result => |r| r, + .err => |err| return .{ .err = err }, + }; + buf[cwd.len] = std.fs.path.sep; + + const parts = &[_]string{ + cwd, + slice, + }; + + break :brk Path.joinAbsStringBufZ( + buf[0 .. cwd.len + 1], + joined_buf, + parts, + .auto, + ); }; - const cwd = switch (bun.sys.getcwd(&buf)) { - .result => |r| r, - .err => |err| return .{ .err = err }, - }; - buf[cwd.len] = std.fs.path.sep; - - var joined_buf: bun.PathBuffer = undefined; - const file_path = Path.joinAbsStringBuf( - buf[0 .. cwd.len + 1], - &joined_buf, - &parts, - .auto, - ); - - joined_buf[file_path.len] = 0; - const file_path_z = joined_buf[0..file_path.len :0]; - const vm = args.global_this.bunVM(); const ctx = bun.new(FSWatcher, .{ @@ -689,7 +693,7 @@ pub const FSWatcher = struct { ctx.current_task.ctx = ctx; ctx.path_watcher = if (args.signal == null or !args.signal.?.aborted()) - switch (PathWatcher.watch(vm, file_path_z, args.recursive, onPathUpdate, onUpdateEnd, bun.cast(*anyopaque, ctx))) { + switch (PathWatcher.watch(vm, file_path, args.recursive, onPathUpdate, onUpdateEnd, bun.cast(*anyopaque, ctx))) { .result => |r| r, .err => |err| { ctx.deinit(); @@ -702,7 +706,7 @@ pub const FSWatcher = struct { } else null; - ctx.initJS(args.listener); + ctx.initJS(args.listener.withAsyncContextIfNeeded(args.global_this)); return .{ .result = ctx }; } }; diff --git a/src/bun.js/node/node_http_binding.zig b/src/bun.js/node/node_http_binding.zig index 713201d0ea..71731ca19d 100644 --- a/src/bun.js/node/node_http_binding.zig +++ b/src/bun.js/node/node_http_binding.zig @@ -35,7 +35,7 @@ pub fn setMaxHTTPHeaderSize(globalThis: *JSC.JSGlobalObject, callframe: *JSC.Cal return globalThis.throwNotEnoughArguments("setMaxHTTPHeaderSize", 1, arguments.len); } const value = arguments[0]; - const num = value.coerceToInt64(globalThis); + const num = try value.coerceToInt64(globalThis); if (num <= 0) { return globalThis.throwInvalidArgumentTypeValue("maxHeaderSize", "non-negative integer", value); } diff --git a/src/bun.js/node/node_os.zig b/src/bun.js/node/node_os.zig index 2dedf0cc5d..2ab9917dce 100644 --- a/src/bun.js/node/node_os.zig +++ b/src/bun.js/node/node_os.zig @@ -105,7 +105,7 @@ fn cpusImplLinux(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { // Actually create the JS object representing the CPU const cpu = JSC.JSValue.createEmptyObject(globalThis, 3); cpu.put(globalThis, JSC.ZigString.static("times"), times.toValue(globalThis)); - values.putIndex(globalThis, num_cpus, cpu); + try values.putIndex(globalThis, num_cpus, cpu); num_cpus += 1; } @@ -129,7 +129,7 @@ fn cpusImplLinux(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { while (line_iter.next()) |line| { if (strings.hasPrefixComptime(line, key_processor)) { if (!has_model_name) { - const cpu = JSC.JSObject.getIndex(values, globalThis, cpu_index); + const cpu = try values.getIndex(globalThis, cpu_index); cpu.put(globalThis, JSC.ZigString.static("model"), JSC.ZigString.static("unknown").withEncoding().toJS(globalThis)); } // If this line starts a new processor, parse the index from the line @@ -140,26 +140,26 @@ fn cpusImplLinux(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { } else if (strings.hasPrefixComptime(line, key_model_name)) { // If this is the model name, extract it and store on the current cpu const model_name = line[key_model_name.len..]; - const cpu = JSC.JSObject.getIndex(values, globalThis, cpu_index); + const cpu = try values.getIndex(globalThis, cpu_index); cpu.put(globalThis, JSC.ZigString.static("model"), JSC.ZigString.init(model_name).withEncoding().toJS(globalThis)); has_model_name = true; } } if (!has_model_name) { - const cpu = JSC.JSObject.getIndex(values, globalThis, cpu_index); + const cpu = try values.getIndex(globalThis, cpu_index); cpu.put(globalThis, JSC.ZigString.static("model"), JSC.ZigString.static("unknown").withEncoding().toJS(globalThis)); } } else |_| { // Initialize model name to "unknown" - var it = values.arrayIterator(globalThis); - while (it.next()) |cpu| { + var it = try values.arrayIterator(globalThis); + while (try it.next()) |cpu| { cpu.put(globalThis, JSC.ZigString.static("model"), JSC.ZigString.static("unknown").withEncoding().toJS(globalThis)); } } // Read /sys/devices/system/cpu/cpu{}/cpufreq/scaling_cur_freq to get current frequency (optional) for (0..num_cpus) |cpu_index| { - const cpu = JSC.JSObject.getIndex(values, globalThis, @truncate(cpu_index)); + const cpu = try values.getIndex(globalThis, @truncate(cpu_index)); var path_buf: [128]u8 = undefined; const path = try std.fmt.bufPrint(&path_buf, "/sys/devices/system/cpu/cpu{}/cpufreq/scaling_cur_freq", .{cpu_index}); @@ -251,7 +251,7 @@ fn cpusImplDarwin(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { cpu.put(globalThis, JSC.ZigString.static("model"), model_name); cpu.put(globalThis, JSC.ZigString.static("times"), times.toValue(globalThis)); - values.putIndex(globalThis, cpu_index, cpu); + try values.putIndex(globalThis, cpu_index, cpu); } return values; } @@ -281,7 +281,7 @@ pub fn cpusImplWindows(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { cpu.put(globalThis, JSC.ZigString.static("speed"), JSC.JSValue.jsNumber(cpu_info.speed)); cpu.put(globalThis, JSC.ZigString.static("times"), times.toValue(globalThis)); - values.putIndex(globalThis, @intCast(i), cpu); + try values.putIndex(globalThis, @intCast(i), cpu); } return values; @@ -318,7 +318,7 @@ pub fn homedir(global: *JSC.JSGlobalObject) !bun.String { var out: bun.PathBuffer = undefined; var size: usize = out.len; if (libuv.uv_os_homedir(&out, &size).toError(.uv_os_homedir)) |err| { - return global.throwValue(err.toJSC(global)); + return global.throwValue(err.toJS(global)); } return bun.String.createUTF8(out[0..size]); } else { @@ -372,7 +372,7 @@ pub fn homedir(global: *JSC.JSGlobalObject) !bun.String { return global.throwValue(bun.sys.Error.fromCode( @enumFromInt(ret), .uv_os_homedir, - ).toJSC(global)); + ).toJS(global)); } if (result == null) { @@ -380,7 +380,7 @@ pub fn homedir(global: *JSC.JSGlobalObject) !bun.String { return global.throwValue(bun.sys.Error.fromCode( .NOENT, .uv_os_homedir, - ).toJSC(global)); + ).toJS(global)); } return if (pw.pw_dir) |dir| @@ -632,15 +632,15 @@ fn networkInterfacesPosix(globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSVal } // Does this entry already exist? - if (ret.get_unsafe(globalThis, interface_name)) |array| { + if (try ret.get(globalThis, interface_name)) |array| { // Add this interface entry to the existing array - const next_index = @as(u32, @intCast(array.getLength(globalThis))); - array.putIndex(globalThis, next_index, interface); + const next_index: u32 = @intCast(try array.getLength(globalThis)); + try array.putIndex(globalThis, next_index, interface); } else { // Add it as an array with this interface as an element const member_name = JSC.ZigString.init(interface_name); var array = try JSC.JSValue.createEmptyArray(globalThis, 1); - array.putIndex(globalThis, 0, interface); + try array.putIndex(globalThis, 0, interface); ret.put(globalThis, &member_name, array); } } @@ -746,15 +746,15 @@ fn networkInterfacesWindows(globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSV // Does this entry already exist? const interface_name = bun.span(iface.name); - if (ret.get_unsafe(globalThis, interface_name)) |array| { + if (try ret.get(globalThis, interface_name)) |array| { // Add this interface entry to the existing array - const next_index = @as(u32, @intCast(array.getLength(globalThis))); - array.putIndex(globalThis, next_index, interface); + const next_index: u32 = @intCast(try array.getLength(globalThis)); + try array.putIndex(globalThis, next_index, interface); } else { // Add it as an array with this interface as an element const member_name = JSC.ZigString.init(interface_name); var array = try JSC.JSValue.createEmptyArray(globalThis, 1); - array.putIndex(globalThis, 0, interface); + try array.putIndex(globalThis, 0, interface); ret.put(globalThis, &member_name, array); } } diff --git a/src/bun.js/node/node_process.zig b/src/bun.js/node/node_process.zig index bb6c23d898..c17ca62d18 100644 --- a/src/bun.js/node/node_process.zig +++ b/src/bun.js/node/node_process.zig @@ -8,7 +8,8 @@ comptime { @export(&exit, .{ .name = "Bun__Process__exit" }); @export(&createArgv0, .{ .name = "Bun__Process__createArgv0" }); @export(&getExecPath, .{ .name = "Bun__Process__getExecPath" }); - @export(&createExecArgv, .{ .name = "Bun__Process__createExecArgv" }); + @export(&bun.jsc.host_fn.wrap1(createExecArgv), .{ .name = "Bun__Process__createExecArgv" }); + @export(&getEval, .{ .name = "Bun__Process__getEval" }); } var title_mutex = bun.Mutex{}; @@ -42,7 +43,7 @@ pub fn getExecPath(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { return JSC.ZigString.fromUTF8(out).toJS(globalObject); } -fn createExecArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { +fn createExecArgv(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { var sfb = std.heap.stackFallback(4096, globalObject.allocator()); const temp_alloc = sfb.get(); const vm = globalObject.bunVM(); @@ -50,15 +51,15 @@ fn createExecArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { if (vm.worker) |worker| { // was explicitly overridden for the worker? if (worker.execArgv) |execArgv| { - const array = JSC.JSValue.createEmptyArray(globalObject, execArgv.len) catch return .zero; + const array = try JSC.JSValue.createEmptyArray(globalObject, execArgv.len); for (0..execArgv.len) |i| { - array.putIndex(globalObject, @intCast(i), bun.String.init(execArgv[i]).toJS(globalObject)); + try array.putIndex(globalObject, @intCast(i), bun.String.init(execArgv[i]).toJS(globalObject)); } return array; } } - var args = std.ArrayList(bun.String).initCapacity(temp_alloc, bun.argv.len - 1) catch bun.outOfMemory(); + var args = try std.ArrayList(bun.String).initCapacity(temp_alloc, bun.argv.len - 1); defer args.deinit(); defer for (args.items) |*arg| arg.deref(); @@ -71,7 +72,7 @@ fn createExecArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { defer prev = arg; if (arg.len >= 1 and arg[0] == '-') { - args.append(bun.String.createUTF8(arg)) catch bun.outOfMemory(); + try args.append(bun.String.createUTF8(arg)); continue; } @@ -106,7 +107,7 @@ fn createExecArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { }); if (prev) |p| if (map.has(p)) { - args.append(bun.String.createUTF8(arg)) catch @panic("OOM"); + try args.append(bun.String.createUTF8(arg)); continue; }; @@ -114,7 +115,7 @@ fn createExecArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { break; } - return bun.String.toJSArray(globalObject, args.items) catch .zero; + return bun.String.toJSArray(globalObject, args.items); } fn createArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { @@ -193,22 +194,26 @@ pub fn getExecArgv(global: *JSGlobalObject) callconv(.c) JSValue { return Bun__Process__getExecArgv(global); } -pub fn getCwd(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { - return JSC.toJSHostValue(globalObject, getCwd_(globalObject)); +pub fn getEval(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { + const vm = globalObject.bunVM(); + if (vm.module_loader.eval_source) |source| { + return JSC.ZigString.init(source.contents).toJS(globalObject); + } + return .js_undefined; } + +pub const getCwd = JSC.host_fn.wrap1(getCwd_); fn getCwd_(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { var buf: bun.PathBuffer = undefined; switch (bun.api.node.path.getCwd(&buf)) { .result => |r| return JSC.ZigString.init(r).withEncoding().toJS(globalObject), .err => |e| { - return globalObject.throwValue(e.toJSC(globalObject)); + return globalObject.throwValue(e.toJS(globalObject)); }, } } -pub fn setCwd(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) callconv(.C) JSC.JSValue { - return JSC.toJSHostValue(globalObject, setCwd_(globalObject, to)); -} +pub const setCwd = JSC.host_fn.wrap2(setCwd_); fn setCwd_(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) bun.JSError!JSC.JSValue { if (to.len == 0) { return globalObject.throwInvalidArguments("Expected path to be a non-empty string", .{}); @@ -228,7 +233,7 @@ fn setCwd_(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) bun.JSError!JS .result => |r| r, .err => |err| { _ = Syscall.chdir(fs.top_level_dir, fs.top_level_dir); - return globalObject.throwValue(err.toJSC(globalObject)); + return globalObject.throwValue(err.toJS(globalObject)); }, }; @memcpy(fs.top_level_dir_buf[0..into_cwd_buf.len], into_cwd_buf); @@ -247,7 +252,7 @@ fn setCwd_(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) bun.JSError!JS return str.transferToJS(globalObject); }, .err => |e| { - return globalObject.throwValue(e.toJSC(globalObject)); + return globalObject.throwValue(e.toJS(globalObject)); }, } } diff --git a/src/bun.js/node/node_util_binding.zig b/src/bun.js/node/node_util_binding.zig index bc580c85e8..78e0791b8d 100644 --- a/src/bun.js/node/node_util_binding.zig +++ b/src/bun.js/node/node_util_binding.zig @@ -101,6 +101,7 @@ pub fn internalErrorName(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFr if (err_int == -bun.sys.UV_E.SOCKTNOSUPPORT) return bun.String.static("ESOCKTNOSUPPORT").toJS(globalThis); if (err_int == -bun.sys.UV_E.NODATA) return bun.String.static("ENODATA").toJS(globalThis); if (err_int == -bun.sys.UV_E.UNATCH) return bun.String.static("EUNATCH").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOEXEC) return bun.String.static("ENOEXEC").toJS(globalThis); var fmtstring = bun.String.createFormat("Unknown system error {d}", .{err_int}) catch bun.outOfMemory(); return fmtstring.transferToJS(globalThis); diff --git a/src/bun.js/node/node_zlib_binding.zig b/src/bun.js/node/node_zlib_binding.zig index ff2dfc8e10..5093700de8 100644 --- a/src/bun.js/node/node_zlib_binding.zig +++ b/src/bun.js/node/node_zlib_binding.zig @@ -247,7 +247,7 @@ pub fn CompressionStream(comptime T: type) type { pub fn setOnError(_: *T, this_value: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { if (value.isFunction()) { - T.js.errorCallbackSetCached(this_value, globalObject, value); + T.js.errorCallbackSetCached(this_value, globalObject, value.withAsyncContextIfNeeded(globalObject)); } } diff --git a/src/bun.js/node/path.zig b/src/bun.js/node/path.zig index 59d67d9501..fe7fe3966d 100644 --- a/src/bun.js/node/path.zig +++ b/src/bun.js/node/path.zig @@ -2325,14 +2325,14 @@ pub fn relativeWindowsT(comptime T: type, from: []const T, to: []const T, buf: [ pub inline fn relativePosixJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, from: []const T, to: []const T, buf: []T, buf2: []T, buf3: []T) JSC.JSValue { return switch (relativePosixT(T, from, to, buf, buf2, buf3)) { .result => |r| bun.String.createUTF8ForJS(globalObject, r), - .err => |e| e.toJSC(globalObject), + .err => |e| e.toJS(globalObject), }; } pub inline fn relativeWindowsJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, from: []const T, to: []const T, buf: []T, buf2: []T, buf3: []T) JSC.JSValue { return switch (relativeWindowsT(T, from, to, buf, buf2, buf3)) { .result => |r| bun.String.createUTF8ForJS(globalObject, r), - .err => |e| e.toJSC(globalObject), + .err => |e| e.toJS(globalObject), }; } @@ -2779,14 +2779,14 @@ pub fn resolveWindowsT(comptime T: type, paths: []const []const T, buf: []T, buf pub inline fn resolvePosixJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, paths: []const []const T, buf: []T, buf2: []T) JSC.JSValue { return switch (resolvePosixT(T, paths, buf, buf2)) { .result => |r| bun.String.createUTF8ForJS(globalObject, r), - .err => |e| e.toJSC(globalObject), + .err => |e| e.toJS(globalObject), }; } pub inline fn resolveWindowsJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, paths: []const []const T, buf: []T, buf2: []T) JSC.JSValue { return switch (resolveWindowsT(T, paths, buf, buf2)) { .result => |r| bun.String.createUTF8ForJS(globalObject, r), - .err => |e| e.toJSC(globalObject), + .err => |e| e.toJS(globalObject), }; } @@ -2925,7 +2925,7 @@ pub fn toNamespacedPathWindowsT(comptime T: type, path: []const T, buf: []T, buf pub inline fn toNamespacedPathWindowsJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, path: []const T, buf: []T, buf2: []T) JSC.JSValue { return switch (toNamespacedPathWindowsT(T, path, buf, buf2)) { .result => |r| bun.String.createUTF8ForJS(globalObject, r), - .err => |e| e.toJSC(globalObject), + .err => |e| e.toJS(globalObject), }; } diff --git a/src/bun.js/node/path_watcher.zig b/src/bun.js/node/path_watcher.zig index 6a0f33144a..1f24866e27 100644 --- a/src/bun.js/node/path_watcher.zig +++ b/src/bun.js/node/path_watcher.zig @@ -497,7 +497,7 @@ pub const PathWatcherManager = struct { if (watcher.recursive and !watcher.isClosed()) { // this may trigger another thread with is desired when available to watch long trees switch (manager._addDirectory(watcher, child_path)) { - .err => |err| return .{ .err = err }, + .err => |err| return .{ .err = err.withPath(child_path.path) }, .result => {}, } } @@ -537,7 +537,7 @@ pub const PathWatcherManager = struct { fn _addDirectory(this: *PathWatcherManager, watcher: *PathWatcher, path: PathInfo) bun.JSC.Maybe(void) { const fd = path.fd; switch (this.main_watcher.addDirectory(fd, path.path, path.hash, false)) { - .err => |err| return .{ .err = err }, + .err => |err| return .{ .err = err.withPath(path.path) }, .result => {}, } @@ -878,7 +878,9 @@ pub const PathWatcher = struct { } this.needs_flush = true; - if (this.isClosed()) return; + if (this.isClosed()) { + return; + } this.callback(this.ctx, event, is_file); } diff --git a/src/bun.js/node/time_like.zig b/src/bun.js/node/time_like.zig index fbf722ec36..f9ba19cea5 100644 --- a/src/bun.js/node/time_like.zig +++ b/src/bun.js/node/time_like.zig @@ -7,7 +7,7 @@ pub const TimeLike = if (Environment.isWindows) f64 else std.posix.timespec; // Node.js docs: // > Values can be either numbers representing Unix epoch time in seconds, Dates, or a numeric string like '123456789.0'. // > If the value can not be converted to a number, or is NaN, Infinity, or -Infinity, an Error will be thrown. -pub fn fromJS(globalObject: *JSGlobalObject, value: JSValue) ?TimeLike { +pub fn fromJS(globalObject: *JSGlobalObject, value: JSValue) bun.JSError!?TimeLike { // Number is most common case if (value.isNumber()) { const seconds = value.asNumber(); @@ -26,7 +26,7 @@ pub fn fromJS(globalObject: *JSGlobalObject, value: JSValue) ?TimeLike { } }, .String => { - const seconds = value.coerceToDouble(globalObject); + const seconds = try value.toNumber(globalObject); if (std.math.isFinite(seconds)) { return fromSeconds(seconds); } diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 7df8e8d663..4ff5a6c332 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -400,7 +400,7 @@ pub const Encoding = enum(u8) { return globalObject.ERR(.INVALID_ARG_VALUE, "encoding '{}' is an invalid encoding", .{value.fmtString(globalObject)}).throw(); } - pub fn encodeWithSize(encoding: Encoding, globalObject: *JSC.JSGlobalObject, comptime size: usize, input: *const [size]u8) JSC.JSValue { + pub fn encodeWithSize(encoding: Encoding, globalObject: *JSC.JSGlobalObject, comptime size: usize, input: *const [size]u8) bun.JSError!JSC.JSValue { switch (encoding) { .base64 => { var buf: [std.base64.standard.Encoder.calcSize(size)]u8 = undefined; @@ -425,14 +425,14 @@ pub const Encoding = enum(u8) { inline else => |enc| { const res = JSC.WebCore.encoding.toStringComptime(input, globalObject, enc); if (res.isError()) { - return globalObject.throwValue(res) catch .zero; + return globalObject.throwValue(res); } return res; }, } } - pub fn encodeWithMaxSize(encoding: Encoding, globalObject: *JSC.JSGlobalObject, comptime max_size: usize, input: []const u8) JSC.JSValue { + pub fn encodeWithMaxSize(encoding: Encoding, globalObject: *JSC.JSGlobalObject, comptime max_size: usize, input: []const u8) bun.JSError!JSC.JSValue { switch (encoding) { .base64 => { var base64_buf: [std.base64.standard.Encoder.calcSize(max_size * 4)]u8 = undefined; @@ -459,7 +459,7 @@ pub const Encoding = enum(u8) { inline else => |enc| { const res = JSC.WebCore.encoding.toStringComptime(input, globalObject, enc); if (res.isError()) { - return globalObject.throwValue(res) catch .zero; + return globalObject.throwValue(res); } return res; @@ -569,8 +569,8 @@ pub const PathLike = union(enum) { if (std.fs.path.isAbsolute(sliced)) { if (sliced.len > 2 and bun.path.isDriveLetter(sliced[0]) and sliced[1] == ':' and bun.path.isSepAny(sliced[2])) { // Add the long path syntax. This affects most of node:fs - const drive_resolve_buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(drive_resolve_buf); + const drive_resolve_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(drive_resolve_buf); const rest = path_handler.PosixToWinNormalizer.resolveCWDWithExternalBufZ(drive_resolve_buf, sliced) catch @panic("Error while resolving path."); buf[0..4].* = bun.windows.long_path_prefix_u8; // When long path syntax is used, the entire string should be normalized @@ -619,8 +619,8 @@ pub const PathLike = union(enum) { pub fn osPathKernel32(this: PathLike, buf: *bun.PathBuffer) callconv(bun.callconv_inline) bun.OSPathSliceZ { if (comptime Environment.isWindows) { const s = this.slice(); - const b = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(b); + const b = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(b); if (s.len > 0 and bun.path.isSepAny(s[0])) { const resolve = path_handler.PosixToWinNormalizer.resolveCWDWithExternalBuf(buf, s) catch @panic("Error while resolving path."); const normal = path_handler.normalizeBuf(resolve, b, .windows); @@ -800,11 +800,11 @@ pub const VectorArrayBuffer = struct { var bufferlist = std.ArrayList(bun.PlatformIOVec).init(allocator); var i: usize = 0; - const len = val.getLength(globalObject); + const len = try val.getLength(globalObject); bufferlist.ensureTotalCapacityPrecise(len) catch bun.outOfMemory(); while (i < len) { - const element = val.getIndex(globalObject, @as(u32, @truncate(i))); + const element = try val.getIndex(globalObject, @as(u32, @truncate(i))); if (!element.isCell()) { return globalObject.throwInvalidArguments("Expected ArrayBufferView[]", .{}); @@ -1017,7 +1017,7 @@ pub const FileSystemFlags = enum(c_int) { if (!val.isInt32()) { return ctx.throwValue(ctx.ERR(.OUT_OF_RANGE, "The value of \"flags\" is out of range. It must be an integer. Received {d}", .{val.asNumber()}).toJS()); } - const number = val.coerce(i32, ctx); + const number = try val.coerce(i32, ctx); return @as(FileSystemFlags, @enumFromInt(@max(number, 0))); } @@ -1126,8 +1126,8 @@ pub const Dirent = struct { pub const getConstructor = Bun__JSDirentObjectConstructor; extern fn Bun__Dirent__toJS(*JSC.JSGlobalObject, i32, *bun.String, *bun.String, cached_previous_path_jsvalue: ?*?*JSC.JSString) JSC.JSValue; - pub fn toJS(this: *Dirent, globalObject: *JSC.JSGlobalObject, cached_previous_path_jsvalue: ?*?*JSC.JSString) JSC.JSValue { - return Bun__Dirent__toJS( + pub fn toJS(this: *Dirent, globalObject: *JSC.JSGlobalObject, cached_previous_path_jsvalue: ?*?*JSC.JSString) bun.JSError!JSC.JSValue { + return bun.jsc.fromJSHostCall(globalObject, @src(), Bun__Dirent__toJS, .{ globalObject, switch (this.kind) { .file => bun.windows.libuv.UV_DIRENT_FILE, @@ -1136,19 +1136,17 @@ pub const Dirent = struct { .directory => bun.windows.libuv.UV_DIRENT_DIR, // event_port is deliberate there. .event_port, .named_pipe => bun.windows.libuv.UV_DIRENT_FIFO, - .unix_domain_socket => bun.windows.libuv.UV_DIRENT_SOCKET, .sym_link => bun.windows.libuv.UV_DIRENT_LINK, - .whiteout, .door, .unknown => bun.windows.libuv.UV_DIRENT_UNKNOWN, }, &this.name, &this.path, cached_previous_path_jsvalue, - ); + }); } - pub fn toJSNewlyCreated(this: *Dirent, globalObject: *JSC.JSGlobalObject, previous_jsstring: ?*?*JSC.JSString) JSC.JSValue { + pub fn toJSNewlyCreated(this: *Dirent, globalObject: *JSC.JSGlobalObject, previous_jsstring: ?*?*JSC.JSString) bun.JSError!JSC.JSValue { // Shouldn't techcnically be necessary. defer this.deref(); return this.toJS(globalObject, previous_jsstring); diff --git a/src/bun.js/node/util/parse_args.zig b/src/bun.js/node/util/parse_args.zig index be9f380498..5bfa86afbc 100644 --- a/src/bun.js/node/util/parse_args.zig +++ b/src/bun.js/node/util/parse_args.zig @@ -24,7 +24,7 @@ const ArgsSlice = struct { start: u32, end: u32, - pub inline fn get(this: ArgsSlice, globalThis: *JSGlobalObject, i: u32) JSValue { + pub inline fn get(this: ArgsSlice, globalThis: *JSGlobalObject, i: u32) bun.JSError!JSValue { return this.array.getIndex(globalThis, this.start + i); } }; @@ -157,8 +157,8 @@ fn getDefaultArgs(globalThis: *JSGlobalObject) !ArgsSlice { const exec_argv = bun.api.node.process.getExecArgv(globalThis); const argv = bun.api.node.process.getArgv(globalThis); if (argv.isArray() and exec_argv.isArray()) { - var iter = exec_argv.arrayIterator(globalThis); - while (iter.next()) |item| { + var iter = try exec_argv.arrayIterator(globalThis); + while (try iter.next()) |item| { if (item.isString()) { const str = try item.toBunString(globalThis); defer str.deref(); @@ -166,12 +166,12 @@ fn getDefaultArgs(globalThis: *JSGlobalObject) !ArgsSlice { return .{ .array = argv, .start = 1, - .end = @intCast(argv.getLength(globalThis)), + .end = @intCast(try argv.getLength(globalThis)), }; } } } - return .{ .array = argv, .start = 2, .end = @intCast(argv.getLength(globalThis)) }; + return .{ .array = argv, .start = 2, .end = @intCast(try argv.getLength(globalThis)) }; } return .{ @@ -285,11 +285,11 @@ fn storeOption(globalThis: *JSGlobalObject, option_name: ValueRef, option_value: // values[long_option] starts out not present, // first value is added as new array [new_value], // subsequent values are pushed to existing array. - if (values.getOwn(globalThis, key)) |value_list| { - value_list.push(globalThis, new_value); + if (try values.getOwn(globalThis, key)) |value_list| { + try value_list.push(globalThis, new_value); } else { var value_list = try JSValue.createEmptyArray(globalThis, 1); - value_list.putIndex(globalThis, 0, new_value); + try value_list.putIndex(globalThis, 0, new_value); values.putMayBeIndex(globalThis, &key, value_list); } } else { @@ -316,10 +316,10 @@ fn parseOptionDefinitions(globalThis: *JSGlobalObject, options_obj: JSValue, opt try validators.validateObject(globalThis, obj, "options.{s}", .{option.long_name}, .{}); // type field is required - const option_type: JSValue = obj.getOwn(globalThis, "type") orelse .js_undefined; + const option_type: JSValue = try obj.getOwn(globalThis, "type") orelse .js_undefined; option.type = try validators.validateStringEnum(OptionValueType, globalThis, option_type, "options.{s}.type", .{option.long_name}); - if (obj.getOwn(globalThis, "short")) |short_option| { + if (try obj.getOwn(globalThis, "short")) |short_option| { try validators.validateString(globalThis, short_option, "options.{s}.short", .{option.long_name}); var short_option_str = try short_option.toBunString(globalThis); if (short_option_str.length() != 1) { @@ -329,13 +329,13 @@ fn parseOptionDefinitions(globalThis: *JSGlobalObject, options_obj: JSValue, opt option.short_name = short_option_str; } - if (obj.getOwn(globalThis, "multiple")) |multiple_value| { + if (try obj.getOwn(globalThis, "multiple")) |multiple_value| { if (!multiple_value.isUndefined()) { option.multiple = try validators.validateBoolean(globalThis, multiple_value, "options.{s}.multiple", .{option.long_name}); } } - if (obj.getOwn(globalThis, "default")) |default_value| { + if (try obj.getOwn(globalThis, "default")) |default_value| { if (!default_value.isUndefined()) { switch (option.type) { .string => { @@ -382,7 +382,7 @@ fn tokenizeArgs( const num_args: u32 = args.end - args.start; var index: u32 = 0; while (index < num_args) : (index += 1) { - const arg_ref: ValueRef = ValueRef{ .jsvalue = args.get(globalThis, index) }; + const arg_ref: ValueRef = ValueRef{ .jsvalue = try args.get(globalThis, index) }; const arg = arg_ref.asBunString(globalThis); const token_rawtype = classifyToken(arg, options); @@ -401,7 +401,7 @@ fn tokenizeArgs( while (index < num_args) : (index += 1) { try ctx.handleToken(.{ .positional = .{ .index = index, - .value = ValueRef{ .jsvalue = args.get(globalThis, index) }, + .value = ValueRef{ .jsvalue = try args.get(globalThis, index) }, } }); } break; // Finished processing args, leave while loop. @@ -417,7 +417,7 @@ fn tokenizeArgs( var has_inline_value = true; if (option_type == .string and index + 1 < num_args) { // e.g. '-f', "bar" - value = ValueRef{ .jsvalue = args.get(globalThis, index + 1) }; + value = ValueRef{ .jsvalue = try args.get(globalThis, index + 1) }; has_inline_value = false; log(" (lone_short_option consuming next token as value)", .{}); } @@ -451,7 +451,7 @@ fn tokenizeArgs( var has_inline_value = true; if (option_type == .string and index + 1 < num_args) { // e.g. '-f', "bar" - value = ValueRef{ .jsvalue = args.get(globalThis, index + 1) }; + value = ValueRef{ .jsvalue = try args.get(globalThis, index + 1) }; has_inline_value = false; log(" (short_option_group short option consuming next token as value)", .{}); } @@ -520,7 +520,7 @@ fn tokenizeArgs( var value: ?JSValue = null; if (option_type == .string and index + 1 < num_args and !negative) { // e.g. '--foo', "bar" - value = args.get(globalThis, index + 1); + value = try args.get(globalThis, index + 1); log(" (consuming next as value)", .{}); } @@ -602,7 +602,7 @@ const ParseArgsState = struct { return globalThis.throwValue(err); } const value = token.value.asJSValue(globalThis); - this.positionals.push(globalThis, value); + try this.positionals.push(globalThis, value); }, .@"option-terminator" => {}, } @@ -645,7 +645,7 @@ const ParseArgsState = struct { obj.put(globalThis, ZigString.static("index"), JSValue.jsNumber(token.index)); }, } - this.tokens.push(globalThis, obj); + try this.tokens.push(globalThis, obj); } } }; @@ -665,23 +665,23 @@ pub fn parseArgs(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSE const config = if (config_value.isUndefined()) null else config_value; // Phase 0.A: Get and validate type of input args - const config_args: JSValue = if (config) |c| c.getOwn(globalThis, "args") orelse .js_undefined else .js_undefined; + const config_args: JSValue = if (config) |c| try c.getOwn(globalThis, "args") orelse .js_undefined else .js_undefined; const args: ArgsSlice = if (!config_args.isUndefinedOrNull()) args: { try validators.validateArray(globalThis, config_args, "args", .{}, null); break :args .{ .array = config_args, .start = 0, - .end = @intCast(config_args.getLength(globalThis)), + .end = @intCast(try config_args.getLength(globalThis)), }; } else try getDefaultArgs(globalThis); // Phase 0.B: Parse and validate config - const config_strict: JSValue = (if (config) |c| c.getOwn(globalThis, "strict") else null) orelse JSValue.jsBoolean(true); - var config_allow_positionals: JSValue = if (config) |c| c.getOwn(globalThis, "allowPositionals") orelse JSC.jsBoolean(!config_strict.toBoolean()) else JSC.jsBoolean(!config_strict.toBoolean()); - const config_return_tokens: JSValue = (if (config) |c| c.getOwn(globalThis, "tokens") else null) orelse JSValue.jsBoolean(false); - const config_allow_negative: JSValue = if (config) |c| c.getOwn(globalThis, "allowNegative") orelse .false else .false; - const config_options: JSValue = if (config) |c| c.getOwn(globalThis, "options") orelse .js_undefined else .js_undefined; + const config_strict: JSValue = (if (config) |c| try c.getOwn(globalThis, "strict") else null) orelse JSValue.jsBoolean(true); + var config_allow_positionals: JSValue = if (config) |c| try c.getOwn(globalThis, "allowPositionals") orelse JSC.jsBoolean(!config_strict.toBoolean()) else JSC.jsBoolean(!config_strict.toBoolean()); + const config_return_tokens: JSValue = (if (config) |c| try c.getOwn(globalThis, "tokens") else null) orelse JSValue.jsBoolean(false); + const config_allow_negative: JSValue = if (config) |c| try c.getOwn(globalThis, "allowNegative") orelse .false else .false; + const config_options: JSValue = if (config) |c| try c.getOwn(globalThis, "options") orelse .js_undefined else .js_undefined; const strict = try validators.validateBoolean(globalThis, config_strict, "strict", .{}); @@ -739,7 +739,7 @@ pub fn parseArgs(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSE for (option_defs.items) |option| { if (option.default_value) |default_value| { if (!option.long_name.eqlComptime("__proto__")) { - if (state.values.getOwn(globalThis, option.long_name) == null) { + if (try state.values.getOwn(globalThis, option.long_name) == null) { log(" Setting \"{}\" to default value", .{option.long_name}); state.values.putMayBeIndex(globalThis, &option.long_name, default_value); } diff --git a/src/bun.js/node/util/validators.zig b/src/bun.js/node/util/validators.zig index 1f3f412f45..9fed7e7f68 100644 --- a/src/bun.js/node/util/validators.zig +++ b/src/bun.js/node/util/validators.zig @@ -244,8 +244,8 @@ pub fn validateArray(globalThis: *JSGlobalObject, value: JSValue, comptime name_ pub fn validateStringArray(globalThis: *JSGlobalObject, value: JSValue, comptime name_fmt: string, name_args: anytype) bun.JSError!usize { try validateArray(globalThis, value, name_fmt, name_args, null); var i: usize = 0; - var iter = value.arrayIterator(globalThis); - while (iter.next()) |item| { + var iter = try value.arrayIterator(globalThis); + while (try iter.next()) |item| { if (!item.isString()) { return throwErrInvalidArgType(globalThis, name_fmt ++ "[{d}]", name_args ++ .{i}, "string", value); } @@ -257,8 +257,8 @@ pub fn validateStringArray(globalThis: *JSGlobalObject, value: JSValue, comptime pub fn validateBooleanArray(globalThis: *JSGlobalObject, value: JSValue, comptime name_fmt: string, name_args: anytype) bun.JSError!usize { try validateArray(globalThis, value, name_fmt, name_args, null); var i: usize = 0; - var iter = value.arrayIterator(globalThis); - while (iter.next()) |item| { + var iter = try value.arrayIterator(globalThis); + while (try iter.next()) |item| { if (!item.isBoolean()) { return throwErrInvalidArgType(globalThis, name_fmt ++ "[{d}]", name_args ++ .{i}, "boolean", value); } diff --git a/src/bun.js/node/zlib/NativeBrotli.zig b/src/bun.js/node/zlib/NativeBrotli.zig index 71ea321d95..212007fb55 100644 --- a/src/bun.js/node/zlib/NativeBrotli.zig +++ b/src/bun.js/node/zlib/NativeBrotli.zig @@ -83,7 +83,7 @@ pub fn init(this: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.Cal this.write_result = writeResult; - js.writeCallbackSetCached(this_value, globalThis, writeCallback); + js.writeCallbackSetCached(this_value, globalThis, writeCallback.withAsyncContextIfNeeded(globalThis)); var err = this.stream.init(); if (err.isError()) { diff --git a/src/bun.js/node/zlib/NativeZlib.zig b/src/bun.js/node/zlib/NativeZlib.zig index 945136e59a..5aaf817b2b 100644 --- a/src/bun.js/node/zlib/NativeZlib.zig +++ b/src/bun.js/node/zlib/NativeZlib.zig @@ -84,7 +84,7 @@ pub fn init(this: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.Cal const dictionary = if (arguments[6].isUndefined()) null else arguments[6].asArrayBuffer(globalThis).?.byteSlice(); this.write_result = writeResult; - js.writeCallbackSetCached(this_value, globalThis, writeCallback); + js.writeCallbackSetCached(this_value, globalThis, writeCallback.withAsyncContextIfNeeded(globalThis)); // Keep the dictionary alive by keeping a reference to it in the JS object. if (dictionary != null) { diff --git a/src/bun.js/node/zlib/NativeZstd.zig b/src/bun.js/node/zlib/NativeZstd.zig index 0d9756bf81..95acc32749 100644 --- a/src/bun.js/node/zlib/NativeZstd.zig +++ b/src/bun.js/node/zlib/NativeZstd.zig @@ -83,7 +83,7 @@ pub fn init(this: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.Cal this.write_result = writeState.asU32().ptr; const write_js_callback = try validators.validateFunction(globalThis, "processCallback", processCallback_value); - js.writeCallbackSetCached(this_value, globalThis, write_js_callback); + js.writeCallbackSetCached(this_value, globalThis, write_js_callback.withAsyncContextIfNeeded(globalThis)); var pledged_src_size: u64 = std.math.maxInt(u64); if (pledgedSrcSize_value.isNumber()) { diff --git a/src/bun.js/rare_data.zig b/src/bun.js/rare_data.zig index 6db3cb8f52..d4adde9a4e 100644 --- a/src/bun.js/rare_data.zig +++ b/src/bun.js/rare_data.zig @@ -1,20 +1,4 @@ -const EditorContext = @import("../open.zig").EditorContext; -const ValkeyContext = @import("../valkey/valkey.zig").ValkeyContext; -const Blob = JSC.WebCore.Blob; -const default_allocator = bun.default_allocator; -const Output = bun.Output; -const RareData = @This(); -const Syscall = bun.sys; -const JSC = bun.JSC; -const std = @import("std"); -const BoringSSL = bun.BoringSSL.c; -const bun = @import("bun"); -const UUID = @import("./uuid.zig"); -const Async = bun.Async; -const StatWatcherScheduler = @import("./node/node_fs_stat_watcher.zig").StatWatcherScheduler; -const IPC = @import("./ipc.zig"); -const uws = bun.uws; -const api = bun.api; +websocket_deflate: ?*WebSocketDeflate.RareData = null, boring_ssl_engine: ?*BoringSSL.ENGINE = null, editor_context: EditorContext = EditorContext{}, stderr_store: ?*Blob.Store = null, @@ -503,5 +487,36 @@ pub fn deinit(this: *RareData) void { this.cleanup_hooks.clearAndFree(bun.default_allocator); + if (this.websocket_deflate) |deflate| { + this.websocket_deflate = null; + deflate.deinit(); + } + this.valkey_context.deinit(); } + +pub fn websocketDeflate(this: *RareData) *WebSocketDeflate.RareData { + return this.websocket_deflate orelse brk: { + this.websocket_deflate = bun.new(WebSocketDeflate.RareData, .{}); + break :brk this.websocket_deflate.?; + }; +} + +const EditorContext = @import("../open.zig").EditorContext; +const ValkeyContext = @import("../valkey/valkey.zig").ValkeyContext; +const Blob = JSC.WebCore.Blob; +const default_allocator = bun.default_allocator; +const Output = bun.Output; +const RareData = @This(); +const Syscall = bun.sys; +const JSC = bun.JSC; +const std = @import("std"); +const BoringSSL = bun.BoringSSL.c; +const bun = @import("bun"); +const UUID = @import("./uuid.zig"); +const Async = bun.Async; +const StatWatcherScheduler = @import("./node/node_fs_stat_watcher.zig").StatWatcherScheduler; +const IPC = @import("./ipc.zig"); +const uws = bun.uws; +const api = bun.api; +const WebSocketDeflate = @import("../http/websocket_client/WebSocketDeflate.zig"); diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index a04e1fd0cd..ae517e0717 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -513,7 +513,7 @@ pub const Expect = struct { const left = try this.getValue(globalThis, thisValue, "toBe", "expected"); const not = this.flags.not; - var pass = right.isSameValue(left, globalThis); + var pass = try right.isSameValue(left, globalThis); if (not) pass = !pass; if (pass) return .js_undefined; @@ -594,7 +594,7 @@ pub const Expect = struct { const not = this.flags.not; var pass = false; - const actual_length = value.getLengthIfPropertyExistsInternal(globalThis); + const actual_length = try value.getLengthIfPropertyExistsInternal(globalThis); if (actual_length == std.math.inf(f64)) { var fmt = JSC.ConsoleObject.Formatter{ .globalThis = globalThis, .quote_strings = true }; @@ -652,15 +652,15 @@ pub const Expect = struct { }; if (list_value.jsTypeLoose().isArrayLike()) { - var itr = list_value.arrayIterator(globalThis); - while (itr.next()) |item| { + var itr = try list_value.arrayIterator(globalThis); + while (try itr.next()) |item| { // Confusingly, jest-extended uses `deepEqual`, instead of `toBe` if (try item.jestDeepEquals(expected, globalThis)) { pass = true; break; } } - } else if (list_value.isIterable(globalThis)) { + } else if (try list_value.isIterable(globalThis)) { var expected_entry = ExpectedEntry{ .globalThis = globalThis, .expected = expected, @@ -736,9 +736,9 @@ pub const Expect = struct { }; if (value.jsTypeLoose().isArrayLike()) { - var itr = value.arrayIterator(globalThis); - while (itr.next()) |item| { - if (item.isSameValue(expected, globalThis)) { + var itr = try value.arrayIterator(globalThis); + while (try itr.next()) |item| { + if (try item.isSameValue(expected, globalThis)) { pass = true; break; } @@ -756,7 +756,7 @@ pub const Expect = struct { } else if (value_string.len == 0 and expected_string.len == 0) { // edge case two empty strings are true pass = true; } - } else if (value.isIterable(globalThis)) { + } else if (try value.isIterable(globalThis)) { var expected_entry = ExpectedEntry{ .globalThis = globalThis, .expected = expected, @@ -770,7 +770,7 @@ pub const Expect = struct { item: JSValue, ) callconv(.C) void { const entry = bun.cast(*ExpectedEntry, entry_.?); - if (item.isSameValue(entry.expected, entry.globalThis)) { + if (item.isSameValue(entry.expected, entry.globalThis) catch return) { entry.pass.* = true; // TODO(perf): break out of the `forEach` when a match is found } @@ -828,11 +828,7 @@ pub const Expect = struct { return globalThis.throwInvalidArguments("Expected value must be an object\nReceived: {}", .{value.toFmt(&formatter)}); } - var pass = value.hasOwnPropertyValue(globalThis, expected); - - if (globalThis.hasException()) { - return .zero; - } + var pass = try value.hasOwnPropertyValue(globalThis, expected); if (not) pass = !pass; if (pass) return thisValue; @@ -880,18 +876,18 @@ pub const Expect = struct { const not = this.flags.not; var pass = brk: { - const count = expected.getLength(globalThis); + const count = try expected.getLength(globalThis); // jest-extended checks for truthiness before calling hasOwnProperty // https://github.com/jest-community/jest-extended/blob/711fdcc54d68c2b2c1992c7cfbdf0d0bd6be0f4d/src/matchers/toContainKeys.js#L1-L6 - if (!value.coerce(bool, globalThis)) break :brk count == 0; + if (!value.toBoolean()) break :brk count == 0; var i: u32 = 0; while (i < count) : (i += 1) { - const key = expected.getIndex(globalThis, i); + const key = try expected.getIndex(globalThis, i); - if (!value.hasOwnPropertyValue(globalThis, key)) { + if (!try value.hasOwnPropertyValue(globalThis, key)) { break :brk false; } } @@ -899,10 +895,6 @@ pub const Expect = struct { break :brk true; }; - if (globalThis.hasException()) { - return .zero; - } - if (not) pass = !pass; if (pass) return thisValue; @@ -951,16 +943,16 @@ pub const Expect = struct { const not = this.flags.not; var pass = false; - const count = expected.getLength(globalObject); + const count = try expected.getLength(globalObject); - var keys = value.keys(globalObject); - if (keys.getLength(globalObject) == count) { - var itr = keys.arrayIterator(globalObject); + var keys = try value.keys(globalObject); + if (try keys.getLength(globalObject) == count) { + var itr = try keys.arrayIterator(globalObject); outer: { - while (itr.next()) |item| { + while (try itr.next()) |item| { var i: u32 = 0; while (i < count) : (i += 1) { - const key = expected.getIndex(globalObject, i); + const key = try expected.getIndex(globalObject, i); if (try item.jestDeepEquals(key, globalObject)) break; } else break :outer; } @@ -1016,23 +1008,19 @@ pub const Expect = struct { const not = this.flags.not; var pass = false; - const count = expected.getLength(globalThis); + const count = try expected.getLength(globalThis); var i: u32 = 0; while (i < count) : (i += 1) { - const key = expected.getIndex(globalThis, i); + const key = try expected.getIndex(globalThis, i); - if (value.hasOwnPropertyValue(globalThis, key)) { + if (try value.hasOwnPropertyValue(globalThis, key)) { pass = true; break; } } - if (globalThis.hasException()) { - return .zero; - } - if (not) pass = !pass; if (pass) return thisValue; @@ -1078,9 +1066,9 @@ pub const Expect = struct { var pass = false; if (!value.isUndefinedOrNull()) { - const values = value.values(globalObject); - var itr = values.arrayIterator(globalObject); - while (itr.next()) |item| { + const values = try value.values(globalObject); + var itr = try values.arrayIterator(globalObject); + while (try itr.next()) |item| { if (try item.jestDeepEquals(expected, globalObject)) { pass = true; break; @@ -1136,14 +1124,14 @@ pub const Expect = struct { var pass = true; if (!value.isUndefinedOrNull()) { - const values = value.values(globalObject); - var itr = expected.arrayIterator(globalObject); - const count = values.getLength(globalObject); + const values = try value.values(globalObject); + var itr = try expected.arrayIterator(globalObject); + const count = try values.getLength(globalObject); - while (itr.next()) |item| { + while (try itr.next()) |item| { var i: u32 = 0; while (i < count) : (i += 1) { - const key = values.getIndex(globalObject, i); + const key = try values.getIndex(globalObject, i); if (try key.jestDeepEquals(item, globalObject)) break; } else { pass = false; @@ -1200,16 +1188,16 @@ pub const Expect = struct { var pass = false; if (!value.isUndefinedOrNull()) { - var values = value.values(globalObject); - var itr = expected.arrayIterator(globalObject); - const count = values.getLength(globalObject); - const expectedLength = expected.getLength(globalObject); + var values = try value.values(globalObject); + var itr = try expected.arrayIterator(globalObject); + const count = try values.getLength(globalObject); + const expectedLength = try expected.getLength(globalObject); if (count == expectedLength) { - while (itr.next()) |item| { + while (try itr.next()) |item| { var i: u32 = 0; while (i < count) : (i += 1) { - const key = values.getIndex(globalObject, i); + const key = try values.getIndex(globalObject, i); if (try key.jestDeepEquals(item, globalObject)) { pass = true; break; @@ -1270,14 +1258,14 @@ pub const Expect = struct { var pass = false; if (!value.isUndefinedOrNull()) { - var values = value.values(globalObject); - var itr = expected.arrayIterator(globalObject); - const count = values.getLength(globalObject); + var values = try value.values(globalObject); + var itr = try expected.arrayIterator(globalObject); + const count = try values.getLength(globalObject); - outer: while (itr.next()) |item| { + outer: while (try itr.next()) |item| { var i: u32 = 0; while (i < count) : (i += 1) { - const key = values.getIndex(globalObject, i); + const key = try values.getIndex(globalObject, i); if (try key.jestDeepEquals(item, globalObject)) { pass = true; break :outer; @@ -1340,8 +1328,8 @@ pub const Expect = struct { const expected_type = expected.jsType(); if (value_type.isArrayLike()) { - var itr = value.arrayIterator(globalThis); - while (itr.next()) |item| { + var itr = try value.arrayIterator(globalThis); + while (try itr.next()) |item| { if (try item.jestDeepEquals(expected, globalThis)) { pass = true; break; @@ -1366,7 +1354,7 @@ pub const Expect = struct { else strings.indexOf(value_string.slice(), expected_string.slice()) != null; } - } else if (value.isIterable(globalThis)) { + } else if (try value.isIterable(globalThis)) { var expected_entry = ExpectedEntry{ .globalThis = globalThis, .expected = expected, @@ -1685,7 +1673,7 @@ pub const Expect = struct { const value: JSValue = try this.getValue(globalThis, thisValue, "toHaveProperty", "path, value"); - if (!expected_property_path.isString() and !expected_property_path.isIterable(globalThis)) { + if (!expected_property_path.isString() and !try expected_property_path.isIterable(globalThis)) { return globalThis.throw("Expected path must be a string or an array", .{}); } @@ -1697,7 +1685,7 @@ pub const Expect = struct { var received_property: JSValue = .zero; if (pass) { - received_property = value.getIfPropertyExistsFromPath(globalThis, expected_property_path); + received_property = try value.getIfPropertyExistsFromPath(globalThis, expected_property_path); pass = received_property != .zero; } @@ -2290,7 +2278,7 @@ pub const Expect = struct { if (globalThis.hasException()) return .zero; // no partial match for this case - if (!expected_message.isSameValue(received_message, globalThis)) return .js_undefined; + if (!try expected_message.isSameValue(received_message, globalThis)) return .js_undefined; return this.throw(globalThis, signature, "\n\nExpected message: not {any}\n", .{expected_message.toFmt(&formatter)}); } @@ -2397,7 +2385,7 @@ pub const Expect = struct { const signature = comptime getSignature("toThrow", "expected", false); if (_received_message) |received_message| { - if (received_message.isSameValue(expected_message, globalThis)) return .js_undefined; + if (try received_message.isSameValue(expected_message, globalThis)) return .js_undefined; } // error: message from received error does not match expected error message. @@ -2916,7 +2904,7 @@ pub const Expect = struct { const prop_matchers = _prop_matchers; - if (!value.jestDeepMatch(prop_matchers, globalThis, true)) { + if (!try value.jestDeepMatch(prop_matchers, globalThis, true)) { // TODO: print diff with properties from propertyMatchers const signature = comptime getSignature(fn_name, "propertyMatchers", false); const fmt = signature ++ "\n\nExpected propertyMatchers to match properties from received object" ++ @@ -2986,11 +2974,11 @@ pub const Expect = struct { var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis, .quote_strings = true }; defer formatter.deinit(); - const actual_length = value.getLengthIfPropertyExistsInternal(globalThis); + const actual_length = try value.getLengthIfPropertyExistsInternal(globalThis); if (actual_length == std.math.inf(f64)) { if (value.jsTypeLoose().isObject()) { - if (value.isIterable(globalThis)) { + if (try value.isIterable(globalThis)) { var any_properties_in_iterator = false; value.forEach(globalThis, &any_properties_in_iterator, struct { pub fn anythingInIterator( @@ -3060,7 +3048,7 @@ pub const Expect = struct { incrementExpectCallCounter(); const not = this.flags.not; - var pass = value.isObjectEmpty(globalThis); + var pass = try value.isObjectEmpty(globalThis); if (not) pass = !pass; if (pass) return thisValue; @@ -3153,7 +3141,7 @@ pub const Expect = struct { incrementExpectCallCounter(); const not = this.flags.not; - var pass = value.jsType().isArray() and @as(i32, @intCast(value.getLength(globalThis))) == size.toInt32(); + var pass = value.jsType().isArray() and @as(i32, @intCast(try value.getLength(globalThis))) == size.toInt32(); if (not) pass = !pass; if (pass) return .js_undefined; @@ -4198,7 +4186,8 @@ pub const Expect = struct { return globalThis.throw("Expected value must be a mock function: {}", .{value}); } - var pass = calls.getLength(globalThis) > 0; + const calls_length = try calls.getLength(globalThis); + var pass = calls_length > 0; const not = this.flags.not; if (not) pass = !pass; @@ -4207,11 +4196,11 @@ pub const Expect = struct { // handle failure if (not) { const signature = comptime getSignature("toHaveBeenCalled", "", true); - return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: 0\n" ++ "Received number of calls: {any}\n", .{calls.getLength(globalThis)}); + return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: 0\n" ++ "Received number of calls: {any}\n", .{calls_length}); } const signature = comptime getSignature("toHaveBeenCalled", "", false); - return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: \\>= 1\n" ++ "Received number of calls: {any}\n", .{calls.getLength(globalThis)}); + return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: \\>= 1\n" ++ "Received number of calls: {any}\n", .{calls_length}); } pub fn toHaveBeenCalledOnce(this: *Expect, globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { @@ -4229,7 +4218,8 @@ pub const Expect = struct { return globalThis.throw("Expected value must be a mock function: {}", .{value}); } - var pass = @as(i32, @intCast(calls.getLength(globalThis))) == 1; + const calls_length = try calls.getLength(globalThis); + var pass = calls_length == 1; const not = this.flags.not; if (not) pass = !pass; @@ -4238,11 +4228,11 @@ pub const Expect = struct { // handle failure if (not) { const signature = comptime getSignature("toHaveBeenCalledOnce", "expected", true); - return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: not 1\n" ++ "Received number of calls: {d}\n", .{calls.getLength(globalThis)}); + return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: not 1\n" ++ "Received number of calls: {d}\n", .{calls_length}); } const signature = comptime getSignature("toHaveBeenCalledOnce", "expected", false); - return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: 1\n" ++ "Received number of calls: {d}\n", .{calls.getLength(globalThis)}); + return this.throw(globalThis, signature, "\n\n" ++ "Expected number of calls: 1\n" ++ "Received number of calls: {d}\n", .{calls_length}); } pub fn toHaveBeenCalledTimes(this: *Expect, globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { @@ -4266,9 +4256,9 @@ pub const Expect = struct { return globalThis.throwInvalidArguments("toHaveBeenCalledTimes() requires 1 non-negative integer argument", .{}); } - const times = arguments[0].coerce(i32, globalThis); + const times = try arguments[0].coerce(i32, globalThis); - var pass = @as(i32, @intCast(calls.getLength(globalThis))) == times; + var pass = @as(i32, @intCast(try calls.getLength(globalThis))) == times; const not = this.flags.not; if (not) pass = !pass; @@ -4320,7 +4310,7 @@ pub const Expect = struct { const property_matchers = args[0]; - var pass = received_object.jestDeepMatch(property_matchers, globalThis, true); + var pass = try received_object.jestDeepMatch(property_matchers, globalThis, true); if (not) pass = !pass; if (pass) return .js_undefined; @@ -4360,20 +4350,20 @@ pub const Expect = struct { var pass = false; - if (calls.getLength(globalThis) > 0) { - var itr = calls.arrayIterator(globalThis); - while (itr.next()) |callItem| { + if (try calls.getLength(globalThis) > 0) { + var itr = try calls.arrayIterator(globalThis); + while (try itr.next()) |callItem| { if (callItem == .zero or !callItem.jsType().isArray()) { return globalThis.throw("Expected value must be a mock function with calls: {}", .{value}); } - if (callItem.getLength(globalThis) != arguments.len) { + if (try callItem.getLength(globalThis) != arguments.len) { continue; } - var callItr = callItem.arrayIterator(globalThis); + var callItr = try callItem.arrayIterator(globalThis); var match = true; - while (callItr.next()) |callArg| { + while (try callItr.next()) |callArg| { if (!try callArg.jestDeepEquals(arguments[callItr.i - 1], globalThis)) { match = false; break; @@ -4417,23 +4407,23 @@ pub const Expect = struct { return globalThis.throw("Expected value must be a mock function: {}", .{value}); } - const totalCalls = @as(u32, @intCast(calls.getLength(globalThis))); + const totalCalls: u32 = @truncate(try calls.getLength(globalThis)); var lastCallValue: JSValue = .zero; var pass = totalCalls > 0; if (pass) { - lastCallValue = calls.getIndex(globalThis, totalCalls - 1); + lastCallValue = try calls.getIndex(globalThis, totalCalls - 1); - if (lastCallValue == .zero or !lastCallValue.jsType().isArray()) { + if (!lastCallValue.jsType().isArray()) { return globalThis.throw("Expected value must be a mock function with calls: {}", .{value}); } - if (lastCallValue.getLength(globalThis) != arguments.len) { + if (try lastCallValue.getLength(globalThis) != arguments.len) { pass = false; } else { - var itr = lastCallValue.arrayIterator(globalThis); - while (itr.next()) |callArg| { + var itr = try lastCallValue.arrayIterator(globalThis); + while (try itr.next()) |callArg| { if (!try callArg.jestDeepEquals(arguments[itr.i - 1], globalThis)) { pass = false; break; @@ -4476,28 +4466,28 @@ pub const Expect = struct { return globalThis.throw("Expected value must be a mock function: {}", .{value}); } - const nthCallNum = if (arguments.len > 0 and arguments[0].isUInt32AsAnyInt()) arguments[0].coerce(i32, globalThis) else 0; + const nthCallNum = if (arguments.len > 0 and arguments[0].isUInt32AsAnyInt()) try arguments[0].coerce(i32, globalThis) else 0; if (nthCallNum < 1) { return globalThis.throwInvalidArguments("toHaveBeenNthCalledWith() requires a positive integer argument", .{}); } - const totalCalls = calls.getLength(globalThis); + const totalCalls = try calls.getLength(globalThis); var nthCallValue: JSValue = .zero; var pass = totalCalls >= nthCallNum; if (pass) { - nthCallValue = calls.getIndex(globalThis, @as(u32, @intCast(nthCallNum)) - 1); + nthCallValue = try calls.getIndex(globalThis, @as(u32, @intCast(nthCallNum)) - 1); - if (nthCallValue == .zero or !nthCallValue.jsType().isArray()) { + if (!nthCallValue.jsType().isArray()) { return globalThis.throw("Expected value must be a mock function with calls: {}", .{value}); } - if (nthCallValue.getLength(globalThis) != (arguments.len - 1)) { + if (try nthCallValue.getLength(globalThis) != (arguments.len - 1)) { pass = false; } else { - var itr = nthCallValue.arrayIterator(globalThis); - while (itr.next()) |callArg| { + var itr = try nthCallValue.arrayIterator(globalThis); + while (try itr.next()) |callArg| { if (!try callArg.jestDeepEquals(arguments[itr.i], globalThis)) { pass = false; break; @@ -4556,7 +4546,7 @@ pub const Expect = struct { return globalThis.throwInvalidArguments(name ++ "() requires 1 non-negative integer argument", .{}); } - break :brk arguments[0].coerce(i32, globalThis); + break :brk try arguments[0].coerce(i32, globalThis); }; var pass = false; @@ -4567,7 +4557,7 @@ pub const Expect = struct { index, ); - const total_count = returns.getLength(globalThis); + const total_count = try returns.getLength(globalThis); const return_status: ReturnStatus = brk: { // Returns is an array of: @@ -5411,7 +5401,7 @@ pub const ExpectCustomAsymmetricMatcher = struct { const args = callFrame.arguments(); const array = try JSValue.createEmptyArray(globalThis, args.len); for (args, 0..) |arg, i| { - array.putIndex(globalThis, @truncate(i), arg); + try array.putIndex(globalThis, @truncate(i), arg); } js.capturedArgsSetCached(instance_jsvalue, globalThis, array); array.ensureStillAlive(); @@ -5445,7 +5435,7 @@ pub const ExpectCustomAsymmetricMatcher = struct { captured_args.ensureStillAlive(); // prepare the args array as `[received, ...captured_args]` - const args_count = captured_args.getLength(globalThis); + const args_count = captured_args.getLength(globalThis) catch return false; var allocator = std.heap.stackFallback(8 * @sizeOf(JSValue), globalThis.allocator()); var matcher_args = std.ArrayList(JSValue).initCapacity(allocator.get(), args_count + 1) catch { globalThis.throwOutOfMemory() catch {}; @@ -5453,7 +5443,7 @@ pub const ExpectCustomAsymmetricMatcher = struct { }; matcher_args.appendAssumeCapacity(received); for (0..args_count) |i| { - matcher_args.appendAssumeCapacity(captured_args.getIndex(globalThis, @truncate(i))); + matcher_args.appendAssumeCapacity(captured_args.getIndex(globalThis, @truncate(i)) catch return false); } return Expect.executeCustomMatcher(globalThis, matcher_name, matcher_fn, matcher_args.items, this.flags, true) catch false; @@ -5466,34 +5456,30 @@ pub const ExpectCustomAsymmetricMatcher = struct { return JSValue.jsBoolean(matched); } + fn maybeClear(comptime dontThrow: bool, globalThis: *JSGlobalObject, err: bun.JSError) bun.JSError!bool { + if (dontThrow) { + globalThis.clearException(); + return false; + } + return err; + } + /// Calls a custom implementation (if provided) to stringify this asymmetric matcher, and returns true if it was provided and it succeed pub fn customPrint(_: *ExpectCustomAsymmetricMatcher, thisValue: JSValue, globalThis: *JSGlobalObject, writer: anytype, comptime dontThrow: bool) !bool { const matcher_fn: JSValue = js.matcherFnGetCached(thisValue) orelse return false; - if (matcher_fn.get_unsafe(globalThis, "toAsymmetricMatcher")) |fn_value| { + if (matcher_fn.get(globalThis, "toAsymmetricMatcher") catch |e| return maybeClear(dontThrow, globalThis, e)) |fn_value| { if (fn_value.jsType().isFunction()) { const captured_args: JSValue = js.capturedArgsGetCached(thisValue) orelse return false; var stack_fallback = std.heap.stackFallback(256, globalThis.allocator()); - const args_len = captured_args.getLength(globalThis); + const args_len = captured_args.getLength(globalThis) catch |e| return maybeClear(dontThrow, globalThis, e); var args = try std.ArrayList(JSValue).initCapacity(stack_fallback.get(), args_len); - var iter = captured_args.arrayIterator(globalThis); - while (iter.next()) |arg| { + var iter = captured_args.arrayIterator(globalThis) catch |e| return maybeClear(dontThrow, globalThis, e); + while (iter.next() catch |e| return maybeClear(dontThrow, globalThis, e)) |arg| { args.appendAssumeCapacity(arg); } - const result = matcher_fn.call(globalThis, thisValue, args.items) catch |err| { - if (dontThrow) { - globalThis.clearException(); - return false; - } - return err; - }; - try writer.print("{}", .{result.toBunString(globalThis) catch { - if (dontThrow) { - globalThis.clearException(); - return false; - } - return error.JSError; - }}); + const result = matcher_fn.call(globalThis, thisValue, args.items) catch |e| return maybeClear(dontThrow, globalThis, e); + try writer.print("{}", .{result.toBunString(globalThis) catch |e| return maybeClear(dontThrow, globalThis, e)}); } } return false; @@ -5659,7 +5645,7 @@ pub const ExpectMatcherUtils = struct { return globalThis.throw("matcherHint: options must be an object (or undefined)", .{}); } if (try options.get(globalThis, "isNot")) |val| { - is_not = val.coerce(bool, globalThis); + is_not = val.toBoolean(); } if (try options.get(globalThis, "comment")) |val| { comment = try val.toJSString(globalThis); diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index 95c5e6d358..8def3c0735 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -36,6 +36,7 @@ pub const Tag = enum(u3) { only, skip, todo, + skipped_because_label, }; const debug = Output.scoped(.jest, false); var max_test_id_for_debugger: u32 = 0; @@ -84,9 +85,24 @@ pub const TestRunner = struct { filter_buffer: MutableString, unhandled_errors_between_tests: u32 = 0, + summary: Summary = Summary{}, pub const Drainer = JSC.AnyTask.New(TestRunner, drain); + pub const Summary = struct { + pass: u32 = 0, + expectations: u32 = 0, + skip: u32 = 0, + todo: u32 = 0, + fail: u32 = 0, + files: u32 = 0, + skipped_because_label: u32 = 0, + + pub fn didLabelFilterOutAllTests(this: *const Summary) bool { + return this.skipped_because_label > 0 and (this.pass + this.skip + this.todo + this.fail + this.expectations) == 0; + } + }; + pub fn onTestTimeout(this: *TestRunner, now: *const bun.timespec, vm: *VirtualMachine) void { _ = vm; // autofix this.event_loop_timer.state = .FIRED; @@ -178,6 +194,7 @@ pub const TestRunner = struct { onTestPass: OnTestUpdate, onTestFail: OnTestUpdate, onTestSkip: OnTestUpdate, + onTestFilteredOut: OnTestUpdate, // when a test is filtered out by a label onTestTodo: OnTestUpdate, }; @@ -201,6 +218,11 @@ pub const TestRunner = struct { this.callback.onTestTodo(this.callback, test_id, file, label, 0, 0, parent); } + pub fn reportFilteredOut(this: *TestRunner, test_id: Test.ID, file: string, label: string, parent: ?*DescribeScope) void { + this.tests.items(.status)[test_id] = .skip; + this.callback.onTestFilteredOut(this.callback, test_id, file, label, 0, 0, parent); + } + pub fn addTestCount(this: *TestRunner, count: u32) u32 { this.tests.ensureUnusedCapacity(this.allocator, count) catch unreachable; const start = @as(Test.ID, @truncate(this.tests.len)); @@ -250,6 +272,7 @@ pub const TestRunner = struct { fail, skip, todo, + skipped_because_label, /// A test marked as `.failing()` actually passed fail_because_failing_test_passed, fail_because_todo_passed, @@ -279,7 +302,7 @@ pub const Jest = struct { return globalThis.throwInvalidArgumentType(name, "callback", "function"); } - if (function.getLength(globalThis) > 0) { + if (try function.getLength(globalThis) > 0) { return globalThis.throw("done() callback is not implemented in global hooks yet. Please make your function take no arguments", .{}); } @@ -499,7 +522,7 @@ pub const Jest = struct { return globalObject.throw("setTimeout() expects a number (milliseconds)", .{}); } - const timeout_ms: u32 = @intCast(@max(arguments[0].coerce(i32, globalObject), 0)); + const timeout_ms: u32 = @intCast(@max(try arguments[0].coerce(i32, globalObject), 0)); if (Jest.runner) |test_runner| { test_runner.default_timeout_override = timeout_ms; @@ -735,7 +758,7 @@ pub const TestScope = struct { switch (promise.status(vm.global.vm())) { .rejected => { if (!promise.isHandled(vm.global.vm()) and this.tag != .fail) { - _ = vm.unhandledRejection(vm.global, promise.result(vm.global.vm()), promise.asValue()); + vm.unhandledRejection(vm.global, promise.result(vm.global.vm()), promise.asValue()); } return switch (this.tag) { @@ -913,6 +936,7 @@ pub const DescribeScope = struct { pub const beforeAll = createCallback(.beforeAll); pub const beforeEach = createCallback(.beforeEach); + // TODO this should return JSError pub fn execCallback(this: *DescribeScope, globalObject: *JSGlobalObject, comptime hook: LifecycleHook) ?JSValue { var hooks = &@field(this, @tagName(hook) ++ "s"); defer { @@ -933,7 +957,7 @@ pub const DescribeScope = struct { } const vm = VirtualMachine.get(); - var result: JSValue = switch (cb.getLength(globalObject)) { + var result: JSValue = switch (cb.getLength(globalObject) catch |e| return globalObject.takeException(e)) { // TODO is this right? 0 => callJSFunctionForTestRunner(vm, globalObject, cb, &.{}), else => brk: { this.done = false; @@ -1095,7 +1119,7 @@ pub const DescribeScope = struct { switch (prom.status(globalObject.vm())) { .fulfilled => {}, else => { - _ = globalObject.bunVM().unhandledRejection(globalObject, prom.result(globalObject.vm()), prom.asValue()); + globalObject.bunVM().unhandledRejection(globalObject, prom.result(globalObject.vm()), prom.asValue()); return .js_undefined; }, } @@ -1611,6 +1635,7 @@ pub const TestRunnerTask = struct { ); }, .skip => Jest.runner.?.reportSkip(test_id, this.source_file_path, test_.label, describe), + .skipped_because_label => Jest.runner.?.reportFilteredOut(test_id, this.source_file_path, test_.label, describe), .todo => Jest.runner.?.reportTodo(test_id, this.source_file_path, test_.label, describe), .fail_because_todo_passed => |count| { Output.prettyErrorln(" ^ this test is marked as todo but passes. Remove `.todo` or check that test is correct.", .{}); @@ -1672,6 +1697,7 @@ pub const Result = union(TestRunner.Test.Status) { fail: u32, skip: void, todo: void, + skipped_because_label: void, fail_because_failing_test_passed: u32, fail_because_todo_passed: u32, fail_because_expected_has_assertions: void, @@ -1775,25 +1801,25 @@ inline fn createScope( var timeout_ms: u32 = std.math.maxInt(u32); if (options.isNumber()) { - timeout_ms = @as(u32, @intCast(@max(args[2].coerce(i32, globalThis), 0))); + timeout_ms = @as(u32, @intCast(@max(try args[2].coerce(i32, globalThis), 0))); } else if (options.isObject()) { if (try options.get(globalThis, "timeout")) |timeout| { if (!timeout.isNumber()) { return globalThis.throwPretty("{s} expects timeout to be a number", .{signature}); } - timeout_ms = @as(u32, @intCast(@max(timeout.coerce(i32, globalThis), 0))); + timeout_ms = @as(u32, @intCast(@max(try timeout.coerce(i32, globalThis), 0))); } if (try options.get(globalThis, "retry")) |retries| { if (!retries.isNumber()) { return globalThis.throwPretty("{s} expects retry to be a number", .{signature}); } - // TODO: retry_count = @intCast(u32, @max(retries.coerce(i32, globalThis), 0)); + // TODO: retry_count = @intCast(u32, @max(try retries.coerce(i32, globalThis), 0)); } if (try options.get(globalThis, "repeats")) |repeats| { if (!repeats.isNumber()) { return globalThis.throwPretty("{s} expects repeats to be a number", .{signature}); } - // TODO: repeat_count = @intCast(u32, @max(repeats.coerce(i32, globalThis), 0)); + // TODO: repeat_count = @intCast(u32, @max(try repeats.coerce(i32, globalThis), 0)); } } else if (!options.isEmptyOrUndefinedOrNull()) { return globalThis.throwPretty("{s} expects options to be a number or object", .{signature}); @@ -1814,15 +1840,21 @@ inline fn createScope( if (is_test) { if (!is_skip) { - if (Jest.runner.?.filter_regex) |regex| { - var buffer: bun.MutableString = Jest.runner.?.filter_buffer; - buffer.reset(); - appendParentLabel(&buffer, parent) catch @panic("Bun ran out of memory while filtering tests"); - buffer.append(label) catch unreachable; - const str = bun.String.fromBytes(buffer.slice()); - is_skip = !regex.matches(str); - if (is_skip) { - tag_to_use = .skip; + if (Jest.runner) |runner| { + if (runner.filter_regex) |regex| { + var buffer: bun.MutableString = runner.filter_buffer; + buffer.reset(); + appendParentLabel(&buffer, parent) catch @panic("Bun ran out of memory while filtering tests"); + buffer.append(label) catch unreachable; + const str = bun.String.fromBytes(buffer.slice()); + is_skip = !regex.matches(str); + if (is_skip) { + tag_to_use = .skipped_because_label; + if (comptime is_test) { + // These won't get counted for describe scopes, which means the process will not exit with 1. + runner.summary.skipped_because_label += 1; + } + } } } } @@ -1834,7 +1866,7 @@ inline fn createScope( function.protect(); } - const func_params_length = function.getLength(globalThis); + const func_params_length = try function.getLength(globalThis); var arg_size: usize = 0; var has_callback = false; if (func_params_length > 0) { @@ -1904,7 +1936,7 @@ inline fn createIfScope( .pass => .{ Scope.skip, Scope.call }, .fail => @compileError("unreachable"), .only => @compileError("unreachable"), - .skip => .{ Scope.call, Scope.skip }, + .skipped_because_label, .skip => .{ Scope.call, Scope.skip }, .todo => .{ Scope.call, Scope.todo }, }; @@ -1962,7 +1994,7 @@ fn formatLabel(globalThis: *JSGlobalObject, label: string, function_args: []JSVa 'j', 'o' => { var str = bun.String.empty; defer str.deref(); - current_arg.jsonStringify(globalThis, 0, &str); + try current_arg.jsonStringify(globalThis, 0, &str); const owned_slice = str.toOwnedSlice(allocator) catch bun.outOfMemory(); defer allocator.free(owned_slice); list.appendSlice(allocator, owned_slice) catch bun.outOfMemory(); @@ -2022,25 +2054,25 @@ fn eachBind(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSVa var timeout_ms: u32 = std.math.maxInt(u32); if (options.isNumber()) { - timeout_ms = @as(u32, @intCast(@max(args[2].coerce(i32, globalThis), 0))); + timeout_ms = @as(u32, @intCast(@max(try args[2].coerce(i32, globalThis), 0))); } else if (options.isObject()) { if (try options.get(globalThis, "timeout")) |timeout| { if (!timeout.isNumber()) { return globalThis.throwPretty("{s} expects timeout to be a number", .{signature}); } - timeout_ms = @as(u32, @intCast(@max(timeout.coerce(i32, globalThis), 0))); + timeout_ms = @as(u32, @intCast(@max(try timeout.coerce(i32, globalThis), 0))); } if (try options.get(globalThis, "retry")) |retries| { if (!retries.isNumber()) { return globalThis.throwPretty("{s} expects retry to be a number", .{signature}); } - // TODO: retry_count = @intCast(u32, @max(retries.coerce(i32, globalThis), 0)); + // TODO: retry_count = @intCast(u32, @max(try retries.coerce(i32, globalThis), 0)); } if (try options.get(globalThis, "repeats")) |repeats| { if (!repeats.isNumber()) { return globalThis.throwPretty("{s} expects repeats to be a number", .{signature}); } - // TODO: repeat_count = @intCast(u32, @max(repeats.coerce(i32, globalThis), 0)); + // TODO: repeat_count = @intCast(u32, @max(try repeats.coerce(i32, globalThis), 0)); } } else if (!options.isEmptyOrUndefinedOrNull()) { return globalThis.throwPretty("{s} expects options to be a number or object", .{signature}); @@ -2062,16 +2094,16 @@ fn eachBind(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSVa return .js_undefined; } - var iter = array.arrayIterator(globalThis); + var iter = try array.arrayIterator(globalThis); var test_idx: usize = 0; - while (iter.next()) |item| { - const func_params_length = function.getLength(globalThis); + while (try iter.next()) |item| { + const func_params_length = try function.getLength(globalThis); const item_is_array = !item.isEmptyOrUndefinedOrNull() and item.jsType().isArray(); var arg_size: usize = 1; if (item_is_array) { - arg_size = item.getLength(globalThis); + arg_size = try item.getLength(globalThis); } // add room for callback function @@ -2085,8 +2117,8 @@ fn eachBind(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSVa if (item_is_array) { // Spread array as args - var item_iter = item.arrayIterator(globalThis); - while (item_iter.next()) |array_item| { + var item_iter = try item.arrayIterator(globalThis); + while (try item_iter.next()) |array_item| { if (array_item == .zero) { allocator.free(function_args); break; @@ -2129,6 +2161,11 @@ fn eachBind(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSVa buffer.append(formattedLabel) catch unreachable; const str = bun.String.fromBytes(buffer.slice()); is_skip = !regex.matches(str); + if (is_skip) { + if (each_data.is_test) { + Jest.runner.?.summary.skipped_because_label += 1; + } + } } if (is_skip) { @@ -2206,7 +2243,7 @@ fn callJSFunctionForTestRunner(vm: *JSC.VirtualMachine, globalObject: *JSGlobalO vm.eventLoop().enter(); defer vm.eventLoop().exit(); - globalObject.clearTerminationException(); + globalObject.clearTerminationException(); // TODO this is sus return function.call(globalObject, .js_undefined, args) catch |err| globalObject.takeException(err); } diff --git a/src/bun.js/test/pretty_format.zig b/src/bun.js/test/pretty_format.zig index daa4c316f1..b544956606 100644 --- a/src/bun.js/test/pretty_format.zig +++ b/src/bun.js/test/pretty_format.zig @@ -107,7 +107,7 @@ pub const JestPrettyFormat = struct { .globalThis = global, .quote_strings = options.quote_strings, }; - const tag = JestPrettyFormat.Formatter.Tag.get(vals[0], global); + const tag = try JestPrettyFormat.Formatter.Tag.get(vals[0], global); var unbuffered_writer = if (comptime Writer != RawWriter) writer.context.unbuffered_writer.context.writer() @@ -197,7 +197,7 @@ pub const JestPrettyFormat = struct { } any = true; - tag = JestPrettyFormat.Formatter.Tag.get(this_value, global); + tag = try JestPrettyFormat.Formatter.Tag.get(this_value, global); if (tag.tag == .String and fmt.remaining_values.len > 0) { tag.tag = .StringPossiblyFormatted; } @@ -219,7 +219,7 @@ pub const JestPrettyFormat = struct { _ = writer.write(" ") catch 0; } any = true; - tag = JestPrettyFormat.Formatter.Tag.get(this_value, global); + tag = try JestPrettyFormat.Formatter.Tag.get(this_value, global); if (tag.tag == .String and fmt.remaining_values.len > 0) { tag.tag = .StringPossiblyFormatted; } @@ -358,12 +358,12 @@ pub const JestPrettyFormat = struct { cell: JSValue.JSType = .Cell, }; - pub fn get(value: JSValue, globalThis: *JSGlobalObject) Result { - switch (@intFromEnum(value)) { - 0, 0xa => return Result{ + pub fn get(value: JSValue, globalThis: *JSGlobalObject) bun.JSError!Result { + switch (value) { + .zero, .js_undefined => return Result{ .tag = .Undefined, }, - 0x2 => return Result{ + .null => return Result{ .tag = .Null, }, else => {}, @@ -439,11 +439,11 @@ pub const JestPrettyFormat = struct { // Is this a react element? if (js_type.isObject() and js_type != .ProxyObject) { - if (value.getOwnTruthy(globalThis, "$$typeof")) |typeof_symbol| { + if (try value.getOwnTruthy(globalThis, "$$typeof")) |typeof_symbol| { var reactElement = ZigString.init("react.element"); var react_fragment = ZigString.init("react.fragment"); - if (JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &reactElement), globalThis) or JSValue.isSameValue(typeof_symbol, JSValue.symbolFor(globalThis, &react_fragment), globalThis)) { + if (try typeof_symbol.isSameValue(.symbolFor(globalThis, &reactElement), globalThis) or try typeof_symbol.isSameValue(.symbolFor(globalThis, &react_fragment), globalThis)) { return .{ .tag = .JSX, .cell = js_type }; } } @@ -576,7 +576,7 @@ pub const JestPrettyFormat = struct { Tag.Integer => this.printAs(Tag.Integer, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch return, // undefined is overloaded to mean the '%o" field - Tag.Undefined => this.format(Tag.get(next_value, globalThis), Writer, writer_, next_value, globalThis, enable_ansi_colors) catch return, + Tag.Undefined => this.format(Tag.get(next_value, globalThis) catch return, Writer, writer_, next_value, globalThis, enable_ansi_colors) catch return, else => unreachable, } @@ -677,10 +677,10 @@ pub const JestPrettyFormat = struct { pub fn forEach(_: *JSC.VM, globalObject: *JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { var this: *@This() = bun.cast(*@This(), ctx orelse return); if (this.formatter.failed) return; - const key = JSC.JSObject.getIndex(nextValue, globalObject, 0); - const value = JSC.JSObject.getIndex(nextValue, globalObject, 1); + const key = JSC.JSObject.getIndex(nextValue, globalObject, 0) catch return; + const value = JSC.JSObject.getIndex(nextValue, globalObject, 1) catch return; this.formatter.writeIndent(Writer, this.writer) catch return; - const key_tag = Tag.get(key, globalObject); + const key_tag = Tag.get(key, globalObject) catch return; this.formatter.format( key_tag, @@ -691,7 +691,7 @@ pub const JestPrettyFormat = struct { enable_ansi_colors, ) catch return; this.writer.writeAll(" => ") catch return; - const value_tag = Tag.get(value, globalObject); + const value_tag = Tag.get(value, globalObject) catch return; this.formatter.format( value_tag, Writer, @@ -714,7 +714,7 @@ pub const JestPrettyFormat = struct { var this: *@This() = bun.cast(*@This(), ctx orelse return); if (this.formatter.failed) return; this.formatter.writeIndent(Writer, this.writer) catch return; - const key_tag = Tag.get(nextValue, globalObject); + const key_tag = Tag.get(nextValue, globalObject) catch return; this.formatter.format( key_tag, Writer, @@ -794,7 +794,7 @@ pub const JestPrettyFormat = struct { .failed = false, }; - const tag = Tag.get(value, globalThis); + const tag = Tag.get(value, globalThis) catch return; if (tag.cell.isHidden()) return; if (ctx.i == 0) { @@ -1137,7 +1137,7 @@ pub const JestPrettyFormat = struct { } }, .Array => { - const len = @as(u32, @truncate(value.getLength(this.globalThis))); + const len: u32 = @truncate(try value.getLength(this.globalThis)); if (len == 0) { writer.writeAll("[]"); this.addForNewLine(2); @@ -1163,7 +1163,7 @@ pub const JestPrettyFormat = struct { { const element = JSValue.fromRef(CAPI.JSObjectGetPropertyAtIndex(this.globalThis, ref, 0, null)); - const tag = Tag.get(element, this.globalThis); + const tag = try Tag.get(element, this.globalThis); was_good_time = was_good_time or !tag.tag.isPrimitive() or this.goodTimeForANewLine(); @@ -1194,7 +1194,7 @@ pub const JestPrettyFormat = struct { this.writeIndent(Writer, writer_) catch unreachable; const element = JSValue.fromRef(CAPI.JSObjectGetPropertyAtIndex(this.globalThis, ref, i, null)); - const tag = Tag.get(element, this.globalThis); + const tag = try Tag.get(element, this.globalThis); try this.format(tag, Writer, writer_, element, this.globalThis, enable_ansi_colors); @@ -1260,7 +1260,7 @@ pub const JestPrettyFormat = struct { }; return; } else if (value.as(JSC.DOMFormData) != null) { - const toJSONFunction = value.get_unsafe(this.globalThis, "toJSON").?; + const toJSONFunction = (try value.get(this.globalThis, "toJSON")).?; this.addForNewLine("FormData (entries) ".len); writer.writeAll(comptime Output.prettyFmt("FormData (entries) ", enable_ansi_colors)); @@ -1341,7 +1341,7 @@ pub const JestPrettyFormat = struct { writer.writeAll(comptime Output.prettyFmt("" ++ fmt ++ "", enable_ansi_colors)); }, .Map => { - const length_value = value.get_unsafe(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); + const length_value = try value.get(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); const length = length_value.toInt32(); const prev_quote_strings = this.quote_strings; @@ -1369,7 +1369,7 @@ pub const JestPrettyFormat = struct { writer.writeAll("\n"); }, .Set => { - const length_value = value.get_unsafe(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); + const length_value = try value.get(this.globalThis, "size") orelse JSC.JSValue.jsNumberFromInt32(0); const length = length_value.toInt32(); const prev_quote_strings = this.quote_strings; @@ -1402,7 +1402,7 @@ pub const JestPrettyFormat = struct { var str = bun.String.empty; defer str.deref(); - value.jsonStringify(this.globalThis, this.indent, &str); + try value.jsonStringify(this.globalThis, this.indent, &str); this.addForNewLine(str.length()); if (jsType == .JSDate) { // in the code for printing dates, it never exceeds this amount @@ -1421,7 +1421,7 @@ pub const JestPrettyFormat = struct { }, .Event => { const event_type_value: JSValue = brk: { - const value_: JSValue = value.get_unsafe(this.globalThis, "type") orelse break :brk .js_undefined; + const value_: JSValue = try value.get(this.globalThis, "type") orelse break :brk .js_undefined; if (value_.isString()) { break :brk value_; } @@ -1465,7 +1465,7 @@ pub const JestPrettyFormat = struct { .{}, ); - const tag = Tag.get(message_value, this.globalThis); + const tag = try Tag.get(message_value, this.globalThis); try this.format(tag, Writer, writer_, message_value, this.globalThis, enable_ansi_colors); writer.writeAll(", \n"); } @@ -1479,7 +1479,7 @@ pub const JestPrettyFormat = struct { .{}, ); const data: JSValue = (try value.fastGet(this.globalThis, .data)) orelse .js_undefined; - const tag = Tag.get(data, this.globalThis); + const tag = try Tag.get(data, this.globalThis); if (tag.cell.isStringLike()) { try this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); @@ -1496,7 +1496,7 @@ pub const JestPrettyFormat = struct { .{}, ); - const tag = Tag.get(data, this.globalThis); + const tag = try Tag.get(data, this.globalThis); try this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); writer.writeAll("\n"); } @@ -1521,8 +1521,8 @@ pub const JestPrettyFormat = struct { defer if (tag_name_slice.isAllocated()) tag_name_slice.deinit(); - if (value.get_unsafe(this.globalThis, "type")) |type_value| { - const _tag = Tag.get(type_value, this.globalThis); + if (try value.get(this.globalThis, "type")) |type_value| { + const _tag = try Tag.get(type_value, this.globalThis); if (_tag.cell == .Symbol) {} else if (_tag.cell.isStringLike()) { try type_value.toZigString(&tag_name_str, this.globalThis); @@ -1551,7 +1551,7 @@ pub const JestPrettyFormat = struct { writer.writeAll(tag_name_slice.slice()); if (enable_ansi_colors) writer.writeAll(comptime Output.prettyFmt("", enable_ansi_colors)); - if (value.get_unsafe(this.globalThis, "key")) |key_value| { + if (try value.get(this.globalThis, "key")) |key_value| { if (!key_value.isUndefinedOrNull()) { if (needs_space) writer.writeAll(" key=") @@ -1562,13 +1562,13 @@ pub const JestPrettyFormat = struct { this.quote_strings = true; defer this.quote_strings = old_quote_strings; - try this.format(Tag.get(key_value, this.globalThis), Writer, writer_, key_value, this.globalThis, enable_ansi_colors); + try this.format(try Tag.get(key_value, this.globalThis), Writer, writer_, key_value, this.globalThis, enable_ansi_colors); needs_space = true; } } - if (value.get_unsafe(this.globalThis, "props")) |props| { + if (try value.get(this.globalThis, "props")) |props| { const prev_quote_strings = this.quote_strings; defer this.quote_strings = prev_quote_strings; this.quote_strings = true; @@ -1581,7 +1581,7 @@ pub const JestPrettyFormat = struct { }).init(this.globalThis, props_obj); defer props_iter.deinit(); - const children_prop = props.get_unsafe(this.globalThis, "children"); + const children_prop = try props.get(this.globalThis, "children"); if (props_iter.len > 0) { { this.indent += 1; @@ -1593,7 +1593,7 @@ pub const JestPrettyFormat = struct { continue; const property_value = props_iter.value; - const tag = Tag.get(property_value, this.globalThis); + const tag = try Tag.get(property_value, this.globalThis); if (tag.cell.isHidden()) continue; @@ -1639,7 +1639,7 @@ pub const JestPrettyFormat = struct { } if (children_prop) |children| { - const tag = Tag.get(children, this.globalThis); + const tag = try Tag.get(children, this.globalThis); const print_children = switch (tag.tag) { .String, .JSX, .Array => true, @@ -1674,14 +1674,14 @@ pub const JestPrettyFormat = struct { this.indent += 1; this.writeIndent(Writer, writer_) catch unreachable; defer this.indent -|= 1; - try this.format(Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors); + try this.format(try Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors); } writer.writeAll("\n"); this.writeIndent(Writer, writer_) catch unreachable; }, .Array => { - const length = children.getLength(this.globalThis); + const length = try children.getLength(this.globalThis); if (length == 0) break :print_children; writer.writeAll(">\n"); @@ -1696,8 +1696,8 @@ pub const JestPrettyFormat = struct { var j: usize = 0; while (j < length) : (j += 1) { - const child = JSC.JSObject.getIndex(children, this.globalThis, @as(u32, @intCast(j))); - try this.format(Tag.get(child, this.globalThis), Writer, writer_, child, this.globalThis, enable_ansi_colors); + const child = try JSC.JSObject.getIndex(children, this.globalThis, @as(u32, @intCast(j))); + try this.format(try Tag.get(child, this.globalThis), Writer, writer_, child, this.globalThis, enable_ansi_colors); if (j + 1 < length) { writer.writeAll("\n"); this.writeIndent(Writer, writer_) catch unreachable; @@ -1764,7 +1764,7 @@ pub const JestPrettyFormat = struct { .parent = value, }; - value.forEachPropertyOrdered(this.globalThis, &iter, Iterator.forEach); + try value.forEachPropertyOrdered(this.globalThis, &iter, Iterator.forEach); if (iter.i == 0) { var object_name = ZigString.Empty; diff --git a/src/bun.js/test/snapshot.zig b/src/bun.js/test/snapshot.zig index 897db792bd..bdb21bf784 100644 --- a/src/bun.js/test/snapshot.zig +++ b/src/bun.js/test/snapshot.zig @@ -13,7 +13,7 @@ const VirtualMachine = JSC.VirtualMachine; const Expect = @import("./expect.zig").Expect; pub const Snapshots = struct { - const file_header = "// Bun Snapshot v1, https://goo.gl/fbAQLP\n"; + const file_header = "// Bun Snapshot v1, https://bun.sh/docs/test/snapshots\n"; const snapshots_dir_name = "__snapshots__" ++ [_]u8{std.fs.path.sep}; pub const ValuesHashMap = std.HashMap(usize, string, bun.IdentityContext(usize), std.hash_map.default_max_load_percentage); diff --git a/src/bun.js/uuid.zig b/src/bun.js/uuid.zig index 3c24955d01..5f803dd4f1 100644 --- a/src/bun.js/uuid.zig +++ b/src/bun.js/uuid.zig @@ -205,3 +205,81 @@ pub const UUID7 = struct { return self.toUUID().format(layout, options, writer); } }; + +/// UUID v5 implementation using SHA-1 hashing +/// This is a name-based UUID that uses SHA-1 for hashing +pub const UUID5 = struct { + bytes: [16]u8, + + pub const namespaces = struct { + pub const dns: *const [16]u8 = &.{ 0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8 }; + pub const url: *const [16]u8 = &.{ 0x6b, 0xa7, 0xb8, 0x11, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8 }; + pub const oid: *const [16]u8 = &.{ 0x6b, 0xa7, 0xb8, 0x12, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8 }; + pub const x500: *const [16]u8 = &.{ 0x6b, 0xa7, 0xb8, 0x14, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8 }; + + pub fn get(namespace: []const u8) ?*const [16]u8 { + if (bun.strings.eqlCaseInsensitiveASCII(namespace, "dns", true)) { + return dns; + } else if (bun.strings.eqlCaseInsensitiveASCII(namespace, "url", true)) { + return url; + } else if (bun.strings.eqlCaseInsensitiveASCII(namespace, "oid", true)) { + return oid; + } else if (bun.strings.eqlCaseInsensitiveASCII(namespace, "x500", true)) { + return x500; + } + + return null; + } + }; + + /// Generate a UUID v5 from a namespace UUID and name data + pub fn init(namespace: *const [16]u8, name: []const u8) UUID5 { + const hash = brk: { + var sha1_hasher = bun.sha.SHA1.init(); + defer sha1_hasher.deinit(); + + sha1_hasher.update(namespace); + sha1_hasher.update(name); + + var hash: [20]u8 = undefined; + sha1_hasher.final(&hash); + + break :brk hash; + }; + + // Take first 16 bytes of the hash + var bytes: [16]u8 = hash[0..16].*; + + // Set version to 5 (bits 12-15 of time_hi_and_version) + bytes[6] = (bytes[6] & 0x0F) | 0x50; + + // Set variant bits (bits 6-7 of clock_seq_hi_and_reserved) + bytes[8] = (bytes[8] & 0x3F) | 0x80; + + return UUID5{ + .bytes = bytes, + }; + } + + pub fn toBytes(self: UUID5) [16]u8 { + return self.bytes; + } + + pub fn print(self: UUID5, buf: *[36]u8) void { + return printBytes(&self.toBytes(), buf); + } + + pub fn toUUID(self: UUID5) UUID { + const bytes: [16]u8 = self.toBytes(); + return .{ .bytes = bytes }; + } + + pub fn format( + self: UUID5, + comptime layout: []const u8, + options: fmt.FormatOptions, + writer: anytype, + ) !void { + return self.toUUID().format(layout, options, writer); + } +}; diff --git a/src/bun.js/virtual_machine_exports.zig b/src/bun.js/virtual_machine_exports.zig index 6122e926cc..1a1448012b 100644 --- a/src/bun.js/virtual_machine_exports.zig +++ b/src/bun.js/virtual_machine_exports.zig @@ -12,6 +12,7 @@ pub export fn Bun__getVM() *JSC.VirtualMachine { return JSC.VirtualMachine.get(); } +/// Caller must check for termination exception pub export fn Bun__drainMicrotasks() void { JSC.VirtualMachine.get().eventLoop().tick(); } @@ -85,11 +86,9 @@ pub export fn Bun__queueTaskWithTimeout(global: *JSGlobalObject, task: *JSC.CppT pub export fn Bun__reportUnhandledError(globalObject: *JSGlobalObject, value: JSValue) callconv(.C) JSValue { JSC.markBinding(@src()); - // This JSGlobalObject might not be the main script execution context - // See the crash in https://github.com/oven-sh/bun/issues/9778 - const vm = JSC.VirtualMachine.get(); - if (!value.isTerminationException(vm.jsc)) { - _ = vm.uncaughtException(globalObject, value, false); + + if (!value.isTerminationException()) { + _ = globalObject.bunVM().uncaughtException(globalObject, value, false); } return .js_undefined; } @@ -115,7 +114,7 @@ pub export fn Bun__handleRejectedPromise(global: *JSGlobalObject, promise: *JSC. if (result == .zero) return; - _ = jsc_vm.unhandledRejection(global, result, promise.toJS()); + jsc_vm.unhandledRejection(global, result, promise.toJS()); jsc_vm.autoGarbageCollect(); } @@ -173,6 +172,14 @@ export fn Bun__getVerboseFetchValue() i32 { }; } +const BakeSourceProvider = bun.sourcemap.BakeSourceProvider; +export fn Bun__addBakeSourceProviderSourceMap(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void { + var sfb = std.heap.stackFallback(4096, bun.default_allocator); + const slice = specifier.toUTF8(sfb.get()); + defer slice.deinit(); + vm.source_mappings.putBakeSourceProvider(@as(*BakeSourceProvider, @ptrCast(opaque_source_provider)), slice.slice()); +} + export fn Bun__addSourceProviderSourceMap(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void { var sfb = std.heap.stackFallback(4096, bun.default_allocator); const slice = specifier.toUTF8(sfb.get()); @@ -197,7 +204,7 @@ pub fn Bun__setSyntheticAllocationLimitForTesting(globalObject: *JSGlobalObject, return globalObject.throwInvalidArguments("setSyntheticAllocationLimitForTesting expects a number", .{}); } - const limit: usize = @intCast(@max(args[0].coerceToInt64(globalObject), 1024 * 1024)); + const limit: usize = @intCast(@max(try args[0].coerceToInt64(globalObject), 1024 * 1024)); const prev = VirtualMachine.synthetic_allocation_limit; VirtualMachine.synthetic_allocation_limit = limit; VirtualMachine.string_allocation_limit = limit; diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig index 930957ef5a..75bc187633 100644 --- a/src/bun.js/webcore.zig +++ b/src/bun.js/webcore.zig @@ -26,6 +26,8 @@ pub const encoding = @import("webcore/encoding.zig"); pub const ReadableStream = @import("webcore/ReadableStream.zig"); pub const Blob = @import("webcore/Blob.zig"); pub const S3Stat = @import("webcore/S3Stat.zig").S3Stat; +pub const ResumableFetchSink = @import("webcore/ResumableSink.zig").ResumableFetchSink; +pub const ResumableS3UploadSink = @import("webcore/ResumableSink.zig").ResumableS3UploadSink; pub const S3Client = @import("webcore/S3Client.zig").S3Client; pub const Request = @import("webcore/Request.zig"); pub const Body = @import("webcore/Body.zig"); @@ -69,6 +71,10 @@ pub const Pipe = struct { ctx: ?*anyopaque = null, onPipe: ?Function = null, + pub inline fn isEmpty(this: *const Pipe) bool { + return this.ctx == null and this.onPipe == null; + } + pub const Function = *const fn ( ctx: *anyopaque, stream: streams.Result, diff --git a/src/bun.js/webcore/ArrayBufferSink.zig b/src/bun.js/webcore/ArrayBufferSink.zig index 8f9f3a209c..387861a51c 100644 --- a/src/bun.js/webcore/ArrayBufferSink.zig +++ b/src/bun.js/webcore/ArrayBufferSink.zig @@ -45,8 +45,8 @@ pub fn flush(_: *ArrayBufferSink) JSC.Maybe(void) { pub fn flushFromJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, wait: bool) JSC.Maybe(JSValue) { if (this.streaming) { const value: JSValue = switch (this.as_uint8array) { - true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array), - false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer), + true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array) catch .zero, // TODO: properly propagate exception upwards + false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer) catch .zero, // TODO: properly propagate exception upwards }; this.bytes.len = 0; if (wait) {} diff --git a/src/bun.js/webcore/Blob.zig b/src/bun.js/webcore/Blob.zig index 8f55b58b6a..7935aeb5c6 100644 --- a/src/bun.js/webcore/Blob.zig +++ b/src/bun.js/webcore/Blob.zig @@ -105,7 +105,7 @@ pub fn doReadFromS3(this: *Blob, comptime Function: anytype, global: *JSGlobalOb const WrappedFn = struct { pub fn wrapped(b: *Blob, g: *JSGlobalObject, by: []u8) JSC.JSValue { - return JSC.toJSHostValue(g, Function(b, g, by, .clone)); + return JSC.toJSHostCall(g, @src(), Function, .{ b, g, by, .clone }); } }; return S3BlobDownloadTask.init(global, this, WrappedFn.wrapped); @@ -248,7 +248,7 @@ const FormDataContext = struct { switch (res) { .err => |err| { - globalThis.throwValue(err.toJSC(globalThis)) catch {}; + globalThis.throwValue(err.toJS(globalThis)) catch {}; this.failed = true; }, .result => |result| { @@ -813,11 +813,7 @@ pub noinline fn mkdirIfNotExists(this: anytype, err: bun.sys.Error, path_string: /// Returns an encoded `*JSPromise` that resolves if the file /// - doesn't exist and is created /// - exists and is truncated -fn writeFileWithEmptySourceToDestination( - ctx: *JSC.JSGlobalObject, - destination_blob: *Blob, - options: WriteFileOptions, -) JSC.JSValue { +fn writeFileWithEmptySourceToDestination(ctx: *JSC.JSGlobalObject, destination_blob: *Blob, options: WriteFileOptions) bun.JSError!JSC.JSValue { // SAFETY: null-checked by caller const destination_store = destination_blob.store.?; defer destination_blob.detach(); @@ -894,7 +890,7 @@ fn writeFileWithEmptySourceToDestination( } result.err = result.err.withPathLike(file.pathlike); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, result.toJS(ctx)); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, try result.toJS(ctx)); } }, .s3 => |*s3| { @@ -958,12 +954,7 @@ fn writeFileWithEmptySourceToDestination( return JSC.JSPromise.resolvedPromiseValue(ctx, JSC.JSValue.jsNumber(0)); } -pub fn writeFileWithSourceDestination( - ctx: *JSC.JSGlobalObject, - source_blob: *Blob, - destination_blob: *Blob, - options: WriteFileOptions, -) JSC.JSValue { +pub fn writeFileWithSourceDestination(ctx: *JSC.JSGlobalObject, source_blob: *Blob, destination_blob: *Blob, options: WriteFileOptions) bun.JSError!JSC.JSValue { const destination_store = destination_blob.store orelse Output.panic("Destination blob is detached", .{}); const destination_type = std.meta.activeTag(destination_store.data); @@ -1250,7 +1241,7 @@ pub fn writeFileInternal(globalThis: *JSC.JSGlobalObject, path_or_blob_: *PathOr bun.isRegularFile(path_or_blob.blob.store.?.data.file.mode)))) { if (data.isString()) { - const len = data.getLength(globalThis); + const len = try data.getLength(globalThis); if (len < 256 * 1024) { const str = try data.toBunString(globalThis); @@ -1565,7 +1556,7 @@ fn writeStringToFileFast( return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), + err.withPath(pathlike.path.slice()).toJS(globalThis), ); }, } @@ -1606,11 +1597,11 @@ fn writeStringToFileFast( return .zero; } if (comptime !needs_open) { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJS(globalThis)); } return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), + err.withPath(pathlike.path.slice()).toJS(globalThis), ); }, } @@ -1652,7 +1643,7 @@ fn writeBytesToFileFast( return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), + err.withPath(pathlike.path.slice()).toJS(globalThis), ); }, } @@ -1685,12 +1676,12 @@ fn writeBytesToFileFast( if (comptime !needs_open) { return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( globalThis, - err.toJSC(globalThis), + err.toJS(globalThis), ); } return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), + err.withPath(pathlike.path.slice()).toJS(globalThis), ); }, } @@ -1796,7 +1787,7 @@ pub fn JSDOMFile__construct_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.Ca if (try options.getTruthy(globalThis, "lastModified")) |last_modified| { set_last_modified = true; - blob.last_modified = last_modified.coerce(f64, globalThis); + blob.last_modified = try last_modified.coerce(f64, globalThis); } } } @@ -1895,7 +1886,7 @@ pub fn constructBunFile( } } if (try opts.getTruthy(globalObject, "lastModified")) |last_modified| { - blob.last_modified = last_modified.coerce(f64, globalObject); + blob.last_modified = try last_modified.coerce(f64, globalObject); } } } @@ -1918,6 +1909,7 @@ pub fn findOrCreateFileFromPath(path_or_fd: *JSC.Node.PathOrFileDescriptor, glob } } } + const path: JSC.Node.PathOrFileDescriptor = brk: { switch (path_or_fd.*) { .path => { @@ -2031,7 +2023,7 @@ pub fn toStreamWithOffset( fn lifetimeWrap(comptime Fn: anytype, comptime lifetime: JSC.WebCore.Lifetime) fn (*Blob, *JSC.JSGlobalObject) JSC.JSValue { return struct { fn wrap(this: *Blob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - return JSC.toJSHostValue(globalObject, Fn(this, globalObject, lifetime)); + return JSC.toJSHostCall(globalObject, @src(), Fn, .{ this, globalObject, lifetime }); } }.wrap; } @@ -2425,7 +2417,7 @@ pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *JSC.JSGlobalObject, re break :brk result; }, .err => |err| { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.withPath(path).toJSC(globalThis)); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.withPath(path).toJS(globalThis)); }, } unreachable; @@ -2458,7 +2450,7 @@ pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *JSC.JSGlobalObject, re switch (sink.writer.startSync(fd, false)) { .err => |err| { sink.deref(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJS(globalThis)); }, else => {}, } @@ -2466,7 +2458,7 @@ pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *JSC.JSGlobalObject, re switch (sink.writer.start(fd, true)) { .err => |err| { sink.deref(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJS(globalThis)); }, else => {}, } @@ -2501,7 +2493,7 @@ pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *JSC.JSGlobalObject, re switch (sink.start(stream_start)) { .err => |err| { sink.deref(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJS(globalThis)); }, else => {}, } @@ -2668,7 +2660,7 @@ pub fn getWriter( break :brk result; }, .err => |err| { - return globalThis.throwValue(err.withPath(pathlike.path.slice()).toJSC(globalThis)); + return globalThis.throwValue(err.withPath(pathlike.path.slice()).toJS(globalThis)); }, } @compileError(unreachable); @@ -2701,7 +2693,7 @@ pub fn getWriter( switch (sink.writer.startSync(fd, false)) { .err => |err| { sink.deref(); - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, else => {}, } @@ -2709,7 +2701,7 @@ pub fn getWriter( switch (sink.writer.start(fd, true)) { .err => |err| { sink.deref(); - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, else => {}, } @@ -2749,7 +2741,7 @@ pub fn getWriter( switch (sink.start(stream_start)) { .err => |err| { sink.deref(); - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, else => {}, } @@ -3781,13 +3773,13 @@ fn fromJSWithoutDeferGC( var fail_if_top_value_is_not_typed_array_like = false; switch (current.jsTypeLoose()) { .Array, .DerivedArray => { - var top_iter = JSC.JSArrayIterator.init(current, global); + var top_iter = try JSC.JSArrayIterator.init(current, global); might_only_be_one_thing = top_iter.len == 1; if (top_iter.len == 0) { return Blob{ .globalThis = global }; } if (might_only_be_one_thing) { - top_value = top_iter.next().?; + top_value = (try top_iter.next()).?; } }, else => { @@ -3894,10 +3886,10 @@ fn fromJSWithoutDeferGC( }, .Array, .DerivedArray => { - var iter = JSC.JSArrayIterator.init(current, global); + var iter = try JSC.JSArrayIterator.init(current, global); try stack.ensureUnusedCapacity(iter.len); var any_arrays = false; - while (iter.next()) |item| { + while (try iter.next()) |item| { if (item.isUndefinedOrNull()) continue; // When it's a string or ArrayBuffer inside an array, we can avoid the extra push/pop diff --git a/src/bun.js/webcore/Body.zig b/src/bun.js/webcore/Body.zig index 4684c06575..2a8b087f7c 100644 --- a/src/bun.js/webcore/Body.zig +++ b/src/bun.js/webcore/Body.zig @@ -1153,7 +1153,7 @@ pub fn Mixin(comptime Type: type) type { fn lifetimeWrap(comptime Fn: anytype, comptime lifetime: JSC.WebCore.Lifetime) fn (*AnyBlob, *JSC.JSGlobalObject) JSC.JSValue { return struct { fn wrap(this: *AnyBlob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - return JSC.toJSHostValue(globalObject, Fn(this, globalObject, lifetime)); + return JSC.toJSHostCall(globalObject, @src(), Fn, .{ this, globalObject, lifetime }); } }.wrap; } diff --git a/src/bun.js/webcore/ByteStream.zig b/src/bun.js/webcore/ByteStream.zig index e9094c59a7..e54a02592f 100644 --- a/src/bun.js/webcore/ByteStream.zig +++ b/src/bun.js/webcore/ByteStream.zig @@ -374,7 +374,12 @@ pub fn deinit(this: *@This()) void { this.pending_buffer = &.{}; this.pending.result.deinit(); this.pending.result = .{ .done = {} }; - this.pending.run(); + if (this.pending.state == .pending and this.pending.future == .promise) { + // We must never run JavaScript inside of a GC finalizer. + this.pending.runOnNextTick(); + } else { + this.pending.run(); + } } if (this.buffer_action) |*action| { action.deinit(); diff --git a/src/bun.js/webcore/Crypto.zig b/src/bun.js/webcore/Crypto.zig index 2c0577cd06..4524509f53 100644 --- a/src/bun.js/webcore/Crypto.zig +++ b/src/bun.js/webcore/Crypto.zig @@ -163,6 +163,93 @@ pub fn Bun__randomUUIDv7_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallF return encoding.encodeWithMaxSize(globalThis, 32, &uuid.bytes); } +comptime { + const Bun__randomUUIDv5 = JSC.toJSHostFn(Bun__randomUUIDv5_); + @export(&Bun__randomUUIDv5, .{ .name = "Bun__randomUUIDv5" }); +} + +pub fn Bun__randomUUIDv5_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const arguments: []const JSC.JSValue = callframe.argumentsUndef(3).slice(); + + if (arguments.len == 0 or arguments[0].isUndefinedOrNull()) { + return globalThis.ERR(.INVALID_ARG_TYPE, "The \"name\" argument must be specified", .{}).throw(); + } + + if (arguments.len < 2 or arguments[1].isUndefinedOrNull()) { + return globalThis.ERR(.INVALID_ARG_TYPE, "The \"namespace\" argument must be specified", .{}).throw(); + } + + const encoding: JSC.Node.Encoding = brk: { + if (arguments.len > 2 and !arguments[2].isUndefined()) { + if (arguments[2].isString()) { + break :brk try JSC.Node.Encoding.fromJS(arguments[2], globalThis) orelse { + return globalThis.ERR(.UNKNOWN_ENCODING, "Encoding must be one of base64, base64url, hex, or buffer", .{}).throw(); + }; + } + } + + break :brk JSC.Node.Encoding.hex; + }; + + const name_value = arguments[0]; + const namespace_value = arguments[1]; + + const name = brk: { + if (name_value.isString()) { + const name_str = try name_value.toBunString(globalThis); + defer name_str.deref(); + const result = name_str.toUTF8(bun.default_allocator); + + break :brk result; + } else if (name_value.asArrayBuffer(globalThis)) |array_buffer| { + break :brk JSC.ZigString.Slice.fromUTF8NeverFree(array_buffer.byteSlice()); + } else { + return globalThis.ERR(.INVALID_ARG_TYPE, "The \"name\" argument must be of type string or BufferSource", .{}).throw(); + } + }; + defer name.deinit(); + + const namespace = brk: { + if (namespace_value.isString()) { + const namespace_str = try namespace_value.toBunString(globalThis); + defer namespace_str.deref(); + const namespace_slice = namespace_str.toUTF8(bun.default_allocator); + defer namespace_slice.deinit(); + + if (namespace_slice.slice().len != 36) { + if (UUID5.namespaces.get(namespace_slice.slice())) |namespace| { + break :brk namespace.*; + } + + return globalThis.ERR(.INVALID_ARG_VALUE, "Invalid UUID format for namespace", .{}).throw(); + } + + const parsed_uuid = UUID.parse(namespace_slice.slice()) catch { + return globalThis.ERR(.INVALID_ARG_VALUE, "Invalid UUID format for namespace", .{}).throw(); + }; + break :brk parsed_uuid.bytes; + } else if (namespace_value.asArrayBuffer(globalThis)) |*array_buffer| { + const slice = array_buffer.byteSlice(); + if (slice.len != 16) { + return globalThis.ERR(.INVALID_ARG_VALUE, "Namespace must be exactly 16 bytes", .{}).throw(); + } + break :brk slice[0..16].*; + } + + return globalThis.ERR(.INVALID_ARG_TYPE, "The \"namespace\" argument must be a string or buffer", .{}).throw(); + }; + + const uuid = UUID5.init(&namespace, name.slice()); + + if (encoding == .hex) { + var str, var bytes = bun.String.createUninitialized(.latin1, 36); + uuid.print(bytes[0..36]); + return str.transferToJS(globalThis); + } + + return encoding.encodeWithMaxSize(globalThis, 32, &uuid.bytes); +} + pub fn randomUUIDWithoutTypeChecks( _: *Crypto, globalThis: *JSC.JSGlobalObject, @@ -193,6 +280,8 @@ pub export fn CryptoObject__create(globalThis: *JSC.JSGlobalObject) JSC.JSValue } const UUID7 = @import("../uuid.zig").UUID7; +const UUID = @import("../uuid.zig"); +const UUID5 = @import("../uuid.zig").UUID5; const std = @import("std"); const bun = @import("bun"); diff --git a/src/bun.js/webcore/FileReader.zig b/src/bun.js/webcore/FileReader.zig index bf4bbe747b..74fac23c40 100644 --- a/src/bun.js/webcore/FileReader.zig +++ b/src/bun.js/webcore/FileReader.zig @@ -613,13 +613,10 @@ pub fn onReaderDone(this: *FileReader) void { globalThis, .js_undefined, &.{ - JSC.ArrayBuffer.fromBytes( - buffered.items, - .Uint8Array, - ).toJS( - globalThis, - null, - ), + JSC.ArrayBuffer.fromBytes(buffered.items, .Uint8Array).toJS(globalThis, null) catch |err| { + this.pending.result = .{ .err = .{ .WeakJSValue = globalThis.takeException(err) } }; + return; + }, }, ); } diff --git a/src/bun.js/webcore/FileSink.zig b/src/bun.js/webcore/FileSink.zig index 8230ac6bb5..1c7c02d82a 100644 --- a/src/bun.js/webcore/FileSink.zig +++ b/src/bun.js/webcore/FileSink.zig @@ -24,6 +24,9 @@ fd: bun.FileDescriptor = bun.invalid_fd, auto_flusher: webcore.AutoFlusher = .{}, run_pending_later: FlushPendingTask = .{}, +/// Currently, only used when `stdin` in `Bun.spawn` is a ReadableStream. +readable_stream: JSC.WebCore.ReadableStream.Strong = .{}, + const log = Output.scoped(.FileSink, false); pub const RefCount = bun.ptr.RefCount(FileSink, "ref_count", deinit, .{}); @@ -72,13 +75,31 @@ comptime { @export(&Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio, .{ .name = "Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio" }); } -pub fn onAttachedProcessExit(this: *FileSink) void { +pub fn onAttachedProcessExit(this: *FileSink, status: *const bun.spawn.Status) void { log("onAttachedProcessExit()", .{}); this.done = true; + var readable_stream = this.readable_stream; + this.readable_stream = .{}; + if (readable_stream.has()) { + if (this.event_loop_handle.globalObject()) |global| { + if (readable_stream.get(global)) |*stream| { + if (!status.isOK()) { + const event_loop = global.bunVM().eventLoop(); + event_loop.enter(); + defer event_loop.exit(); + stream.cancel(global); + } else { + stream.done(global); + } + } + } + // Clean up the readable stream reference + readable_stream.deinit(); + } + this.writer.close(); this.pending.result = .{ .err = .fromCode(.PIPE, .write) }; - this.runPending(); if (this.must_be_kept_alive_until_eof) { @@ -181,6 +202,14 @@ pub fn onReady(this: *FileSink) void { pub fn onClose(this: *FileSink) void { log("onClose()", .{}); + if (this.readable_stream.has()) { + if (this.event_loop_handle.globalObject()) |global| { + if (this.readable_stream.get(global)) |stream| { + stream.done(global); + } + } + } + this.signal.close(null); } @@ -225,80 +254,40 @@ pub fn create( } pub fn setup(this: *FileSink, options: *const FileSink.Options) JSC.Maybe(void) { - // TODO: this should be concurrent. - var isatty = false; - var is_nonblocking = false; - const fd = switch (switch (options.input_path) { - .path => |path| brk: { - is_nonblocking = true; - break :brk bun.sys.openA(path.slice(), options.flags(), options.mode); - }, - .fd => |fd_| brk: { - const duped = bun.sys.dupWithFlags(fd_, 0); + if (this.readable_stream.has()) { + // Already started. + return .{ .result = {} }; + } - break :brk duped; + const result = bun.io.openForWriting( + bun.FileDescriptor.cwd(), + options.input_path, + options.flags(), + options.mode, + &this.pollable, + &this.is_socket, + this.force_sync, + &this.nonblocking, + *FileSink, + this, + struct { + fn onForceSyncOrIsaTTY(fs: *FileSink) void { + if (comptime bun.Environment.isPosix) { + fs.force_sync = true; + fs.writer.force_sync = true; + } + } + }.onForceSyncOrIsaTTY, + bun.sys.isPollable, + ); + + const fd = switch (result) { + .err => |err| { + return .{ .err = err }; }, - }) { - .err => |err| return .{ .err = err }, .result => |fd| fd, }; - if (comptime Environment.isPosix) { - switch (bun.sys.fstat(fd)) { - .err => |err| { - fd.close(); - return .{ .err = err }; - }, - .result => |stat| { - this.pollable = bun.sys.isPollable(stat.mode); - if (!this.pollable) { - isatty = std.posix.isatty(fd.native()); - } - - if (isatty) { - this.pollable = true; - } - - this.fd = fd; - this.is_socket = std.posix.S.ISSOCK(stat.mode); - - if (this.force_sync or isatty) { - // Prevents interleaved or dropped stdout/stderr output for terminals. - // As noted in the following reference, local TTYs tend to be quite fast and - // this behavior has become expected due historical functionality on OS X, - // even though it was originally intended to change in v1.0.2 (Libuv 1.2.1). - // Ref: https://github.com/nodejs/node/pull/1771#issuecomment-119351671 - _ = bun.sys.updateNonblocking(fd, false); - is_nonblocking = false; - this.force_sync = true; - this.writer.force_sync = true; - } else if (!is_nonblocking) { - const flags = switch (bun.sys.getFcntlFlags(fd)) { - .result => |flags| flags, - .err => |err| { - fd.close(); - return .{ .err = err }; - }, - }; - is_nonblocking = (flags & @as(@TypeOf(flags), bun.O.NONBLOCK)) != 0; - - if (!is_nonblocking) { - if (bun.sys.setNonblocking(fd) == .result) { - is_nonblocking = true; - } - } - } - - this.nonblocking = is_nonblocking and this.pollable; - }, - } - } else if (comptime Environment.isWindows) { - this.pollable = (bun.windows.GetFileType(fd.cast()) & bun.windows.FILE_TYPE_PIPE) != 0 and !this.force_sync; - this.fd = fd; - } else { - @compileError("TODO: implement for this platform"); - } - if (comptime Environment.isWindows) { if (this.force_sync) { switch (this.writer.startSync( @@ -459,6 +448,7 @@ pub fn flushFromJS(this: *FileSink, globalThis: *JSGlobalObject, wait: bool) JSC } pub fn finalize(this: *FileSink) void { + this.readable_stream.deinit(); this.pending.deinit(); this.deref(); } @@ -540,6 +530,7 @@ pub fn end(this: *FileSink, _: ?bun.sys.Error) JSC.Maybe(void) { fn deinit(this: *FileSink) void { this.pending.deinit(); this.writer.deinit(); + this.readable_stream.deinit(); if (this.event_loop_handle.globalObject()) |global| { webcore.AutoFlusher.unregisterDeferredMicrotaskWithType(@This(), this, global.bunVM()); } @@ -656,6 +647,98 @@ pub const FlushPendingTask = struct { } }; +/// Does not ref or unref. +fn handleResolveStream(this: *FileSink, globalThis: *JSC.JSGlobalObject) void { + if (this.readable_stream.get(globalThis)) |*stream| { + stream.done(globalThis); + } + + if (!this.done) { + this.writer.close(); + } +} + +/// Does not ref or unref. +fn handleRejectStream(this: *FileSink, globalThis: *JSC.JSGlobalObject, _: JSC.JSValue) void { + if (this.readable_stream.get(globalThis)) |*stream| { + stream.abort(globalThis); + this.readable_stream.deinit(); + } + + if (!this.done) { + this.writer.close(); + } +} + +fn onResolveStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + log("onResolveStream", .{}); + var args = callframe.arguments(); + var this: *@This() = args[args.len - 1].asPromisePtr(@This()); + defer this.deref(); + this.handleResolveStream(globalThis); + return .js_undefined; +} +fn onRejectStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + log("onRejectStream", .{}); + const args = callframe.arguments(); + var this = args[args.len - 1].asPromisePtr(@This()); + const err = args[0]; + defer this.deref(); + + this.handleRejectStream(globalThis, err); + return .js_undefined; +} + +pub fn assignToStream(this: *FileSink, stream: *JSC.WebCore.ReadableStream, globalThis: *JSGlobalObject) JSC.JSValue { + var signal = &this.signal; + signal.* = JSC.WebCore.FileSink.JSSink.SinkSignal.init(JSValue.zero); + this.ref(); + defer this.deref(); + + // explicitly set it to a dead pointer + // we use this memory address to disable signals being sent + signal.clear(); + + this.readable_stream = .init(stream.*, globalThis); + const promise_result = JSC.WebCore.FileSink.JSSink.assignToStream(globalThis, stream.value, this, @as(**anyopaque, @ptrCast(&signal.ptr))); + + if (promise_result.toError()) |err| { + this.readable_stream.deinit(); + this.readable_stream = .{}; + return err; + } + + if (!promise_result.isEmptyOrUndefinedOrNull()) { + if (promise_result.asAnyPromise()) |promise| { + switch (promise.status(globalThis.vm())) { + .pending => { + this.writer.enableKeepingProcessAlive(this.event_loop_handle); + this.ref(); + promise_result.then(globalThis, this, onResolveStream, onRejectStream); + }, + .fulfilled => { + // These don't ref(). + this.handleResolveStream(globalThis); + }, + .rejected => { + // These don't ref(). + this.handleRejectStream(globalThis, promise.result(globalThis.vm())); + }, + } + } + } + + return promise_result; +} + +comptime { + const export_prefix = "Bun__FileSink"; + if (bun.Environment.export_cpp_apis) { + @export(&JSC.toJSHostFn(onResolveStream), .{ .name = export_prefix ++ "__onResolveStream" }); + @export(&JSC.toJSHostFn(onRejectStream), .{ .name = export_prefix ++ "__onRejectStream" }); + } +} + const std = @import("std"); const bun = @import("bun"); const uv = bun.windows.libuv; diff --git a/src/bun.js/webcore/ReadableStream.zig b/src/bun.js/webcore/ReadableStream.zig index 7913cc8a2c..df32ec91d2 100644 --- a/src/bun.js/webcore/ReadableStream.zig +++ b/src/bun.js/webcore/ReadableStream.zig @@ -571,7 +571,7 @@ pub fn NewSource( } return switch (this.context.setRawMode(flag == .true)) { .result => .js_undefined, - .err => |e| e.toJSC(global), + .err => |e| e.toJS(global), }; } @@ -636,7 +636,7 @@ pub fn NewSource( .ready => return JSValue.jsNumber(16384), .chunk_size => |size| return JSValue.jsNumber(size), .err => |err| { - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); }, else => |rc| { return rc.toJS(globalThis); @@ -653,7 +653,7 @@ pub fn NewSource( switch (result) { .err => |err| { if (err == .Error) { - return globalThis.throwValue(err.Error.toJSC(globalThis)); + return globalThis.throwValue(err.Error.toJS(globalThis)); } else { const js_err = err.JSValue; js_err.ensureStillAlive(); @@ -662,7 +662,7 @@ pub fn NewSource( } }, .pending => { - const out = result.toJS(globalThis); + const out = try result.toJS(globalThis); js.pendingPromiseSetCached(this_jsvalue, globalThis, out); return out; }, diff --git a/src/bun.js/webcore/Request.zig b/src/bun.js/webcore/Request.zig index 4728da2b98..efcd5ab1eb 100644 --- a/src/bun.js/webcore/Request.zig +++ b/src/bun.js/webcore/Request.zig @@ -941,7 +941,7 @@ const bun = @import("bun"); const MimeType = bun.http.MimeType; const JSC = bun.JSC; -const Method = @import("../../http/method.zig").Method; +const Method = @import("../../http/Method.zig").Method; const FetchHeaders = bun.webcore.FetchHeaders; const AbortSignal = JSC.WebCore.AbortSignal; const Output = bun.Output; diff --git a/src/bun.js/webcore/Response.zig b/src/bun.js/webcore/Response.zig index 6767cc8e10..e33fe52ea1 100644 --- a/src/bun.js/webcore/Response.zig +++ b/src/bun.js/webcore/Response.zig @@ -388,7 +388,7 @@ pub fn constructJSON( var str = bun.String.empty; // calling JSON.stringify on an empty string adds extra quotes // so this is correct - json_value.jsonStringify(globalThis, 0, &str); + try json_value.jsonStringify(globalThis, 0, &str); if (globalThis.hasException()) { return .zero; @@ -658,7 +658,7 @@ pub const Init = struct { } if (try response_init.fastGet(globalThis, .status)) |status_value| { - const number = status_value.coerceToInt64(globalThis); + const number = try status_value.coerceToInt64(globalThis); if ((200 <= number and number < 600) or number == 101) { result.status_code = @as(u16, @truncate(@as(u32, @intCast(number)))); } else { @@ -727,7 +727,7 @@ const MimeType = bun.http.MimeType; const http = bun.http; const JSC = bun.JSC; -const Method = @import("../../http/method.zig").Method; +const Method = @import("../../http/Method.zig").Method; const FetchHeaders = bun.webcore.FetchHeaders; const Output = bun.Output; const string = bun.string; diff --git a/src/bun.js/webcore/ResumableSink.zig b/src/bun.js/webcore/ResumableSink.zig new file mode 100644 index 0000000000..27dc0b33a2 --- /dev/null +++ b/src/bun.js/webcore/ResumableSink.zig @@ -0,0 +1,362 @@ +/// ResumableSink allows a simplified way of reading a stream into a native Writable Interface, allowing to pause and resume the stream without the use of promises. +/// returning false on `onWrite` will pause the stream and calling .drain() will resume the stream consumption. +/// onEnd is always called when the stream is done or errored. +/// Calling `cancel` will cancel the stream, onEnd will be called with the reason passed to cancel. +/// Different from JSSink this is not intended to be exposed to the users, like FileSink or HTTPRequestSink etc. +pub fn ResumableSink( + comptime js: type, + comptime Context: type, + comptime onWrite: fn (context: *Context, chunk: []const u8) bool, + comptime onEnd: fn (context: *Context, err: ?JSC.JSValue) void, +) type { + return struct { + const log = bun.Output.scoped(.ResumableSink, false); + pub const toJS = js.toJS; + pub const fromJS = js.fromJS; + pub const fromJSDirect = js.fromJSDirect; + + pub const new = bun.TrivialNew(@This()); + const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{}); + pub const ref = RefCount.ref; + pub const deref = RefCount.deref; + const setCancel = js.oncancelSetCached; + const getCancel = js.oncancelGetCached; + const setDrain = js.ondrainSetCached; + const getDrain = js.ondrainGetCached; + const setStream = js.streamSetCached; + const getStream = js.streamGetCached; + ref_count: RefCount, + self: JSC.Strong.Optional = JSC.Strong.Optional.empty, + // We can have a detached self, and still have a strong reference to the stream + stream: JSC.WebCore.ReadableStream.Strong = .{}, + globalThis: *JSC.JSGlobalObject, + context: *Context, + highWaterMark: i64 = 16384, + status: Status = .started, + + const Status = enum(u8) { + started, + piped, + paused, + done, + }; + + pub fn constructor(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!*@This() { + return globalThis.throwInvalidArguments("ResumableSink is not constructable", .{}); + } + + pub fn init(globalThis: *JSC.JSGlobalObject, stream: JSC.WebCore.ReadableStream, context: *Context) *@This() { + return initExactRefs(globalThis, stream, context, 1); + } + + pub fn initExactRefs(globalThis: *JSC.JSGlobalObject, stream: JSC.WebCore.ReadableStream, context: *Context, ref_count: u32) *@This() { + const this = @This().new(.{ + .globalThis = globalThis, + .context = context, + .ref_count = RefCount.initExactRefs(ref_count), + }); + if (stream.isLocked(globalThis) or stream.isDisturbed(globalThis)) { + var err = JSC.SystemError{ + .code = bun.String.static(@tagName(JSC.Node.ErrorCode.ERR_STREAM_CANNOT_PIPE)), + .message = bun.String.static("Stream already used, please create a new one"), + }; + const err_instance = err.toErrorInstance(globalThis); + err_instance.ensureStillAlive(); + this.status = .done; + onEnd(this.context, err_instance); + this.deref(); + return this; + } + if (stream.ptr == .Bytes) { + const byte_stream: *bun.webcore.ByteStream = stream.ptr.Bytes; + // if pipe is empty, we can pipe + if (byte_stream.pipe.isEmpty()) { + // equivalent to onStart to get the highWaterMark + this.highWaterMark = if (byte_stream.highWaterMark < std.math.maxInt(i64)) + @intCast(byte_stream.highWaterMark) + else + std.math.maxInt(i64); + + if (byte_stream.has_received_last_chunk) { + this.status = .done; + const err = brk_err: { + const pending = byte_stream.pending.result; + if (pending == .err) { + const js_err, const was_strong = pending.err.toJSWeak(this.globalThis); + js_err.ensureStillAlive(); + if (was_strong == .Strong) + js_err.unprotect(); + break :brk_err js_err; + } + break :brk_err null; + }; + + const bytes = byte_stream.drain().listManaged(bun.default_allocator); + defer bytes.deinit(); + log("onWrite {}", .{bytes.items.len}); + _ = onWrite(this.context, bytes.items); + onEnd(this.context, err); + this.deref(); + return this; + } + // We can pipe but we also wanna to drain as much as possible first + const bytes = byte_stream.drain().listManaged(bun.default_allocator); + defer bytes.deinit(); + // lets write and see if we can still pipe or if we have backpressure + if (bytes.items.len > 0) { + log("onWrite {}", .{bytes.items.len}); + // we ignore the return value here because we dont want to pause the stream + // if we pause will just buffer in the pipe and we can do the buffer in one place + _ = onWrite(this.context, bytes.items); + } + this.status = .piped; + byte_stream.pipe = JSC.WebCore.Pipe.Wrap(@This(), onStreamPipe).init(this); + this.ref(); // one ref for the pipe + + // we only need the stream, we dont need to touch JS side yet + this.stream = JSC.WebCore.ReadableStream.Strong.init(stream, this.globalThis); + return this; + } + } + // lets go JS side route + const self = this.toJS(globalThis); + self.ensureStillAlive(); + const js_stream = stream.toJS(); + js_stream.ensureStillAlive(); + _ = Bun__assignStreamIntoResumableSink(globalThis, js_stream, self); + this.self = JSC.Strong.Optional.create(self, globalThis); + setStream(self, globalThis, js_stream); + return this; + } + + pub fn jsSetHandlers(_: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame, this_value: JSC.JSValue) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + const args = callframe.arguments(); + + if (args.len < 2) { + return globalThis.throwInvalidArguments("ResumableSink.setHandlers requires at least 2 arguments", .{}); + } + + const ondrain = args.ptr[0]; + const oncancel = args.ptr[1]; + + if (ondrain.isCallable()) { + setDrain(this_value, globalThis, ondrain); + } + if (oncancel.isCallable()) { + setCancel(this_value, globalThis, oncancel); + } + return .js_undefined; + } + + pub fn jsStart(this: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + const args = callframe.arguments(); + if (args.len > 0 and args[0].isObject()) { + if (try args[0].getOptionalInt(globalThis, "highWaterMark", i64)) |highWaterMark| { + this.highWaterMark = highWaterMark; + } + } + + return .js_undefined; + } + + pub fn jsWrite(this: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + const args = callframe.arguments(); + // ignore any call if detached + if (!this.self.has() or this.status == .done) return .js_undefined; + + if (args.len < 1) { + return globalThis.throwInvalidArguments("ResumableSink.write requires at least 1 argument", .{}); + } + + const buffer = args[0]; + buffer.ensureStillAlive(); + if (try JSC.Node.StringOrBuffer.fromJS(globalThis, bun.default_allocator, buffer)) |sb| { + defer sb.deinit(); + const bytes = sb.slice(); + log("jsWrite {}", .{bytes.len}); + const should_continue = onWrite(this.context, bytes); + if (!should_continue) { + log("paused", .{}); + this.status = .paused; + } + return JSC.jsBoolean(should_continue); + } + + return globalThis.throwInvalidArguments("ResumableSink.write requires a string or buffer", .{}); + } + + pub fn jsEnd(this: *@This(), _: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + const args = callframe.arguments(); + // ignore any call if detached + if (!this.self.has() or this.status == .done) return .js_undefined; + this.detachJS(); + log("jsEnd {}", .{args.len}); + this.status = .done; + + onEnd(this.context, if (args.len > 0) args[0] else null); + return .js_undefined; + } + + pub fn drain(this: *@This()) void { + log("drain", .{}); + if (this.status != .paused) { + return; + } + if (this.self.get()) |js_this| { + const globalObject = this.globalThis; + const vm = globalObject.bunVM(); + vm.eventLoop().enter(); + defer vm.eventLoop().exit(); + if (getDrain(js_this)) |ondrain| { + if (ondrain.isCallable()) { + this.status = .started; + _ = ondrain.call(globalObject, .js_undefined, &.{.js_undefined}) catch |err| { + // should never happen + bun.debugAssert(false); + _ = globalObject.takeError(err); + }; + } + } + } + } + + pub fn cancel(this: *@This(), reason: JSC.JSValue) void { + if (this.status == .piped) { + reason.ensureStillAlive(); + this.endPipe(reason); + return; + } + if (this.self.get()) |js_this| { + this.status = .done; + js_this.ensureStillAlive(); + + const globalObject = this.globalThis; + const vm = globalObject.bunVM(); + vm.eventLoop().enter(); + defer vm.eventLoop().exit(); + + if (getCancel(js_this)) |oncancel| { + oncancel.ensureStillAlive(); + // detach first so if cancel calls end will be a no-op + this.detachJS(); + // call onEnd to indicate the native side that the stream errored + onEnd(this.context, reason); + if (oncancel.isCallable()) { + _ = oncancel.call(globalObject, .js_undefined, &.{ .js_undefined, reason }) catch |err| { + // should never happen + bun.debugAssert(false); + _ = globalObject.takeError(err); + }; + } + } else { + // should never happen but lets call onEnd to indicate the native side that the stream errored + this.detachJS(); + onEnd(this.context, reason); + } + } + } + + fn detachJS(this: *@This()) void { + if (this.self.trySwap()) |js_this| { + setDrain(js_this, this.globalThis, .zero); + setCancel(js_this, this.globalThis, .zero); + setStream(js_this, this.globalThis, .zero); + this.self.deinit(); + this.self = JSC.Strong.Optional.empty; + } + } + pub fn deinit(this: *@This()) void { + this.detachJS(); + this.stream.deinit(); + bun.destroy(this); + } + + pub fn finalize(this: *@This()) void { + this.deref(); + } + + fn onStreamPipe( + this: *@This(), + stream: bun.webcore.streams.Result, + allocator: std.mem.Allocator, + ) void { + const stream_needs_deinit = stream == .owned or stream == .owned_and_done; + + defer { + if (stream_needs_deinit) { + if (stream == .owned_and_done) { + stream.owned_and_done.listManaged(allocator).deinit(); + } else { + stream.owned.listManaged(allocator).deinit(); + } + } + } + const chunk = stream.slice(); + log("onWrite {}", .{chunk.len}); + const stopStream = !onWrite(this.context, chunk); + const is_done = stream.isDone(); + + if (is_done) { + const err: ?JSC.JSValue = brk_err: { + if (stream == .err) { + const js_err, const was_strong = stream.err.toJSWeak(this.globalThis); + js_err.ensureStillAlive(); + if (was_strong == .Strong) + js_err.unprotect(); + break :brk_err js_err; + } + break :brk_err null; + }; + this.endPipe(err); + } else if (stopStream) { + // dont make sense pausing the stream here + // it will be buffered in the pipe anyways + } + } + + fn endPipe(this: *@This(), err: ?JSC.JSValue) void { + log("endPipe", .{}); + if (this.status != .piped) return; + this.status = .done; + if (this.stream.get(this.globalThis)) |stream_| { + if (stream_.ptr == .Bytes) { + stream_.ptr.Bytes.pipe = .{}; + } + if (err != null) { + stream_.cancel(this.globalThis); + } else { + stream_.done(this.globalThis); + } + var stream = this.stream; + this.stream = .{}; + stream.deinit(); + } + // We ref when we attach the stream so we deref when we detach the stream + this.deref(); + + onEnd(this.context, err); + if (this.self.has()) { + // JS owns the stream, so we need to detach the JS and let finalize handle the deref + // this should not happen but lets handle it anyways + this.detachJS(); + } else { + // no js attached, so we can just deref + this.deref(); + } + } + }; +} + +pub const ResumableFetchSink = ResumableSink(JSC.Codegen.JSResumableFetchSink, FetchTasklet, FetchTasklet.writeRequestData, FetchTasklet.writeEndRequest); +const S3UploadStreamWrapper = @import("../../s3/client.zig").S3UploadStreamWrapper; +pub const ResumableS3UploadSink = ResumableSink(JSC.Codegen.JSResumableS3UploadSink, S3UploadStreamWrapper, S3UploadStreamWrapper.writeRequestData, S3UploadStreamWrapper.writeEndRequest); +const std = @import("std"); +const bun = @import("bun"); +const FetchTasklet = @import("./fetch.zig").FetchTasklet; + +const JSC = bun.JSC; +extern fn Bun__assignStreamIntoResumableSink(globalThis: *JSC.JSGlobalObject, stream: JSC.JSValue, sink: JSC.JSValue) JSC.JSValue; diff --git a/src/bun.js/webcore/Sink.zig b/src/bun.js/webcore/Sink.zig index e46bed8c48..00105bec52 100644 --- a/src/bun.js/webcore/Sink.zig +++ b/src/bun.js/webcore/Sink.zig @@ -79,7 +79,7 @@ pub const UTF8Fallback = struct { if (stack_size >= str.len * 2) { var buf: [stack_size]u8 = undefined; - const copied = bun.strings.copyUTF16IntoUTF8(&buf, []const u16, str, true); + const copied = bun.strings.copyUTF16IntoUTF8Impl(&buf, []const u16, str, true); bun.assert(copied.written <= stack_size); bun.assert(copied.read <= stack_size); if (input.isDone()) { @@ -471,7 +471,7 @@ pub fn JSSink(comptime SinkType: type, comptime abi_name: []const u8) type { } } - return this.sink.end(null).toJS(globalThis); + return this.sink.end(null).toJS(globalThis) catch .zero; // TODO: properly propagate exception upwards } pub fn flush(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { @@ -496,7 +496,7 @@ pub fn JSSink(comptime SinkType: type, comptime abi_name: []const u8) type { const maybe_value: JSC.Maybe(JSValue) = this.sink.flushFromJS(globalThis, wait); return switch (maybe_value) { .result => |value| value, - .err => |err| return globalThis.throwValue(err.toJSC(globalThis)), + .err => |err| return globalThis.throwValue(err.toJS(globalThis)), }; } @@ -568,7 +568,7 @@ pub fn JSSink(comptime SinkType: type, comptime abi_name: []const u8) type { } } - return this.sink.endFromJS(globalThis).toJS(globalThis); + return this.sink.endFromJS(globalThis).toJS(globalThis) catch .zero; // TODO: properly propagate exception upwards } pub fn updateRef(ptr: *anyopaque, value: bool) callconv(.C) void { diff --git a/src/bun.js/webcore/TextDecoder.zig b/src/bun.js/webcore/TextDecoder.zig index 78903e2828..4dbc8dd798 100644 --- a/src/bun.js/webcore/TextDecoder.zig +++ b/src/bun.js/webcore/TextDecoder.zig @@ -171,11 +171,7 @@ pub fn decode(this: *TextDecoder, globalThis: *JSC.JSGlobalObject, callframe: *J const stream = stream: { if (arguments.len > 1 and arguments[1].isObject()) { if (try arguments[1].fastGet(globalThis, .stream)) |stream_value| { - const stream_bool = stream_value.coerce(bool, globalThis); - if (globalThis.hasException()) { - return .zero; - } - break :stream stream_bool; + break :stream stream_value.toBoolean(); } } diff --git a/src/bun.js/webcore/TextEncoder.zig b/src/bun.js/webcore/TextEncoder.zig index e6845f3b88..e7f867e3cf 100644 --- a/src/bun.js/webcore/TextEncoder.zig +++ b/src/bun.js/webcore/TextEncoder.zig @@ -16,7 +16,7 @@ pub export fn TextEncoder__encode8( if (slice.len <= buf.len / 2) { const result = strings.copyLatin1IntoUTF8(&buf, []const u8, slice); - const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written); + const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written) catch return .zero; bun.assert(result.written <= buf.len); bun.assert(result.read == slice.len); const array_buffer = uint8array.asArrayBuffer(globalThis) orelse return .zero; @@ -28,7 +28,7 @@ pub export fn TextEncoder__encode8( return globalThis.throwOutOfMemoryValue(); }; bun.assert(bytes.len >= slice.len); - return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null); + return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null) catch .zero; } } @@ -51,15 +51,15 @@ pub export fn TextEncoder__encode16( // max utf16 -> utf8 length if (slice.len <= buf.len / 4) { - const result = strings.copyUTF16IntoUTF8(&buf, @TypeOf(slice), slice, true); + const result = strings.copyUTF16IntoUTF8(&buf, @TypeOf(slice), slice); if (result.read == 0 or result.written == 0) { - const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, 3); + const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, 3) catch return .zero; const array_buffer = uint8array.asArrayBuffer(globalThis).?; const replacement_char = [_]u8{ 239, 191, 189 }; @memcpy(array_buffer.slice()[0..replacement_char.len], &replacement_char); return uint8array; } - const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written); + const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written) catch return .zero; bun.assert(result.written <= buf.len); bun.assert(result.read == slice.len); const array_buffer = uint8array.asArrayBuffer(globalThis).?; @@ -74,7 +74,7 @@ pub export fn TextEncoder__encode16( ) catch { return globalThis.toInvalidArguments("Out of memory", .{}); }; - return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null); + return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null) catch .zero; } } @@ -97,15 +97,15 @@ pub export fn c( // max utf16 -> utf8 length if (slice.len <= buf.len / 4) { - const result = strings.copyUTF16IntoUTF8(&buf, @TypeOf(slice), slice, true); + const result = strings.copyUTF16IntoUTF8(&buf, @TypeOf(slice), slice); if (result.read == 0 or result.written == 0) { - const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, 3); + const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, 3) catch return .zero; const array_buffer = uint8array.asArrayBuffer(globalThis).?; const replacement_char = [_]u8{ 239, 191, 189 }; @memcpy(array_buffer.slice()[0..replacement_char.len], &replacement_char); return uint8array; } - const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written); + const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written) catch return .zero; bun.assert(result.written <= buf.len); bun.assert(result.read == slice.len); const array_buffer = uint8array.asArrayBuffer(globalThis).?; @@ -120,7 +120,7 @@ pub export fn c( ) catch { return globalThis.throwOutOfMemoryValue(); }; - return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null); + return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null) catch .zero; } } @@ -186,7 +186,7 @@ pub export fn TextEncoder__encodeRopeString( const length = rope_str.length(); var array: JSValue = .zero; if (length > stack_buf.len / 2) { - array = JSC.JSValue.createUninitializedUint8Array(globalThis, length); + array = JSC.JSValue.createUninitializedUint8Array(globalThis, length) catch return .zero; array.ensureStillAlive(); buf_to_use = array.asArrayBuffer(globalThis).?.slice(); } @@ -204,7 +204,7 @@ pub export fn TextEncoder__encodeRopeString( } if (array == .zero) { - array = JSC.JSValue.createUninitializedUint8Array(globalThis, length); + array = JSC.JSValue.createUninitializedUint8Array(globalThis, length) catch return .zero; array.ensureStillAlive(); @memcpy(array.asArrayBuffer(globalThis).?.ptr[0..length], buf_to_use[0..length]); } @@ -220,7 +220,7 @@ pub export fn TextEncoder__encodeInto16( ) u64 { const output = buf_ptr[0..buf_len]; const input = input_ptr[0..input_len]; - var result: strings.EncodeIntoResult = strings.copyUTF16IntoUTF8(output, []const u16, input, false); + var result: strings.EncodeIntoResult = strings.copyUTF16IntoUTF8(output, []const u16, input); if (output.len >= 3 and (result.read == 0 or result.written == 0)) { const replacement_char = [_]u8{ 239, 191, 189 }; @memcpy(buf_ptr[0..replacement_char.len], &replacement_char); diff --git a/src/bun.js/webcore/blob/copy_file.zig b/src/bun.js/webcore/blob/copy_file.zig index 4396dc8d94..7c35e7e369 100644 --- a/src/bun.js/webcore/blob/copy_file.zig +++ b/src/bun.js/webcore/blob/copy_file.zig @@ -63,7 +63,7 @@ pub const CopyFile = struct { pub fn reject(this: *CopyFile, promise: *JSC.JSPromise) void { const globalThis = this.globalThis; - var system_error: SystemError = this.system_error orelse SystemError{}; + var system_error: SystemError = this.system_error orelse SystemError{ .message = .empty }; if (this.source_file_store.pathlike == .path and system_error.path.isEmpty()) { system_error.path = bun.String.createUTF8(this.source_file_store.pathlike.path.slice()); } @@ -997,7 +997,7 @@ pub const CopyFileWindows = struct { pub fn throw(this: *CopyFileWindows, err: bun.sys.Error) void { const globalThis = this.event_loop.global; const promise = this.promise.swap(); - const err_instance = err.toJSC(globalThis); + const err_instance = err.toJS(globalThis); var event_loop = this.event_loop; event_loop.enter(); diff --git a/src/bun.js/webcore/blob/read_file.zig b/src/bun.js/webcore/blob/read_file.zig index 13cc393747..4ce7514532 100644 --- a/src/bun.js/webcore/blob/read_file.zig +++ b/src/bun.js/webcore/blob/read_file.zig @@ -36,7 +36,7 @@ pub fn NewReadFileHandler(comptime Function: anytype) type { blob.size = @min(@as(SizeType, @truncate(bytes.len)), blob.size); const WrappedFn = struct { pub fn wrapped(b: *Blob, g: *JSGlobalObject, by: []u8) JSC.JSValue { - return JSC.toJSHostValue(g, Function(b, g, by, .temporary)); + return JSC.toJSHostCall(g, @src(), Function, .{ b, g, by, .temporary }); } }; diff --git a/src/bun.js/webcore/blob/write_file.zig b/src/bun.js/webcore/blob/write_file.zig index fac6648cd3..11302547fb 100644 --- a/src/bun.js/webcore/blob/write_file.zig +++ b/src/bun.js/webcore/blob/write_file.zig @@ -693,7 +693,13 @@ pub const WriteFileWaitFromLockedValueTask = struct { => { var blob = value.use(); // TODO: this should be one promise not two! - const new_promise = Blob.writeFileWithSourceDestination(globalThis, &blob, &file_blob, .{ .mkdirp_if_not_exists = this.mkdirp_if_not_exists }); + const new_promise = Blob.writeFileWithSourceDestination(globalThis, &blob, &file_blob, .{ .mkdirp_if_not_exists = this.mkdirp_if_not_exists }) catch |err| { + file_blob.detach(); + this.promise.deinit(); + bun.destroy(this); + promise.reject(globalThis, err); + return; + }; if (new_promise.asAnyPromise()) |p| { switch (p.unwrap(globalThis.vm(), .mark_handled)) { // Fulfill the new promise using the pending promise diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig index a1fdca0158..8eda33a62f 100644 --- a/src/bun.js/webcore/encoding.zig +++ b/src/bun.js/webcore/encoding.zig @@ -78,7 +78,7 @@ export fn Bun__encoding__toString(input: [*]const u8, len: usize, globalObject: } // pub fn writeUTF16AsUTF8(utf16: [*]const u16, len: usize, to: [*]u8, to_len: usize) callconv(.C) i32 { -// return @intCast(i32, strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, utf16[0..len], true).written); +// return @intCast(i32, strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, utf16[0..len]).written); // } pub fn toString(input: []const u8, globalObject: *JSGlobalObject, encoding: Encoding) JSValue { return switch (encoding) { @@ -357,7 +357,12 @@ pub fn writeU16(input: [*]const u16, len: usize, to: [*]u8, to_len: usize, compt switch (comptime encoding) { .utf8 => { - return strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, input[0..len], allow_partial_write).written; + return strings.copyUTF16IntoUTF8Impl( + to[0..to_len], + []const u16, + input[0..len], + allow_partial_write, + ).written; }, .latin1, .ascii, .buffer => { const out = @min(len, to_len); diff --git a/src/bun.js/webcore/fetch.zig b/src/bun.js/webcore/fetch.zig index 3c5177bd79..deb177df2a 100644 --- a/src/bun.js/webcore/fetch.zig +++ b/src/bun.js/webcore/fetch.zig @@ -61,16 +61,17 @@ pub const fetch_type_error_strings: JSTypeErrorEnum = brk: { }; pub const FetchTasklet = struct { - pub const FetchTaskletStream = JSC.WebCore.NetworkSink; + pub const ResumableSink = JSC.WebCore.ResumableFetchSink; const log = Output.scoped(.FetchTasklet, false); - sink: ?*FetchTaskletStream.JSSink = null, + sink: ?*ResumableSink = null, http: ?*http.AsyncHTTP = null, result: http.HTTPClientResult = .{}, metadata: ?http.HTTPResponseMetadata = null, javascript_vm: *VirtualMachine = undefined, global_this: *JSGlobalObject = undefined, request_body: HTTPRequestBody = undefined, + request_body_streaming_buffer: ?*http.ThreadSafeStreamBuffer = null, /// buffer being used by AsyncHTTP response_buffer: MutableString = undefined, @@ -148,7 +149,7 @@ pub const FetchTasklet = struct { pub const HTTPRequestBody = union(enum) { AnyBlob: AnyBlob, - Sendfile: http.Sendfile, + Sendfile: http.SendFile, ReadableStream: JSC.WebCore.ReadableStream.Strong, pub const Empty: HTTPRequestBody = .{ .AnyBlob = .{ .Blob = .{} } }; @@ -242,19 +243,19 @@ pub const FetchTasklet = struct { } fn clearSink(this: *FetchTasklet) void { - if (this.sink) |wrapper| { + if (this.sink) |sink| { this.sink = null; - - wrapper.sink.done = true; - wrapper.sink.ended = true; - wrapper.sink.finalize(); - wrapper.detach(); - wrapper.sink.finalizeAndDestroy(); + sink.deref(); + } + if (this.request_body_streaming_buffer) |buffer| { + this.request_body_streaming_buffer = null; + buffer.clearDrainCallback(); + buffer.deref(); } } fn clearData(this: *FetchTasklet) void { - log("clearData", .{}); + log("clearData ", .{}); const allocator = this.memory_reporter.allocator(); if (this.url_proxy_buffer.len > 0) { allocator.free(this.url_proxy_buffer); @@ -339,136 +340,18 @@ pub const FetchTasklet = struct { return null; } - pub fn onResolveRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var args = callframe.arguments_old(2); - var this: *@This() = args.ptr[args.len - 1].asPromisePtr(@This()); - defer this.deref(); - if (this.request_body == .ReadableStream) { - var readable_stream_ref = this.request_body.ReadableStream; - this.request_body.ReadableStream = .{}; - defer readable_stream_ref.deinit(); - if (readable_stream_ref.get(globalThis)) |stream| { - stream.done(globalThis); - this.clearSink(); - } - } - - return .js_undefined; - } - - pub fn onRejectRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const args = callframe.arguments_old(2); - var this = args.ptr[args.len - 1].asPromisePtr(@This()); - defer this.deref(); - const err = args.ptr[0]; - if (this.request_body == .ReadableStream) { - var readable_stream_ref = this.request_body.ReadableStream; - this.request_body.ReadableStream = .{}; - defer readable_stream_ref.deinit(); - if (readable_stream_ref.get(globalThis)) |stream| { - stream.cancel(globalThis); - this.clearSink(); - } - } - - this.abortListener(err); - return .js_undefined; - } - comptime { - const jsonResolveRequestStream = JSC.toJSHostFn(onResolveRequestStream); - @export(&jsonResolveRequestStream, .{ .name = "Bun__FetchTasklet__onResolveRequestStream" }); - const jsonRejectRequestStream = JSC.toJSHostFn(onRejectRequestStream); - @export(&jsonRejectRequestStream, .{ .name = "Bun__FetchTasklet__onRejectRequestStream" }); - } - pub fn startRequestStream(this: *FetchTasklet) void { this.is_waiting_request_stream_start = false; bun.assert(this.request_body == .ReadableStream); if (this.request_body.ReadableStream.get(this.global_this)) |stream| { - this.ref(); // lets only unref when sink is done - const globalThis = this.global_this; - var response_stream = FetchTaskletStream.new(.{ - .task = .{ .fetch = this }, - .buffer = .{}, - .globalThis = globalThis, - }).toSink(); - var signal = &response_stream.sink.signal; - this.sink = response_stream; - - signal.* = FetchTaskletStream.JSSink.SinkSignal.init(JSValue.zero); - - // explicitly set it to a dead pointer - // we use this memory address to disable signals being sent - signal.clear(); - bun.assert(signal.isDead()); - - // We are already corked! - const assignment_result: JSValue = FetchTaskletStream.JSSink.assignToStream( - globalThis, - stream.value, - response_stream, - @as(**anyopaque, @ptrCast(&signal.ptr)), - ); - - assignment_result.ensureStillAlive(); - - // assert that it was updated - bun.assert(!signal.isDead()); - - if (assignment_result.toError()) |err_value| { - response_stream.detach(); - this.sink = null; - response_stream.sink.finalizeAndDestroy(); - return this.abortListener(err_value); - } - - if (!assignment_result.isEmptyOrUndefinedOrNull()) { - assignment_result.ensureStillAlive(); - // it returns a Promise when it goes through ReadableStreamDefaultReader - if (assignment_result.asAnyPromise()) |promise| { - switch (promise.status(globalThis.vm())) { - .pending => { - this.ref(); - assignment_result.then( - globalThis, - this, - onResolveRequestStream, - onRejectRequestStream, - ); - }, - .fulfilled => { - var readable_stream_ref = this.request_body.ReadableStream; - this.request_body.ReadableStream = .{}; - defer { - stream.done(globalThis); - this.clearSink(); - readable_stream_ref.deinit(); - } - }, - .rejected => { - var readable_stream_ref = this.request_body.ReadableStream; - this.request_body.ReadableStream = .{}; - defer { - stream.cancel(globalThis); - this.clearSink(); - readable_stream_ref.deinit(); - } - - this.abortListener(promise.result(globalThis.vm())); - }, - } - return; - } else { - // if is not a promise we treat it as Error - response_stream.detach(); - this.sink = null; - response_stream.sink.finalizeAndDestroy(); - return this.abortListener(assignment_result); - } - } + this.ref(); // lets only unref when sink is done + // +1 because the task refs the sink + const sink = ResumableSink.initExactRefs(globalThis, stream, this, 2); + this.sink = sink; } } + pub fn onBodyReceived(this: *FetchTasklet) void { const success = this.result.isSuccess(); const globalThis = this.global_this; @@ -484,17 +367,28 @@ pub const FetchTasklet = struct { var err = this.onReject(); var need_deinit = true; defer if (need_deinit) err.deinit(); + var js_err = JSValue.zero; // if we are streaming update with error if (this.readable_stream_ref.get(globalThis)) |readable| { if (readable.ptr == .Bytes) { + js_err = err.toJS(globalThis); + js_err.ensureStillAlive(); readable.ptr.Bytes.onData( .{ - .err = .{ .JSValue = err.toJS(globalThis) }, + .err = .{ .JSValue = js_err }, }, bun.default_allocator, ); } } + if (this.sink) |sink| { + if (js_err == .zero) { + js_err = err.toJS(globalThis); + js_err.ensureStillAlive(); + } + sink.cancel(js_err); + return; + } // if we are buffering resolve the promise if (this.getCurrentResponse()) |response| { response.body.value.toErrorInstance(err, globalThis); @@ -710,7 +604,10 @@ pub const FetchTasklet = struct { false => brk: { // in this case we wanna a JSC.Strong.Optional so we just convert it var value = this.onReject(); - _ = value.toJS(globalThis); + const err = value.toJS(globalThis); + if (this.sink) |sink| { + sink.cancel(err); + } break :brk value.JSValue; }, }; @@ -1232,9 +1129,12 @@ pub const FetchTasklet = struct { fetch_tasklet.http.?.client.flags.is_streaming_request_body = isStream; fetch_tasklet.is_waiting_request_stream_start = isStream; if (isStream) { + const buffer = http.ThreadSafeStreamBuffer.new(.{}); + buffer.setDrainCallback(FetchTasklet, FetchTasklet.onWriteRequestDataDrain, fetch_tasklet); + fetch_tasklet.request_body_streaming_buffer = buffer; fetch_tasklet.http.?.request_body = .{ .stream = .{ - .buffer = .{}, + .buffer = buffer, .ended = false, }, }; @@ -1266,17 +1166,74 @@ pub const FetchTasklet = struct { reason.ensureStillAlive(); this.abort_reason.set(this.global_this, reason); this.abortTask(); - if (this.sink) |wrapper| { - wrapper.sink.abort(); + if (this.sink) |sink| { + sink.cancel(reason); return; } } - pub fn sendRequestData(this: *FetchTasklet, data: []const u8, ended: bool) void { - if (this.http) |http_| { - http.http_thread.scheduleRequestWrite(http_, data, ended); - } else if (data.len != 3) { - bun.default_allocator.free(data); + /// This is ALWAYS called from the http thread and we cannot touch the buffer here because is locked + pub fn onWriteRequestDataDrain(this: *FetchTasklet) void { + // ref until the main thread callback is called + this.ref(); + this.javascript_vm.eventLoop().enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(this, FetchTasklet.resumeRequestDataStream)); + } + + /// This is ALWAYS called from the main thread + pub fn resumeRequestDataStream(this: *FetchTasklet) void { + // deref when done because we ref inside onWriteRequestDataDrain + defer this.deref(); + if (this.sink) |sink| { + sink.drain(); + } + } + + pub fn writeRequestData(this: *FetchTasklet, data: []const u8) bool { + log("writeRequestData {}", .{data.len}); + if (this.request_body_streaming_buffer) |buffer| { + const highWaterMark = if (this.sink) |sink| sink.highWaterMark else 16384; + const stream_buffer = buffer.acquire(); + var needs_schedule = false; + defer if (needs_schedule) { + // wakeup the http thread to write the data + http.http_thread.scheduleRequestWrite(this.http.?, .data); + }; + defer buffer.release(); + + // dont have backpressure so we will schedule the data to be written + // if we have backpressure the onWritable will drain the buffer + needs_schedule = stream_buffer.isEmpty(); + //16 is the max size of a hex number size that represents 64 bits + 2 for the \r\n + var formated_size_buffer: [18]u8 = undefined; + const formated_size = std.fmt.bufPrint(formated_size_buffer[0..], "{x}\r\n", .{data.len}) catch bun.outOfMemory(); + stream_buffer.ensureUnusedCapacity(formated_size.len + data.len + 2) catch bun.outOfMemory(); + stream_buffer.writeAssumeCapacity(formated_size); + stream_buffer.writeAssumeCapacity(data); + stream_buffer.writeAssumeCapacity("\r\n"); + + // pause the stream if we hit the high water mark + return stream_buffer.size() >= highWaterMark; + } + return false; + } + + pub fn writeEndRequest(this: *FetchTasklet, err: ?JSC.JSValue) void { + log("writeEndRequest hasError? {}", .{err != null}); + this.clearSink(); + defer this.deref(); + if (err) |jsError| { + if (this.signal_store.aborted.load(.monotonic) or this.abort_reason.has()) { + return; + } + if (!jsError.isUndefinedOrNull()) { + this.abort_reason.set(this.global_this, jsError); + } + this.abortTask(); + } else { + if (this.http) |http_| { + // just tell to write the end of the chunked encoding aka 0\r\n\r\n + http.http_thread.scheduleRequestWrite(http_, .endChunked); + } } } @@ -1853,7 +1810,7 @@ pub fn Bun__fetch_( inline for (0..2) |i| { if (objects_to_try[i] != .zero) { if (try objects_to_try[i].get(globalThis, "unix")) |socket_path| { - if (socket_path.isString() and socket_path.getLength(ctx) > 0) { + if (socket_path.isString() and try socket_path.getLength(ctx) > 0) { if (socket_path.toSliceCloneWithAllocator(globalThis, allocator)) |slice| { break :extract_unix_socket_path slice; } @@ -2001,7 +1958,7 @@ pub fn Bun__fetch_( inline for (0..2) |i| { if (objects_to_try[i] != .zero) { if (try objects_to_try[i].get(globalThis, "proxy")) |proxy_arg| { - if (proxy_arg.isString() and proxy_arg.getLength(ctx) > 0) { + if (proxy_arg.isString() and try proxy_arg.getLength(ctx) > 0) { var href = try JSC.URL.hrefFromJS(proxy_arg, globalThis); if (href.tag == .Dead) { const err = ctx.toTypeError(.INVALID_ARG_VALUE, "fetch() proxy URL is invalid", .{}); @@ -2406,14 +2363,14 @@ pub fn Bun__fetch_( const opened_fd = switch (opened_fd_res) { .err => |err| { - const rejected_value = JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + const rejected_value = JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJS(globalThis)); is_error = true; return rejected_value; }, .result => |fd| fd, }; - if (proxy == null and bun.http.Sendfile.isEligible(url)) { + if (proxy == null and bun.http.SendFile.isEligible(url)) { use_sendfile: { const stat: bun.Stat = switch (bun.sys.fstat(opened_fd)) { .result => |result| result, @@ -2478,7 +2435,7 @@ pub fn Bun__fetch_( switch (res) { .err => |err| { is_error = true; - const rejected_value = JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + const rejected_value = JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJS(globalThis)); body.detach(); return rejected_value; @@ -2748,7 +2705,7 @@ const Blob = JSC.WebCore.Blob; const Response = JSC.WebCore.Response; const Request = JSC.WebCore.Request; const Headers = bun.http.Headers; -const Method = @import("../../http/method.zig").Method; +const Method = @import("../../http/Method.zig").Method; const Body = JSC.WebCore.Body; const Async = bun.Async; const SSLConfig = @import("../api/server.zig").ServerConfig.SSLConfig; diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 43f8e8f508..a500f6dcb5 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -29,7 +29,7 @@ pub const Start = union(Tag) { done, }; - pub fn toJS(this: Start, globalThis: *JSGlobalObject) JSC.JSValue { + pub fn toJS(this: Start, globalThis: *JSGlobalObject) bun.JSError!JSC.JSValue { switch (this) { .empty, .ready => { return .js_undefined; @@ -38,7 +38,7 @@ pub const Start = union(Tag) { return JSC.JSValue.jsNumber(@as(Blob.SizeType, @intCast(chunk))); }, .err => |err| { - return globalThis.throwValue(err.toJSC(globalThis)) catch .zero; + return globalThis.throwValue(err.toJS(globalThis)); }, .owned_and_done => |list| { return JSC.ArrayBuffer.fromBytes(list.slice(), .Uint8Array).toJS(globalThis, null); @@ -81,7 +81,7 @@ pub const Start = union(Tag) { var chunk_size: Blob.SizeType = 0; var empty = true; - if (value.getOwn(globalThis, "asUint8Array")) |val| { + if (try value.getOwn(globalThis, "asUint8Array")) |val| { if (val.isBoolean()) { as_uint8array = val.toBoolean(); empty = false; @@ -234,7 +234,7 @@ pub const Result = union(Tag) { pub fn toJSWeak(this: *const @This(), globalObject: *JSC.JSGlobalObject) struct { JSC.JSValue, WasStrong } { return switch (this.*) { .Error => |err| { - return .{ err.toJSC(globalObject), WasStrong.Weak }; + return .{ err.toJS(globalObject), WasStrong.Weak }; }, .JSValue => .{ this.JSValue, WasStrong.Strong }, .WeakJSValue => .{ this.WeakJSValue, WasStrong.Weak }, @@ -381,7 +381,7 @@ pub const Result = union(Tag) { defer promise.toJS().unprotect(); switch (result) { .err => |err| { - promise.reject(globalThis, err.toJSC(globalThis)); + promise.reject(globalThis, err.toJS(globalThis)); }, .done => { promise.resolve(globalThis, JSValue.jsBoolean(false)); @@ -394,7 +394,7 @@ pub const Result = union(Tag) { pub fn toJS(this: Writable, globalThis: *JSGlobalObject) JSValue { return switch (this) { - .err => |err| JSC.JSPromise.rejectedPromise(globalThis, JSValue.c(err.toJS(globalThis))).toJS(), + .err => |err| JSC.JSPromise.rejectedPromise(globalThis, err.toJS(globalThis)).toJS(), .owned => |len| JSC.JSValue.jsNumber(len), .owned_and_done => |len| JSC.JSValue.jsNumber(len), @@ -439,6 +439,25 @@ pub const Result = union(Tag) { return prom; } + pub fn runOnNextTick(this: *Pending) void { + if (this.state != .pending) return; + const vm = JSC.VirtualMachine.get(); + if (vm.isShuttingDown()) { + return; + } + + const clone = bun.create(bun.default_allocator, Pending, this.*); + this.state = .none; + this.result = .{ .done = {} }; + vm.eventLoop().enqueueTask(JSC.Task.init(clone)); + } + + pub fn runFromJSThread(this: *Pending) void { + this.run(); + + bun.destroy(this); + } + pub const Future = union(enum) { promise: struct { promise: *JSPromise, @@ -524,7 +543,11 @@ pub const Result = union(Tag) { promise.resolve(globalThis, JSValue.jsBoolean(false)); }, else => { - const value = result.toJS(globalThis); + const value = result.toJS(globalThis) catch |err| { + result.* = .{ .temporary = .{} }; + promise.reject(globalThis, err); + return; + }; value.ensureStillAlive(); result.* = .{ .temporary = .{} }; @@ -533,7 +556,7 @@ pub const Result = union(Tag) { } } - pub fn toJS(this: *const Result, globalThis: *JSGlobalObject) JSValue { + pub fn toJS(this: *const Result, globalThis: *JSGlobalObject) bun.JSError!JSValue { if (JSC.VirtualMachine.get().isShuttingDown()) { var that = this.*; that.deinit(); @@ -548,14 +571,14 @@ pub const Result = union(Tag) { return JSC.ArrayBuffer.fromBytes(list.slice(), .Uint8Array).toJS(globalThis, null); }, .temporary => |temp| { - var array = JSC.JSValue.createUninitializedUint8Array(globalThis, temp.len); + var array = try JSC.JSValue.createUninitializedUint8Array(globalThis, temp.len); var slice_ = array.asArrayBuffer(globalThis).?.slice(); const temp_slice = temp.slice(); @memcpy(slice_[0..temp_slice.len], temp_slice); return array; }, .temporary_and_done => |temp| { - var array = JSC.JSValue.createUninitializedUint8Array(globalThis, temp.len); + var array = try JSC.JSValue.createUninitializedUint8Array(globalThis, temp.len); var slice_ = array.asArrayBuffer(globalThis).?.slice(); const temp_slice = temp.slice(); @memcpy(slice_[0..temp_slice.len], temp_slice); @@ -1304,68 +1327,32 @@ pub const NetworkSink = struct { pub const new = bun.TrivialNew(@This()); pub const deinit = bun.TrivialDeinit(@This()); - task: ?HTTPWritableStream = null, + task: ?*bun.S3.MultiPartUpload = null, signal: Signal = .{}, globalThis: *JSGlobalObject = undefined, highWaterMark: Blob.SizeType = 2048, - buffer: bun.io.StreamBuffer, + flushPromise: JSC.JSPromise.Strong = .{}, + endPromise: JSC.JSPromise.Strong = .{}, ended: bool = false, done: bool = false, cancel: bool = false, - encoded: bool = true, - endPromise: JSC.JSPromise.Strong = .{}, - - auto_flusher: AutoFlusher = AutoFlusher{}, - - const HTTPWritableStream = union(enum) { - fetch: *JSC.WebCore.Fetch.FetchTasklet, - s3_upload: *bun.S3.MultiPartUpload, - }; + const log = bun.Output.scoped(.NetworkSink, false); fn getHighWaterMark(this: *@This()) Blob.SizeType { if (this.task) |task| { - return switch (task) { - .s3_upload => |s3| @truncate(s3.partSizeInBytes()), - else => this.highWaterMark, - }; + return task.partSizeInBytes(); } return this.highWaterMark; } - fn unregisterAutoFlusher(this: *@This()) void { - if (this.auto_flusher.registered) - AutoFlusher.unregisterDeferredMicrotaskWithTypeUnchecked(@This(), this, this.globalThis.bunVM()); - } - - fn registerAutoFlusher(this: *@This()) void { - if (!this.auto_flusher.registered) - AutoFlusher.registerDeferredMicrotaskWithTypeUnchecked(@This(), this, this.globalThis.bunVM()); - } pub fn path(this: *@This()) ?[]const u8 { if (this.task) |task| { - return switch (task) { - .s3_upload => |s3| s3.path, - else => null, - }; + return task.path; } return null; } - pub fn onAutoFlush(this: *@This()) bool { - if (this.done) { - this.auto_flusher.registered = false; - return false; - } - - _ = this.internalFlush() catch 0; - if (this.buffer.isEmpty()) { - this.auto_flusher.registered = false; - return false; - } - return true; - } - pub fn start(this: *@This(), stream_start: Start) JSC.Maybe(void) { if (this.ended) { return .{ .result = {} }; @@ -1394,84 +1381,47 @@ pub const NetworkSink = struct { return @ptrCast(this); } pub fn finalize(this: *@This()) void { - this.unregisterAutoFlusher(); - - var buffer = this.buffer; - this.buffer = .{}; - buffer.deinit(); - this.detachWritable(); } fn detachWritable(this: *@This()) void { if (this.task) |task| { this.task = null; - switch (task) { - inline .fetch, .s3_upload => |writable| { - writable.deref(); - }, - } + task.deref(); } } - fn sendRequestData(writable: HTTPWritableStream, data: []const u8, is_last: bool) void { - switch (writable) { - inline .fetch, .s3_upload => |task| task.sendRequestData(data, is_last), + pub fn onWritable(task: *bun.S3.MultiPartUpload, this: *@This(), flushed: u64) void { + log("onWritable flushed: {d} state: {s}", .{ flushed, @tagName(task.state) }); + if (this.flushPromise.hasValue()) { + this.flushPromise.resolve(this.globalThis, JSC.JSValue.jsNumber(flushed)); } } - pub fn send(this: *@This(), data: []const u8, is_last: bool) !void { - if (this.done) return; - - if (this.task) |task| { - if (is_last) this.done = true; - if (this.encoded) { - if (data.len == 0) { - sendRequestData(task, bun.http.end_of_chunked_http1_1_encoding_response_body, true); - return; - } - - // chunk encoding is really simple - if (is_last) { - const chunk = std.fmt.allocPrint(bun.default_allocator, "{x}\r\n{s}\r\n0\r\n\r\n", .{ data.len, data }) catch return error.OOM; - sendRequestData(task, chunk, true); - } else { - const chunk = std.fmt.allocPrint(bun.default_allocator, "{x}\r\n{s}\r\n", .{ data.len, data }) catch return error.OOM; - sendRequestData(task, chunk, false); - } - } else { - sendRequestData(task, data, is_last); - } - } - } - - pub fn internalFlush(this: *@This()) !usize { - if (this.done) return 0; - var flushed: usize = 0; - // we need to respect the max len for the chunk - while (this.buffer.isNotEmpty()) { - const bytes = this.buffer.slice(); - const len: u32 = @min(bytes.len, std.math.maxInt(u32)); - try this.send(bytes, this.buffer.list.items.len - (this.buffer.cursor + len) == 0 and this.ended); - flushed += len; - this.buffer.cursor = len; - if (this.buffer.isEmpty()) { - this.buffer.reset(); - } - } - if (this.ended and !this.done) { - try this.send("", true); - this.finalize(); - } - return flushed; - } - - pub fn flush(this: *@This()) JSC.Maybe(void) { - _ = this.internalFlush() catch 0; + pub fn flush(_: *@This()) JSC.Maybe(void) { return .{ .result = {} }; } + pub fn flushFromJS(this: *@This(), globalThis: *JSGlobalObject, _: bool) JSC.Maybe(JSValue) { - return .{ .result = JSC.JSPromise.resolvedPromiseValue(globalThis, JSValue.jsNumber(this.internalFlush() catch 0)) }; + // still waiting for more data tobe flushed + if (this.flushPromise.hasValue()) { + return .{ .result = this.flushPromise.value() }; + } + + // nothing todo here + if (this.done) { + return .{ .result = JSC.JSPromise.resolvedPromiseValue(globalThis, JSValue.jsNumber(0)) }; + } + // flush more + if (this.task) |task| { + if (!task.isQueueEmpty()) { + // we have something queued, we need to wait for the next flush + this.flushPromise = JSC.JSPromise.Strong.init(globalThis); + return .{ .result = this.flushPromise.value() }; + } + } + // we are done flushing no backpressure + return .{ .result = JSC.JSPromise.resolvedPromiseValue(globalThis, JSValue.jsNumber(0)) }; } pub fn finalizeAndDestroy(this: *@This()) void { this.finalize(); @@ -1493,28 +1443,11 @@ pub const NetworkSink = struct { const bytes = data.slice(); const len = @as(Blob.SizeType, @truncate(bytes.len)); - if (this.buffer.size() == 0 and len >= this.getHighWaterMark()) { - // fast path: - // - large-ish chunk - this.send(bytes, false) catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - return .{ .owned = len }; - } else if (this.buffer.size() + len >= this.getHighWaterMark()) { - _ = this.buffer.write(bytes) catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - _ = this.internalFlush() catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - return .{ .owned = len }; - } else { - // queue the data wait until highWaterMark is reached or the auto flusher kicks in - this.buffer.write(bytes) catch { + if (this.task) |task| { + _ = task.writeBytes(bytes, false) catch { return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; }; } - this.registerAutoFlusher(); return .{ .owned = len }; } @@ -1527,47 +1460,11 @@ pub const NetworkSink = struct { const bytes = data.slice(); const len = @as(Blob.SizeType, @truncate(bytes.len)); - if (this.buffer.size() == 0 and len >= this.getHighWaterMark()) { - // common case - if (strings.isAllASCII(bytes)) { - // fast path: - // - large-ish chunk - this.send(bytes, false) catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - return .{ .owned = len }; - } - - const check_ascii = false; - this.buffer.writeLatin1(bytes, check_ascii) catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - - _ = this.internalFlush() catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - return .{ .owned = len }; - } else if (this.buffer.size() + len >= this.getHighWaterMark()) { - // kinda fast path: - // - combined chunk is large enough to flush automatically - - const check_ascii = true; - this.buffer.writeLatin1(bytes, check_ascii) catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - _ = this.internalFlush() catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - return .{ .owned = len }; - } else { - const check_ascii = true; - this.buffer.writeLatin1(bytes, check_ascii) catch { + if (this.task) |task| { + _ = task.writeLatin1(bytes, false) catch { return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; }; } - - this.registerAutoFlusher(); - return .{ .owned = len }; } pub fn writeUTF16(this: *@This(), data: Result) Result.Writable { @@ -1575,21 +1472,14 @@ pub const NetworkSink = struct { return .{ .owned = 0 }; } const bytes = data.slice(); - // we must always buffer UTF-16 - // we assume the case of all-ascii UTF-16 string is pretty uncommon - this.buffer.writeUTF16(@alignCast(std.mem.bytesAsSlice(u16, bytes))) catch { - return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; - }; - - const readable = this.buffer.slice(); - if (readable.len >= this.getHighWaterMark()) { - _ = this.internalFlush() catch { + if (this.task) |task| { + // we must always buffer UTF-16 + // we assume the case of all-ascii UTF-16 string is pretty uncommon + _ = task.writeUTF16(bytes, false) catch { return .{ .err = Syscall.Error.fromCode(.NOMEM, .write) }; }; - return .{ .owned = @as(Blob.SizeType, @intCast(bytes.len)) }; } - this.registerAutoFlusher(); return .{ .owned = @as(Blob.SizeType, @intCast(bytes.len)) }; } @@ -1601,26 +1491,33 @@ pub const NetworkSink = struct { // send EOF this.ended = true; // flush everything and send EOF - _ = this.internalFlush() catch 0; + if (this.task) |task| { + _ = task.writeBytes("", true) catch bun.outOfMemory(); + } this.signal.close(err); return .{ .result = {} }; } pub fn endFromJS(this: *@This(), _: *JSGlobalObject) JSC.Maybe(JSValue) { - if (!this.ended) { - if (this.done) { + _ = this.end(null); + if (this.endPromise.hasValue()) { + // we are already waiting for the end + return .{ .result = this.endPromise.value() }; + } + if (this.task) |task| { + // we need to wait for the task to end + this.endPromise = JSC.JSPromise.Strong.init(this.globalThis); + const value = this.endPromise.value(); + if (!this.ended) { this.ended = true; + // we need to send EOF + _ = task.writeBytes("", true) catch bun.outOfMemory(); this.signal.close(null); - this.finalize(); - } else { - _ = this.end(null); } + return .{ .result = value }; } - const promise = this.endPromise.valueOrEmpty(); - if (promise.isEmptyOrUndefinedOrNull()) { - return .{ .result = JSC.JSValue.jsNumber(0) }; - } - return .{ .result = promise }; + // task already detached + return .{ .result = JSC.JSValue.jsNumber(0) }; } pub fn toJS(this: *@This(), globalThis: *JSGlobalObject) JSValue { return JSSink.createObject(globalThis, this, 0); @@ -1628,7 +1525,11 @@ pub const NetworkSink = struct { pub fn memoryCost(this: *const @This()) usize { // Since this is a JSSink, the NewJSSink function does @sizeOf(JSSink) which includes @sizeOf(ArrayBufferSink). - return this.buffer.memoryCost(); + if (this.task) |task| { + //TODO: we could do better here + return task.buffered.memoryCost(); + } + return 0; } pub const name = "NetworkSink"; diff --git a/src/bun.zig b/src/bun.zig index 5715dd301a..a39e0cb9b9 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -123,6 +123,15 @@ pub const JSError = error{ OutOfMemory, }; +pub const JSExecutionTerminated = error{ + /// JavaScript execution has been terminated. + /// This condition is indicated by throwing an exception, so most code should still handle it + /// with JSError. If you expect that you will not throw any errors other than the termination + /// exception, you can catch JSError, assert that the exception is the termination exception, + /// and return error.JSExecutionTerminated. + JSExecutionTerminated, +}; + pub const JSOOM = OOM || JSError; pub const detectCI = @import("ci_info.zig").detectCI; @@ -245,13 +254,22 @@ pub const stringZ = StringTypes.stringZ; pub const string = StringTypes.string; pub const CodePoint = StringTypes.CodePoint; -pub const MAX_PATH_BYTES: usize = if (Environment.isWasm) 1024 else std.fs.max_path_bytes; -pub const PathBuffer = [MAX_PATH_BYTES]u8; -pub const WPathBuffer = [std.os.windows.PATH_MAX_WIDE]u16; -pub const OSPathChar = if (Environment.isWindows) u16 else u8; -pub const OSPathSliceZ = [:0]const OSPathChar; -pub const OSPathSlice = []const OSPathChar; -pub const OSPathBuffer = if (Environment.isWindows) WPathBuffer else PathBuffer; +pub const paths = @import("./paths.zig"); +pub const MAX_PATH_BYTES = paths.MAX_PATH_BYTES; +pub const PathBuffer = paths.PathBuffer; +pub const PATH_MAX_WIDE = paths.PATH_MAX_WIDE; +pub const WPathBuffer = paths.WPathBuffer; +pub const OSPathChar = paths.OSPathChar; +pub const OSPathSliceZ = paths.OSPathSliceZ; +pub const OSPathSlice = paths.OSPathSlice; +pub const OSPathBuffer = paths.OSPathBuffer; +pub const Path = paths.Path; +pub const AbsPath = paths.AbsPath; +pub const RelPath = paths.RelPath; +pub const EnvPath = paths.EnvPath; +pub const path_buffer_pool = paths.path_buffer_pool; +pub const w_path_buffer_pool = paths.w_path_buffer_pool; +pub const os_path_buffer_pool = paths.os_path_buffer_pool; pub inline fn cast(comptime To: type, value: anytype) To { if (@typeInfo(@TypeOf(value)) == .int) { @@ -743,14 +761,18 @@ pub fn openDirA(dir: std.fs.Dir, path_: []const u8) !std.fs.Dir { } } -pub fn openDirForIteration(dir: std.fs.Dir, path_: []const u8) !std.fs.Dir { +pub fn openDirForIteration(dir: FD, path_: []const u8) sys.Maybe(FD) { if (comptime Environment.isWindows) { - const res = try sys.openDirAtWindowsA(.fromStdDir(dir), path_, .{ .iterable = true, .can_rename_or_delete = false, .read_only = true }).unwrap(); - return res.stdDir(); - } else { - const fd = try sys.openatA(.fromStdDir(dir), path_, O.DIRECTORY | O.CLOEXEC | O.RDONLY, 0).unwrap(); - return fd.stdDir(); + return sys.openDirAtWindowsA(dir, path_, .{ .iterable = true, .can_rename_or_delete = false, .read_only = true }); } + return sys.openatA(dir, path_, O.DIRECTORY | O.CLOEXEC | O.RDONLY, 0); +} + +pub fn openDirForIterationOSPath(dir: FD, path_: []const OSPathChar) sys.Maybe(FD) { + if (comptime Environment.isWindows) { + return sys.openDirAtWindows(dir, path_, .{ .iterable = true, .can_rename_or_delete = false, .read_only = true }); + } + return sys.openatA(dir, path_, O.DIRECTORY | O.CLOEXEC | O.RDONLY, 0); } pub fn openDirAbsolute(path_: []const u8) !std.fs.Dir { @@ -1265,7 +1287,7 @@ pub fn getFdPath(fd: FileDescriptor, buf: *bun.PathBuffer) ![]u8 { if (comptime Environment.isWindows) { var wide_buf: WPathBuffer = undefined; const wide_slice = try windows.GetFinalPathNameByHandle(fd.native(), .{}, wide_buf[0..]); - const res = strings.copyUTF16IntoUTF8(buf[0..], @TypeOf(wide_slice), wide_slice, true); + const res = strings.copyUTF16IntoUTF8(buf[0..], @TypeOf(wide_slice), wide_slice); return buf[0..res.written]; } @@ -1913,6 +1935,7 @@ pub const StandaloneModuleGraph = @import("./StandaloneModuleGraph.zig").Standal const _string = @import("./string.zig"); pub const strings = @import("string_immutable.zig"); pub const String = _string.String; +pub const ZigString = JSC.ZigString; pub const StringJoiner = _string.StringJoiner; pub const SliceWithUnderlyingString = _string.SliceWithUnderlyingString; pub const PathString = _string.PathString; @@ -2702,8 +2725,8 @@ pub fn exitThread() noreturn { pub fn deleteAllPoolsForThreadExit() void { const pools_to_delete = .{ JSC.WebCore.ByteListPool, - bun.WPathBufferPool, - bun.PathBufferPool, + bun.w_path_buffer_pool, + bun.path_buffer_pool, bun.JSC.ConsoleObject.Formatter.Visited.Pool, bun.js_parser.StringVoidMap.Pool, }; @@ -2756,7 +2779,7 @@ pub fn errnoToZigErr(err: anytype) anyerror { pub const brotli = @import("./brotli.zig"); -pub fn iterateDir(dir: std.fs.Dir) DirIterator.Iterator { +pub fn iterateDir(dir: FD) DirIterator.Iterator { return DirIterator.iterate(dir, .u8).iter; } @@ -2975,7 +2998,7 @@ noinline fn assertionFailureAtLocation(src: std.builtin.SourceLocation) noreturn @compileError(std.fmt.comptimePrint("assertion failure")); } else { @branchHint(.cold); - Output.panic(assertion_failure_msg ++ "at {s}:{d}:{d}", .{ src.file, src.line, src.column }); + Output.panic(assertion_failure_msg ++ " at {s}:{d}:{d}", .{ src.file, src.line, src.column }); } } @@ -3632,6 +3655,15 @@ pub inline fn clear(val: anytype, allocator: std.mem.Allocator) void { } } +pub inline fn move(val: anytype) switch (@typeInfo(@TypeOf(val))) { + .pointer => |p| p.child, + else => @compileError("unexpected move type"), +} { + const tmp = val.*; + @constCast(val).* = undefined; + return tmp; +} + pub inline fn wrappingNegation(val: anytype) @TypeOf(val) { return 0 -% val; } @@ -3702,45 +3734,6 @@ pub noinline fn throwStackOverflow() StackOverflow!void { } const StackOverflow = error{StackOverflow}; -// This pool exists because on Windows, each path buffer costs 64 KB. -// This makes the stack memory usage very unpredictable, which means we can't really know how much stack space we have left. -// This pool is a workaround to make the stack memory usage more predictable. -// We keep up to 4 path buffers alive per thread at a time. -pub fn PathBufferPoolT(comptime T: type) type { - return struct { - const Pool = ObjectPool(PathBuf, null, true, 4); - pub const PathBuf = struct { - bytes: T, - - pub fn deinit(this: *PathBuf) void { - var node: *Pool.Node = @alignCast(@fieldParentPtr("data", this)); - node.release(); - } - }; - - pub fn get() *T { - // use a threadlocal allocator so mimalloc deletes it on thread deinit. - return &Pool.get(bun.threadlocalAllocator()).data.bytes; - } - - pub fn put(buffer: *T) void { - var path_buf: *PathBuf = @alignCast(@fieldParentPtr("bytes", buffer)); - path_buf.deinit(); - } - - pub fn deleteAll() void { - Pool.deleteAll(); - } - }; -} - -pub const PathBufferPool = PathBufferPoolT(bun.PathBuffer); -pub const WPathBufferPool = if (Environment.isWindows) PathBufferPoolT(bun.WPathBuffer) else struct { - // So it can be used in code that deletes all the pools. - pub fn deleteAll() void {} -}; -pub const OSPathBufferPool = if (Environment.isWindows) WPathBufferPool else PathBufferPool; - pub const S3 = @import("./s3/client.zig"); pub const ptr = @import("ptr.zig"); @@ -3762,13 +3755,12 @@ pub const highway = @import("./highway.zig"); pub const MemoryReportingAllocator = @import("allocators/MemoryReportingAllocator.zig"); -pub fn move(dest: []u8, src: []const u8) void { - if (comptime Environment.allow_assert) { - if (src.len != dest.len) { - bun.Output.panic("Move: src.len != dest.len, {d} != {d}", .{ src.len, dest.len }); - } - } - _ = bun.c.memmove(dest.ptr, src.ptr, src.len); -} - pub const mach_port = if (Environment.isMac) std.c.mach_port_t else u32; + +pub fn contains(item: anytype, list: *const std.ArrayListUnmanaged(@TypeOf(item))) bool { + const T = @TypeOf(item); + return switch (T) { + u8 => strings.containsChar(list.items, item), + else => std.mem.indexOfScalar(T, list.items, item) != null, + }; +} diff --git a/src/bundler/Chunk.zig b/src/bundler/Chunk.zig index f9aace07f7..fa58ce93b5 100644 --- a/src/bundler/Chunk.zig +++ b/src/bundler/Chunk.zig @@ -27,6 +27,7 @@ pub const Chunk = struct { is_executable: bool = false, has_html_chunk: bool = false, + is_browser_chunk_from_server_build: bool = false, output_source_map: sourcemap.SourceMapPieces, diff --git a/src/bundler/HTMLImportManifest.zig b/src/bundler/HTMLImportManifest.zig index 58a199cdea..a187d1f5d9 100644 --- a/src/bundler/HTMLImportManifest.zig +++ b/src/bundler/HTMLImportManifest.zig @@ -128,13 +128,26 @@ pub fn write(index: u32, graph: *const Graph, linker_graph: *const LinkerGraph, try writer.writeAll("{"); + const inject_compiler_filesystem_prefix = bv2.transpiler.options.compile; + // Use the server-side public path here. + const public_path = bv2.transpiler.options.public_path; + var temp_buffer = std.ArrayList(u8).init(bun.default_allocator); + defer temp_buffer.deinit(); + for (chunks) |*ch| { if (ch.entry_point.source_index == browser_source_index and ch.entry_point.is_entry_point) { entry_point_bits.set(ch.entry_point.entry_point_id); if (ch.content == .html) { try writer.writeAll("\"index\":"); - try bun.js_printer.writeJSONString(ch.final_rel_path, @TypeOf(writer), writer, .utf8); + if (inject_compiler_filesystem_prefix) { + temp_buffer.clearRetainingCapacity(); + try temp_buffer.appendSlice(public_path); + try temp_buffer.appendSlice(bun.strings.removeLeadingDotSlash(ch.final_rel_path)); + try bun.js_printer.writeJSONString(temp_buffer.items, @TypeOf(writer), writer, .utf8); + } else { + try bun.js_printer.writeJSONString(ch.final_rel_path, @TypeOf(writer), writer, .utf8); + } try writer.writeAll(","); } } @@ -167,13 +180,20 @@ pub fn write(index: u32, graph: *const Graph, linker_graph: *const LinkerGraph, .posix, false, ); - if (path_for_key.len > 2 and strings.eqlComptime(path_for_key[0..2], "./")) { - path_for_key = path_for_key[2..]; - } + + path_for_key = bun.strings.removeLeadingDotSlash(path_for_key); break :brk path_for_key; }, - ch.final_rel_path, + brk: { + if (inject_compiler_filesystem_prefix) { + temp_buffer.clearRetainingCapacity(); + try temp_buffer.appendSlice(public_path); + try temp_buffer.appendSlice(bun.strings.removeLeadingDotSlash(ch.final_rel_path)); + break :brk temp_buffer.items; + } + break :brk ch.final_rel_path; + }, ch.isolated_hash, ch.content.loader(), if (ch.entry_point.is_entry_point) @@ -203,14 +223,20 @@ pub fn write(index: u32, graph: *const Graph, linker_graph: *const LinkerGraph, .posix, false, ); - if (path_for_key.len > 2 and strings.eqlComptime(path_for_key[0..2], "./")) { - path_for_key = path_for_key[2..]; - } + path_for_key = bun.strings.removeLeadingDotSlash(path_for_key); try writeEntryItem( writer, path_for_key, - output_file.dest_path, + brk: { + if (inject_compiler_filesystem_prefix) { + temp_buffer.clearRetainingCapacity(); + try temp_buffer.appendSlice(public_path); + try temp_buffer.appendSlice(bun.strings.removeLeadingDotSlash(output_file.dest_path)); + break :brk temp_buffer.items; + } + break :brk output_file.dest_path; + }, output_file.hash, output_file.loader, output_file.output_kind, diff --git a/src/bundler/LinkerContext.zig b/src/bundler/LinkerContext.zig index 89f238947d..3389628624 100644 --- a/src/bundler/LinkerContext.zig +++ b/src/bundler/LinkerContext.zig @@ -841,13 +841,18 @@ pub const LinkerContext = struct { // any import to be considered different if the import's output path has changed. hasher.write(chunk.template.data); + const public_path = if (chunk.is_browser_chunk_from_server_build) + @as(*bundler.BundleV2, @fieldParentPtr("linker", c)).transpilerForTarget(.browser).options.public_path + else + c.options.public_path; + // Also hash the public path. If provided, this is used whenever files // reference each other such as cross-chunk imports, asset file references, // and source map comments. We always include the hash in all chunks instead // of trying to figure out which chunks will include the public path for // simplicity and for robustness to code changes in the future. - if (c.options.public_path.len > 0) { - hasher.write(c.options.public_path); + if (public_path.len > 0) { + hasher.write(public_path); } // Include the generated output content in the hash. This excludes the @@ -890,13 +895,13 @@ pub const LinkerContext = struct { pub fn validateTLA( c: *LinkerContext, source_index: Index.Int, - tla_keywords: []Logger.Range, + tla_keywords: []const Logger.Range, tla_checks: []js_ast.TlaCheck, - input_files: []Logger.Source, - import_records: []ImportRecord, + input_files: []const Logger.Source, + import_records: []const ImportRecord, meta_flags: []JSMeta.Flags, - ast_import_records: []bun.BabyList(ImportRecord), - ) js_ast.TlaCheck { + ast_import_records: []const bun.BabyList(ImportRecord), + ) bun.OOM!js_ast.TlaCheck { var result_tla_check: *js_ast.TlaCheck = &tla_checks[source_index]; if (result_tla_check.depth == 0) { @@ -907,7 +912,15 @@ pub const LinkerContext = struct { for (import_records, 0..) |record, import_record_index| { if (Index.isValid(record.source_index) and (record.kind == .require or record.kind == .stmt)) { - const parent = c.validateTLA(record.source_index.get(), tla_keywords, tla_checks, input_files, import_records, meta_flags, ast_import_records); + const parent = try c.validateTLA( + record.source_index.get(), + tla_keywords, + tla_checks, + input_files, + ast_import_records[record.source_index.get()].slice(), + meta_flags, + ast_import_records, + ); if (Index.isInvalid(Index.init(parent.parent))) { continue; } @@ -944,31 +957,31 @@ pub const LinkerContext = struct { } if (!Index.isValid(Index.init(parent_tla_check.parent))) { - notes.append(Logger.Data{ + try notes.append(Logger.Data{ .text = "unexpected invalid index", - }) catch bun.outOfMemory(); + }); break; } other_source_index = parent_tla_check.parent; - notes.append(Logger.Data{ - .text = std.fmt.allocPrint(c.allocator, "The file {s} imports the file {s} here:", .{ + try notes.append(Logger.Data{ + .text = try std.fmt.allocPrint(c.allocator, "The file {s} imports the file {s} here:", .{ input_files[parent_source_index].path.pretty, input_files[other_source_index].path.pretty, - }) catch bun.outOfMemory(), + }), .location = .initOrNull(&input_files[parent_source_index], ast_import_records[parent_source_index].slice()[tla_checks[parent_source_index].import_record_index].range), - }) catch bun.outOfMemory(); + }); } const source: *const Logger.Source = &input_files[source_index]; const imported_pretty_path = source.path.pretty; const text: string = if (strings.eql(imported_pretty_path, tla_pretty_path)) - std.fmt.allocPrint(c.allocator, "This require call is not allowed because the imported file \"{s}\" contains a top-level await", .{imported_pretty_path}) catch bun.outOfMemory() + try std.fmt.allocPrint(c.allocator, "This require call is not allowed because the imported file \"{s}\" contains a top-level await", .{imported_pretty_path}) else - std.fmt.allocPrint(c.allocator, "This require call is not allowed because the transitive dependency \"{s}\" contains a top-level await", .{tla_pretty_path}) catch bun.outOfMemory(); + try std.fmt.allocPrint(c.allocator, "This require call is not allowed because the transitive dependency \"{s}\" contains a top-level await", .{tla_pretty_path}); - c.log.addRangeErrorWithNotes(source, record.range, text, notes.items) catch bun.outOfMemory(); + try c.log.addRangeErrorWithNotes(source, record.range, text, notes.items); } } } diff --git a/src/bundler/ParseTask.zig b/src/bundler/ParseTask.zig index c42df528f3..b12b1c8ef4 100644 --- a/src/bundler/ParseTask.zig +++ b/src/bundler/ParseTask.zig @@ -34,10 +34,6 @@ emit_decorator_metadata: bool = false, ctx: *BundleV2, package_version: string = "", is_entry_point: bool = false, -/// This is set when the file is an entrypoint, and it has an onLoad plugin. -/// In this case we want to defer adding this to additional_files until after -/// the onLoad plugin has finished. -defer_copy_for_bundling: bool = false, const ParseTaskStage = union(enum) { needs_source_code: void, diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 9c7c076fdc..8844b3207d 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -136,6 +136,9 @@ pub const BundleV2 = struct { /// Set true by DevServer. Currently every usage of the transpiler (Bun.build /// and `bun build` cli) runs at the top of an event loop. When this is /// true, a callback is executed after all work is complete. + /// + /// You can find which callbacks are run by looking at the + /// `finishFromBakeDevServer(...)` function here asynchronous: bool = false, thread_lock: bun.DebugThreadLock, @@ -179,27 +182,23 @@ pub const BundleV2 = struct { const this_transpiler = this.transpiler; const client_transpiler = try allocator.create(Transpiler); - const defines = this_transpiler.options.transform_options.define; client_transpiler.* = this_transpiler.*; client_transpiler.options = this_transpiler.options; client_transpiler.options.target = .browser; client_transpiler.options.main_fields = options.Target.DefaultMainFields.get(options.Target.browser); client_transpiler.options.conditions = try options.ESMConditions.init(allocator, options.Target.browser.defaultConditions()); - client_transpiler.options.define = try options.Define.init( - allocator, - if (defines) |user_defines| - try options.Define.Data.fromInput(try options.stringHashMapFromArrays( - options.defines.RawDefines, - allocator, - user_defines.keys, - user_defines.values, - ), this_transpiler.options.transform_options.drop, this_transpiler.log, allocator) - else - null, - null, - this_transpiler.options.define.drop_debugger, - ); + + // We need to make sure it has [hash] in the names so we don't get conflicts. + if (this_transpiler.options.compile) { + client_transpiler.options.asset_naming = bun.options.PathTemplate.asset.data; + client_transpiler.options.chunk_naming = bun.options.PathTemplate.chunk.data; + client_transpiler.options.entry_naming = "./[name]-[hash].[ext]"; + + // Avoid setting a public path for --compile since all the assets + // will be served relative to the server root. + client_transpiler.options.public_path = ""; + } client_transpiler.setLog(this_transpiler.log); client_transpiler.setAllocator(allocator); @@ -207,8 +206,10 @@ pub const BundleV2 = struct { client_transpiler.macro_context = js_ast.Macro.MacroContext.init(client_transpiler); const CacheSet = @import("../cache.zig"); client_transpiler.resolver.caches = CacheSet.Set.init(allocator); - client_transpiler.resolver.opts = client_transpiler.options; + try client_transpiler.configureDefines(); + client_transpiler.resolver.opts = client_transpiler.options; + client_transpiler.resolver.env_loader = client_transpiler.env; this.client_transpiler = client_transpiler; return client_transpiler; } @@ -1525,8 +1526,12 @@ pub const BundleV2 = struct { else PathTemplate.asset; - if (this.transpiler.options.asset_naming.len > 0) - template.data = this.transpiler.options.asset_naming; + const target = targets[index]; + const asset_naming = this.transpilerForTarget(target).options.asset_naming; + if (asset_naming.len > 0) { + template.data = asset_naming; + } + const source = &sources[index]; const output_path = brk: { @@ -1546,7 +1551,7 @@ pub const BundleV2 = struct { } if (template.needs(.target)) { - template.placeholder.target = @tagName(targets[index]); + template.placeholder.target = @tagName(target); } break :brk std.fmt.allocPrint(bun.default_allocator, "{}", .{template}) catch bun.outOfMemory(); }; @@ -1750,7 +1755,7 @@ pub const BundleV2 = struct { if (!transpiler.options.production) { try transpiler.options.conditions.appendSlice(&.{"development"}); } - + transpiler.resolver.env_loader = transpiler.env; transpiler.resolver.opts = transpiler.options; } @@ -1862,7 +1867,7 @@ pub const BundleV2 = struct { to_assign_on_sourcemap = result; } - output_files_js.putIndex(globalThis, @as(u32, @intCast(i)), result); + output_files_js.putIndex(globalThis, @as(u32, @intCast(i)), result) catch return; // TODO: properly propagate exception upwards } root_obj.put(globalThis, JSC.ZigString.static("outputs"), output_files_js); @@ -1959,7 +1964,8 @@ pub const BundleV2 = struct { this.decrementScanCounter(); }, .success => |code| { - const should_copy_for_bundling = load.parse_task.defer_copy_for_bundling and code.loader.shouldCopyForBundling(); + // When a plugin returns a file loader, we always need to populate additional_files + const should_copy_for_bundling = code.loader.shouldCopyForBundling(); if (should_copy_for_bundling) { const source_index = load.source_index; var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; @@ -2614,9 +2620,6 @@ pub const BundleV2 = struct { pub fn enqueueOnLoadPluginIfNeededImpl(this: *BundleV2, parse: *ParseTask) bool { if (this.plugins) |plugins| { if (plugins.hasAnyMatches(&parse.path, true)) { - if (parse.is_entry_point and parse.loader != null and parse.loader.?.shouldCopyForBundling()) { - parse.defer_copy_for_bundling = true; - } // This is where onLoad plugins are enqueued debug("enqueue onLoad: {s}:{s}", .{ parse.path.namespace, @@ -2950,8 +2953,8 @@ pub const BundleV2 = struct { ) catch bun.outOfMemory(); } } else { - const buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf); + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); const specifier_to_use = if (loader == .html and bun.strings.hasPrefix(import_record.path.text, bun.fs.FileSystem.instance.top_level_dir)) brk: { const specifier_to_use = import_record.path.text[bun.fs.FileSystem.instance.top_level_dir.len..]; if (Environment.isWindows) { diff --git a/src/bundler/linker_context/README.md b/src/bundler/linker_context/README.md index 673f611d20..61ef4b0008 100644 --- a/src/bundler/linker_context/README.md +++ b/src/bundler/linker_context/README.md @@ -557,13 +557,11 @@ For production builds, the function uses frequency-based minification for optima _Algorithm steps_: 1. **Character Frequency Analysis**: - - Analyzes character usage across all files in the chunk - Builds frequency map to generate shortest possible identifiers - Common patterns get shorter names (e.g., `a`, `b`, `c` for most used symbols) 2. **Symbol Usage Counting**: - - Counts how often each symbol is used throughout the chunk - Prioritizes frequently-used symbols for shortest names - Includes special handling for `exports` and `module` references @@ -879,7 +877,6 @@ When bundling modules, Bun often needs to wrap module code in runtime functions 1. **Module Wrapper Management**: Determines which statements can be placed inside wrapper functions vs. which must remain at the top level 2. **Import/Export Statement Processing**: Transforms import/export syntax based on output format and bundling context - - Converts `export * from 'path'` to import statements when needed - Strips export keywords when bundling (since internal modules don't need exports) - Handles re-export runtime function calls diff --git a/src/bundler/linker_context/computeChunks.zig b/src/bundler/linker_context/computeChunks.zig index 2bd7b79d2f..d2f5abeef0 100644 --- a/src/bundler/linker_context/computeChunks.zig +++ b/src/bundler/linker_context/computeChunks.zig @@ -25,8 +25,11 @@ pub noinline fn computeChunks( const css_chunking = this.options.css_chunking; var html_chunks = bun.StringArrayHashMap(Chunk).init(temp_allocator); const loaders = this.parse_graph.input_files.items(.loader); + const ast_targets = this.graph.ast.items(.target); const code_splitting = this.graph.code_splitting; + const could_be_browser_target_from_server_build = this.options.target.isServerSide() and this.parse_graph.html_imports.html_source_indices.len > 0; + const has_server_html_imports = this.parse_graph.html_imports.server_source_indices.len > 0; // Create chunks for entry points for (entry_source_indices, 0..) |source_index, entry_id_| { @@ -61,6 +64,7 @@ pub noinline fn computeChunks( .entry_bits = entry_bits.*, .content = .html, .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), + .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; } } @@ -95,6 +99,7 @@ pub noinline fn computeChunks( }, .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), .has_html_chunk = has_html_chunk, + .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; } @@ -116,6 +121,7 @@ pub noinline fn computeChunks( }, .has_html_chunk = has_html_chunk, .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), + .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; { @@ -129,7 +135,8 @@ pub noinline fn computeChunks( if (css_source_indices.len > 0) { const order = this.findImportedFilesInCSSOrder(temp_allocator, css_source_indices.slice()); - const hash_to_use = if (!css_chunking) + const use_content_based_key = css_chunking or has_server_html_imports; + const hash_to_use = if (!use_content_based_key) bun.hash(try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len))) else brk: { var hasher = std.hash.Wyhash.init(5); @@ -168,6 +175,7 @@ pub noinline fn computeChunks( .files_with_parts_in_chunk = css_files_with_parts_in_chunk, .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), .has_html_chunk = has_html_chunk, + .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; } } @@ -200,6 +208,7 @@ pub noinline fn computeChunks( var js_chunk_entry = try js_chunks.getOrPut(js_chunk_key); if (!js_chunk_entry.found_existing) { + const is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index.get()] == .browser; js_chunk_entry.value_ptr.* = .{ .entry_bits = entry_bits.*, .entry_point = .{ @@ -209,6 +218,7 @@ pub noinline fn computeChunks( .javascript = .{}, }, .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), + .is_browser_chunk_from_server_build = is_browser_chunk_from_server_build, }; } @@ -287,6 +297,12 @@ pub noinline fn computeChunks( // to look up the path for this chunk to use with the import. for (chunks, 0..) |*chunk, chunk_id| { if (chunk.entry_point.is_entry_point) { + // JS entry points that import CSS files generate two chunks, a JS chunk + // and a CSS chunk. Don't link the CSS chunk to the JS file since the CSS + // chunk is secondary (the JS chunk is primary). + if (chunk.content == .css and css_asts[chunk.entry_point.source_index] == null) { + continue; + } entry_point_chunk_indices[chunk.entry_point.source_index] = @intCast(chunk_id); } } @@ -305,6 +321,7 @@ pub noinline fn computeChunks( const kinds = this.graph.files.items(.entry_point_kind); const output_paths = this.graph.entry_points.items(.output_path); + const bv2: *bundler.BundleV2 = @fieldParentPtr("linker", this); for (chunks, 0..) |*chunk, chunk_id| { // Assign a unique key to each chunk. This key encodes the index directly so // we can easily recover it later without needing to look it up in a map. The @@ -317,21 +334,27 @@ pub noinline fn computeChunks( (chunk.content == .html or (kinds[chunk.entry_point.source_index] == .user_specified and !chunk.has_html_chunk))) { // Use fileWithTarget template if there are HTML imports and user hasn't manually set naming - if (this.parse_graph.html_imports.server_source_indices.len > 0 and this.resolver.opts.entry_naming.len == 0) { + if (has_server_html_imports and bv2.transpiler.options.entry_naming.len == 0) { chunk.template = PathTemplate.fileWithTarget; } else { chunk.template = PathTemplate.file; - if (this.resolver.opts.entry_naming.len > 0) - chunk.template.data = this.resolver.opts.entry_naming; + if (chunk.is_browser_chunk_from_server_build) { + chunk.template.data = bv2.transpilerForTarget(.browser).options.entry_naming; + } else { + chunk.template.data = bv2.transpiler.options.entry_naming; + } } } else { - if (this.parse_graph.html_imports.server_source_indices.len > 0 and this.resolver.opts.chunk_naming.len == 0) { + if (has_server_html_imports and bv2.transpiler.options.chunk_naming.len == 0) { chunk.template = PathTemplate.chunkWithTarget; } else { chunk.template = PathTemplate.chunk; + if (chunk.is_browser_chunk_from_server_build) { + chunk.template.data = bv2.transpilerForTarget(.browser).options.chunk_naming; + } else { + chunk.template.data = bv2.transpiler.options.chunk_naming; + } } - if (this.resolver.opts.chunk_naming.len > 0) - chunk.template.data = this.resolver.opts.chunk_naming; } const pathname = Fs.PathName.init(output_paths[chunk.entry_point.entry_point_id].slice()); @@ -340,7 +363,6 @@ pub noinline fn computeChunks( if (chunk.template.needs(.target)) { // Determine the target from the AST of the entry point source - const ast_targets = this.graph.ast.items(.target); const chunk_target = ast_targets[chunk.entry_point.source_index]; chunk.template.placeholder.target = switch (chunk_target) { .browser => "browser", diff --git a/src/bundler/linker_context/computeCrossChunkDependencies.zig b/src/bundler/linker_context/computeCrossChunkDependencies.zig index 7e3114a607..2135c98a1c 100644 --- a/src/bundler/linker_context/computeCrossChunkDependencies.zig +++ b/src/bundler/linker_context/computeCrossChunkDependencies.zig @@ -89,7 +89,7 @@ const CrossChunkDependencies = struct { const wrapper_ref = deps.wrapper_refs[source_index]; const _chunks = deps.chunks; - for (parts) |part| { + for (parts) |*part| { if (!part.is_live) continue; diff --git a/src/bundler/linker_context/generateChunksInParallel.zig b/src/bundler/linker_context/generateChunksInParallel.zig index 23a5ba8210..e09ce74aea 100644 --- a/src/bundler/linker_context/generateChunksInParallel.zig +++ b/src/bundler/linker_context/generateChunksInParallel.zig @@ -340,6 +340,8 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_ return error.MultipleOutputFilesWithoutOutputDir; } + const bundler = @as(*bun.bundle_v2.BundleV2, @fieldParentPtr("linker", c)); + if (root_path.len > 0) { try c.writeOutputFilesToDisk(root_path, chunks, &output_files); } else { @@ -347,11 +349,16 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_ for (chunks) |*chunk| { var display_size: usize = 0; + const public_path = if (chunk.is_browser_chunk_from_server_build) + bundler.transpilerForTarget(.browser).options.public_path + else + c.options.public_path; + const _code_result = chunk.intermediate_output.code( null, c.parse_graph, &c.graph, - c.resolver.opts.public_path, + public_path, chunk, chunks, &display_size, @@ -376,8 +383,8 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_ bun.copy(u8, source_map_final_rel_path[chunk.final_rel_path.len..], ".map"); if (tag == .linked) { - const a, const b = if (c.options.public_path.len > 0) - cheapPrefixNormalizer(c.options.public_path, source_map_final_rel_path) + const a, const b = if (public_path.len > 0) + cheapPrefixNormalizer(public_path, source_map_final_rel_path) else .{ "", std.fs.path.basename(source_map_final_rel_path) }; @@ -471,7 +478,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_ .data = .{ .buffer = .{ .data = bytecode, .allocator = cached_bytecode.allocator() }, }, - .side = null, + .side = .server, .entry_point_index = null, .is_executable = false, }); @@ -522,7 +529,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_ .is_executable = chunk.is_executable, .source_map_index = source_map_index, .bytecode_index = bytecode_index, - .side = if (chunk.content == .css) + .side = if (chunk.content == .css or chunk.is_browser_chunk_from_server_build) .client else switch (c.graph.ast.items(.target)[chunk.entry_point.source_index]) { .browser => .client, diff --git a/src/bundler/linker_context/scanImportsAndExports.zig b/src/bundler/linker_context/scanImportsAndExports.zig index c510cdde3d..2d53e3fabe 100644 --- a/src/bundler/linker_context/scanImportsAndExports.zig +++ b/src/bundler/linker_context/scanImportsAndExports.zig @@ -80,7 +80,7 @@ pub fn scanImportsAndExports(this: *LinkerContext) !void { continue; } - _ = this.validateTLA(id, tla_keywords, tla_checks, input_files, import_records, flags, import_records_list); + _ = try this.validateTLA(id, tla_keywords, tla_checks, input_files, import_records, flags, import_records_list); for (import_records) |record| { if (!record.source_index.isValid()) { diff --git a/src/bundler/linker_context/writeOutputFilesToDisk.zig b/src/bundler/linker_context/writeOutputFilesToDisk.zig index 745649726b..d3f5622f5c 100644 --- a/src/bundler/linker_context/writeOutputFilesToDisk.zig +++ b/src/bundler/linker_context/writeOutputFilesToDisk.zig @@ -39,6 +39,7 @@ pub fn writeOutputFilesToDisk( const code_with_inline_source_map_allocator = max_heap_allocator_inline_source_map.init(bun.default_allocator); var pathbuf: bun.PathBuffer = undefined; + const bv2: *bundler.BundleV2 = @fieldParentPtr("linker", c); for (chunks) |*chunk| { const trace2 = bun.perf.trace("Bundler.writeChunkToDisk"); @@ -59,11 +60,16 @@ pub fn writeOutputFilesToDisk( } } var display_size: usize = 0; + const public_path = if (chunk.is_browser_chunk_from_server_build) + bv2.transpilerForTarget(.browser).options.public_path + else + c.resolver.opts.public_path; + var code_result = chunk.intermediate_output.code( code_allocator, c.parse_graph, &c.graph, - c.resolver.opts.public_path, + public_path, chunk, chunks, &display_size, @@ -89,8 +95,8 @@ pub fn writeOutputFilesToDisk( }) catch @panic("Failed to allocate memory for external source map path"); if (tag == .linked) { - const a, const b = if (c.options.public_path.len > 0) - cheapPrefixNormalizer(c.options.public_path, source_map_final_rel_path) + const a, const b = if (public_path.len > 0) + cheapPrefixNormalizer(public_path, source_map_final_rel_path) else .{ "", std.fs.path.basename(source_map_final_rel_path) }; diff --git a/src/cli.zig b/src/cli.zig index f95921e6e8..96c3f2d4b1 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -222,8 +222,8 @@ pub const HelpCommand = struct { ; const cli_helptext_footer = \\ - \\Learn more about Bun: https://bun.sh/docs - \\Join our Discord community: https://bun.sh/discord + \\Learn more about Bun: https://bun.com/docs + \\Join our Discord community: https://bun.com/discord \\ ; @@ -357,6 +357,7 @@ pub const Command = struct { only: bool = false, bail: u32 = 0, coverage: TestCommand.CodeCoverageOptions = .{}, + test_filter_pattern: ?[]const u8 = null, test_filter_regex: ?*RegularExpression = null, file_reporter: ?TestCommand.FileReporter = null, @@ -1362,10 +1363,10 @@ pub const Command = struct { \\ Bundle code to be run in Bun (reduces server startup time) \\ bun build --target=bun --outfile=server.js ./server.ts \\ - \\ Creating a standalone executable (see https://bun.sh/docs/bundler/executables) + \\ Creating a standalone executable (see https://bun.com/docs/bundler/executables) \\ bun build --compile --outfile=my-app ./cli.ts \\ - \\A full list of flags is available at https://bun.sh/docs/bundler + \\A full list of flags is available at https://bun.com/docs/bundler \\ ; @@ -1393,7 +1394,7 @@ pub const Command = struct { \\ Run all test files, only including tests whose names includes "baz" \\ bun test --test-name-pattern baz \\ - \\Full documentation is available at https://bun.sh/docs/cli/test + \\Full documentation is available at https://bun.com/docs/cli/test \\ ; @@ -1433,7 +1434,7 @@ pub const Command = struct { \\ • GitHub: Downloads repository contents as template \\ • Local: Uses templates from $HOME/.bun-create/\ or ./.bun-create/\ \\ - \\Learn more: https://bun.sh/docs/cli/bun-create + \\Learn more: https://bun.com/docs/cli/bun-create \\ ; @@ -1458,7 +1459,7 @@ pub const Command = struct { \\ {s} \\ bun upgrade --{s} \\ - \\Full documentation is available at https://bun.sh/docs/installation#upgrading + \\Full documentation is available at https://bun.com/docs/installation#upgrading \\ ; @@ -1537,7 +1538,7 @@ pub const Command = struct { \\ bun info react dependencies \\ bun info react versions \\ - \\Full documentation is available at https://bun.sh/docs/cli/info + \\Full documentation is available at https://bun.com/docs/cli/info \\ ; diff --git a/src/cli/Arguments.zig b/src/cli/Arguments.zig index d29edde371..55eb512c7f 100644 --- a/src/cli/Arguments.zig +++ b/src/cli/Arguments.zig @@ -82,6 +82,7 @@ pub const runtime_params_ = [_]ParamType{ clap.parseParam("--smol Use less memory, but run garbage collection more often") catch unreachable, clap.parseParam("-r, --preload ... Import a module before other modules are loaded") catch unreachable, clap.parseParam("--require ... Alias of --preload, for Node.js compatibility") catch unreachable, + clap.parseParam("--import ... Alias of --preload, for Node.js compatibility") catch unreachable, clap.parseParam("--inspect ? Activate Bun's debugger") catch unreachable, clap.parseParam("--inspect-wait ? Activate Bun's debugger, wait for a connection before executing") catch unreachable, clap.parseParam("--inspect-brk ? Activate Bun's debugger, set breakpoint on first line of code and wait") catch unreachable, @@ -462,6 +463,7 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C } } if (args.option("--test-name-pattern")) |namePattern| { + ctx.test_options.test_filter_pattern = namePattern; const regex = RegularExpression.init(bun.String.fromBytes(namePattern), RegularExpression.Flags.none) catch { Output.prettyErrorln( "error: --test-name-pattern expects a valid regular expression but received {}", @@ -542,13 +544,23 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C // runtime commands if (cmd == .AutoCommand or cmd == .RunCommand or cmd == .TestCommand or cmd == .RunAsNodeCommand) { - var preloads = args.options("--preload"); - if (preloads.len == 0) { - if (bun.getenvZ("BUN_INSPECT_PRELOAD")) |preload| { - preloads = bun.default_allocator.dupe([]const u8, &.{preload}) catch unreachable; + { + const preloads = args.options("--preload"); + const preloads2 = args.options("--require"); + const preloads3 = args.options("--import"); + const preload4 = bun.getenvZ("BUN_INSPECT_PRELOAD"); + + const total_preloads = ctx.preloads.len + preloads.len + preloads2.len + preloads3.len + (if (preload4 != null) @as(usize, 1) else @as(usize, 0)); + if (total_preloads > 0) { + var all = std.ArrayList(string).initCapacity(ctx.allocator, total_preloads) catch unreachable; + if (ctx.preloads.len > 0) all.appendSliceAssumeCapacity(ctx.preloads); + if (preloads.len > 0) all.appendSliceAssumeCapacity(preloads); + if (preloads2.len > 0) all.appendSliceAssumeCapacity(preloads2); + if (preloads3.len > 0) all.appendSliceAssumeCapacity(preloads3); + if (preload4) |p| all.appendAssumeCapacity(p); + ctx.preloads = all.items; } } - const preloads2 = args.options("--require"); if (args.flag("--hot")) { ctx.debug.hot_reload = .hot; @@ -645,25 +657,6 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C } } - if (ctx.preloads.len > 0 and (preloads.len > 0 or preloads2.len > 0)) { - var all = std.ArrayList(string).initCapacity(ctx.allocator, ctx.preloads.len + preloads.len + preloads2.len) catch unreachable; - all.appendSliceAssumeCapacity(ctx.preloads); - all.appendSliceAssumeCapacity(preloads); - all.appendSliceAssumeCapacity(preloads2); - ctx.preloads = all.items; - } else if (preloads.len > 0) { - if (preloads2.len > 0) { - var all = std.ArrayList(string).initCapacity(ctx.allocator, preloads.len + preloads2.len) catch unreachable; - all.appendSliceAssumeCapacity(preloads); - all.appendSliceAssumeCapacity(preloads2); - ctx.preloads = all.items; - } else { - ctx.preloads = preloads; - } - } else if (preloads2.len > 0) { - ctx.preloads = preloads2; - } - if (args.option("--print")) |script| { ctx.runtime_options.eval.script = script; ctx.runtime_options.eval.eval_and_print = true; diff --git a/src/cli/add_command.zig b/src/cli/add_command.zig index f16b6cca1e..407daaf12b 100644 --- a/src/cli/add_command.zig +++ b/src/cli/add_command.zig @@ -1,8 +1,13 @@ -const Command = @import("../cli.zig").Command; -const PackageManager = @import("../install/install.zig").PackageManager; - pub const AddCommand = struct { pub fn exec(ctx: Command.Context) !void { - try PackageManager.add(ctx); + try updatePackageJSONAndInstallCatchError(ctx, .add); } }; + +// @sortImports + +const bun = @import("bun"); +const Command = bun.CLI.Command; + +const PackageManager = bun.install.PackageManager; +const updatePackageJSONAndInstallCatchError = PackageManager.updatePackageJSONAndInstallCatchError; diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index 225f50af0a..4258efc82d 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -113,6 +113,7 @@ pub const BuildCommand = struct { } this_transpiler.options.bytecode = ctx.bundler_options.bytecode; + var was_renamed_from_index = false; if (ctx.bundler_options.compile) { if (ctx.bundler_options.code_splitting) { @@ -140,6 +141,7 @@ pub const BuildCommand = struct { if (strings.eqlComptime(outfile, "index")) { outfile = std.fs.path.basename(std.fs.path.dirname(this_transpiler.options.entry_points[0]) orelse "index"); + was_renamed_from_index = !strings.eqlComptime(outfile, "index"); } if (strings.eqlComptime(outfile, "bun")) { @@ -223,6 +225,7 @@ pub const BuildCommand = struct { } this_transpiler.resolver.opts = this_transpiler.options; + this_transpiler.resolver.env_loader = this_transpiler.env; this_transpiler.options.jsx.development = !this_transpiler.options.production; this_transpiler.resolver.opts.jsx.development = this_transpiler.options.jsx.development; @@ -266,7 +269,9 @@ pub const BuildCommand = struct { try bun.bake.addImportMetaDefines(allocator, client_transpiler.options.define, .development, .client); this_transpiler.resolver.opts = this_transpiler.options; + this_transpiler.resolver.env_loader = this_transpiler.env; client_transpiler.resolver.opts = client_transpiler.options; + client_transpiler.resolver.env_loader = client_transpiler.env; } // var env_loader = this_transpiler.env; @@ -353,7 +358,20 @@ pub const BuildCommand = struct { if (output_dir.len == 0 and outfile.len > 0 and will_be_one_file) { output_dir = std.fs.path.dirname(outfile) orelse "."; - output_files[0].dest_path = std.fs.path.basename(outfile); + if (ctx.bundler_options.compile) { + // If the first output file happens to be a client-side chunk imported server-side + // then don't rename it to something else, since an HTML + // import manifest might depend on the file path being the + // one we think it should be. + for (output_files) |*f| { + if (f.output_kind == .@"entry-point" and (f.side orelse .server) == .server) { + f.dest_path = std.fs.path.basename(outfile); + break; + } + } + } else { + output_files[0].dest_path = std.fs.path.basename(outfile); + } } if (!ctx.bundler_options.compile) { @@ -416,6 +434,11 @@ pub const BuildCommand = struct { if (compile_target.os == .windows and !strings.hasSuffixComptime(outfile, ".exe")) { outfile = try std.fmt.allocPrint(allocator, "{s}.exe", .{outfile}); + } else if (was_renamed_from_index and !bun.strings.eqlComptime(outfile, "index")) { + // If we're going to fail due to EISDIR, we should instead pick a different name. + if (bun.sys.directoryExistsAt(bun.FD.fromStdDir(root_dir), outfile).asValue() orelse false) { + outfile = "index"; + } } try bun.StandaloneModuleGraph.toExecutable( diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index 08faf854f9..ab8aa095f5 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -196,8 +196,7 @@ pub const BunxCommand = struct { if (bin_prop.expr.asString(transpiler.allocator)) |dir_name| { const bin_dir = try bun.sys.openatA(dir_fd, dir_name, bun.O.RDONLY | bun.O.DIRECTORY, 0).unwrap(); defer bin_dir.close(); - const dir = std.fs.Dir{ .fd = bin_dir.cast() }; - var iterator = bun.DirIterator.iterate(dir, .u8); + var iterator = bun.DirIterator.iterate(bin_dir, .u8); var entry = iterator.next(); while (true) : (entry = iterator.next()) { const current = switch (entry) { diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 119fa520b2..38c158716f 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -484,7 +484,7 @@ pub const CreateCommand = struct { const destination_dir = destination_dir__; const Walker = @import("../walker_skippable.zig"); - var walker_ = try Walker.walk(template_dir, ctx.allocator, skip_files, skip_dirs); + var walker_ = try Walker.walk(.fromStdDir(template_dir), ctx.allocator, skip_files, skip_dirs); defer walker_.deinit(); const FileCopier = struct { @@ -498,7 +498,7 @@ pub const CreateCommand = struct { src_base_len: if (Environment.isWindows) usize else void, src_buf: if (Environment.isWindows) *bun.WPathBuffer else void, ) !void { - while (try walker.next()) |entry| { + while (try walker.next().unwrap()) |entry| { if (comptime Environment.isWindows) { if (entry.kind != .file and entry.kind != .directory) continue; @@ -561,7 +561,7 @@ pub const CreateCommand = struct { defer outfile.close(); defer node_.completeOne(); - const infile = bun.FD.fromStdFile(try entry.dir.openFile(entry.basename, .{ .mode = .read_only })); + const infile = try entry.dir.openat(entry.basename, bun.O.RDONLY, 0).unwrap(); defer infile.close(); // Assumption: you only really care about making sure something that was executable is still executable @@ -1541,7 +1541,7 @@ pub const CreateCommand = struct { Output.pretty( \\ - \\Come hang out in bun's Discord: https://bun.sh/discord + \\Come hang out in bun's Discord: https://bun.com/discord \\ , .{}); diff --git a/src/cli/discord_command.zig b/src/cli/discord_command.zig index d6e4146097..8558805130 100644 --- a/src/cli/discord_command.zig +++ b/src/cli/discord_command.zig @@ -4,7 +4,7 @@ const std = @import("std"); const open = @import("../open.zig"); pub const DiscordCommand = struct { - const discord_url = "https://bun.sh/discord"; + const discord_url = "https://bun.com/discord"; pub fn exec(_: std.mem.Allocator) !void { open.openURL(discord_url); } diff --git a/src/cli/init/README.default.md b/src/cli/init/README.default.md index 9dbda4bb7d..7b1502d140 100644 --- a/src/cli/init/README.default.md +++ b/src/cli/init/README.default.md @@ -12,4 +12,4 @@ To run: bun run {[entryPoint]s} ``` -This project was created using `bun init` in bun v{[bunVersion]s}. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime. +This project was created using `bun init` in bun v{[bunVersion]s}. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime. diff --git a/src/cli/init/README2.default.md b/src/cli/init/README2.default.md index 207f6af124..f34b4d717f 100644 --- a/src/cli/init/README2.default.md +++ b/src/cli/init/README2.default.md @@ -18,4 +18,4 @@ To run for production: bun start ``` -This project was created using `bun init` in bun v{[bunVersion]s}. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime. +This project was created using `bun init` in bun v{[bunVersion]s}. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime. diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index 8ba5a7fc50..51cb2b146d 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -531,7 +531,7 @@ pub const InitCommand = struct { // Find any source file var dir = std.fs.cwd().openDir(".", .{ .iterate = true }) catch break :infer; defer dir.close(); - var it = bun.DirIterator.iterate(dir, .u8); + var it = bun.DirIterator.iterate(.fromStdDir(dir), .u8); while (try it.next().unwrap()) |file| { if (file.kind != .file) continue; const loader = bun.options.Loader.fromString(std.fs.path.extension(file.name.slice())) orelse @@ -1021,8 +1021,8 @@ const Template = enum { return false; } - const pathbuffer = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(pathbuffer); + const pathbuffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(pathbuffer); return bun.which(pathbuffer, bun.getenvZ("PATH") orelse return false, bun.fs.FileSystem.instance.top_level_dir, "claude") != null; } @@ -1097,8 +1097,8 @@ const Template = enum { if (Environment.isWindows) { if (bun.getenvZAnyCase("USER")) |user| { - const pathbuf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(pathbuf); + const pathbuf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(pathbuf); const path = std.fmt.bufPrintZ(pathbuf, "C:\\Users\\{s}\\AppData\\Local\\Programs\\Cursor\\Cursor.exe", .{user}) catch { return false; }; diff --git a/src/cli/install.sh b/src/cli/install.sh index f32e073258..e6f8634399 100644 --- a/src/cli/install.sh +++ b/src/cli/install.sh @@ -5,7 +5,7 @@ platform=$(uname -ms) if [[ ${OS:-} = Windows_NT ]]; then if [[ $platform != MINGW64* ]]; then - powershell -c "irm bun.sh/install.ps1|iex" + powershell -c "irm bun.com/install.ps1|iex" exit $? fi fi diff --git a/src/cli/install_command.zig b/src/cli/install_command.zig index aff3fcad86..589535bb45 100644 --- a/src/cli/install_command.zig +++ b/src/cli/install_command.zig @@ -1,10 +1,6 @@ -const Command = @import("../cli.zig").Command; -const bun = @import("bun"); -const PackageManager = @import("../install/install.zig").PackageManager; - pub const InstallCommand = struct { pub fn exec(ctx: Command.Context) !void { - PackageManager.install(ctx) catch |err| switch (err) { + install(ctx) catch |err| switch (err) { error.InstallFailed, error.InvalidPackageJSON, => { @@ -16,3 +12,97 @@ pub const InstallCommand = struct { }; } }; + +fn install(ctx: Command.Context) !void { + var cli = try CommandLineArguments.parse(ctx.allocator, .install); + + // The way this works: + // 1. Run the bundler on source files + // 2. Rewrite positional arguments to act identically to the developer + // typing in the dependency names + // 3. Run the install command + if (cli.analyze) { + const Analyzer = struct { + ctx: Command.Context, + cli: *CommandLineArguments, + pub fn onAnalyze(this: *@This(), result: *bun.bundle_v2.BundleV2.DependenciesScanner.Result) anyerror!void { + // TODO: add separate argument that makes it so positionals[1..] is not done and instead the positionals are passed + var positionals = bun.default_allocator.alloc(string, result.dependencies.keys().len + 1) catch bun.outOfMemory(); + positionals[0] = "install"; + bun.copy(string, positionals[1..], result.dependencies.keys()); + this.cli.positionals = positionals; + + try installWithCLI(this.ctx, this.cli.*); + + Global.exit(0); + } + }; + var analyzer = Analyzer{ + .ctx = ctx, + .cli = &cli, + }; + + var fetcher = bun.bundle_v2.BundleV2.DependenciesScanner{ + .ctx = &analyzer, + .entry_points = cli.positionals[1..], + .onFetch = @ptrCast(&Analyzer.onAnalyze), + }; + + try bun.CLI.BuildCommand.exec(bun.CLI.Command.get(), &fetcher); + return; + } + + return installWithCLI(ctx, cli); +} + +fn installWithCLI(ctx: Command.Context, cli: CommandLineArguments) !void { + const subcommand: Subcommand = if (cli.positionals.len > 1) .add else .install; + + // TODO(dylan-conway): print `bun install ` or `bun add ` before logs from `init`. + // and cleanup install/add subcommand usage + var manager, const original_cwd = try PackageManager.init(ctx, cli, .install); + + // switch to `bun add ` + if (subcommand == .add) { + manager.subcommand = .add; + if (manager.options.shouldPrintCommandName()) { + Output.prettyln("bun add v" ++ Global.package_json_version_with_sha ++ "\n", .{}); + Output.flush(); + } + return manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd); + } + + if (manager.options.shouldPrintCommandName()) { + Output.prettyln("bun install v" ++ Global.package_json_version_with_sha ++ "\n", .{}); + Output.flush(); + } + + const package_json_contents = manager.root_package_json_file.readToEndAlloc(ctx.allocator, std.math.maxInt(usize)) catch |err| { + if (manager.options.log_level != .silent) { + Output.prettyErrorln("{s} reading package.json :(", .{@errorName(err)}); + Output.flush(); + } + return; + }; + + try manager.installWithManager(ctx, package_json_contents, original_cwd); + + if (manager.any_failed_to_install) { + Global.exit(1); + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Global = bun.Global; +const Output = bun.Output; +const default_allocator = bun.default_allocator; +const string = bun.string; +const Command = bun.CLI.Command; + +const PackageManager = bun.install.PackageManager; +const CommandLineArguments = PackageManager.CommandLineArguments; +const Subcommand = PackageManager.Subcommand; diff --git a/src/cli/link_command.zig b/src/cli/link_command.zig index 2dca979991..c66462721b 100644 --- a/src/cli/link_command.zig +++ b/src/cli/link_command.zig @@ -1,8 +1,216 @@ -const Command = @import("../cli.zig").Command; -const PackageManager = @import("../install/install.zig").PackageManager; - pub const LinkCommand = struct { pub fn exec(ctx: Command.Context) !void { - try PackageManager.link(ctx); + try link(ctx); } }; + +fn link(ctx: Command.Context) !void { + const cli = try CommandLineArguments.parse(ctx.allocator, .link); + var manager, const original_cwd = PackageManager.init(ctx, cli, .link) catch |err| brk: { + if (err == error.MissingPackageJSON) { + try attemptToCreatePackageJSON(); + break :brk try PackageManager.init(ctx, cli, .link); + } + + return err; + }; + defer ctx.allocator.free(original_cwd); + + if (manager.options.shouldPrintCommandName()) { + Output.prettyln("bun link v" ++ Global.package_json_version_with_sha ++ "\n", .{}); + Output.flush(); + } + + if (manager.options.positionals.len == 1) { + // bun link + + var lockfile: Lockfile = undefined; + var name: string = ""; + var package = Lockfile.Package{}; + + // Step 1. parse the nearest package.json file + { + const package_json_source = &(bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { + Output.errGeneric("failed to read \"{s}\" for linking: {s}", .{ manager.original_package_json_path, @errorName(err) }); + Global.crash(); + }); + lockfile.initEmpty(ctx.allocator); + + var resolver: void = {}; + try package.parse(&lockfile, manager, ctx.allocator, manager.log, package_json_source, void, &resolver, Features.folder); + name = lockfile.str(&package.name); + if (name.len == 0) { + if (manager.options.log_level != .silent) { + Output.prettyErrorln("error: package.json missing \"name\" in \"{s}\"", .{package_json_source.path.text}); + } + Global.crash(); + } else if (!strings.isNPMPackageName(name)) { + if (manager.options.log_level != .silent) { + Output.prettyErrorln("error: invalid package.json name \"{s}\" in \"{any}\"", .{ + name, + package_json_source.path.text, + }); + } + Global.crash(); + } + } + + // Step 2. Setup the global directory + var node_modules: std.fs.Dir = brk: { + Bin.Linker.ensureUmask(); + var explicit_global_dir: string = ""; + if (ctx.install) |install_| { + explicit_global_dir = install_.global_dir orelse explicit_global_dir; + } + manager.global_dir = try Options.openGlobalDir(explicit_global_dir); + + try manager.setupGlobalDir(ctx); + + break :brk manager.global_dir.?.makeOpenPath("node_modules", .{}) catch |err| { + if (manager.options.log_level != .silent) + Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + }; + + // Step 3a. symlink to the node_modules folder + { + // delete it if it exists + node_modules.deleteTree(name) catch {}; + + // create scope if specified + if (name[0] == '@') { + if (strings.indexOfChar(name, '/')) |i| { + node_modules.makeDir(name[0..i]) catch |err| brk: { + if (err == error.PathAlreadyExists) break :brk; + if (manager.options.log_level != .silent) + Output.prettyErrorln("error: failed to create scope in global dir due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + } + } + + if (comptime Environment.isWindows) { + // create the junction + const top_level = Fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(); + var link_path_buf: bun.PathBuffer = undefined; + @memcpy( + link_path_buf[0..top_level.len], + top_level, + ); + link_path_buf[top_level.len] = 0; + const link_path = link_path_buf[0..top_level.len :0]; + const global_path = manager.globalLinkDirPath(); + const dest_path = Path.joinAbsStringZ(global_path, &.{name}, .windows); + switch (bun.sys.sys_uv.symlinkUV( + link_path, + dest_path, + bun.windows.libuv.UV_FS_SYMLINK_JUNCTION, + )) { + .err => |err| { + Output.prettyErrorln("error: failed to create junction to node_modules in global dir due to error {}", .{err}); + Global.crash(); + }, + .result => {}, + } + } else { + // create the symlink + node_modules.symLink(Fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(), name, .{ .is_directory = true }) catch |err| { + if (manager.options.log_level != .silent) + Output.prettyErrorln("error: failed to create symlink to node_modules in global dir due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + } + } + + // Step 3b. Link any global bins + if (package.bin.tag != .none) { + var link_target_buf: bun.PathBuffer = undefined; + var link_dest_buf: bun.PathBuffer = undefined; + var link_rel_buf: bun.PathBuffer = undefined; + + var node_modules_path = bun.AbsPath(.{}).initFdPath(.fromStdDir(node_modules)) catch |err| { + if (manager.options.log_level != .silent) { + Output.err(err, "failed to link binary", .{}); + } + Global.crash(); + }; + defer node_modules_path.deinit(); + + var bin_linker = Bin.Linker{ + .bin = package.bin, + .node_modules_path = &node_modules_path, + .global_bin_path = manager.options.bin_path, + + // .destination_dir_subpath = destination_dir_subpath, + .package_name = strings.StringOrTinyString.init(name), + .string_buf = lockfile.buffers.string_bytes.items, + .extern_string_buf = lockfile.buffers.extern_strings.items, + .seen = null, + .abs_target_buf = &link_target_buf, + .abs_dest_buf = &link_dest_buf, + .rel_buf = &link_rel_buf, + }; + bin_linker.link(true); + + if (bin_linker.err) |err| { + if (manager.options.log_level != .silent) + Output.prettyErrorln("error: failed to link bin due to error {s}", .{@errorName(err)}); + Global.crash(); + } + } + + Output.flush(); + + // Done + if (manager.options.log_level != .silent) + Output.prettyln( + \\Success! Registered "{[name]s}" + \\ + \\To use {[name]s} in a project, run: + \\ bun link {[name]s} + \\ + \\Or add it in dependencies in your package.json file: + \\ "{[name]s}": "link:{[name]s}" + \\ + , + .{ + .name = name, + }, + ); + + Output.flush(); + Global.exit(0); + } else { + // bun link lodash + try manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd); + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const Global = bun.Global; +const Output = bun.Output; +const Path = bun.path; +const string = bun.string; +const strings = bun.strings; +const Command = bun.CLI.Command; +const File = bun.sys.File; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const Bin = bun.install.Bin; +const Features = bun.install.Features; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; + +const PackageManager = bun.install.PackageManager; +const CommandLineArguments = PackageManager.CommandLineArguments; +const Options = PackageManager.Options; +const attemptToCreatePackageJSON = PackageManager.attemptToCreatePackageJSON; diff --git a/src/cli/pack_command.zig b/src/cli/pack_command.zig index 3d699e4565..056c4a2a2b 100644 --- a/src/cli/pack_command.zig +++ b/src/cli/pack_command.zig @@ -297,7 +297,7 @@ pub const PackCommand = struct { } } - var dir_iter = DirIterator.iterate(dir, .u8); + var dir_iter = DirIterator.iterate(.fromStdDir(dir), .u8); while (dir_iter.next().unwrap() catch null) |entry| { if (entry.kind != .file and entry.kind != .directory) continue; @@ -451,7 +451,7 @@ pub const PackCommand = struct { } } - var iter = DirIterator.iterate(dir, .u8); + var iter = DirIterator.iterate(.fromStdDir(dir), .u8); while (iter.next().unwrap() catch null) |entry| { if (entry.kind != .file and entry.kind != .directory) continue; @@ -565,7 +565,7 @@ pub const PackCommand = struct { var additional_bundled_deps: std.ArrayListUnmanaged(DirInfo) = .{}; defer additional_bundled_deps.deinit(ctx.allocator); - var iter = DirIterator.iterate(dir, .u8); + var iter = DirIterator.iterate(.fromStdDir(dir), .u8); while (iter.next().unwrap() catch null) |entry| { if (entry.kind != .directory) continue; @@ -579,7 +579,7 @@ pub const PackCommand = struct { }; defer scoped_dir.close(); - var scoped_iter = DirIterator.iterate(scoped_dir, .u8); + var scoped_iter = DirIterator.iterate(.fromStdDir(scoped_dir), .u8); while (scoped_iter.next().unwrap() catch null) |sub_entry| { const entry_name = try entrySubpath(ctx.allocator, _entry_name, sub_entry.name.slice()); @@ -689,7 +689,7 @@ pub const PackCommand = struct { var dir, const dir_subpath, const dir_depth = dir_info; defer dir.close(); - var iter = DirIterator.iterate(dir, .u8); + var iter = DirIterator.iterate(.fromStdDir(dir), .u8); while (iter.next().unwrap() catch null) |entry| { if (entry.kind != .file and entry.kind != .directory) continue; @@ -849,7 +849,7 @@ pub const PackCommand = struct { } } - var dir_iter = DirIterator.iterate(dir, .u8); + var dir_iter = DirIterator.iterate(.fromStdDir(dir), .u8); while (dir_iter.next().unwrap() catch null) |entry| { if (entry.kind != .file and entry.kind != .directory) continue; @@ -2674,7 +2674,7 @@ pub const bindings = struct { if (entry.contents) |contents| { obj.put(global, "contents", contents.toJS(global)); } - entries.putIndex(global, @intCast(i), obj); + try entries.putIndex(global, @intCast(i), obj); } const result = JSValue.createEmptyObject(global, 2); diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 4ef5083653..9d22dc4c1c 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -22,6 +22,7 @@ const Environment = bun.Environment; pub const PackCommand = @import("./pack_command.zig").PackCommand; const Npm = Install.Npm; const PmViewCommand = @import("./pm_view_command.zig"); +const PmVersionCommand = @import("./pm_version_command.zig").PmVersionCommand; const File = bun.sys.File; const ByName = struct { @@ -127,6 +128,8 @@ pub const PackageManagerCommand = struct { \\ --all list the entire dependency tree according to the current lockfile \\ bun pm whoami print the current npm username \\ bun pm view name[@version] view package metadata from the registry (use `bun info` instead) + \\ bun pm version [increment] bump the version in package.json and create a git tag + \\ increment patch, minor, major, prepatch, preminor, premajor, prerelease, from-git, or a specific version \\ bun pm hash generate & print the hash of the current lockfile \\ bun pm hash-string print the string used to hash the lockfile \\ bun pm hash-print print the hash stored in the current lockfile @@ -138,7 +141,7 @@ pub const PackageManagerCommand = struct { \\ --all trust all untrusted dependencies \\ bun pm default-trusted print the default trusted dependencies list \\ - \\Learn more about these at https://bun.sh/docs/cli/pm. + \\Learn more about these at https://bun.com/docs/cli/pm. \\ ; @@ -428,6 +431,9 @@ pub const PackageManagerCommand = struct { lockfile.saveToDisk(&load_lockfile, &pm.options); Global.exit(0); + } else if (strings.eqlComptime(subcommand, "version")) { + try PmVersionCommand.exec(ctx, pm, pm.options.positionals, cwd); + Global.exit(0); } printHelp(); diff --git a/src/cli/patch_command.zig b/src/cli/patch_command.zig index bc211fef24..9fb9f635f9 100644 --- a/src/cli/patch_command.zig +++ b/src/cli/patch_command.zig @@ -1,8 +1,18 @@ -const Command = @import("../cli.zig").Command; -const PackageManager = @import("../install/install.zig").PackageManager; +//! parse dependency of positional arg string (may include name@version for example) +//! get the precise version from the lockfile (there may be multiple) +//! copy the contents into a temp folder pub const PatchCommand = struct { pub fn exec(ctx: Command.Context) !void { - try PackageManager.patch(ctx); + try updatePackageJSONAndInstallCatchError(ctx, .patch); } }; + +// @sortImports + +const bun = @import("bun"); +const string = bun.string; +const Command = bun.CLI.Command; + +const PackageManager = bun.install.PackageManager; +const updatePackageJSONAndInstallCatchError = PackageManager.updatePackageJSONAndInstallCatchError; diff --git a/src/cli/patch_commit_command.zig b/src/cli/patch_commit_command.zig index fd8490e65c..2260656d3d 100644 --- a/src/cli/patch_commit_command.zig +++ b/src/cli/patch_commit_command.zig @@ -1,8 +1,13 @@ -const Command = @import("../cli.zig").Command; -const PackageManager = @import("../install/install.zig").PackageManager; - pub const PatchCommitCommand = struct { pub fn exec(ctx: Command.Context) !void { - try PackageManager.patchCommit(ctx); + try updatePackageJSONAndInstallCatchError(ctx, .@"patch-commit"); } }; + +// @sortImports + +const bun = @import("bun"); +const Command = bun.CLI.Command; + +const PackageManager = bun.install.PackageManager; +const updatePackageJSONAndInstallCatchError = PackageManager.updatePackageJSONAndInstallCatchError; diff --git a/src/cli/pm_trusted_command.zig b/src/cli/pm_trusted_command.zig index 6edfc0fdec..5a207d1c83 100644 --- a/src/cli/pm_trusted_command.zig +++ b/src/cli/pm_trusted_command.zig @@ -11,7 +11,6 @@ const String = bun.Semver.String; const PackageManager = Install.PackageManager; const PackageManagerCommand = @import("./package_manager_command.zig").PackageManagerCommand; const Lockfile = Install.Lockfile; -const Fs = @import("../fs.zig"); const Global = bun.Global; const DependencyID = Install.DependencyID; const ArrayIdentityContext = bun.ArrayIdentityContext; @@ -73,22 +72,14 @@ pub const UntrustedCommand = struct { var tree_iterator = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile); - const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); - var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; - defer abs_node_modules_path.deinit(ctx.allocator); - try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash); - try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep); + var node_modules_path: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer node_modules_path.deinit(); while (tree_iterator.next(null)) |node_modules| { - // + 1 because we want to keep the path separator - abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; - try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); + const node_modules_path_save = node_modules_path.save(); + defer node_modules_path_save.restore(); - var node_modules_dir = bun.openDir(std.fs.cwd(), node_modules.relative_path) catch |err| { - if (err == error.ENOENT) continue; - return err; - }; - defer node_modules_dir.close(); + node_modules_path.append(node_modules.relative_path); for (node_modules.dependencies) |dep_id| { if (untrusted_dep_ids.contains(dep_id)) { @@ -97,12 +88,15 @@ pub const UntrustedCommand = struct { const package_id = pm.lockfile.buffers.resolutions.items[dep_id]; const resolution = &resolutions[package_id]; var package_scripts = scripts[package_id]; - var not_lazy: PackageManager.PackageInstaller.LazyPackageDestinationDir = .{ .dir = node_modules_dir }; + + const folder_name_save = node_modules_path.save(); + defer folder_name_save.restore(); + node_modules_path.append(alias); + const maybe_scripts_list = package_scripts.getList( pm.log, pm.lockfile, - ¬_lazy, - abs_node_modules_path.items, + &node_modules_path, alias, resolution, ) catch |err| { @@ -148,7 +142,7 @@ pub const UntrustedCommand = struct { \\ \\This means all packages with scripts are in "trustedDependencies" or none of your dependencies have scripts. \\ - \\For more information, visit https://bun.sh/docs/install/lifecycle#trusteddependencies + \\For more information, visit https://bun.com/docs/install/lifecycle#trusteddependencies \\ , .{}); } @@ -227,11 +221,8 @@ pub const TrustCommand = struct { // in the correct order as they would during a normal install var tree_iter = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile); - const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); - var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; - defer abs_node_modules_path.deinit(ctx.allocator); - try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash); - try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep); + var node_modules_path: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer node_modules_path.deinit(); var package_names_to_add: bun.StringArrayHashMapUnmanaged(void) = .{}; var scripts_at_depth: std.AutoArrayHashMapUnmanaged(usize, std.ArrayListUnmanaged(struct { @@ -243,8 +234,9 @@ pub const TrustCommand = struct { var scripts_count: usize = 0; while (tree_iter.next(null)) |node_modules| { - abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; - try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); + const node_modules_path_save = node_modules_path.save(); + defer node_modules_path_save.restore(); + node_modules_path.append(node_modules.relative_path); var node_modules_dir = bun.openDir(std.fs.cwd(), node_modules.relative_path) catch |err| { if (err == error.ENOENT) continue; @@ -262,12 +254,15 @@ pub const TrustCommand = struct { } const resolution = &resolutions[package_id]; var package_scripts = scripts[package_id]; - var not_lazy = PackageManager.PackageInstaller.LazyPackageDestinationDir{ .dir = node_modules_dir }; + + var folder_save = node_modules_path.save(); + defer folder_save.restore(); + node_modules_path.append(alias); + const maybe_scripts_list = package_scripts.getList( pm.log, pm.lockfile, - ¬_lazy, - abs_node_modules_path.items, + &node_modules_path, alias, resolution, ) catch |err| { @@ -344,6 +339,7 @@ pub const TrustCommand = struct { info.scripts_list, optional, output_in_foreground, + null, ); if (pm.options.log_level.showProgress()) { diff --git a/src/cli/pm_version_command.zig b/src/cli/pm_version_command.zig new file mode 100644 index 0000000000..1653319471 --- /dev/null +++ b/src/cli/pm_version_command.zig @@ -0,0 +1,644 @@ +const std = @import("std"); +const bun = @import("bun"); +const Global = bun.Global; +const Output = bun.Output; +const strings = bun.strings; +const string = bun.string; +const Command = bun.CLI.Command; +const PackageManager = bun.install.PackageManager; +const Semver = bun.Semver; +const logger = bun.logger; +const JSON = bun.JSON; +const RunCommand = bun.RunCommand; +const Environment = bun.Environment; +const JSPrinter = bun.js_printer; + +pub const PmVersionCommand = struct { + const VersionType = enum { + patch, + minor, + major, + prepatch, + preminor, + premajor, + prerelease, + specific, + from_git, + + pub fn fromString(str: []const u8) ?VersionType { + if (strings.eqlComptime(str, "patch")) return .patch; + if (strings.eqlComptime(str, "minor")) return .minor; + if (strings.eqlComptime(str, "major")) return .major; + if (strings.eqlComptime(str, "prepatch")) return .prepatch; + if (strings.eqlComptime(str, "preminor")) return .preminor; + if (strings.eqlComptime(str, "premajor")) return .premajor; + if (strings.eqlComptime(str, "prerelease")) return .prerelease; + if (strings.eqlComptime(str, "from-git")) return .from_git; + return null; + } + }; + + pub fn exec(ctx: Command.Context, pm: *PackageManager, positionals: []const string, original_cwd: []const u8) !void { + const package_json_dir = try findPackageDir(ctx.allocator, original_cwd); + + if (positionals.len <= 1) { + try showHelp(ctx, pm, package_json_dir); + return; + } + + const version_type, const new_version = parseVersionArgument(positionals[1]); + + try verifyGit(package_json_dir, pm); + + var path_buf: bun.PathBuffer = undefined; + const package_json_path = bun.path.joinAbsStringBufZ(package_json_dir, &path_buf, &.{"package.json"}, .auto); + + const package_json_contents = bun.sys.File.readFrom(bun.FD.cwd(), package_json_path, ctx.allocator).unwrap() catch |err| { + Output.errGeneric("Failed to read package.json: {s}", .{@errorName(err)}); + Global.exit(1); + }; + defer ctx.allocator.free(package_json_contents); + + const package_json_source = logger.Source.initPathString(package_json_path, package_json_contents); + const json_result = JSON.parsePackageJSONUTF8WithOpts( + &package_json_source, + ctx.log, + ctx.allocator, + .{ + .is_json = true, + .allow_comments = true, + .allow_trailing_commas = true, + .guess_indentation = true, + }, + ) catch |err| { + Output.errGeneric("Failed to parse package.json: {s}", .{@errorName(err)}); + Global.exit(1); + }; + + var json = json_result.root; + + if (json.data != .e_object) { + Output.errGeneric("Failed to parse package.json: root must be an object", .{}); + Global.exit(1); + } + + const scripts = if (pm.options.do.run_scripts) json.asProperty("scripts") else null; + const scripts_obj = if (scripts) |s| if (s.expr.data == .e_object) s.expr else null else null; + + if (scripts_obj) |s| { + if (s.get("preversion")) |script| { + if (script.asString(ctx.allocator)) |script_command| { + try RunCommand.runPackageScriptForeground( + ctx, + ctx.allocator, + script_command, + "preversion", + package_json_dir, + pm.env, + &.{}, + pm.options.log_level == .silent, + ctx.debug.use_system_shell, + ); + } + } + } + + const current_version = brk_version: { + if (json.asProperty("version")) |v| { + switch (v.expr.data) { + .e_string => |s| { + break :brk_version s.data; + }, + else => {}, + } + } + break :brk_version null; + }; + + const new_version_str = try calculateNewVersion(ctx.allocator, current_version orelse "0.0.0", version_type, new_version, pm.options.preid, package_json_dir); + defer ctx.allocator.free(new_version_str); + + if (current_version) |version| { + if (!pm.options.allow_same_version and strings.eql(version, new_version_str)) { + Output.errGeneric("Version not changed", .{}); + Global.exit(1); + } + } + + { + try json.data.e_object.putString(ctx.allocator, "version", new_version_str); + + var buffer_writer = JSPrinter.BufferWriter.init(ctx.allocator); + buffer_writer.append_newline = package_json_contents.len > 0 and package_json_contents[package_json_contents.len - 1] == '\n'; + var package_json_writer = JSPrinter.BufferPrinter.init(buffer_writer); + + _ = JSPrinter.printJSON( + @TypeOf(&package_json_writer), + &package_json_writer, + json, + &package_json_source, + .{ + .indent = json_result.indentation, + .mangled_props = null, + }, + ) catch |err| { + Output.errGeneric("Failed to save package.json: {s}", .{@errorName(err)}); + Global.exit(1); + }; + + std.fs.cwd().writeFile(.{ + .sub_path = package_json_path, + .data = package_json_writer.ctx.writtenWithoutTrailingZero(), + }) catch |err| { + Output.errGeneric("Failed to write package.json: {s}", .{@errorName(err)}); + Global.exit(1); + }; + } + + if (scripts_obj) |s| { + if (s.get("version")) |script| { + if (script.asString(ctx.allocator)) |script_command| { + try RunCommand.runPackageScriptForeground( + ctx, + ctx.allocator, + script_command, + "version", + package_json_dir, + pm.env, + &.{}, + pm.options.log_level == .silent, + ctx.debug.use_system_shell, + ); + } + } + } + + if (pm.options.git_tag_version) { + try gitCommitAndTag(ctx.allocator, new_version_str, pm.options.message, package_json_dir); + } + + if (scripts_obj) |s| { + if (s.get("postversion")) |script| { + if (script.asString(ctx.allocator)) |script_command| { + try RunCommand.runPackageScriptForeground( + ctx, + ctx.allocator, + script_command, + "postversion", + package_json_dir, + pm.env, + &.{}, + pm.options.log_level == .silent, + ctx.debug.use_system_shell, + ); + } + } + } + + Output.println("v{s}", .{new_version_str}); + Output.flush(); + } + + fn findPackageDir(allocator: std.mem.Allocator, start_dir: []const u8) bun.OOM![]const u8 { + var path_buf: bun.PathBuffer = undefined; + var current_dir = start_dir; + + while (true) { + const package_json_path_z = bun.path.joinAbsStringBufZ(current_dir, &path_buf, &.{"package.json"}, .auto); + if (bun.FD.cwd().existsAt(package_json_path_z)) { + return try allocator.dupe(u8, current_dir); + } + + const parent = bun.path.dirname(current_dir, .auto); + if (strings.eql(parent, current_dir)) { + break; + } + current_dir = parent; + } + + return try allocator.dupe(u8, start_dir); + } + + fn verifyGit(cwd: []const u8, pm: *PackageManager) !void { + if (!pm.options.git_tag_version) return; + + var path_buf: bun.PathBuffer = undefined; + const git_dir_path = bun.path.joinAbsStringBuf(cwd, &path_buf, &.{".git"}, .auto); + if (!bun.FD.cwd().directoryExistsAt(git_dir_path).isTrue()) { + pm.options.git_tag_version = false; + return; + } + + if (!pm.options.force and !try isGitClean(cwd)) { + Output.errGeneric("Git working directory not clean.", .{}); + Global.exit(1); + } + } + + fn parseVersionArgument(arg: []const u8) struct { VersionType, ?[]const u8 } { + if (VersionType.fromString(arg)) |vtype| { + return .{ vtype, null }; + } + + const version = Semver.Version.parse(Semver.SlicedString.init(arg, arg)); + if (version.valid) { + return .{ .specific, arg }; + } + + Output.errGeneric("Invalid version argument: \"{s}\"", .{arg}); + Output.note("Valid options: patch, minor, major, prepatch, preminor, premajor, prerelease, from-git, or a specific semver version", .{}); + Global.exit(1); + } + + fn getCurrentVersion(ctx: Command.Context, cwd: []const u8) ?[]const u8 { + var path_buf: bun.PathBuffer = undefined; + const package_json_path = bun.path.joinAbsStringBufZ(cwd, &path_buf, &.{"package.json"}, .auto); + + const package_json_contents = bun.sys.File.readFrom(bun.FD.cwd(), package_json_path, ctx.allocator).unwrap() catch { + return null; + }; + + const package_json_source = logger.Source.initPathString(package_json_path, package_json_contents); + const json = JSON.parsePackageJSONUTF8(&package_json_source, ctx.log, ctx.allocator) catch { + return null; + }; + + if (json.asProperty("version")) |v| { + switch (v.expr.data) { + .e_string => |s| { + return s.data; + }, + else => {}, + } + } + + return null; + } + + fn showHelp(ctx: Command.Context, pm: *PackageManager, cwd: []const u8) bun.OOM!void { + const _current_version = getCurrentVersion(ctx, cwd); + const current_version = _current_version orelse "1.0.0"; + + Output.prettyln("bun pm version v" ++ Global.package_json_version_with_sha ++ "", .{}); + if (_current_version) |version| { + Output.prettyln("Current package version: v{s}", .{version}); + } + + const patch_version = try calculateNewVersion(ctx.allocator, current_version, .patch, null, pm.options.preid, cwd); + const minor_version = try calculateNewVersion(ctx.allocator, current_version, .minor, null, pm.options.preid, cwd); + const major_version = try calculateNewVersion(ctx.allocator, current_version, .major, null, pm.options.preid, cwd); + const prerelease_version = try calculateNewVersion(ctx.allocator, current_version, .prerelease, null, pm.options.preid, cwd); + defer ctx.allocator.free(patch_version); + defer ctx.allocator.free(minor_version); + defer ctx.allocator.free(major_version); + defer ctx.allocator.free(prerelease_version); + + const increment_help_text = + \\ + \\Increment: + \\ patch {s} → {s} + \\ minor {s} → {s} + \\ major {s} → {s} + \\ prerelease {s} → {s} + \\ + ; + Output.pretty(increment_help_text, .{ + current_version, patch_version, + current_version, minor_version, + current_version, major_version, + current_version, prerelease_version, + }); + + if (strings.indexOfChar(current_version, '-') != null or pm.options.preid.len > 0) { + const prepatch_version = try calculateNewVersion(ctx.allocator, current_version, .prepatch, null, pm.options.preid, cwd); + const preminor_version = try calculateNewVersion(ctx.allocator, current_version, .preminor, null, pm.options.preid, cwd); + const premajor_version = try calculateNewVersion(ctx.allocator, current_version, .premajor, null, pm.options.preid, cwd); + defer ctx.allocator.free(prepatch_version); + defer ctx.allocator.free(preminor_version); + defer ctx.allocator.free(premajor_version); + + const prerelease_help_text = + \\ prepatch {s} → {s} + \\ preminor {s} → {s} + \\ premajor {s} → {s} + \\ + ; + Output.pretty(prerelease_help_text, .{ + current_version, prepatch_version, + current_version, preminor_version, + current_version, premajor_version, + }); + } + + const beta_prerelease_version = try calculateNewVersion(ctx.allocator, current_version, .prerelease, null, "beta", cwd); + defer ctx.allocator.free(beta_prerelease_version); + + const set_specific_version_help_text = + \\ from-git Use version from latest git tag + \\ 1.2.3 Set specific version + \\ + \\Options: + \\ --no-git-tag-version Skip git operations + \\ --allow-same-version Prevents throwing error if version is the same + \\ --message=\, -m Custom commit message, use %s for version substitution + \\ --preid=\ Prerelease identifier (i.e beta → {s}) + \\ --force, -f Bypass dirty git history check + \\ + \\Examples: + \\ $ bun pm version patch + \\ $ bun pm version 1.2.3 --no-git-tag-version + \\ $ bun pm version prerelease --preid beta --message "Release beta: %s" + \\ + \\More info: https://bun.com/docs/cli/pm#version + \\ + ; + Output.pretty(set_specific_version_help_text, .{beta_prerelease_version}); + Output.flush(); + } + + fn calculateNewVersion(allocator: std.mem.Allocator, current_str: []const u8, version_type: VersionType, specific_version: ?[]const u8, preid: []const u8, cwd: []const u8) bun.OOM![]const u8 { + if (version_type == .specific) { + return try allocator.dupe(u8, specific_version.?); + } + + if (version_type == .from_git) { + return try getVersionFromGit(allocator, cwd); + } + + const current = Semver.Version.parse(Semver.SlicedString.init(current_str, current_str)); + if (!current.valid) { + Output.errGeneric("Current version \"{s}\" is not a valid semver", .{current_str}); + Global.exit(1); + } + + const prerelease_id: []const u8 = if (preid.len > 0) + try allocator.dupe(u8, preid) + else if (!current.version.tag.hasPre()) + try allocator.dupe(u8, "") + else blk: { + const current_prerelease = current.version.tag.pre.slice(current_str); + + if (strings.indexOfChar(current_prerelease, '.')) |dot_index| { + break :blk try allocator.dupe(u8, current_prerelease[0..dot_index]); + } + + break :blk if (std.fmt.parseInt(u32, current_prerelease, 10)) |_| + try allocator.dupe(u8, "") + else |_| + try allocator.dupe(u8, current_prerelease); + }; + defer allocator.free(prerelease_id); + + return try incrementVersion(allocator, current_str, current, version_type, prerelease_id); + } + + fn incrementVersion(allocator: std.mem.Allocator, current_str: []const u8, current: Semver.Version.ParseResult, version_type: VersionType, preid: []const u8) bun.OOM![]const u8 { + var new_version = current.version.min(); + + switch (version_type) { + .patch => { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}", .{ new_version.major, new_version.minor, new_version.patch + 1 }); + }, + .minor => { + return try std.fmt.allocPrint(allocator, "{d}.{d}.0", .{ new_version.major, new_version.minor + 1 }); + }, + .major => { + return try std.fmt.allocPrint(allocator, "{d}.0.0", .{new_version.major + 1}); + }, + .prepatch => { + if (preid.len > 0) { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-{s}.0", .{ new_version.major, new_version.minor, new_version.patch + 1, preid }); + } else { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-0", .{ new_version.major, new_version.minor, new_version.patch + 1 }); + } + }, + .preminor => { + if (preid.len > 0) { + return try std.fmt.allocPrint(allocator, "{d}.{d}.0-{s}.0", .{ new_version.major, new_version.minor + 1, preid }); + } else { + return try std.fmt.allocPrint(allocator, "{d}.{d}.0-0", .{ new_version.major, new_version.minor + 1 }); + } + }, + .premajor => { + if (preid.len > 0) { + return try std.fmt.allocPrint(allocator, "{d}.0.0-{s}.0", .{ new_version.major + 1, preid }); + } else { + return try std.fmt.allocPrint(allocator, "{d}.0.0-0", .{new_version.major + 1}); + } + }, + .prerelease => { + if (current.version.tag.hasPre()) { + const current_prerelease = current.version.tag.pre.slice(current_str); + const identifier = if (preid.len > 0) preid else current_prerelease; + + if (strings.lastIndexOfChar(current_prerelease, '.')) |dot_index| { + const number_str = current_prerelease[dot_index + 1 ..]; + const next_num = std.fmt.parseInt(u32, number_str, 10) catch 0; + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-{s}.{d}", .{ new_version.major, new_version.minor, new_version.patch, identifier, next_num + 1 }); + } else { + const num = std.fmt.parseInt(u32, current_prerelease, 10) catch null; + if (num) |n| { + if (preid.len > 0) { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-{s}.{d}", .{ new_version.major, new_version.minor, new_version.patch, preid, n + 1 }); + } else { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-{d}", .{ new_version.major, new_version.minor, new_version.patch, n + 1 }); + } + } else { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-{s}.1", .{ new_version.major, new_version.minor, new_version.patch, identifier }); + } + } + } else { + new_version.patch += 1; + if (preid.len > 0) { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-{s}.0", .{ new_version.major, new_version.minor, new_version.patch, preid }); + } else { + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}-0", .{ new_version.major, new_version.minor, new_version.patch }); + } + } + }, + else => {}, + } + return try std.fmt.allocPrint(allocator, "{d}.{d}.{d}", .{ new_version.major, new_version.minor, new_version.patch }); + } + + fn isGitClean(cwd: []const u8) bun.OOM!bool { + var path_buf: bun.PathBuffer = undefined; + const git_path = bun.which(&path_buf, bun.getenvZ("PATH") orelse "", cwd, "git") orelse { + Output.errGeneric("git must be installed to use `bun pm version --git-tag-version`", .{}); + Global.exit(1); + }; + + const proc = bun.spawnSync(&.{ + .argv = &.{ git_path, "status", "--porcelain" }, + .stdout = .buffer, + .stderr = .ignore, + .stdin = .ignore, + .cwd = cwd, + .envp = null, + .windows = if (Environment.isWindows) .{ + .loop = bun.JSC.EventLoopHandle.init(bun.JSC.MiniEventLoop.initGlobal(null)), + }, + }) catch |err| { + Output.errGeneric("Failed to spawn git process: {s}", .{@errorName(err)}); + Global.exit(1); + }; + + switch (proc) { + .err => |err| { + Output.err(err, "Failed to spawn git process", .{}); + Global.exit(1); + }, + .result => |result| { + return result.isOK() and result.stdout.items.len == 0; + }, + } + } + + fn getVersionFromGit(allocator: std.mem.Allocator, cwd: []const u8) bun.OOM![]const u8 { + var path_buf: bun.PathBuffer = undefined; + const git_path = bun.which(&path_buf, bun.getenvZ("PATH") orelse "", cwd, "git") orelse { + Output.errGeneric("git must be installed to use `bun pm version from-git`", .{}); + Global.exit(1); + }; + + const proc = bun.spawnSync(&.{ + .argv = &.{ git_path, "describe", "--tags", "--abbrev=0" }, + .stdout = .buffer, + .stderr = .buffer, + .stdin = .ignore, + .cwd = cwd, + .envp = null, + .windows = if (Environment.isWindows) .{ + .loop = bun.JSC.EventLoopHandle.init(bun.JSC.MiniEventLoop.initGlobal(null)), + }, + }) catch |err| { + Output.err(err, "Failed to spawn git process", .{}); + Global.exit(1); + }; + + switch (proc) { + .err => |err| { + Output.err(err, "Git command failed unexpectedly", .{}); + Global.exit(1); + }, + .result => |result| { + if (!result.isOK()) { + if (result.stderr.items.len > 0) { + Output.errGeneric("Git error: {s}", .{strings.trim(result.stderr.items, " \n\r\t")}); + } else { + Output.errGeneric("No git tags found", .{}); + } + Global.exit(1); + } + + var version_str = strings.trim(result.stdout.items, " \n\r\t"); + if (strings.startsWith(version_str, "v")) { + version_str = version_str[1..]; + } + + return try allocator.dupe(u8, version_str); + }, + } + } + + fn gitCommitAndTag(allocator: std.mem.Allocator, version: []const u8, custom_message: ?[]const u8, cwd: []const u8) bun.OOM!void { + var path_buf: bun.PathBuffer = undefined; + const git_path = bun.which(&path_buf, bun.getenvZ("PATH") orelse "", cwd, "git") orelse { + Output.errGeneric("git must be installed to use `bun pm version --git-tag-version`", .{}); + Global.exit(1); + }; + + const stage_proc = bun.spawnSync(&.{ + .argv = &.{ git_path, "add", "package.json" }, + .cwd = cwd, + .stdout = .buffer, + .stderr = .buffer, + .stdin = .ignore, + .envp = null, + .windows = if (Environment.isWindows) .{ + .loop = bun.JSC.EventLoopHandle.init(bun.JSC.MiniEventLoop.initGlobal(null)), + }, + }) catch |err| { + Output.errGeneric("Git add failed: {s}", .{@errorName(err)}); + Global.exit(1); + }; + + switch (stage_proc) { + .err => |err| { + Output.err(err, "Git add failed unexpectedly", .{}); + Global.exit(1); + }, + .result => |result| { + if (!result.isOK()) { + Output.errGeneric("Git add failed with exit code {d}", .{result.status.exited.code}); + Global.exit(1); + } + }, + } + + const commit_message = if (custom_message) |msg| + try std.mem.replaceOwned(u8, allocator, msg, "%s", version) + else + try std.fmt.allocPrint(allocator, "v{s}", .{version}); + defer allocator.free(commit_message); + + const commit_proc = bun.spawnSync(&.{ + .argv = &.{ git_path, "commit", "-m", commit_message }, + .cwd = cwd, + .stdout = .buffer, + .stderr = .buffer, + .stdin = .ignore, + .envp = null, + .windows = if (Environment.isWindows) .{ + .loop = bun.JSC.EventLoopHandle.init(bun.JSC.MiniEventLoop.initGlobal(null)), + }, + }) catch |err| { + Output.errGeneric("Git commit failed: {s}", .{@errorName(err)}); + Global.exit(1); + }; + + switch (commit_proc) { + .err => |err| { + Output.err(err, "Git commit failed unexpectedly", .{}); + Global.exit(1); + }, + .result => |result| { + if (!result.isOK()) { + Output.errGeneric("Git commit failed", .{}); + Global.exit(1); + } + }, + } + + const tag_name = try std.fmt.allocPrint(allocator, "v{s}", .{version}); + defer allocator.free(tag_name); + + const tag_proc = bun.spawnSync(&.{ + .argv = &.{ git_path, "tag", "-a", tag_name, "-m", tag_name }, + .cwd = cwd, + .stdout = .buffer, + .stderr = .buffer, + .stdin = .ignore, + .envp = null, + .windows = if (Environment.isWindows) .{ + .loop = bun.JSC.EventLoopHandle.init(bun.JSC.MiniEventLoop.initGlobal(null)), + }, + }) catch |err| { + Output.errGeneric("Git tag failed: {s}", .{@errorName(err)}); + Global.exit(1); + }; + + switch (tag_proc) { + .err => |err| { + Output.err(err, "Git tag failed unexpectedly", .{}); + Global.exit(1); + }, + .result => |result| { + if (!result.isOK()) { + Output.errGeneric("Git tag failed", .{}); + Global.exit(1); + } + }, + } + } +}; diff --git a/src/cli/publish_command.zig b/src/cli/publish_command.zig index 8aea179caf..c4257c5226 100644 --- a/src/cli/publish_command.zig +++ b/src/cli/publish_command.zig @@ -1149,7 +1149,7 @@ pub const PublishCommand = struct { var dir, const dir_subpath, const close_dir = dir_info; defer if (close_dir) dir.close(); - var iter = bun.DirIterator.iterate(dir, .u8); + var iter = bun.DirIterator.iterate(.fromStdDir(dir), .u8); while (iter.next().unwrap() catch null) |entry| { const name, const subpath = name_and_subpath: { const name = entry.name.slice(); @@ -1246,7 +1246,7 @@ pub const PublishCommand = struct { if (ci_name != null) " ci/" else "", ci_name orelse "", }); - // headers.count("user-agent", "npm/10.8.3 node/v22.6.0 darwin arm64 workspaces/false"); + // headers.count("user-agent", "npm/10.8.3 node/v24.3.0 darwin arm64 workspaces/false"); headers.count("user-agent", print_buf.items); print_buf.clearRetainingCapacity(); @@ -1295,7 +1295,7 @@ pub const PublishCommand = struct { if (ci_name != null) " ci/" else "", ci_name orelse "", }); - // headers.append("user-agent", "npm/10.8.3 node/v22.6.0 darwin arm64 workspaces/false"); + // headers.append("user-agent", "npm/10.8.3 node/v24.3.0 darwin arm64 workspaces/false"); headers.append("user-agent", print_buf.items); print_buf.clearRetainingCapacity(); diff --git a/src/cli/remove_command.zig b/src/cli/remove_command.zig index eec924d8d2..d85dc27b4a 100644 --- a/src/cli/remove_command.zig +++ b/src/cli/remove_command.zig @@ -1,8 +1,13 @@ -const Command = @import("../cli.zig").Command; -const PackageManager = @import("../install/install.zig").PackageManager; - pub const RemoveCommand = struct { pub fn exec(ctx: Command.Context) !void { - try PackageManager.remove(ctx); + try updatePackageJSONAndInstallCatchError(ctx, .remove); } }; + +// @sortImports + +const bun = @import("bun"); +const Command = bun.CLI.Command; + +const PackageManager = bun.install.PackageManager; +const updatePackageJSONAndInstallCatchError = PackageManager.updatePackageJSONAndInstallCatchError; diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 4ff919dbe3..94821e2209 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -84,26 +84,24 @@ pub const RunCommand = struct { /// Find the "best" shell to use /// Cached to only run once pub fn findShell(PATH: string, cwd: string) ?stringZ { - const bufs = struct { - pub var shell_buf_once: bun.PathBuffer = undefined; - pub var found_shell: [:0]const u8 = ""; + const Once = struct { + var shell_buf: bun.PathBuffer = undefined; + pub var once = bun.once(struct { + pub fn run(PATH_: string, cwd_: string) ?stringZ { + if (findShellImpl(PATH_, cwd_)) |found| { + if (found.len < shell_buf.len) { + @memcpy(shell_buf[0..found.len], found); + shell_buf[found.len] = 0; + return shell_buf[0..found.len :0]; + } + } + + return null; + } + }.run); }; - if (bufs.found_shell.len > 0) { - return bufs.found_shell; - } - if (findShellImpl(PATH, cwd)) |found| { - if (found.len < bufs.shell_buf_once.len) { - @memcpy(bufs.shell_buf_once[0..found.len], found); - bufs.shell_buf_once[found.len] = 0; - bufs.found_shell = bufs.shell_buf_once[0..found.len :0]; - return bufs.found_shell; - } - - return found; - } - - return null; + return Once.once.call(.{ PATH, cwd }); } const BUN_BIN_NAME = if (Environment.isDebug) "bun-debug" else "bun"; @@ -1199,7 +1197,7 @@ pub const RunCommand = struct { \\ bun run dev \\ bun run lint \\ - \\Full documentation is available at https://bun.sh/docs/cli/run + \\Full documentation is available at https://bun.com/docs/cli/run \\ ; diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index 9896d18894..d2e73e588d 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -398,7 +398,7 @@ pub const JunitReporter = struct { \\ , .{}); }, - .skip => { + .skipped_because_label, .skip => { this.testcases_metrics.skipped += 1; try this.contents.appendSlice(bun.default_allocator, ">\n \n \n"); }, @@ -462,7 +462,6 @@ pub const CommandLineReporter = struct { jest: TestRunner, callback: TestRunner.Callback, last_dot: u32 = 0, - summary: Summary = Summary{}, prev_file: u64 = 0, repeat_count: u32 = 1, @@ -476,15 +475,6 @@ pub const CommandLineReporter = struct { junit: *JunitReporter, }; - pub const Summary = struct { - pass: u32 = 0, - expectations: u32 = 0, - skip: u32 = 0, - todo: u32 = 0, - fail: u32 = 0, - files: u32 = 0, - }; - const DotColorMap = std.EnumMap(TestRunner.Test.Status, string); const dots: DotColorMap = brk: { var map: DotColorMap = DotColorMap.init(.{}); @@ -607,6 +597,10 @@ pub const CommandLineReporter = struct { } } + pub inline fn summary(this: *CommandLineReporter) *TestRunner.Summary { + return &this.jest.summary; + } + pub fn handleTestPass(cb: *TestRunner.Callback, id: Test.ID, file: string, label: string, expectations: u32, elapsed_ns: u64, parent: ?*jest.DescribeScope) void { const writer_ = Output.errorWriter(); var buffered_writer = std.io.bufferedWriter(writer_); @@ -620,8 +614,8 @@ pub const CommandLineReporter = struct { printTestLine(.pass, label, elapsed_ns, parent, expectations, false, writer, file, this.file_reporter); this.jest.tests.items(.status)[id] = TestRunner.Test.Status.pass; - this.summary.pass += 1; - this.summary.expectations += expectations; + this.summary().pass += 1; + this.summary().expectations += expectations; } pub fn handleTestFail(cb: *TestRunner.Callback, id: Test.ID, file: string, label: string, expectations: u32, elapsed_ns: u64, parent: ?*jest.DescribeScope) void { @@ -646,11 +640,11 @@ pub const CommandLineReporter = struct { Output.flush(); // this.updateDots(); - this.summary.fail += 1; - this.summary.expectations += expectations; + this.summary().fail += 1; + this.summary().expectations += expectations; this.jest.tests.items(.status)[id] = TestRunner.Test.Status.fail; - if (this.jest.bail == this.summary.fail) { + if (this.jest.bail == this.summary().fail) { this.printSummary(); Output.prettyError("\nBailed out after {d} failure{s}\n", .{ this.jest.bail, if (this.jest.bail == 1) "" else "s" }); Global.exit(1); @@ -676,8 +670,18 @@ pub const CommandLineReporter = struct { } // this.updateDots(); - this.summary.skip += 1; - this.summary.expectations += expectations; + this.summary().skip += 1; + this.summary().expectations += expectations; + this.jest.tests.items(.status)[id] = TestRunner.Test.Status.skip; + } + + pub fn handleTestFilteredOut(cb: *TestRunner.Callback, id: Test.ID, _: string, _: string, expectations: u32, _: u64, _: ?*jest.DescribeScope) void { + var this: *CommandLineReporter = @fieldParentPtr("callback", cb); + + // this.updateDots(); + this.summary().skipped_because_label += 1; + this.summary().skip += 1; + this.summary().expectations += expectations; this.jest.tests.items(.status)[id] = TestRunner.Test.Status.skip; } @@ -698,16 +702,23 @@ pub const CommandLineReporter = struct { Output.flush(); // this.updateDots(); - this.summary.todo += 1; - this.summary.expectations += expectations; + this.summary().todo += 1; + this.summary().expectations += expectations; this.jest.tests.items(.status)[id] = TestRunner.Test.Status.todo; } pub fn printSummary(this: *CommandLineReporter) void { - const tests = this.summary.fail + this.summary.pass + this.summary.skip + this.summary.todo; - const files = this.summary.files; + const summary_ = this.summary(); + const tests = summary_.fail + summary_.pass + summary_.skip + summary_.todo; + const files = summary_.files; + + Output.prettyError("Ran {d} test{s} across {d} file{s}. ", .{ + tests, + if (tests == 1) "" else "s", + files, + if (files == 1) "" else "s", + }); - Output.prettyError("Ran {d} tests across {d} files. ", .{ tests, files }); Output.printStartEnd(bun.start_time, std.time.nanoTimestamp()); } @@ -1063,6 +1074,7 @@ pub const TestCommand = struct { .onTestFail = CommandLineReporter.handleTestFail, .onTestSkip = CommandLineReporter.handleTestSkip, .onTestTodo = CommandLineReporter.handleTestTodo, + .onTestFilteredOut = CommandLineReporter.handleTestFilteredOut, }; reporter.repeat_count = @max(ctx.test_options.repeat_count, 1); reporter.jest.callback = &reporter.callback; @@ -1225,33 +1237,33 @@ pub const TestCommand = struct { const write_snapshots_success = try jest.Jest.runner.?.snapshots.writeInlineSnapshots(); try jest.Jest.runner.?.snapshots.writeSnapshotFile(); var coverage_options = ctx.test_options.coverage; - if (reporter.summary.pass > 20) { - if (reporter.summary.skip > 0) { - Output.prettyError("\n{d} tests skipped:\n", .{reporter.summary.skip}); + if (reporter.summary().pass > 20) { + if (reporter.summary().skip > 0) { + Output.prettyError("\n{d} tests skipped:\n", .{reporter.summary().skip}); Output.flush(); var error_writer = Output.errorWriter(); error_writer.writeAll(reporter.skips_to_repeat_buf.items) catch unreachable; } - if (reporter.summary.todo > 0) { - if (reporter.summary.skip > 0) { + if (reporter.summary().todo > 0) { + if (reporter.summary().skip > 0) { Output.prettyError("\n", .{}); } - Output.prettyError("\n{d} tests todo:\n", .{reporter.summary.todo}); + Output.prettyError("\n{d} tests todo:\n", .{reporter.summary().todo}); Output.flush(); var error_writer = Output.errorWriter(); error_writer.writeAll(reporter.todos_to_repeat_buf.items) catch unreachable; } - if (reporter.summary.fail > 0) { - if (reporter.summary.skip > 0 or reporter.summary.todo > 0) { + if (reporter.summary().fail > 0) { + if (reporter.summary().skip > 0 or reporter.summary().todo > 0) { Output.prettyError("\n", .{}); } - Output.prettyError("\n{d} tests failed:\n", .{reporter.summary.fail}); + Output.prettyError("\n{d} tests failed:\n", .{reporter.summary().fail}); Output.flush(); var error_writer = Output.errorWriter(); @@ -1261,7 +1273,11 @@ pub const TestCommand = struct { Output.flush(); + var failed_to_find_any_tests = false; + if (test_files.len == 0) { + failed_to_find_any_tests = true; + if (ctx.positionals.len == 0) { Output.prettyErrorln( \\No tests found! @@ -1304,7 +1320,7 @@ pub const TestCommand = struct { } Output.prettyError( \\ - \\Learn more about the test runner: https://bun.sh/docs/cli/test + \\Learn more about the test runner: https://bun.com/docs/cli/test , .{}); } else { Output.prettyError("\n", .{}); @@ -1321,76 +1337,92 @@ pub const TestCommand = struct { } } - if (reporter.summary.pass > 0) { - Output.prettyError("", .{}); - } + const summary = reporter.summary(); + const did_label_filter_out_all_tests = summary.didLabelFilterOutAllTests() and reporter.jest.unhandled_errors_between_tests == 0; - Output.prettyError(" {d:5>} pass\n", .{reporter.summary.pass}); - - if (reporter.summary.skip > 0) { - Output.prettyError(" {d:5>} skip\n", .{reporter.summary.skip}); - } - - if (reporter.summary.todo > 0) { - Output.prettyError(" {d:5>} todo\n", .{reporter.summary.todo}); - } - - if (reporter.summary.fail > 0) { - Output.prettyError("", .{}); - } else { - Output.prettyError("", .{}); - } - - Output.prettyError(" {d:5>} fail\n", .{reporter.summary.fail}); - if (reporter.jest.unhandled_errors_between_tests > 0) { - Output.prettyError(" {d:5>} error{s}\n", .{ reporter.jest.unhandled_errors_between_tests, if (reporter.jest.unhandled_errors_between_tests > 1) "s" else "" }); - } - - var print_expect_calls = reporter.summary.expectations > 0; - if (reporter.jest.snapshots.total > 0) { - const passed = reporter.jest.snapshots.passed; - const failed = reporter.jest.snapshots.failed; - const added = reporter.jest.snapshots.added; - - var first = true; - if (print_expect_calls and added == 0 and failed == 0) { - print_expect_calls = false; - Output.prettyError(" {d:5>} snapshots, {d:5>} expect() calls", .{ reporter.jest.snapshots.total, reporter.summary.expectations }); - } else { - Output.prettyError(" snapshots: ", .{}); - - if (passed > 0) { - Output.prettyError("{d} passed", .{passed}); - first = false; - } - - if (added > 0) { - if (first) { - first = false; - Output.prettyError("+{d} added", .{added}); - } else { - Output.prettyError(", {d} added", .{added}); - } - } - - if (failed > 0) { - if (first) { - first = false; - Output.prettyError("{d} failed", .{failed}); - } else { - Output.prettyError(", {d} failed", .{failed}); - } - } + if (!did_label_filter_out_all_tests) { + if (summary.pass > 0) { + Output.prettyError("", .{}); } - Output.prettyError("\n", .{}); - } + Output.prettyError(" {d:5>} pass\n", .{summary.pass}); - if (print_expect_calls) { - Output.prettyError(" {d:5>} expect() calls\n", .{reporter.summary.expectations}); - } + if (summary.skip > 0) { + Output.prettyError(" {d:5>} skip\n", .{summary.skip}); + } else if (summary.skipped_because_label > 0) { + Output.prettyError(" {d:5>} filtered out\n", .{summary.skipped_because_label}); + } - reporter.printSummary(); + if (summary.todo > 0) { + Output.prettyError(" {d:5>} todo\n", .{summary.todo}); + } + + if (summary.fail > 0) { + Output.prettyError("", .{}); + } else { + Output.prettyError("", .{}); + } + + Output.prettyError(" {d:5>} fail\n", .{summary.fail}); + if (reporter.jest.unhandled_errors_between_tests > 0) { + Output.prettyError(" {d:5>} error{s}\n", .{ reporter.jest.unhandled_errors_between_tests, if (reporter.jest.unhandled_errors_between_tests > 1) "s" else "" }); + } + + var print_expect_calls = reporter.summary().expectations > 0; + if (reporter.jest.snapshots.total > 0) { + const passed = reporter.jest.snapshots.passed; + const failed = reporter.jest.snapshots.failed; + const added = reporter.jest.snapshots.added; + + var first = true; + if (print_expect_calls and added == 0 and failed == 0) { + print_expect_calls = false; + Output.prettyError(" {d:5>} snapshots, {d:5>} expect() calls", .{ reporter.jest.snapshots.total, reporter.summary().expectations }); + } else { + Output.prettyError(" snapshots: ", .{}); + + if (passed > 0) { + Output.prettyError("{d} passed", .{passed}); + first = false; + } + + if (added > 0) { + if (first) { + first = false; + Output.prettyError("+{d} added", .{added}); + } else { + Output.prettyError(", {d} added", .{added}); + } + } + + if (failed > 0) { + if (first) { + first = false; + Output.prettyError("{d} failed", .{failed}); + } else { + Output.prettyError(", {d} failed", .{failed}); + } + } + } + + Output.prettyError("\n", .{}); + } + + if (print_expect_calls) { + Output.prettyError(" {d:5>} expect() calls\n", .{reporter.summary().expectations}); + } + + reporter.printSummary(); + } else { + Output.prettyError("error: regex {} matched 0 tests. Searched {d} file{s} (skipping {d} test{s}) ", .{ + bun.fmt.quote(ctx.test_options.test_filter_pattern.?), + summary.files, + if (summary.files == 1) "" else "s", + summary.skipped_because_label, + if (summary.skipped_because_label == 1) "" else "s", + }); + Output.printStartEnd(ctx.start_time, std.time.nanoTimestamp()); + } } Output.prettyError("\n", .{}); @@ -1410,8 +1442,9 @@ pub const TestCommand = struct { if (vm.hot_reload == .watch) { vm.runWithAPILock(JSC.VirtualMachine, vm, runEventLoopForWatch); } + const summary = reporter.summary(); - if (reporter.summary.fail > 0 or (coverage_options.enabled and coverage_options.fractions.failing and coverage_options.fail_on_low_coverage) or !write_snapshots_success) { + if (failed_to_find_any_tests or summary.didLabelFilterOutAllTests() or summary.fail > 0 or (coverage_options.enabled and coverage_options.fractions.failing and coverage_options.fail_on_low_coverage) or !write_snapshots_success) { Global.exit(1); } else if (reporter.jest.unhandled_errors_between_tests > 0) { Global.exit(reporter.jest.unhandled_errors_between_tests); @@ -1523,14 +1556,14 @@ pub const TestCommand = struct { Output.flush(); var promise = try vm.loadEntryPointForTestRunner(file_path); - reporter.summary.files += 1; + reporter.summary().files += 1; switch (promise.status(vm.global.vm())) { .rejected => { - _ = vm.unhandledRejection(vm.global, promise.result(vm.global.vm()), promise.asValue()); - reporter.summary.fail += 1; + vm.unhandledRejection(vm.global, promise.result(vm.global.vm()), promise.asValue()); + reporter.summary().fail += 1; - if (reporter.jest.bail == reporter.summary.fail) { + if (reporter.jest.bail == reporter.summary().fail) { reporter.printSummary(); Output.prettyError("\nBailed out after {d} failure{s}\n", .{ reporter.jest.bail, if (reporter.jest.bail == 1) "" else "s" }); diff --git a/src/cli/unlink_command.zig b/src/cli/unlink_command.zig index 91e97547e1..a06648e331 100644 --- a/src/cli/unlink_command.zig +++ b/src/cli/unlink_command.zig @@ -1,8 +1,157 @@ -const Command = @import("../cli.zig").Command; -const PackageManager = @import("../install/install.zig").PackageManager; - pub const UnlinkCommand = struct { pub fn exec(ctx: Command.Context) !void { - try PackageManager.unlink(ctx); + try unlink(ctx); } }; + +fn unlink(ctx: Command.Context) !void { + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .unlink); + var manager, const original_cwd = PackageManager.init(ctx, cli, .unlink) catch |err| brk: { + if (err == error.MissingPackageJSON) { + try attemptToCreatePackageJSON(); + break :brk try PackageManager.init(ctx, cli, .unlink); + } + + return err; + }; + defer ctx.allocator.free(original_cwd); + + if (manager.options.shouldPrintCommandName()) { + Output.prettyln("bun unlink v" ++ Global.package_json_version_with_sha ++ "\n", .{}); + Output.flush(); + } + + if (manager.options.positionals.len == 1) { + // bun unlink + + var lockfile: Lockfile = undefined; + var name: string = ""; + var package = Lockfile.Package{}; + + // Step 1. parse the nearest package.json file + { + const package_json_source = &(bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { + Output.errGeneric("failed to read \"{s}\" for unlinking: {s}", .{ manager.original_package_json_path, @errorName(err) }); + Global.crash(); + }); + lockfile.initEmpty(ctx.allocator); + + var resolver: void = {}; + try package.parse(&lockfile, manager, ctx.allocator, manager.log, package_json_source, void, &resolver, Features.folder); + name = lockfile.str(&package.name); + if (name.len == 0) { + if (manager.options.log_level != .silent) { + Output.prettyErrorln("error: package.json missing \"name\" in \"{s}\"", .{package_json_source.path.text}); + } + Global.crash(); + } else if (!strings.isNPMPackageName(name)) { + if (manager.options.log_level != .silent) { + Output.prettyErrorln("error: invalid package.json name \"{s}\" in \"{s}\"", .{ + name, + package_json_source.path.text, + }); + } + Global.crash(); + } + } + + switch (Syscall.lstat(Path.joinAbsStringZ(manager.globalLinkDirPath(), &.{name}, .auto))) { + .result => |stat| { + if (!bun.S.ISLNK(@intCast(stat.mode))) { + Output.prettyErrorln("success: package \"{s}\" is not globally linked, so there's nothing to do.", .{name}); + Global.exit(0); + } + }, + .err => { + Output.prettyErrorln("success: package \"{s}\" is not globally linked, so there's nothing to do.", .{name}); + Global.exit(0); + }, + } + + // Step 2. Setup the global directory + var node_modules: std.fs.Dir = brk: { + Bin.Linker.ensureUmask(); + var explicit_global_dir: string = ""; + if (ctx.install) |install_| { + explicit_global_dir = install_.global_dir orelse explicit_global_dir; + } + manager.global_dir = try Options.openGlobalDir(explicit_global_dir); + + try manager.setupGlobalDir(ctx); + + break :brk manager.global_dir.?.makeOpenPath("node_modules", .{}) catch |err| { + if (manager.options.log_level != .silent) + Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + }; + + // Step 3b. Link any global bins + if (package.bin.tag != .none) { + var link_target_buf: bun.PathBuffer = undefined; + var link_dest_buf: bun.PathBuffer = undefined; + var link_rel_buf: bun.PathBuffer = undefined; + + var node_modules_path = bun.AbsPath(.{}).initFdPath(.fromStdDir(node_modules)) catch |err| { + if (manager.options.log_level != .silent) { + Output.err(err, "failed to link binary", .{}); + } + Global.crash(); + }; + defer node_modules_path.deinit(); + + var bin_linker = Bin.Linker{ + .bin = package.bin, + .node_modules_path = &node_modules_path, + .global_bin_path = manager.options.bin_path, + .package_name = strings.StringOrTinyString.init(name), + .string_buf = lockfile.buffers.string_bytes.items, + .extern_string_buf = lockfile.buffers.extern_strings.items, + .seen = null, + .abs_target_buf = &link_target_buf, + .abs_dest_buf = &link_dest_buf, + .rel_buf = &link_rel_buf, + }; + bin_linker.unlink(true); + } + + // delete it if it exists + node_modules.deleteTree(name) catch |err| { + if (manager.options.log_level != .silent) + Output.prettyErrorln("error: failed to unlink package in global dir due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + + Output.prettyln("success: unlinked package \"{s}\"", .{name}); + Global.exit(0); + } else { + Output.prettyln("error: bun unlink {{packageName}} not implemented yet", .{}); + Global.crash(); + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Global = bun.Global; +const Output = bun.Output; +const Path = bun.path; +const string = bun.string; +const strings = bun.strings; +const Command = bun.CLI.Command; + +const Bin = bun.install.Bin; +const Features = bun.install.Features; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; + +const PackageManager = bun.install.PackageManager; +const CommandLineArguments = PackageManager.CommandLineArguments; +const Options = PackageManager.Options; +const attemptToCreatePackageJSON = PackageManager.attemptToCreatePackageJSON; + +const Syscall = bun.sys; +const File = bun.sys.File; diff --git a/src/cli/update_command.zig b/src/cli/update_command.zig index bc2bfa76c8..711c1b65c6 100644 --- a/src/cli/update_command.zig +++ b/src/cli/update_command.zig @@ -1,8 +1,13 @@ -const Command = @import("../cli.zig").Command; -const PackageManager = @import("../install/install.zig").PackageManager; - pub const UpdateCommand = struct { pub fn exec(ctx: Command.Context) !void { - try PackageManager.update(ctx); + try updatePackageJSONAndInstallCatchError(ctx, .update); } }; + +// @sortImports + +const bun = @import("bun"); +const Command = bun.CLI.Command; + +const PackageManager = bun.install.PackageManager; +const updatePackageJSONAndInstallCatchError = PackageManager.updatePackageJSONAndInstallCatchError; diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 701c111158..5bdaf41071 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -338,8 +338,8 @@ pub const UpgradeCommand = struct { const profile_exe_subpath = Version.profile_folder_name ++ std.fs.path.sep_str ++ "bun-profile" ++ exe_suffix; const manual_upgrade_command = switch (Environment.os) { - .linux, .mac => "curl -fsSL https://bun.sh/install | bash", - .windows => "powershell -c 'irm bun.sh/install.ps1|iex'", + .linux, .mac => "curl -fsSL https://bun.com/install | bash", + .windows => "powershell -c 'irm bun.com/install.ps1|iex'", else => "(TODO: Install script for " ++ Environment.os.displayString() ++ ")", }; @@ -889,7 +889,7 @@ pub const UpgradeCommand = struct { \\ \\What's new in Bun v{s}: \\ - \\ https://bun.sh/blog/release-notes/{s} + \\ https://bun.com/blog/release-notes/{s} \\ \\Report any bugs: \\ diff --git a/src/codegen/bindgen-lib.ts b/src/codegen/bindgen-lib.ts index ec9a6915d7..d7133b7b69 100644 --- a/src/codegen/bindgen-lib.ts +++ b/src/codegen/bindgen-lib.ts @@ -1,7 +1,7 @@ /** * This is the public API for `bind.ts` files * It is aliased as `import {} from 'bindgen'` - * @see https://bun.sh/docs/project/bindgen + * @see https://bun.com/docs/project/bindgen */ import { diff --git a/src/codegen/bindgen.ts b/src/codegen/bindgen.ts index 271be57437..193c949eeb 100644 --- a/src/codegen/bindgen.ts +++ b/src/codegen/bindgen.ts @@ -1363,7 +1363,7 @@ for (const [filename, { functions, typedefs }] of files) { switch (returnStrategy.type) { case "jsvalue": - zigInternal.add(`return JSC.toJSHostValue(${globalObjectArg}, `); + zigInternal.add(`return JSC.toJSHostCall(${globalObjectArg}, @src(), `); break; case "basic-out-param": zigInternal.add(`out.* = @as(bun.JSError!${returnStrategy.abiType}, `); @@ -1373,7 +1373,12 @@ for (const [filename, { functions, typedefs }] of files) { break; } - zigInternal.line(`${zid("import_" + namespaceVar)}.${fn.zigPrefix}${fn.name + vari.suffix}(`); + zigInternal.add(`${zid("import_" + namespaceVar)}.${fn.zigPrefix}${fn.name + vari.suffix}`); + if (returnStrategy.type === "jsvalue") { + zigInternal.line(", .{"); + } else { + zigInternal.line("("); + } zigInternal.indent(); for (const arg of vari.args) { const argName = arg.zigMappedName!; @@ -1421,7 +1426,7 @@ for (const [filename, { functions, typedefs }] of files) { zigInternal.dedent(); switch (returnStrategy.type) { case "jsvalue": - zigInternal.line(`));`); + zigInternal.line(`});`); break; case "basic-out-param": case "void": diff --git a/src/codegen/class-definitions.ts b/src/codegen/class-definitions.ts index a4736ee949..ab123f9d5f 100644 --- a/src/codegen/class-definitions.ts +++ b/src/codegen/class-definitions.ts @@ -92,9 +92,21 @@ export class ClassDefinition { */ name: string; /** - * Class constructor is newable. + * Class constructor is newable. Called before the JSValue corresponding to + * the object is created. Throwing an exception prevents the object from being + * created. */ construct?: boolean; + + /** + * Class constructor needs `this` value. + * + * Makes the code generator call the Zig constructor function **after** the + * JSValue is instantiated. Only use this if you must, as it probably isn't + * good for GC since it means if the constructor throws the GC will have to + * clean up the object that never reached JS. + */ + constructNeedsThis?: boolean; /** * Class constructor is callable. In JS, ES6 class constructors are not * callable. @@ -168,10 +180,6 @@ export class ClassDefinition { final?: boolean; - // Do not try to track the `this` value in the constructor automatically. - // That is a memory leak. - wantsThis?: never; - /** * Class has an `estimatedSize` function that reports external allocations to GC. * Called from any thread. diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts index 4ae300fa0b..08b824b674 100644 --- a/src/codegen/generate-classes.ts +++ b/src/codegen/generate-classes.ts @@ -406,10 +406,17 @@ function generatePrototype(typeName, obj) { var staticPrototypeValues = ""; if (obj.construct) { - externs += ` + if (obj.constructNeedsThis) { + externs += ` +extern JSC_CALLCONV void* JSC_HOST_CALL_ATTRIBUTES ${classSymbolName(typeName, "construct")}(JSC::JSGlobalObject*, JSC::CallFrame*, JSC::EncodedJSValue); +JSC_DECLARE_CUSTOM_GETTER(js${typeName}Constructor); +`; + } else { + externs += ` extern JSC_CALLCONV void* JSC_HOST_CALL_ATTRIBUTES ${classSymbolName(typeName, "construct")}(JSC::JSGlobalObject*, JSC::CallFrame*); JSC_DECLARE_CUSTOM_GETTER(js${typeName}Constructor); `; + } } if (obj.structuredClone) { @@ -622,7 +629,8 @@ function generateConstructorImpl(typeName, obj: ClassDefinition) { externs += `extern JSC_CALLCONV size_t ${symbolName(typeName, "estimatedSize")}(void* ptr);` + "\n"; } - return ` + return ( + ` ${renderStaticDecls(classSymbolName, typeName, fields, obj.supportsObjectCreate || false)} ${hashTable} @@ -635,14 +643,14 @@ void ${name}::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, ${protot } ${name}::${name}(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure, ${ - obj.call ? classSymbolName(typeName, "call") : "call" - }, construct) { + obj.call ? classSymbolName(typeName, "call") : "call" + }, construct) { } ${name}* ${name}::create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, ${prototypeName( - typeName, - )}* prototype) { + typeName, + )}* prototype) { ${name}* ptr = new (NotNull, JSC::allocateCell<${name}>(vm)) ${name}(vm, structure); ptr->finishCreation(vm, globalObject, prototype); return ptr; @@ -653,6 +661,10 @@ JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES ${name}::call(JSC::JSGlobalObject* Zig::GlobalObject *globalObject = reinterpret_cast(lexicalGlobalObject); JSC::VM &vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); + +${ + !obj.constructNeedsThis + ? ` void* ptr = ${classSymbolName(typeName, "construct")}(globalObject, callFrame); if (!ptr || scope.exception()) [[unlikely]] { @@ -661,6 +673,21 @@ JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES ${name}::call(JSC::JSGlobalObject* Structure* structure = globalObject->${className(typeName)}Structure(); ${className(typeName)}* instance = ${className(typeName)}::create(vm, globalObject, structure, ptr); +` + : ` + Structure* structure = globalObject->${className(typeName)}Structure(); + ${className(typeName)}* instance = ${className(typeName)}::create(vm, globalObject, structure, nullptr); + + void* ptr = ${classSymbolName(typeName, "construct")}(globalObject, callFrame, JSValue::encode(instance)); + if (scope.exception()) [[unlikely]] { + ASSERT_WITH_MESSAGE(!ptr, "Memory leak detected: new ${typeName}() allocated memory without checking for exceptions."); + return JSValue::encode(JSC::jsUndefined()); + } + + instance->m_ctx = ptr; +` +} + RETURN_IF_EXCEPTION(scope, {}); ${ obj.estimatedSize @@ -694,7 +721,10 @@ JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES ${name}::construct(JSC::JSGlobalObj functionGlobalObject->${className(typeName)}Structure() ); } - + +` + + (!obj.constructNeedsThis + ? ` void* ptr = ${classSymbolName(typeName, "construct")}(globalObject, callFrame); if (scope.exception()) [[unlikely]] { @@ -704,6 +734,19 @@ JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES ${name}::construct(JSC::JSGlobalObj ASSERT_WITH_MESSAGE(ptr, "Incorrect exception handling: new ${typeName} returned a null pointer, indicating an exception - but did not throw an exception."); ${className(typeName)}* instance = ${className(typeName)}::create(vm, globalObject, structure, ptr); +` + : ` + ${className(typeName)}* instance = ${className(typeName)}::create(vm, globalObject, structure, nullptr); + + void* ptr = ${classSymbolName(typeName, "construct")}(globalObject, callFrame, JSValue::encode(instance)); + if (scope.exception()) [[unlikely]] { + ASSERT_WITH_MESSAGE(!ptr, "Memory leak detected: new ${typeName}() allocated memory without checking for exceptions."); + return JSValue::encode(JSC::jsUndefined()); + } + + instance->m_ctx = ptr; + `) + + ` ${ obj.estimatedSize ? ` @@ -728,7 +771,8 @@ ${ } - `; + ` + ); } function renderCachedFieldsHeader(typeName, klass, proto, values) { @@ -1788,6 +1832,7 @@ function generateZig( proto = {}, own = {}, construct, + constructNeedsThis = false, finalize, noConstructor = false, overridesToJS = false, @@ -1913,7 +1958,21 @@ const JavaScriptCoreBindings = struct { if (construct && !noConstructor) { exports.set("construct", classSymbolName(typeName, "construct")); - output += ` + if (constructNeedsThis) { + output += ` + pub fn ${classSymbolName(typeName, "construct")}(globalObject: *jsc.JSGlobalObject, callFrame: *jsc.CallFrame, thisValue: jsc.JSValue) callconv(jsc.conv) ?*anyopaque { + if (comptime Environment.enable_logs) log_zig_constructor("${typeName}", callFrame); + return @as(*${typeName}, ${typeName}.constructor(globalObject, callFrame, thisValue) catch |err| switch (err) { + error.JSError => return null, + error.OutOfMemory => { + globalObject.throwOutOfMemory() catch {}; + return null; + }, + }); + } + `; + } else { + output += ` pub fn ${classSymbolName(typeName, "construct")}(globalObject: *jsc.JSGlobalObject, callFrame: *jsc.CallFrame) callconv(jsc.conv) ?*anyopaque { if (comptime Environment.enable_logs) log_zig_constructor("${typeName}", callFrame); return @as(*${typeName}, ${typeName}.constructor(globalObject, callFrame) catch |err| switch (err) { @@ -1925,6 +1984,7 @@ const JavaScriptCoreBindings = struct { }); } `; + } } if (call) { @@ -1958,7 +2018,7 @@ const JavaScriptCoreBindings = struct { if (comptime Environment.enable_logs) log_zig_getter("${typeName}", "${name}"); return switch (@typeInfo(@typeInfo(@TypeOf(${typeName}.${getter})).@"fn".return_type.?)) { .error_union => { - return @call(.always_inline, jsc.toJSHostValue, .{globalObject, @call(.always_inline, ${typeName}.${getter}, .{this, ${thisValue ? "thisValue," : ""} globalObject})}); + return @call(.always_inline, jsc.toJSHostCall, .{globalObject, @src(), ${typeName}.${getter}, .{this, ${thisValue ? "thisValue," : ""} globalObject}}); }, else => @call(.always_inline, ${typeName}.${getter}, .{this, ${thisValue ? "thisValue," : ""} globalObject}), }; @@ -2002,7 +2062,7 @@ const JavaScriptCoreBindings = struct { output += ` pub fn ${names.fn}(thisValue: *${typeName}, globalObject: *jsc.JSGlobalObject, callFrame: *jsc.CallFrame${proto[name].passThis ? ", js_this_value: jsc.JSValue" : ""}) callconv(jsc.conv) jsc.JSValue { if (comptime Environment.enable_logs) log_zig_method("${typeName}", "${name}", callFrame); - return @call(.always_inline, jsc.toJSHostValue, .{globalObject, @call(.always_inline, ${typeName}.${fn}, .{thisValue, globalObject, callFrame${proto[name].passThis ? ", js_this_value" : ""}})}); + return @call(.always_inline, jsc.toJSHostCall, .{globalObject, @src(), ${typeName}.${fn}, .{thisValue, globalObject, callFrame${proto[name].passThis ? ", js_this_value" : ""}}}); } `; } @@ -2020,7 +2080,7 @@ const JavaScriptCoreBindings = struct { if (comptime Environment.enable_logs) log_zig_class_getter("${typeName}", "${name}"); return switch (@typeInfo(@typeInfo(@TypeOf(${typeName}.${getter})).@"fn".return_type.?)) { .error_union => { - return @call(.always_inline, jsc.toJSHostValue, .{globalObject, @call(.always_inline, ${typeName}.${getter}, .{globalObject, ${thisValue ? "thisValue," : ""} propertyName})}); + return @call(.always_inline, jsc.toJSHostCall, .{globalObject, @src(), ${typeName}.${getter}, .{globalObject, ${thisValue ? "thisValue," : ""} propertyName}}); }, else => { return @call(.always_inline, ${typeName}.${getter}, .{globalObject, ${thisValue ? "thisValue," : ""} propertyName}); @@ -2087,7 +2147,7 @@ const JavaScriptCoreBindings = struct { output += ` pub fn ${symbolName(typeName, "onStructuredCloneDeserialize")}(globalObject: *jsc.JSGlobalObject, ptr: [*]u8, end: [*]u8) callconv(jsc.conv) jsc.JSValue { if (comptime Environment.enable_logs) log_zig_structured_clone_deserialize("${typeName}"); - return @call(.always_inline, jsc.toJSHostValue, .{ globalObject, @call(.always_inline, ${typeName}.onStructuredCloneDeserialize, .{globalObject, ptr, end}) }); + return @call(.always_inline, jsc.toJSHostCall, .{ globalObject, @src(), ${typeName}.onStructuredCloneDeserialize, .{globalObject, ptr, end} }); } `; } else { @@ -2415,19 +2475,35 @@ pub const WriteBytesFn = *const fn(*anyopaque, ptr: [*]const u8, len: u32) callc `; const classes: ClassDefinition[] = []; -for (const file of files) { - const result = require(path.resolve(file)); - if (!(result?.default?.length ?? 0)) continue; - console.log("Found", result.default.length, "classes from", file); - for (let { name, proto = {}, klass = {} } of result.default) { - let protoProps = Object.keys(proto).length ? `${Object.keys(proto).length} fields` : ""; - let klassProps = Object.keys(klass).length ? `${Object.keys(klass).length} class fields` : ""; - let props = [protoProps, klassProps].filter(Boolean).join(", "); - if (props.length) props = ` (${props})`; - console.log(` - ${name}` + props); +{ + let errors = []; + for (const file of files) { + const filepath = path.resolve(file); + const result = require(filepath); + if (!(result?.default?.length ?? 0)) { + errors.push( + new TypeError( + `Missing classes in "${path.relative(process.cwd(), filepath)}". Expected \`export default [ define(...) ] satisfies Array\` but got ${Bun.inspect(result).slice(0, 100) + "..."} `, + ), + ); + continue; + } + + console.log("Found", result.default.length, "classes from", file); + for (let { name, proto = {}, klass = {} } of result.default) { + let protoProps = Object.keys(proto).length ? `${Object.keys(proto).length} fields` : ""; + let klassProps = Object.keys(klass).length ? `${Object.keys(klass).length} class fields` : ""; + let props = [protoProps, klassProps].filter(Boolean).join(", "); + if (props.length) props = ` (${props})`; + console.log(` - ${name}` + props); + } + + classes.push(...result.default); } - classes.push(...result.default); + if (errors.length) { + throw new AggregateError(errors, "Failed to generate classes"); + } } classes.sort((a, b) => (a.name < b.name ? -1 : 1)); diff --git a/src/codegen/generate-js2native.ts b/src/codegen/generate-js2native.ts index 5116e1f390..84c0aa7065 100644 --- a/src/codegen/generate-js2native.ts +++ b/src/codegen/generate-js2native.ts @@ -215,7 +215,7 @@ export function getJS2NativeZig(gs2NativeZigPath: string) { .filter(x => x.type === "zig") .flatMap(call => [ `export fn ${symbol(call)}_workaround(global: *JSC.JSGlobalObject) callconv(JSC.conv) JSC.JSValue {`, - ` return JSC.toJSHostValue(global, @import(${JSON.stringify(path.relative(path.dirname(gs2NativeZigPath), call.filename))}).${call.symbol}(global));`, + ` return JSC.toJSHostCall(global, @src(), @import(${JSON.stringify(path.relative(path.dirname(gs2NativeZigPath), call.filename))}).${call.symbol}, .{global});`, "}", ]), ...wrapperCalls diff --git a/src/codegen/generate-jssink.ts b/src/codegen/generate-jssink.ts index 6727da7bcf..845e8ed854 100644 --- a/src/codegen/generate-jssink.ts +++ b/src/codegen/generate-jssink.ts @@ -162,8 +162,8 @@ function header() { static size_t memoryCost(void* sinkPtr); void* m_sinkPtr; - mutable WriteBarrier m_onPull; - mutable WriteBarrier m_onClose; + mutable WriteBarrier m_onPull; + mutable WriteBarrier m_onClose; mutable JSC::Weak m_weakReadableStream; uintptr_t m_onDestroy { 0 }; @@ -825,8 +825,16 @@ DEFINE_VISIT_CHILDREN(${className}); void ${controller}::start(JSC::JSGlobalObject *globalObject, JSC::JSValue readableStream, JSC::JSValue onPull, JSC::JSValue onClose) { this->m_weakReadableStream = JSC::Weak(readableStream.getObject()); - this->m_onPull.set(globalObject->vm(), this, onPull); - this->m_onClose.set(globalObject->vm(), this, onClose); + if (onPull) { + if (auto* object = onPull.getObject()) { + this->m_onPull.set(globalObject->vm(), this, object); + } + } + if (onClose) { + if (auto* object = onClose.getObject()) { + this->m_onClose.set(globalObject->vm(), this, object); + } + } } void ${className}::destroy(JSCell* cell) diff --git a/src/codegen/generate-node-errors.ts b/src/codegen/generate-node-errors.ts index aafbe8c4e1..bb41dc1389 100644 --- a/src/codegen/generate-node-errors.ts +++ b/src/codegen/generate-node-errors.ts @@ -124,6 +124,7 @@ zig += ` extern fn Bun__createErrorWithCode(globalThis: *JSC.JSGlobalObject, code: Error, message: *bun.String) JSC.JSValue; /// Creates an Error object with the given error code. + /// If an error is thrown while creating the Error object, returns that error instead. /// Derefs the message string. pub fn toJS(this: Error, globalThis: *JSC.JSGlobalObject, message: *bun.String) JSC.JSValue { defer message.deref(); diff --git a/src/compile_target.zig b/src/compile_target.zig index 58540642fe..8298b35551 100644 --- a/src/compile_target.zig +++ b/src/compile_target.zig @@ -393,7 +393,7 @@ pub fn from(input_: []const u8) CompileTarget { Output.errGeneric( \\Unsupported target {} in "bun{s}" \\To see the supported targets: - \\ https://bun.sh/docs/bundler/executables + \\ https://bun.com/docs/bundler/executables , .{ bun.fmt.quote(token), diff --git a/src/crash_handler.zig b/src/crash_handler.zig index edbd263bfb..78f7595d2c 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -1828,7 +1828,7 @@ pub const js_bindings = struct { const list = bun.Analytics.packed_features_list; const array = try JSValue.createEmptyArray(global, list.len); for (list, 0..) |feature, i| { - array.putIndex(global, @intCast(i), bun.String.static(feature).toJS(global)); + try array.putIndex(global, @intCast(i), bun.String.static(feature).toJS(global)); } obj.put(global, JSC.ZigString.static("features"), array); obj.put(global, JSC.ZigString.static("version"), bun.String.init(Global.package_json_version).toJS(global)); diff --git a/src/create/projects/react-shadcn-spa/REPLACE_ME_WITH_YOUR_APP_FILE_NAME.html b/src/create/projects/react-shadcn-spa/REPLACE_ME_WITH_YOUR_APP_FILE_NAME.html index 547b02d9d7..750d8a0dc1 100644 --- a/src/create/projects/react-shadcn-spa/REPLACE_ME_WITH_YOUR_APP_FILE_NAME.html +++ b/src/create/projects/react-shadcn-spa/REPLACE_ME_WITH_YOUR_APP_FILE_NAME.html @@ -5,7 +5,7 @@ REPLACE_ME_WITH_YOUR_APP_BASE_NAME | Powered by Bun - +
              diff --git a/src/csrf.zig b/src/csrf.zig index cfdef4b424..7a9c3abd76 100644 --- a/src/csrf.zig +++ b/src/csrf.zig @@ -230,7 +230,7 @@ pub fn csrf__generate_impl(globalObject: *JSC.JSGlobalObject, callframe: *JSC.Ca if (jsSecret.isEmptyOrUndefinedOrNull()) { return globalObject.throwInvalidArguments("Secret is required", .{}); } - if (!jsSecret.isString() or jsSecret.getLength(globalObject) == 0) { + if (!jsSecret.isString() or try jsSecret.getLength(globalObject) == 0) { return globalObject.throwInvalidArguments("Secret must be a non-empty string", .{}); } secret = try jsSecret.toSlice(globalObject, bun.default_allocator); @@ -316,7 +316,7 @@ pub fn csrf__verify_impl(globalObject: *JSC.JSGlobalObject, call_frame: *JSC.Cal if (jsToken.isUndefinedOrNull()) { return globalObject.throwInvalidArguments("Token is required", .{}); } - if (!jsToken.isString() or jsToken.getLength(globalObject) == 0) { + if (!jsToken.isString() or try jsToken.getLength(globalObject) == 0) { return globalObject.throwInvalidArguments("Token must be a non-empty string", .{}); } const token = try jsToken.toSlice(globalObject, bun.default_allocator); diff --git a/src/css/css_internals.zig b/src/css/css_internals.zig index 47bd0e5833..76d35336f6 100644 --- a/src/css/css_internals.zig +++ b/src/css/css_internals.zig @@ -158,8 +158,8 @@ fn parserOptionsFromJS(globalThis: *JSC.JSGlobalObject, allocator: Allocator, op _ = allocator; // autofix if (try jsobj.getTruthy(globalThis, "flags")) |val| { if (val.isArray()) { - var iter = val.arrayIterator(globalThis); - while (iter.next()) |item| { + var iter = try val.arrayIterator(globalThis); + while (try iter.next()) |item| { const bunstr = try item.toBunString(globalThis); defer bunstr.deref(); const str = bunstr.toUTF8(bun.default_allocator); diff --git a/src/css/values/color_js.zig b/src/css/values/color_js.zig index 4ac9e2c647..5c76c3bacc 100644 --- a/src/css/values/color_js.zig +++ b/src/css/values/color_js.zig @@ -60,7 +60,7 @@ fn colorIntFromJS(globalThis: *JSC.JSGlobalObject, input: JSC.JSValue, comptime } // CSS spec says to clamp values to their valid range so we'll respect that here - return std.math.clamp(input.coerce(i32, globalThis), 0, 255); + return std.math.clamp(try input.coerce(i32, globalThis), 0, 255); } // https://github.com/tmux/tmux/blob/dae2868d1227b95fd076fb4a5efa6256c7245943/colour.c#L44-L55 @@ -183,18 +183,18 @@ pub fn jsFunctionColor(globalThis: *JSC.JSGlobalObject, callFrame: *JSC.CallFram break :brk .{ .result = css.CssColor{ .rgba = .{ .alpha = rgba.alpha, .red = rgba.red, .green = rgba.green, .blue = rgba.blue } } }; } else if (args[0].jsType().isArrayLike()) { - switch (args[0].getLength(globalThis)) { + switch (try args[0].getLength(globalThis)) { 3 => { - const r = try colorIntFromJS(globalThis, args[0].getIndex(globalThis, 0), "[0]"); - const g = try colorIntFromJS(globalThis, args[0].getIndex(globalThis, 1), "[1]"); - const b = try colorIntFromJS(globalThis, args[0].getIndex(globalThis, 2), "[2]"); + const r = try colorIntFromJS(globalThis, try args[0].getIndex(globalThis, 0), "[0]"); + const g = try colorIntFromJS(globalThis, try args[0].getIndex(globalThis, 1), "[1]"); + const b = try colorIntFromJS(globalThis, try args[0].getIndex(globalThis, 2), "[2]"); break :brk .{ .result = css.CssColor{ .rgba = .{ .alpha = 255, .red = @intCast(r), .green = @intCast(g), .blue = @intCast(b) } } }; }, 4 => { - const r = try colorIntFromJS(globalThis, args[0].getIndex(globalThis, 0), "[0]"); - const g = try colorIntFromJS(globalThis, args[0].getIndex(globalThis, 1), "[1]"); - const b = try colorIntFromJS(globalThis, args[0].getIndex(globalThis, 2), "[2]"); - const a = try colorIntFromJS(globalThis, args[0].getIndex(globalThis, 3), "[3]"); + const r = try colorIntFromJS(globalThis, try args[0].getIndex(globalThis, 0), "[0]"); + const g = try colorIntFromJS(globalThis, try args[0].getIndex(globalThis, 1), "[1]"); + const b = try colorIntFromJS(globalThis, try args[0].getIndex(globalThis, 2), "[2]"); + const a = try colorIntFromJS(globalThis, try args[0].getIndex(globalThis, 3), "[3]"); break :brk .{ .result = css.CssColor{ .rgba = .{ .alpha = @intCast(a), .red = @intCast(r), .green = @intCast(g), .blue = @intCast(b) } } }; }, else => { @@ -304,17 +304,17 @@ pub fn jsFunctionColor(globalThis: *JSC.JSGlobalObject, callFrame: *JSC.CallFram }, .@"[rgb]" => { const object = try JSC.JSValue.createEmptyArray(globalThis, 3); - object.putIndex(globalThis, 0, JSC.JSValue.jsNumber(rgba.red)); - object.putIndex(globalThis, 1, JSC.JSValue.jsNumber(rgba.green)); - object.putIndex(globalThis, 2, JSC.JSValue.jsNumber(rgba.blue)); + try object.putIndex(globalThis, 0, JSC.JSValue.jsNumber(rgba.red)); + try object.putIndex(globalThis, 1, JSC.JSValue.jsNumber(rgba.green)); + try object.putIndex(globalThis, 2, JSC.JSValue.jsNumber(rgba.blue)); return object; }, .@"[rgba]" => { const object = try JSC.JSValue.createEmptyArray(globalThis, 4); - object.putIndex(globalThis, 0, JSC.JSValue.jsNumber(rgba.red)); - object.putIndex(globalThis, 1, JSC.JSValue.jsNumber(rgba.green)); - object.putIndex(globalThis, 2, JSC.JSValue.jsNumber(rgba.blue)); - object.putIndex(globalThis, 3, JSC.JSValue.jsNumber(rgba.alpha)); + try object.putIndex(globalThis, 0, JSC.JSValue.jsNumber(rgba.red)); + try object.putIndex(globalThis, 1, JSC.JSValue.jsNumber(rgba.green)); + try object.putIndex(globalThis, 2, JSC.JSValue.jsNumber(rgba.blue)); + try object.putIndex(globalThis, 3, JSC.JSValue.jsNumber(rgba.alpha)); return object; }, .number => { diff --git a/src/deps/c_ares.zig b/src/deps/c_ares.zig index 9dccf83abf..c40fd271d4 100644 --- a/src/deps/c_ares.zig +++ b/src/deps/c_ares.zig @@ -227,7 +227,7 @@ pub const struct_hostent = extern struct { while (this.h_aliases.?[count]) |alias| { const alias_len = bun.len(alias); const alias_slice = alias[0..alias_len]; - array.putIndex(globalThis, count, JSC.ZigString.fromUTF8(alias_slice).toJS(globalThis)); + try array.putIndex(globalThis, count, JSC.ZigString.fromUTF8(alias_slice).toJS(globalThis)); count += 1; } @@ -332,8 +332,8 @@ pub const hostent_with_ttls = struct { bun.dns.addressToJS(&std.net.Address.initIp4(addr[0..4].*, 0), globalThis)) catch return globalThis.throwOutOfMemoryValue(); const ttl: ?c_int = if (count < this.ttls.len) this.ttls[count] else null; - const resultObject = JSC.JSValue.createObject2(globalThis, &addressKey, &ttlKey, addrString, if (ttl) |val| JSC.jsNumber(val) else .js_undefined); - array.putIndex(globalThis, count, resultObject); + const resultObject = try JSC.JSValue.createObject2(globalThis, &addressKey, &ttlKey, addrString, if (ttl) |val| JSC.jsNumber(val) else .js_undefined); + try array.putIndex(globalThis, count, resultObject); } return array; @@ -437,17 +437,17 @@ pub const struct_nameinfo = extern struct { if (this.node != null) { const node_len = bun.len(this.node); const node_slice = this.node[0..node_len]; - array.putIndex(globalThis, 0, JSC.ZigString.fromUTF8(node_slice).toJS(globalThis)); + try array.putIndex(globalThis, 0, JSC.ZigString.fromUTF8(node_slice).toJS(globalThis)); } else { - array.putIndex(globalThis, 0, .js_undefined); + try array.putIndex(globalThis, 0, .js_undefined); } if (this.service != null) { const service_len = bun.len(this.service); const service_slice = this.service[0..service_len]; - array.putIndex(globalThis, 1, JSC.ZigString.fromUTF8(service_slice).toJS(globalThis)); + try array.putIndex(globalThis, 1, JSC.ZigString.fromUTF8(service_slice).toJS(globalThis)); } else { - array.putIndex(globalThis, 1, .js_undefined); + try array.putIndex(globalThis, 1, .js_undefined); } return array; @@ -516,7 +516,7 @@ pub const AddrInfo = extern struct { var j: u32 = 0; var current: ?*AddrInfo_node = addr_info.node; while (current) |this_node| : (current = this_node.next) { - array.putIndex( + try array.putIndex( globalThis, j, GetAddrInfo.Result.toJS( @@ -887,7 +887,7 @@ pub const struct_ares_caa_reply = extern struct { var i: u32 = 0; while (caa != null) { var node = caa.?; - array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); + try array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); caa = node.next; i += 1; } @@ -965,7 +965,7 @@ pub const struct_ares_srv_reply = extern struct { var i: u32 = 0; while (srv != null) { var node = srv.?; - array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); + try array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); srv = node.next; i += 1; } @@ -1048,7 +1048,7 @@ pub const struct_ares_mx_reply = extern struct { var i: u32 = 0; while (mx != null) { var node = mx.?; - array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); + try array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); mx = node.next; i += 1; } @@ -1122,7 +1122,7 @@ pub const struct_ares_txt_reply = extern struct { var i: u32 = 0; while (txt != null) { var node = txt.?; - array.putIndex(globalThis, i, try node.toJS(globalThis, allocator)); + try array.putIndex(globalThis, i, try node.toJS(globalThis, allocator)); txt = node.next; i += 1; } @@ -1133,7 +1133,7 @@ pub const struct_ares_txt_reply = extern struct { pub fn toJS(this: *struct_ares_txt_reply, globalThis: *JSC.JSGlobalObject, _: std.mem.Allocator) bun.JSError!JSC.JSValue { const array = try JSC.JSValue.createEmptyArray(globalThis, 1); const value = this.txt[0..this.length]; - array.putIndex(globalThis, 0, JSC.ZigString.fromUTF8(value).toJS(globalThis)); + try array.putIndex(globalThis, 0, JSC.ZigString.fromUTF8(value).toJS(globalThis)); return array; } @@ -1150,7 +1150,7 @@ pub const struct_ares_txt_reply = extern struct { var i: u32 = 0; while (txt != null) : (txt = txt.?.next) { var node = txt.?; - array.putIndex(globalThis, i, JSC.ZigString.fromUTF8(node.txt[0..node.length]).toJS(globalThis)); + try array.putIndex(globalThis, i, JSC.ZigString.fromUTF8(node.txt[0..node.length]).toJS(globalThis)); i += 1; } @@ -1224,7 +1224,7 @@ pub const struct_ares_naptr_reply = extern struct { var i: u32 = 0; while (naptr != null) { var node = naptr.?; - array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); + try array.putIndex(globalThis, i, node.toJS(globalThis, allocator)); naptr = node.next; i += 1; } @@ -1407,7 +1407,7 @@ pub const struct_any_reply = struct { } transformed.put(globalThis, "type", bun.String.ascii(&upper).toJS(globalThis)); - array.putIndex(globalThis, i.*, transformed); + try array.putIndex(globalThis, i.*, transformed); i.* += 1; } @@ -1418,8 +1418,8 @@ pub const struct_any_reply = struct { reply.toJSResponse(allocator, globalThis, lookup_name); if (response.isArray()) { - var iterator = response.arrayIterator(globalThis); - while (iterator.next()) |item| { + var iterator = try response.arrayIterator(globalThis); + while (try iterator.next()) |item| { try append(globalThis, array, i, item, lookup_name); } } else { @@ -1697,7 +1697,10 @@ pub const Error = enum(i32) { const system_error = JSC.SystemError{ .errno = @intFromEnum(this.errno), .code = bun.String.static(this.errno.code()), - .message = if (this.hostname) |hostname| bun.String.createFormat("{s} {s} {s}", .{ this.syscall, this.errno.code()[4..], hostname }) catch bun.outOfMemory() else bun.String.empty, + .message = if (this.hostname) |hostname| + bun.String.createFormat("{s} {s} {s}", .{ this.syscall, this.errno.code()[4..], hostname }) catch bun.outOfMemory() + else + bun.String.createFormat("{s} {s}", .{ this.syscall, this.errno.code()[4..] }) catch bun.outOfMemory(), .syscall = bun.String.createUTF8(this.syscall), .hostname = this.hostname orelse bun.String.empty, }; @@ -1745,31 +1748,23 @@ pub const Error = enum(i32) { return Deferred.init(this, syscall, host_string, promise.*); } - pub fn toJS(this: Error, globalThis: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJSWithSyscall(this: Error, globalThis: *JSC.JSGlobalObject, comptime syscall: [:0]const u8) JSC.JSValue { const instance = (JSC.SystemError{ .errno = @intFromEnum(this), - .code = bun.String.static(this.code()), + .code = bun.String.static(this.code()[4..]), + .syscall = bun.String.static(syscall), + .message = bun.String.createFormat("{s} {s}", .{ syscall, this.code()[4..] }) catch bun.outOfMemory(), }).toErrorInstance(globalThis); instance.put(globalThis, "name", bun.String.static("DNSException").toJS(globalThis)); return instance; } - pub fn toJSWithSyscall(this: Error, globalThis: *JSC.JSGlobalObject, comptime syscall: []const u8) JSC.JSValue { + pub fn toJSWithSyscallAndHostname(this: Error, globalThis: *JSC.JSGlobalObject, comptime syscall: [:0]const u8, hostname: []const u8) JSC.JSValue { const instance = (JSC.SystemError{ .errno = @intFromEnum(this), - .code = bun.String.static(this.code()), - .syscall = bun.String.static((syscall ++ "\x00")[0..syscall.len :0]), - }).toErrorInstance(globalThis); - instance.put(globalThis, "name", bun.String.static("DNSException").toJS(globalThis)); - return instance; - } - - pub fn toJSWithSyscallAndHostname(this: Error, globalThis: *JSC.JSGlobalObject, comptime syscall: []const u8, hostname: []const u8) JSC.JSValue { - const instance = (JSC.SystemError{ - .errno = @intFromEnum(this), - .code = bun.String.static(this.code()), + .code = bun.String.static(this.code()[4..]), .message = bun.String.createFormat("{s} {s} {s}", .{ syscall, this.code()[4..], hostname }) catch bun.outOfMemory(), - .syscall = bun.String.static((syscall ++ "\x00")[0..syscall.len :0]), + .syscall = bun.String.static(syscall), .hostname = bun.String.createUTF8(hostname), }).toErrorInstance(globalThis); instance.put(globalThis, "name", bun.String.static("DNSException").toJS(globalThis)); diff --git a/src/deps/libuv.zig b/src/deps/libuv.zig index f1955a827d..0fe7d29eb0 100644 --- a/src/deps/libuv.zig +++ b/src/deps/libuv.zig @@ -887,6 +887,7 @@ pub const UV_EILSEQ: c_int = -4027; pub const UV_ESOCKTNOSUPPORT: c_int = -4025; pub const UV_ENODATA: c_int = -4024; pub const UV_EUNATCH: c_int = -4023; +pub const UV_ENOEXEC: c_int = -4022; pub const UV_ERRNO_MAX: c_int = -4096; pub const uv_errno_t = c_int; pub const UV_UNKNOWN_HANDLE: c_int = 0; diff --git a/src/deps/uws/Response.zig b/src/deps/uws/Response.zig index db9a64add3..01ad887ddc 100644 --- a/src/deps/uws/Response.zig +++ b/src/deps/uws/Response.zig @@ -316,6 +316,20 @@ pub const AnyResponse = union(enum) { SSL: *uws.NewApp(true).Response, TCP: *uws.NewApp(false).Response, + pub fn assertSSL(this: AnyResponse) *uws.NewApp(true).Response { + return switch (this) { + .SSL => |resp| resp, + .TCP => bun.Output.panic("Expected SSL response, got TCP response", .{}), + }; + } + + pub fn assertNoSSL(this: AnyResponse) *uws.NewApp(false).Response { + return switch (this) { + .SSL => bun.Output.panic("Expected TCP response, got SSL response", .{}), + .TCP => |resp| resp, + }; + } + pub fn markNeedsMore(this: AnyResponse) void { return switch (this) { inline else => |resp| resp.markNeedsMore(), diff --git a/src/deps/uws/UpgradedDuplex.zig b/src/deps/uws/UpgradedDuplex.zig index 31a0896828..ce57557890 100644 --- a/src/deps/uws/UpgradedDuplex.zig +++ b/src/deps/uws/UpgradedDuplex.zig @@ -96,7 +96,10 @@ fn callWriteOrEnd(this: *UpgradedDuplex, data: ?[]const u8, msg_more: bool) void const globalThis = this.global.?; const writeOrEnd = if (msg_more) duplex.getFunction(globalThis, "write") catch return orelse return else duplex.getFunction(globalThis, "end") catch return orelse return; if (data) |data_| { - const buffer = JSC.ArrayBuffer.BinaryType.toJS(.Buffer, data_, globalThis); + const buffer = JSC.ArrayBuffer.BinaryType.toJS(.Buffer, data_, globalThis) catch |err| { + this.handlers.onError(this.handlers.ctx, globalThis.takeException(err)); + return; + }; buffer.ensureStillAlive(); _ = writeOrEnd.call(globalThis, duplex, &.{buffer}) catch |err| { @@ -278,7 +281,7 @@ pub fn getJSHandlers(this: *UpgradedDuplex, globalThis: *JSC.JSGlobalObject) bun this.onDataCallback = .create(dataCallback, globalThis); break :brk dataCallback; }; - array.putIndex(globalThis, 0, callback); + try array.putIndex(globalThis, 0, callback); } { @@ -298,7 +301,7 @@ pub fn getJSHandlers(this: *UpgradedDuplex, globalThis: *JSC.JSGlobalObject) bun this.onEndCallback = .create(endCallback, globalThis); break :brk endCallback; }; - array.putIndex(globalThis, 1, callback); + try array.putIndex(globalThis, 1, callback); } { @@ -317,7 +320,7 @@ pub fn getJSHandlers(this: *UpgradedDuplex, globalThis: *JSC.JSGlobalObject) bun this.onWritableCallback = .create(writableCallback, globalThis); break :brk writableCallback; }; - array.putIndex(globalThis, 2, callback); + try array.putIndex(globalThis, 2, callback); } { @@ -336,7 +339,7 @@ pub fn getJSHandlers(this: *UpgradedDuplex, globalThis: *JSC.JSGlobalObject) bun this.onCloseCallback = .create(closeCallback, globalThis); break :brk closeCallback; }; - array.putIndex(globalThis, 3, callback); + try array.putIndex(globalThis, 3, callback); } return array; @@ -355,15 +358,15 @@ pub fn startTLS(this: *UpgradedDuplex, ssl_options: JSC.API.ServerConfig.SSLConf this.wrapper.?.start(); } -pub fn encodeAndWrite(this: *UpgradedDuplex, data: []const u8, is_end: bool) i32 { - log("encodeAndWrite (len: {} - is_end: {})", .{ data.len, is_end }); +pub fn encodeAndWrite(this: *UpgradedDuplex, data: []const u8) i32 { + log("encodeAndWrite (len: {})", .{data.len}); if (this.wrapper) |*wrapper| { return @as(i32, @intCast(wrapper.writeData(data) catch 0)); } return 0; } -pub fn rawWrite(this: *UpgradedDuplex, encoded_data: []const u8, _: bool) i32 { +pub fn rawWrite(this: *UpgradedDuplex, encoded_data: []const u8) i32 { this.internalWrite(encoded_data); return @intCast(encoded_data.len); } diff --git a/src/deps/uws/WindowsNamedPipe.zig b/src/deps/uws/WindowsNamedPipe.zig index 21939374d8..0b65cd0c93 100644 --- a/src/deps/uws/WindowsNamedPipe.zig +++ b/src/deps/uws/WindowsNamedPipe.zig @@ -459,8 +459,8 @@ pub fn isTLS(this: *WindowsNamedPipe) bool { return this.flags.is_ssl; } -pub fn encodeAndWrite(this: *WindowsNamedPipe, data: []const u8, is_end: bool) i32 { - log("encodeAndWrite (len: {} - is_end: {})", .{ data.len, is_end }); +pub fn encodeAndWrite(this: *WindowsNamedPipe, data: []const u8) i32 { + log("encodeAndWrite (len: {})", .{data.len}); if (this.wrapper) |*wrapper| { return @as(i32, @intCast(wrapper.writeData(data) catch 0)); } else { @@ -469,7 +469,7 @@ pub fn encodeAndWrite(this: *WindowsNamedPipe, data: []const u8, is_end: bool) i return @intCast(data.len); } -pub fn rawWrite(this: *WindowsNamedPipe, encoded_data: []const u8, _: bool) i32 { +pub fn rawWrite(this: *WindowsNamedPipe, encoded_data: []const u8) i32 { this.internalWrite(encoded_data); return @intCast(encoded_data.len); } diff --git a/src/deps/uws/socket.zig b/src/deps/uws/socket.zig index b2f8d7c22b..7d382c8887 100644 --- a/src/deps/uws/socket.zig +++ b/src/deps/uws/socket.zig @@ -317,29 +317,29 @@ pub fn NewSocketHandler(comptime is_ssl: bool) type { } } - pub fn write(this: ThisSocket, data: []const u8, msg_more: bool) i32 { + pub fn write(this: ThisSocket, data: []const u8) i32 { return switch (this.socket) { - .upgradedDuplex => |socket| socket.encodeAndWrite(data, msg_more), - .pipe => |pipe| if (comptime Environment.isWindows) pipe.encodeAndWrite(data, msg_more) else 0, - .connected => |socket| socket.write(is_ssl, data, msg_more), + .upgradedDuplex => |socket| socket.encodeAndWrite(data), + .pipe => |pipe| if (comptime Environment.isWindows) pipe.encodeAndWrite(data) else 0, + .connected => |socket| socket.write(is_ssl, data), .connecting, .detached => 0, }; } pub fn writeFd(this: ThisSocket, data: []const u8, file_descriptor: bun.FileDescriptor) i32 { return switch (this.socket) { - .upgradedDuplex, .pipe => this.write(data, false), + .upgradedDuplex, .pipe => this.write(data), .connected => |socket| socket.writeFd(data, file_descriptor), .connecting, .detached => 0, }; } - pub fn rawWrite(this: ThisSocket, data: []const u8, msg_more: bool) i32 { + pub fn rawWrite(this: ThisSocket, data: []const u8) i32 { return switch (this.socket) { - .connected => |socket| socket.rawWrite(is_ssl, data, msg_more), + .connected => |socket| socket.rawWrite(is_ssl, data), .connecting, .detached => 0, - .upgradedDuplex => |socket| socket.rawWrite(data, msg_more), - .pipe => |pipe| if (comptime Environment.isWindows) pipe.rawWrite(data, msg_more) else 0, + .upgradedDuplex => |socket| socket.rawWrite(data), + .pipe => |pipe| if (comptime Environment.isWindows) pipe.rawWrite(data) else 0, }; } @@ -1136,10 +1136,10 @@ pub const AnySocket = union(enum) { } } - pub fn write(this: AnySocket, data: []const u8, msg_more: bool) i32 { + pub fn write(this: AnySocket, data: []const u8) i32 { return switch (this) { - .SocketTCP => |sock| sock.write(data, msg_more), - .SocketTLS => |sock| sock.write(data, msg_more), + .SocketTCP => |sock| sock.write(data), + .SocketTLS => |sock| sock.write(data), }; } diff --git a/src/deps/uws/us_socket_t.zig b/src/deps/uws/us_socket_t.zig index 2c8d3fa328..3d530ad562 100644 --- a/src/deps/uws/us_socket_t.zig +++ b/src/deps/uws/us_socket_t.zig @@ -132,8 +132,8 @@ pub const us_socket_t = opaque { return c.us_socket_context(@intFromBool(ssl), this).?; } - pub fn write(this: *us_socket_t, ssl: bool, data: []const u8, msg_more: bool) i32 { - const rc = c.us_socket_write(@intFromBool(ssl), this, data.ptr, @intCast(data.len), @intFromBool(msg_more)); + pub fn write(this: *us_socket_t, ssl: bool, data: []const u8) i32 { + const rc = c.us_socket_write(@intFromBool(ssl), this, data.ptr, @intCast(data.len)); debug("us_socket_write({d}, {d}) = {d}", .{ @intFromPtr(this), data.len, rc }); return rc; } @@ -151,9 +151,9 @@ pub const us_socket_t = opaque { return rc; } - pub fn rawWrite(this: *us_socket_t, ssl: bool, data: []const u8, msg_more: bool) i32 { + pub fn rawWrite(this: *us_socket_t, ssl: bool, data: []const u8) i32 { debug("us_socket_raw_write({d}, {d})", .{ @intFromPtr(this), data.len }); - return c.us_socket_raw_write(@intFromBool(ssl), this, data.ptr, @intCast(data.len), @intFromBool(msg_more)); + return c.us_socket_raw_write(@intFromBool(ssl), this, data.ptr, @intCast(data.len)); } pub fn flush(this: *us_socket_t, ssl: bool) void { @@ -204,10 +204,10 @@ pub const c = struct { pub extern fn us_socket_ext(ssl: i32, s: ?*us_socket_t) ?*anyopaque; // nullish to be safe pub extern fn us_socket_context(ssl: i32, s: ?*us_socket_t) ?*SocketContext; - pub extern fn us_socket_write(ssl: i32, s: ?*us_socket_t, data: [*c]const u8, length: i32, msg_more: i32) i32; + pub extern fn us_socket_write(ssl: i32, s: ?*us_socket_t, data: [*c]const u8, length: i32) i32; pub extern fn us_socket_ipc_write_fd(s: ?*us_socket_t, data: [*c]const u8, length: i32, fd: i32) i32; pub extern fn us_socket_write2(ssl: i32, *us_socket_t, header: ?[*]const u8, len: usize, payload: ?[*]const u8, usize) i32; - pub extern fn us_socket_raw_write(ssl: i32, s: ?*us_socket_t, data: [*c]const u8, length: i32, msg_more: i32) i32; + pub extern fn us_socket_raw_write(ssl: i32, s: ?*us_socket_t, data: [*c]const u8, length: i32) i32; pub extern fn us_socket_flush(ssl: i32, s: ?*us_socket_t) void; // if a TLS socket calls this, it will start SSL instance and call open event will also do TLS handshake if required diff --git a/src/dns.zig b/src/dns.zig index f43455d59b..77ed67ccb8 100644 --- a/src/dns.zig +++ b/src/dns.zig @@ -108,7 +108,7 @@ pub const GetAddrInfo = struct { if (!flags.isNumber()) return error.InvalidFlags; - options.flags = flags.coerce(std.c.AI, globalObject); + options.flags = try flags.coerce(std.c.AI, globalObject); // hints & ~(AI_ADDRCONFIG | AI_ALL | AI_V4MAPPED)) !== 0 const filter = ~@as(u32, @bitCast(std.c.AI{ .ALL = true, .ADDRCONFIG = true, .V4MAPPED = true })); @@ -146,7 +146,7 @@ pub const GetAddrInfo = struct { return .unspecified; if (value.isNumber()) { - return switch (value.coerce(i32, globalObject)) { + return switch (try value.coerce(i32, globalObject)) { 0 => .unspecified, 4 => .inet, 6 => .inet6, @@ -337,7 +337,7 @@ pub const GetAddrInfo = struct { var i: u32 = 0; const items: []const Result = list.items; for (items) |item| { - array.putIndex(globalThis, i, item.toJS(globalThis)); + try array.putIndex(globalThis, i, item.toJS(globalThis)); i += 1; } break :brk array; @@ -454,7 +454,7 @@ pub fn addrInfoToJSArray(addr_info: *std.c.addrinfo, globalThis: *JSC.JSGlobalOb var j: u32 = 0; var current: ?*std.c.addrinfo = addr_info; while (current) |this_node| : (current = current.?.next) { - array.putIndex( + try array.putIndex( globalThis, j, GetAddrInfo.Result.toJS( diff --git a/src/env.zig b/src/env.zig index d877c5c52c..bb84ac155d 100644 --- a/src/env.zig +++ b/src/env.zig @@ -26,12 +26,19 @@ pub const isX86 = @import("builtin").target.cpu.arch.isX86(); pub const isX64 = @import("builtin").target.cpu.arch == .x86_64; pub const isMusl = builtin.target.abi.isMusl(); pub const allow_assert = isDebug or isTest or std.builtin.Mode.ReleaseSafe == @import("builtin").mode; +pub const ci_assert = isDebug or isTest or enable_asan or (std.builtin.Mode.ReleaseSafe == @import("builtin").mode and is_canary); pub const show_crash_trace = isDebug or isTest or enable_asan; /// All calls to `@export` should be gated behind this check, so that code /// generators that compile Zig code know not to reference and compile a ton of /// unused code. pub const export_cpp_apis = if (build_options.override_no_export_cpp_apis) false else (@import("builtin").output_mode == .Obj or isTest); +/// Whether or not to enable allocation tracking when the `AllocationScope` +/// allocator is used. +pub const enableAllocScopes = brk: { + break :brk isDebug or enable_asan; +}; + pub const build_options = @import("build_options"); /// Set if compiling with `-Dno_llvm` diff --git a/src/env_loader.zig b/src/env_loader.zig index dd6cfbd78a..78ce9e1cfe 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -85,9 +85,7 @@ pub const Loader = struct { this.get("bamboo.buildKey")) != null; } - pub fn loadTracy(this: *const Loader) void { - _ = this; // autofix - } + pub fn loadTracy(_: *const Loader) void {} pub fn getS3Credentials(this: *Loader) s3.S3Credentials { if (this.aws_credentials) |credentials| { @@ -100,6 +98,7 @@ pub const Loader = struct { var endpoint: []const u8 = ""; var bucket: []const u8 = ""; var session_token: []const u8 = ""; + var insecure_http: bool = false; if (this.get("S3_ACCESS_KEY_ID")) |access_key| { accessKeyId = access_key; @@ -118,9 +117,13 @@ pub const Loader = struct { region = region_; } if (this.get("S3_ENDPOINT")) |endpoint_| { - endpoint = bun.URL.parse(endpoint_).hostWithPath(); + const url = bun.URL.parse(endpoint_); + endpoint = url.hostWithPath(); + insecure_http = url.isHTTP(); } else if (this.get("AWS_ENDPOINT")) |endpoint_| { - endpoint = bun.URL.parse(endpoint_).hostWithPath(); + const url = bun.URL.parse(endpoint_); + endpoint = url.hostWithPath(); + insecure_http = url.isHTTP(); } if (this.get("S3_BUCKET")) |bucket_| { bucket = bucket_; @@ -140,6 +143,7 @@ pub const Loader = struct { .endpoint = endpoint, .bucket = bucket, .sessionToken = session_token, + .insecure_http = insecure_http, }; return this.aws_credentials.?; diff --git a/src/errno/darwin_errno.zig b/src/errno/darwin_errno.zig index b254f4a542..f7fb51d659 100644 --- a/src/errno/darwin_errno.zig +++ b/src/errno/darwin_errno.zig @@ -193,6 +193,7 @@ pub const UV_E = struct { pub const SOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); pub const NODATA: i32 = @intFromEnum(SystemErrno.ENODATA); pub const UNATCH: i32 = -bun.windows.libuv.UV_EUNATCH; + pub const NOEXEC: i32 = @intFromEnum(SystemErrno.ENOEXEC); }; pub fn getErrno(rc: anytype) E { if (rc == -1) { diff --git a/src/errno/linux_errno.zig b/src/errno/linux_errno.zig index 22ab85ce21..d4379a88c7 100644 --- a/src/errno/linux_errno.zig +++ b/src/errno/linux_errno.zig @@ -222,6 +222,7 @@ pub const UV_E = struct { pub const SOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); pub const NODATA: i32 = @intFromEnum(SystemErrno.ENODATA); pub const UNATCH: i32 = @intFromEnum(SystemErrno.EUNATCH); + pub const NOEXEC: i32 = @intFromEnum(SystemErrno.ENOEXEC); }; pub fn getErrno(rc: anytype) E { const Type = @TypeOf(rc); diff --git a/src/errno/windows_errno.zig b/src/errno/windows_errno.zig index 106785702b..b486f10a2c 100644 --- a/src/errno/windows_errno.zig +++ b/src/errno/windows_errno.zig @@ -222,6 +222,7 @@ pub const E = enum(u16) { UV_ESOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT, UV_ERRNO_MAX = -uv.UV_ERRNO_MAX, UV_EUNATCH = -uv.UV_EUNATCH, + UV_ENOEXEC = -uv.UV_ENOEXEC, }; pub const S = struct { @@ -524,6 +525,7 @@ pub const SystemErrno = enum(u16) { UV_ESOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT, UV_ERRNO_MAX = -uv.UV_ERRNO_MAX, UV_EUNATCH = -uv.UV_EUNATCH, + UV_ENOEXEC = -uv.UV_ENOEXEC, pub const max = 137; @@ -1162,6 +1164,7 @@ pub const UV_E = struct { pub const SOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT; pub const NODATA = -uv.UV_ENODATA; pub const UNATCH = -uv.UV_EUNATCH; + pub const NOEXEC = -uv.UV_ENOEXEC; }; const std = @import("std"); const bun = @import("bun"); diff --git a/src/fd.zig b/src/fd.zig index afdf886a2e..d2a9f8383c 100644 --- a/src/fd.zig +++ b/src/fd.zig @@ -538,6 +538,36 @@ pub const FD = packed struct(backing_int) { return @enumFromInt(@as(backing_int, @bitCast(fd))); } + pub fn makePath(dir: FD, comptime T: type, subpath: []const T) !void { + return switch (T) { + u8 => bun.makePath(dir.stdDir(), subpath), + u16 => bun.makePathW(dir.stdDir(), subpath), + else => @compileError("unexpected type"), + }; + } + + pub fn makeOpenPath(dir: FD, comptime T: type, subpath: []const T) !FD { + return switch (T) { + u8 => { + if (comptime Environment.isWindows) { + return bun.sys.openDirAtWindowsA(dir, subpath, .{ .can_rename_or_delete = false, .create = true, .read_only = false }).unwrap(); + } + + return FD.fromStdDir(try dir.stdDir().makeOpenPath(subpath, .{ .iterate = true, .access_sub_paths = true })); + }, + u16 => { + if (comptime !Environment.isWindows) @compileError("unexpected type"); + return bun.sys.openDirAtWindows(dir, subpath, .{ .can_rename_or_delete = false, .create = true, .read_only = false }).unwrap(); + }, + else => @compileError("unexpected type"), + }; + } + + // TODO: make our own version of deleteTree + pub fn deleteTree(dir: FD, subpath: []const u8) !void { + try dir.stdDir().deleteTree(subpath); + } + // The following functions are from bun.sys but with the 'f' prefix dropped // where it is relevant. These functions all take FD as the first argument, // so that makes them Zig methods, even when declared in a separate file. diff --git a/src/feature_flags.zig b/src/feature_flags.zig index cb01ba3fac..a18d3c4add 100644 --- a/src/feature_flags.zig +++ b/src/feature_flags.zig @@ -15,8 +15,8 @@ pub const RuntimeFeatureFlag = enum { BUN_ENABLE_EXPERIMENTAL_SHELL_BUILTINS, BUN_FEATURE_FLAG_DISABLE_ADDRCONFIG, BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER, - BUN_FEATURE_FLAG_DISABLE_DNS_CACHE, BUN_FEATURE_FLAG_DISABLE_DNS_CACHE_LIBINFO, + BUN_FEATURE_FLAG_DISABLE_DNS_CACHE, BUN_FEATURE_FLAG_DISABLE_INSTALL_INDEX, BUN_FEATURE_FLAG_DISABLE_IO_POOL, BUN_FEATURE_FLAG_DISABLE_IPV4, @@ -28,6 +28,7 @@ pub const RuntimeFeatureFlag = enum { BUN_FEATURE_FLAG_DISABLE_UV_FS_COPYFILE, BUN_FEATURE_FLAG_EXPERIMENTAL_BAKE, BUN_FEATURE_FLAG_FORCE_IO_POOL, + BUN_FEATURE_FLAG_FORCE_WINDOWS_JUNCTIONS, BUN_FEATURE_FLAG_LAST_MODIFIED_PRETEND_304, BUN_FEATURE_FLAG_NO_LIBDEFLATE, BUN_INSTRUMENTS, diff --git a/src/fmt.zig b/src/fmt.zig index a3540ad842..2ecd2ce4d0 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -282,7 +282,7 @@ pub fn formatUTF16Type(comptime Slice: type, slice_: Slice, writer: anytype) !vo var slice = slice_; while (slice.len > 0) { - const result = strings.copyUTF16IntoUTF8(chunk, Slice, slice, true); + const result = strings.copyUTF16IntoUTF8(chunk, Slice, slice); if (result.read == 0 or result.written == 0) break; try writer.writeAll(chunk[0..result.written]); @@ -308,7 +308,7 @@ pub fn formatUTF16TypeWithPathOptions(comptime Slice: type, slice_: Slice, write var slice = slice_; while (slice.len > 0) { - const result = strings.copyUTF16IntoUTF8(chunk, Slice, slice, true); + const result = strings.copyUTF16IntoUTF8(chunk, Slice, slice); if (result.read == 0 or result.written == 0) break; diff --git a/src/fs.zig b/src/fs.zig index c3a8533a24..a4d5b822ca 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -624,7 +624,7 @@ pub const FileSystem = struct { var existing = this.entries.atIndex(index) orelse return null; if (existing.* == .entries) { if (existing.entries.generation < generation) { - var handle = bun.openDirForIteration(std.fs.cwd(), existing.entries.dir) catch |err| { + var handle = bun.openDirForIteration(FD.cwd(), existing.entries.dir).unwrap() catch |err| { existing.entries.data.clearAndFree(bun.fs_allocator); return this.readDirectoryError(existing.entries.dir, err) catch unreachable; @@ -636,7 +636,7 @@ pub const FileSystem = struct { &existing.entries.data, existing.entries.dir, generation, - handle, + handle.stdDir(), void, void{}, @@ -982,7 +982,7 @@ pub const FileSystem = struct { ) !DirEntry { _ = fs; - var iter = bun.iterateDir(handle); + var iter = bun.iterateDir(.fromStdDir(handle)); var dir = DirEntry.init(_dir, generation); const allocator = bun.fs_allocator; errdefer dir.deinit(allocator); @@ -1382,10 +1382,10 @@ pub const FileSystem = struct { if (comptime bun.Environment.isWindows) { var file = bun.sys.getFileAttributes(absolute_path_c) orelse return error.FileNotFound; var depth: usize = 0; - const buf2: *bun.PathBuffer = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf2); - const buf3: *bun.PathBuffer = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(buf3); + const buf2: *bun.PathBuffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf2); + const buf3: *bun.PathBuffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf3); var current_buf: *bun.PathBuffer = buf2; var other_buf: *bun.PathBuffer = &outpath; diff --git a/src/glob/GlobWalker.zig b/src/glob/GlobWalker.zig index 2fb0b13f2d..b69fe5aabe 100644 --- a/src/glob/GlobWalker.zig +++ b/src/glob/GlobWalker.zig @@ -153,7 +153,7 @@ pub const SyscallAccessor = struct { } pub inline fn iterate(dir: Handle) DirIter { - return .{ .value = DirIterator.WrappedIterator.init(dir.value.stdDir()) }; + return .{ .value = DirIterator.WrappedIterator.init(dir.value) }; } }; diff --git a/src/hive_array.zig b/src/hive_array.zig index 042c4d2387..c1881a5c4b 100644 --- a/src/hive_array.zig +++ b/src/hive_array.zig @@ -3,6 +3,7 @@ const bun = @import("bun"); const assert = bun.assert; const mem = std.mem; const testing = std.testing; +const OOM = bun.OOM; /// An array that efficiently tracks which elements are in use. /// The pointers are intended to be stable @@ -114,7 +115,7 @@ pub fn HiveArray(comptime T: type, comptime capacity: u16) type { return self.allocator.create(T) catch bun.outOfMemory(); } - pub fn tryGet(self: *This) !*T { + pub fn tryGet(self: *This) OOM!*T { if (comptime capacity > 0) { if (self.hive.get()) |value| { return value; diff --git a/src/http.zig b/src/http.zig index 3084a08d67..7c9659e0eb 100644 --- a/src/http.zig +++ b/src/http.zig @@ -1,55 +1,12 @@ -const bun = @import("bun"); -const picohttp = bun.picohttp; -const JSC = bun.JSC; -const string = bun.string; -const Output = bun.Output; -const Global = bun.Global; -const Environment = bun.Environment; -const strings = bun.strings; -const MutableString = bun.MutableString; -const FeatureFlags = bun.FeatureFlags; -const stringZ = bun.stringZ; - -const Loc = bun.logger.Loc; -const Log = bun.logger.Log; -const DotEnv = @import("./env_loader.zig"); -const std = @import("std"); -const URL = @import("./url.zig").URL; -const PercentEncoding = @import("./url.zig").PercentEncoding; -pub const Method = @import("./http/method.zig").Method; -const Api = @import("./api/schema.zig").Api; -const HTTPClient = @This(); -const Zlib = @import("./zlib.zig"); -const Brotli = bun.brotli; -const zstd = bun.zstd; -const StringBuilder = bun.StringBuilder; -const ThreadPool = bun.ThreadPool; -const posix = std.posix; -const SOCK = posix.SOCK; -const Arena = @import("./allocators/mimalloc_arena.zig").Arena; -const BoringSSL = bun.BoringSSL.c; -const Progress = bun.Progress; -const SSLConfig = @import("./bun.js/api/server.zig").ServerConfig.SSLConfig; -const SSLWrapper = @import("./bun.js/api/bun/ssl_wrapper.zig").SSLWrapper; -const Blob = bun.webcore.Blob; -const FetchHeaders = bun.webcore.FetchHeaders; -const uws = bun.uws; -pub const MimeType = @import("./http/mime_type.zig"); -pub const URLPath = @import("./http/url_path.zig"); // This becomes Arena.allocator pub var default_allocator: std.mem.Allocator = undefined; -var default_arena: Arena = undefined; +pub var default_arena: Arena = undefined; pub var http_thread: HTTPThread = undefined; -const HiveArray = @import("./hive_array.zig").HiveArray; -const Batch = bun.ThreadPool.Batch; -const TaggedPointerUnion = @import("./ptr.zig").TaggedPointerUnion; -const DeadSocket = opaque {}; -var dead_socket = @as(*DeadSocket, @ptrFromInt(1)); + //TODO: this needs to be freed when Worker Threads are implemented -var socket_async_http_abort_tracker = std.AutoArrayHashMap(u32, uws.InternalSocket).init(bun.default_allocator); -var async_http_id_monotonic: std.atomic.Value(u32) = std.atomic.Value(u32).init(0); +pub var socket_async_http_abort_tracker = std.AutoArrayHashMap(u32, uws.InternalSocket).init(bun.default_allocator); +pub var async_http_id_monotonic: std.atomic.Value(u32) = std.atomic.Value(u32).init(0); const MAX_REDIRECT_URL_LENGTH = 128 * 1024; -var custom_ssl_context_map = std.AutoArrayHashMap(*SSLConfig, *NewHTTPContext(true)).init(bun.default_allocator); pub var max_http_header_size: usize = 16 * 1024; comptime { @@ -68,1481 +25,9 @@ var shared_response_headers_buf: [256]picohttp.Header = undefined; pub const end_of_chunked_http1_1_encoding_response_body = "0\r\n\r\n"; -pub const Signals = struct { - header_progress: ?*std.atomic.Value(bool) = null, - body_streaming: ?*std.atomic.Value(bool) = null, - aborted: ?*std.atomic.Value(bool) = null, - cert_errors: ?*std.atomic.Value(bool) = null, - - pub fn isEmpty(this: *const Signals) bool { - return this.aborted == null and this.body_streaming == null and this.header_progress == null and this.cert_errors == null; - } - - pub const Store = struct { - header_progress: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), - body_streaming: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), - aborted: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), - cert_errors: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), - - pub fn to(this: *Store) Signals { - return .{ - .header_progress = &this.header_progress, - .body_streaming = &this.body_streaming, - .aborted = &this.aborted, - .cert_errors = &this.cert_errors, - }; - } - }; - - pub fn get(this: Signals, comptime field: std.meta.FieldEnum(Signals)) bool { - var ptr: *std.atomic.Value(bool) = @field(this, @tagName(field)) orelse return false; - return ptr.load(.monotonic); - } -}; - -pub const FetchRedirect = enum(u8) { - follow, - manual, - @"error", - - pub const Map = bun.ComptimeStringMap(FetchRedirect, .{ - .{ "follow", .follow }, - .{ "manual", .manual }, - .{ "error", .@"error" }, - }); -}; - -pub const HTTPRequestBody = union(enum) { - bytes: []const u8, - sendfile: Sendfile, - stream: struct { - buffer: bun.io.StreamBuffer, - ended: bool, - has_backpressure: bool = false, - - pub fn hasEnded(this: *@This()) bool { - return this.ended and this.buffer.isEmpty(); - } - }, - - pub fn isStream(this: *const HTTPRequestBody) bool { - return this.* == .stream; - } - - pub fn deinit(this: *HTTPRequestBody) void { - switch (this.*) { - .sendfile, .bytes => {}, - .stream => |*stream| stream.buffer.deinit(), - } - } - pub fn len(this: *const HTTPRequestBody) usize { - return switch (this.*) { - .bytes => this.bytes.len, - .sendfile => this.sendfile.content_size, - // unknow amounts - .stream => std.math.maxInt(usize), - }; - } -}; - -pub const Sendfile = struct { - fd: bun.FileDescriptor, - remain: usize = 0, - offset: usize = 0, - content_size: usize = 0, - - pub fn isEligible(url: bun.URL) bool { - if (comptime Environment.isWindows or !FeatureFlags.streaming_file_uploads_for_http_client) { - return false; - } - return url.isHTTP() and url.href.len > 0; - } - - pub fn write( - this: *Sendfile, - socket: NewHTTPContext(false).HTTPSocket, - ) Status { - const adjusted_count_temporary = @min(@as(u64, this.remain), @as(u63, std.math.maxInt(u63))); - // TODO we should not need this int cast; improve the return type of `@min` - const adjusted_count = @as(u63, @intCast(adjusted_count_temporary)); - - if (Environment.isLinux) { - var signed_offset = @as(i64, @intCast(this.offset)); - const begin = this.offset; - const val = - // this does the syscall directly, without libc - std.os.linux.sendfile(socket.fd().cast(), this.fd.cast(), &signed_offset, this.remain); - this.offset = @as(u64, @intCast(signed_offset)); - - const errcode = bun.sys.getErrno(val); - - this.remain -|= @as(u64, @intCast(this.offset -| begin)); - - if (errcode != .SUCCESS or this.remain == 0 or val == 0) { - if (errcode == .SUCCESS) { - return .{ .done = {} }; - } - - return .{ .err = bun.errnoToZigErr(errcode) }; - } - } else if (Environment.isPosix) { - var sbytes: std.posix.off_t = adjusted_count; - const signed_offset = @as(i64, @bitCast(@as(u64, this.offset))); - const errcode = bun.sys.getErrno(std.c.sendfile( - this.fd.cast(), - socket.fd().cast(), - signed_offset, - &sbytes, - null, - 0, - )); - const wrote = @as(u64, @intCast(sbytes)); - this.offset +|= wrote; - this.remain -|= wrote; - if (errcode != .AGAIN or this.remain == 0 or sbytes == 0) { - if (errcode == .SUCCESS) { - return .{ .done = {} }; - } - - return .{ .err = bun.errnoToZigErr(errcode) }; - } - } - - return .{ .again = {} }; - } - - pub const Status = union(enum) { - done: void, - err: anyerror, - again: void, - }; -}; - -const ProxyTunnel = struct { - const RefCount = bun.ptr.RefCount(@This(), "ref_count", ProxyTunnel.deinit, .{}); - pub const ref = ProxyTunnel.RefCount.ref; - pub const deref = ProxyTunnel.RefCount.deref; - - wrapper: ?ProxyTunnelWrapper = null, - shutdown_err: anyerror = error.ConnectionClosed, - // active socket is the socket that is currently being used - socket: union(enum) { - tcp: NewHTTPContext(false).HTTPSocket, - ssl: NewHTTPContext(true).HTTPSocket, - none: void, - } = .{ .none = {} }, - write_buffer: bun.io.StreamBuffer = .{}, - ref_count: RefCount, - - const ProxyTunnelWrapper = SSLWrapper(*HTTPClient); - - fn onOpen(this: *HTTPClient) void { - log("ProxyTunnel onOpen", .{}); - this.state.response_stage = .proxy_handshake; - this.state.request_stage = .proxy_handshake; - if (this.proxy_tunnel) |proxy| { - proxy.ref(); - defer proxy.deref(); - if (proxy.wrapper) |*wrapper| { - var ssl_ptr = wrapper.ssl orelse return; - const _hostname = this.hostname orelse this.url.hostname; - - var hostname: [:0]const u8 = ""; - var hostname_needs_free = false; - if (!strings.isIPAddress(_hostname)) { - if (_hostname.len < temp_hostname.len) { - @memcpy(temp_hostname[0.._hostname.len], _hostname); - temp_hostname[_hostname.len] = 0; - hostname = temp_hostname[0.._hostname.len :0]; - } else { - hostname = bun.default_allocator.dupeZ(u8, _hostname) catch unreachable; - hostname_needs_free = true; - } - } - - defer if (hostname_needs_free) bun.default_allocator.free(hostname); - ssl_ptr.configureHTTPClient(hostname); - } - } - } - - fn onData(this: *HTTPClient, decoded_data: []const u8) void { - if (decoded_data.len == 0) return; - log("ProxyTunnel onData decoded {}", .{decoded_data.len}); - if (this.proxy_tunnel) |proxy| { - proxy.ref(); - defer proxy.deref(); - switch (this.state.response_stage) { - .body => { - log("ProxyTunnel onData body", .{}); - if (decoded_data.len == 0) return; - const report_progress = this.handleResponseBody(decoded_data, false) catch |err| { - proxy.close(err); - return; - }; - - if (report_progress) { - switch (proxy.socket) { - .ssl => |socket| { - this.progressUpdate(true, &http_thread.https_context, socket); - }, - .tcp => |socket| { - this.progressUpdate(false, &http_thread.http_context, socket); - }, - .none => {}, - } - return; - } - }, - .body_chunk => { - log("ProxyTunnel onData body_chunk", .{}); - if (decoded_data.len == 0) return; - const report_progress = this.handleResponseBodyChunkedEncoding(decoded_data) catch |err| { - proxy.close(err); - return; - }; - - if (report_progress) { - switch (proxy.socket) { - .ssl => |socket| { - this.progressUpdate(true, &http_thread.https_context, socket); - }, - .tcp => |socket| { - this.progressUpdate(false, &http_thread.http_context, socket); - }, - .none => {}, - } - return; - } - }, - .proxy_headers => { - log("ProxyTunnel onData proxy_headers", .{}); - switch (proxy.socket) { - .ssl => |socket| { - this.handleOnDataHeaders(true, decoded_data, &http_thread.https_context, socket); - }, - .tcp => |socket| { - this.handleOnDataHeaders(false, decoded_data, &http_thread.http_context, socket); - }, - .none => {}, - } - }, - else => { - log("ProxyTunnel onData unexpected data", .{}); - this.state.pending_response = null; - proxy.close(error.UnexpectedData); - }, - } - } - } - - fn onHandshake(this: *HTTPClient, handshake_success: bool, ssl_error: uws.us_bun_verify_error_t) void { - if (this.proxy_tunnel) |proxy| { - log("ProxyTunnel onHandshake", .{}); - proxy.ref(); - defer proxy.deref(); - this.state.response_stage = .proxy_headers; - this.state.request_stage = .proxy_headers; - this.state.request_sent_len = 0; - const handshake_error = HTTPCertError{ - .error_no = ssl_error.error_no, - .code = if (ssl_error.code == null) "" else ssl_error.code[0..bun.len(ssl_error.code) :0], - .reason = if (ssl_error.code == null) "" else ssl_error.reason[0..bun.len(ssl_error.reason) :0], - }; - if (handshake_success) { - log("ProxyTunnel onHandshake success", .{}); - // handshake completed but we may have ssl errors - this.flags.did_have_handshaking_error = handshake_error.error_no != 0; - if (this.flags.reject_unauthorized) { - // only reject the connection if reject_unauthorized == true - if (this.flags.did_have_handshaking_error) { - proxy.close(BoringSSL.getCertErrorFromNo(handshake_error.error_no)); - return; - } - - // if checkServerIdentity returns false, we dont call open this means that the connection was rejected - bun.assert(proxy.wrapper != null); - const ssl_ptr = proxy.wrapper.?.ssl orelse return; - - switch (proxy.socket) { - .ssl => |socket| { - if (!this.checkServerIdentity(true, socket, handshake_error, ssl_ptr, false)) { - log("ProxyTunnel onHandshake checkServerIdentity failed", .{}); - this.flags.did_have_handshaking_error = true; - - this.unregisterAbortTracker(); - return; - } - }, - .tcp => |socket| { - if (!this.checkServerIdentity(false, socket, handshake_error, ssl_ptr, false)) { - log("ProxyTunnel onHandshake checkServerIdentity failed", .{}); - this.flags.did_have_handshaking_error = true; - this.unregisterAbortTracker(); - return; - } - }, - .none => {}, - } - } - - switch (proxy.socket) { - .ssl => |socket| { - this.onWritable(true, true, socket); - }, - .tcp => |socket| { - this.onWritable(true, false, socket); - }, - .none => {}, - } - } else { - log("ProxyTunnel onHandshake failed", .{}); - // if we are here is because server rejected us, and the error_no is the cause of this - // if we set reject_unauthorized == false this means the server requires custom CA aka NODE_EXTRA_CA_CERTS - if (this.flags.did_have_handshaking_error and handshake_error.error_no != 0) { - proxy.close(BoringSSL.getCertErrorFromNo(handshake_error.error_no)); - return; - } - // if handshake_success it self is false, this means that the connection was rejected - proxy.close(error.ConnectionRefused); - return; - } - } - } - - pub fn write(this: *HTTPClient, encoded_data: []const u8) void { - if (this.proxy_tunnel) |proxy| { - const written = switch (proxy.socket) { - .ssl => |socket| socket.write(encoded_data, false), - .tcp => |socket| socket.write(encoded_data, false), - .none => 0, - }; - const pending = encoded_data[@intCast(written)..]; - if (pending.len > 0) { - // lets flush when we are truly writable - proxy.write_buffer.write(pending) catch bun.outOfMemory(); - } - } - } - - fn onClose(this: *HTTPClient) void { - log("ProxyTunnel onClose {s}", .{if (this.proxy_tunnel == null) "tunnel is detached" else "tunnel exists"}); - if (this.proxy_tunnel) |proxy| { - proxy.ref(); - // defer the proxy deref the proxy tunnel may still be in use after triggering the close callback - defer http_thread.scheduleProxyDeref(proxy); - const err = proxy.shutdown_err; - switch (proxy.socket) { - .ssl => |socket| { - this.closeAndFail(err, true, socket); - }, - .tcp => |socket| { - this.closeAndFail(err, false, socket); - }, - .none => {}, - } - proxy.detachSocket(); - } - } - - fn start(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, ssl_options: JSC.API.ServerConfig.SSLConfig, start_payload: []const u8) void { - const proxy_tunnel = bun.new(ProxyTunnel, .{ - .ref_count = .init(), - }); - - var custom_options = ssl_options; - // we always request the cert so we can verify it and also we manually abort the connection if the hostname doesn't match - custom_options.reject_unauthorized = 0; - custom_options.request_cert = 1; - proxy_tunnel.wrapper = SSLWrapper(*HTTPClient).init(custom_options, true, .{ - .onOpen = ProxyTunnel.onOpen, - .onData = ProxyTunnel.onData, - .onHandshake = ProxyTunnel.onHandshake, - .onClose = ProxyTunnel.onClose, - .write = ProxyTunnel.write, - .ctx = this, - }) catch |err| { - if (err == error.OutOfMemory) { - bun.outOfMemory(); - } - - // invalid TLS Options - proxy_tunnel.detachAndDeref(); - this.closeAndFail(error.ConnectionRefused, is_ssl, socket); - return; - }; - this.proxy_tunnel = proxy_tunnel; - if (is_ssl) { - proxy_tunnel.socket = .{ .ssl = socket }; - } else { - proxy_tunnel.socket = .{ .tcp = socket }; - } - if (start_payload.len > 0) { - log("proxy tunnel start with payload", .{}); - proxy_tunnel.wrapper.?.startWithPayload(start_payload); - } else { - log("proxy tunnel start", .{}); - proxy_tunnel.wrapper.?.start(); - } - } - - pub fn close(this: *ProxyTunnel, err: anyerror) void { - this.shutdown_err = err; - this.shutdown(); - } - - pub fn shutdown(this: *ProxyTunnel) void { - if (this.wrapper) |*wrapper| { - // fast shutdown the connection - _ = wrapper.shutdown(true); - } - } - - pub fn onWritable(this: *ProxyTunnel, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket) void { - log("ProxyTunnel onWritable", .{}); - this.ref(); - defer this.deref(); - defer if (this.wrapper) |*wrapper| { - // Cycle to through the SSL state machine - _ = wrapper.flush(); - }; - - const encoded_data = this.write_buffer.slice(); - if (encoded_data.len == 0) { - return; - } - const written = socket.write(encoded_data, true); - if (written == encoded_data.len) { - this.write_buffer.reset(); - } else { - this.write_buffer.cursor += @intCast(written); - } - } - - pub fn receiveData(this: *ProxyTunnel, buf: []const u8) void { - this.ref(); - defer this.deref(); - if (this.wrapper) |*wrapper| { - wrapper.receiveData(buf); - } - } - - pub fn writeData(this: *ProxyTunnel, buf: []const u8) !usize { - if (this.wrapper) |*wrapper| { - return try wrapper.writeData(buf); - } - return error.ConnectionClosed; - } - - pub fn detachSocket(this: *ProxyTunnel) void { - this.socket = .{ .none = {} }; - } - - pub fn detachAndDeref(this: *ProxyTunnel) void { - this.detachSocket(); - this.deref(); - } - - fn deinit(this: *ProxyTunnel) void { - this.socket = .{ .none = {} }; - if (this.wrapper) |*wrapper| { - wrapper.deinit(); - this.wrapper = null; - } - this.write_buffer.deinit(); - bun.destroy(this); - } -}; - -pub const HTTPCertError = struct { - error_no: i32 = 0, - code: [:0]const u8 = "", - reason: [:0]const u8 = "", -}; - -pub const InitError = error{ - FailedToOpenSocket, - LoadCAFile, - InvalidCAFile, - InvalidCA, -}; - -fn NewHTTPContext(comptime ssl: bool) type { - return struct { - const pool_size = 64; - const PooledSocket = struct { - http_socket: HTTPSocket, - hostname_buf: [MAX_KEEPALIVE_HOSTNAME]u8 = undefined, - hostname_len: u8 = 0, - port: u16 = 0, - /// If you set `rejectUnauthorized` to `false`, the connection fails to verify, - did_have_handshaking_error_while_reject_unauthorized_is_false: bool = false, - }; - - pub fn markSocketAsDead(socket: HTTPSocket) void { - if (socket.ext(**anyopaque)) |ctx| { - ctx.* = bun.cast(**anyopaque, ActiveSocket.init(&dead_socket).ptr()); - } - } - - fn terminateSocket(socket: HTTPSocket) void { - markSocketAsDead(socket); - socket.close(.failure); - } - - fn closeSocket(socket: HTTPSocket) void { - markSocketAsDead(socket); - socket.close(.normal); - } - - fn getTagged(ptr: *anyopaque) ActiveSocket { - return ActiveSocket.from(bun.cast(**anyopaque, ptr).*); - } - - pub fn getTaggedFromSocket(socket: HTTPSocket) ActiveSocket { - if (socket.ext(anyopaque)) |ctx| { - return getTagged(ctx); - } - return ActiveSocket.init(&dead_socket); - } - - pub const PooledSocketHiveAllocator = bun.HiveArray(PooledSocket, pool_size); - - pending_sockets: PooledSocketHiveAllocator, - us_socket_context: *uws.SocketContext, - - const Context = @This(); - pub const HTTPSocket = uws.NewSocketHandler(ssl); - - pub fn context() *@This() { - if (comptime ssl) { - return &http_thread.https_context; - } else { - return &http_thread.http_context; - } - } - - const ActiveSocket = TaggedPointerUnion(.{ - *DeadSocket, - HTTPClient, - PooledSocket, - }); - const ssl_int = @as(c_int, @intFromBool(ssl)); - - const MAX_KEEPALIVE_HOSTNAME = 128; - - pub fn sslCtx(this: *@This()) *BoringSSL.SSL_CTX { - if (comptime !ssl) { - unreachable; - } - - return @as(*BoringSSL.SSL_CTX, @ptrCast(this.us_socket_context.getNativeHandle(true))); - } - - pub fn deinit(this: *@This()) void { - this.us_socket_context.deinit(ssl); - bun.default_allocator.destroy(this); - } - - pub fn initWithClientConfig(this: *@This(), client: *HTTPClient) InitError!void { - if (!comptime ssl) { - @compileError("ssl only"); - } - var opts = client.tls_props.?.asUSockets(); - opts.request_cert = 1; - opts.reject_unauthorized = 0; - try this.initWithOpts(&opts); - } - - fn initWithOpts(this: *@This(), opts: *const uws.SocketContext.BunSocketContextOptions) InitError!void { - if (!comptime ssl) { - @compileError("ssl only"); - } - - var err: uws.create_bun_socket_error_t = .none; - const socket = uws.SocketContext.createSSLContext(http_thread.loop.loop, @sizeOf(usize), opts.*, &err); - if (socket == null) { - return switch (err) { - .load_ca_file => error.LoadCAFile, - .invalid_ca_file => error.InvalidCAFile, - .invalid_ca => error.InvalidCA, - else => error.FailedToOpenSocket, - }; - } - this.us_socket_context = socket.?; - this.sslCtx().setup(); - - HTTPSocket.configure( - this.us_socket_context, - false, - anyopaque, - Handler, - ); - } - - pub fn initWithThreadOpts(this: *@This(), init_opts: *const HTTPThread.InitOpts) InitError!void { - if (!comptime ssl) { - @compileError("ssl only"); - } - var opts: uws.SocketContext.BunSocketContextOptions = .{ - .ca = if (init_opts.ca.len > 0) @ptrCast(init_opts.ca) else null, - .ca_count = @intCast(init_opts.ca.len), - .ca_file_name = if (init_opts.abs_ca_file_name.len > 0) init_opts.abs_ca_file_name else null, - .request_cert = 1, - }; - - try this.initWithOpts(&opts); - } - - pub fn init(this: *@This()) void { - if (comptime ssl) { - const opts: uws.SocketContext.BunSocketContextOptions = .{ - // we request the cert so we load root certs and can verify it - .request_cert = 1, - // we manually abort the connection if the hostname doesn't match - .reject_unauthorized = 0, - }; - var err: uws.create_bun_socket_error_t = .none; - this.us_socket_context = uws.SocketContext.createSSLContext(http_thread.loop.loop, @sizeOf(usize), opts, &err).?; - - this.sslCtx().setup(); - } else { - this.us_socket_context = uws.SocketContext.createNoSSLContext(http_thread.loop.loop, @sizeOf(usize)).?; - } - - HTTPSocket.configure( - this.us_socket_context, - false, - anyopaque, - Handler, - ); - } - - /// Attempt to keep the socket alive by reusing it for another request. - /// If no space is available, close the socket. - /// - /// If `did_have_handshaking_error_while_reject_unauthorized_is_false` - /// is set, then we can only reuse the socket for HTTP Keep Alive if - /// `reject_unauthorized` is set to `false`. - pub fn releaseSocket(this: *@This(), socket: HTTPSocket, did_have_handshaking_error_while_reject_unauthorized_is_false: bool, hostname: []const u8, port: u16) void { - // log("releaseSocket(0x{})", .{bun.fmt.hexIntUpper(@intFromPtr(socket.socket))}); - - if (comptime Environment.allow_assert) { - assert(!socket.isClosed()); - assert(!socket.isShutdown()); - assert(socket.isEstablished()); - } - assert(hostname.len > 0); - assert(port > 0); - - if (hostname.len <= MAX_KEEPALIVE_HOSTNAME and !socket.isClosedOrHasError() and socket.isEstablished()) { - if (this.pending_sockets.get()) |pending| { - if (socket.ext(**anyopaque)) |ctx| { - ctx.* = bun.cast(**anyopaque, ActiveSocket.init(pending).ptr()); - } - socket.flush(); - socket.timeout(0); - socket.setTimeoutMinutes(5); - - pending.http_socket = socket; - pending.did_have_handshaking_error_while_reject_unauthorized_is_false = did_have_handshaking_error_while_reject_unauthorized_is_false; - @memcpy(pending.hostname_buf[0..hostname.len], hostname); - pending.hostname_len = @as(u8, @truncate(hostname.len)); - pending.port = port; - - log("Keep-Alive release {s}:{d}", .{ - hostname, - port, - }); - return; - } - } - log("close socket", .{}); - closeSocket(socket); - } - - pub const Handler = struct { - pub fn onOpen( - ptr: *anyopaque, - socket: HTTPSocket, - ) void { - const active = getTagged(ptr); - if (active.get(HTTPClient)) |client| { - if (client.onOpen(comptime ssl, socket)) |_| { - return; - } else |_| { - log("Unable to open socket", .{}); - terminateSocket(socket); - return; - } - } - - if (active.get(PooledSocket)) |pooled| { - addMemoryBackToPool(pooled); - return; - } - - log("Unexpected open on unknown socket", .{}); - terminateSocket(socket); - } - pub fn onHandshake( - ptr: *anyopaque, - socket: HTTPSocket, - success: i32, - ssl_error: uws.us_bun_verify_error_t, - ) void { - const handshake_success = if (success == 1) true else false; - - const handshake_error = HTTPCertError{ - .error_no = ssl_error.error_no, - .code = if (ssl_error.code == null) "" else ssl_error.code[0..bun.len(ssl_error.code) :0], - .reason = if (ssl_error.code == null) "" else ssl_error.reason[0..bun.len(ssl_error.reason) :0], - }; - - const active = getTagged(ptr); - if (active.get(HTTPClient)) |client| { - // handshake completed but we may have ssl errors - client.flags.did_have_handshaking_error = handshake_error.error_no != 0; - if (handshake_success) { - if (client.flags.reject_unauthorized) { - // only reject the connection if reject_unauthorized == true - if (client.flags.did_have_handshaking_error) { - client.closeAndFail(BoringSSL.getCertErrorFromNo(handshake_error.error_no), comptime ssl, socket); - return; - } - - // if checkServerIdentity returns false, we dont call open this means that the connection was rejected - const ssl_ptr = @as(*BoringSSL.SSL, @ptrCast(socket.getNativeHandle())); - if (!client.checkServerIdentity(comptime ssl, socket, handshake_error, ssl_ptr, true)) { - client.flags.did_have_handshaking_error = true; - client.unregisterAbortTracker(); - if (!socket.isClosed()) terminateSocket(socket); - return; - } - } - - return client.firstCall(comptime ssl, socket); - } else { - // if we are here is because server rejected us, and the error_no is the cause of this - // if we set reject_unauthorized == false this means the server requires custom CA aka NODE_EXTRA_CA_CERTS - if (client.flags.did_have_handshaking_error) { - client.closeAndFail(BoringSSL.getCertErrorFromNo(handshake_error.error_no), comptime ssl, socket); - return; - } - // if handshake_success it self is false, this means that the connection was rejected - client.closeAndFail(error.ConnectionRefused, comptime ssl, socket); - return; - } - } - - if (socket.isClosed()) { - markSocketAsDead(socket); - if (active.get(PooledSocket)) |pooled| { - addMemoryBackToPool(pooled); - } - - return; - } - - if (handshake_success) { - if (active.is(PooledSocket)) { - // Allow pooled sockets to be reused if the handshake was successful. - socket.setTimeout(0); - socket.setTimeoutMinutes(5); - return; - } - } - - if (active.get(PooledSocket)) |pooled| { - addMemoryBackToPool(pooled); - } - - terminateSocket(socket); - } - pub fn onClose( - ptr: *anyopaque, - socket: HTTPSocket, - _: c_int, - _: ?*anyopaque, - ) void { - const tagged = getTagged(ptr); - markSocketAsDead(socket); - - if (tagged.get(HTTPClient)) |client| { - return client.onClose(comptime ssl, socket); - } - - if (tagged.get(PooledSocket)) |pooled| { - addMemoryBackToPool(pooled); - } - - return; - } - - fn addMemoryBackToPool(pooled: *PooledSocket) void { - assert(context().pending_sockets.put(pooled)); - } - - pub fn onData( - ptr: *anyopaque, - socket: HTTPSocket, - buf: []const u8, - ) void { - const tagged = getTagged(ptr); - if (tagged.get(HTTPClient)) |client| { - return client.onData( - comptime ssl, - buf, - if (comptime ssl) &http_thread.https_context else &http_thread.http_context, - socket, - ); - } else if (tagged.is(PooledSocket)) { - // trailing zero is fine to ignore - if (strings.eqlComptime(buf, end_of_chunked_http1_1_encoding_response_body)) { - return; - } - - log("Unexpected data on socket", .{}); - - return; - } - log("Unexpected data on unknown socket", .{}); - terminateSocket(socket); - } - pub fn onWritable( - ptr: *anyopaque, - socket: HTTPSocket, - ) void { - const tagged = getTagged(ptr); - if (tagged.get(HTTPClient)) |client| { - return client.onWritable( - false, - comptime ssl, - socket, - ); - } else if (tagged.is(PooledSocket)) { - // it's a keep-alive socket - } else { - // don't know what this is, let's close it - log("Unexpected writable on socket", .{}); - terminateSocket(socket); - } - } - pub fn onLongTimeout( - ptr: *anyopaque, - socket: HTTPSocket, - ) void { - const tagged = getTagged(ptr); - if (tagged.get(HTTPClient)) |client| { - return client.onTimeout(comptime ssl, socket); - } else if (tagged.get(PooledSocket)) |pooled| { - // If a socket has been sitting around for 5 minutes - // Let's close it and remove it from the pool. - addMemoryBackToPool(pooled); - } - - terminateSocket(socket); - } - pub fn onConnectError( - ptr: *anyopaque, - socket: HTTPSocket, - _: c_int, - ) void { - const tagged = getTagged(ptr); - markSocketAsDead(socket); - if (tagged.get(HTTPClient)) |client| { - client.onConnectError(); - } else if (tagged.get(PooledSocket)) |pooled| { - addMemoryBackToPool(pooled); - } - // us_connecting_socket_close is always called internally by uSockets - } - pub fn onEnd( - _: *anyopaque, - socket: HTTPSocket, - ) void { - // TCP fin must be closed, but we must keep the original tagged - // pointer so that their onClose callback is called. - // - // Three possible states: - // 1. HTTP Keep-Alive socket: it must be removed from the pool - // 2. HTTP Client socket: it might need to be retried - // 3. Dead socket: it is already marked as dead - socket.close(.failure); - } - }; - - fn existingSocket(this: *@This(), reject_unauthorized: bool, hostname: []const u8, port: u16) ?HTTPSocket { - if (hostname.len > MAX_KEEPALIVE_HOSTNAME) - return null; - - var iter = this.pending_sockets.used.iterator(.{ .kind = .set }); - - while (iter.next()) |pending_socket_index| { - var socket = this.pending_sockets.at(@as(u16, @intCast(pending_socket_index))); - if (socket.port != port) { - continue; - } - - if (socket.did_have_handshaking_error_while_reject_unauthorized_is_false and reject_unauthorized) { - continue; - } - - if (strings.eqlLong(socket.hostname_buf[0..socket.hostname_len], hostname, true)) { - const http_socket = socket.http_socket; - assert(context().pending_sockets.put(socket)); - - if (http_socket.isClosed()) { - markSocketAsDead(http_socket); - continue; - } - - if (http_socket.isShutdown() or http_socket.getError() != 0) { - terminateSocket(http_socket); - continue; - } - - log("+ Keep-Alive reuse {s}:{d}", .{ hostname, port }); - return http_socket; - } - } - - return null; - } - - pub fn connectSocket(this: *@This(), client: *HTTPClient, socket_path: []const u8) !HTTPSocket { - client.connected_url = if (client.http_proxy) |proxy| proxy else client.url; - const socket = try HTTPSocket.connectUnixAnon( - socket_path, - this.us_socket_context, - ActiveSocket.init(client).ptr(), - false, // dont allow half-open sockets - ); - client.allow_retry = false; - return socket; - } - - pub fn connect(this: *@This(), client: *HTTPClient, hostname_: []const u8, port: u16) !HTTPSocket { - const hostname = if (FeatureFlags.hardcode_localhost_to_127_0_0_1 and strings.eqlComptime(hostname_, "localhost")) - "127.0.0.1" - else - hostname_; - - client.connected_url = if (client.http_proxy) |proxy| proxy else client.url; - client.connected_url.hostname = hostname; - - if (client.isKeepAlivePossible()) { - if (this.existingSocket(client.flags.reject_unauthorized, hostname, port)) |sock| { - if (sock.ext(**anyopaque)) |ctx| { - ctx.* = bun.cast(**anyopaque, ActiveSocket.init(client).ptr()); - } - client.allow_retry = true; - try client.onOpen(comptime ssl, sock); - if (comptime ssl) { - client.firstCall(comptime ssl, sock); - } - return sock; - } - } - - const socket = try HTTPSocket.connectAnon( - hostname, - port, - this.us_socket_context, - ActiveSocket.init(client).ptr(), - false, - ); - client.allow_retry = false; - return socket; - } - }; -} - -const UnboundedQueue = @import("./bun.js/unbounded_queue.zig").UnboundedQueue; -const Queue = UnboundedQueue(AsyncHTTP, .next); - -pub const HTTPThread = struct { - loop: *JSC.MiniEventLoop, - http_context: NewHTTPContext(false), - https_context: NewHTTPContext(true), - - queued_tasks: Queue = Queue{}, - - queued_shutdowns: std.ArrayListUnmanaged(ShutdownMessage) = std.ArrayListUnmanaged(ShutdownMessage){}, - queued_writes: std.ArrayListUnmanaged(WriteMessage) = std.ArrayListUnmanaged(WriteMessage){}, - - queued_shutdowns_lock: bun.Mutex = .{}, - queued_writes_lock: bun.Mutex = .{}, - - queued_proxy_deref: std.ArrayListUnmanaged(*ProxyTunnel) = std.ArrayListUnmanaged(*ProxyTunnel){}, - - has_awoken: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), - timer: std.time.Timer, - lazy_libdeflater: ?*LibdeflateState = null, - lazy_request_body_buffer: ?*HeapRequestBodyBuffer = null, - - pub const HeapRequestBodyBuffer = struct { - buffer: [512 * 1024]u8 = undefined, - fixed_buffer_allocator: std.heap.FixedBufferAllocator, - - pub const new = bun.TrivialNew(@This()); - pub const deinit = bun.TrivialDeinit(@This()); - - pub fn init() *@This() { - var this = HeapRequestBodyBuffer.new(.{ - .fixed_buffer_allocator = undefined, - }); - this.fixed_buffer_allocator = std.heap.FixedBufferAllocator.init(&this.buffer); - return this; - } - - pub fn put(this: *@This()) void { - if (http_thread.lazy_request_body_buffer == null) { - // This case hypothetically should never happen - this.fixed_buffer_allocator.reset(); - http_thread.lazy_request_body_buffer = this; - } else { - this.deinit(); - } - } - }; - - pub const RequestBodyBuffer = union(enum) { - heap: *HeapRequestBodyBuffer, - stack: std.heap.StackFallbackAllocator(request_body_send_stack_buffer_size), - - pub fn deinit(this: *@This()) void { - switch (this.*) { - .heap => |heap| heap.put(), - .stack => {}, - } - } - - pub fn allocatedSlice(this: *@This()) []u8 { - return switch (this.*) { - .heap => |heap| &heap.buffer, - .stack => |*stack| &stack.buffer, - }; - } - - pub fn allocator(this: *@This()) std.mem.Allocator { - return switch (this.*) { - .heap => |heap| heap.fixed_buffer_allocator.allocator(), - .stack => |*stack| stack.get(), - }; - } - - pub fn toArrayList(this: *@This()) std.ArrayList(u8) { - var arraylist = std.ArrayList(u8).fromOwnedSlice(this.allocator(), this.allocatedSlice()); - arraylist.items.len = 0; - return arraylist; - } - }; - - const threadlog = Output.scoped(.HTTPThread, true); - const WriteMessage = struct { - data: []const u8, - async_http_id: u32, - flags: packed struct(u8) { - is_tls: bool, - ended: bool, - _: u6 = 0, - }, - }; - const ShutdownMessage = struct { - async_http_id: u32, - is_tls: bool, - }; - - pub const LibdeflateState = struct { - decompressor: *bun.libdeflate.Decompressor = undefined, - shared_buffer: [512 * 1024]u8 = undefined, - - pub const new = bun.TrivialNew(@This()); - }; - - const request_body_send_stack_buffer_size = 32 * 1024; - - pub inline fn getRequestBodySendBuffer(this: *@This(), estimated_size: usize) RequestBodyBuffer { - if (estimated_size >= request_body_send_stack_buffer_size) { - if (this.lazy_request_body_buffer == null) { - log("Allocating HeapRequestBodyBuffer due to {d} bytes request body", .{estimated_size}); - return .{ - .heap = HeapRequestBodyBuffer.init(), - }; - } - - return .{ .heap = bun.take(&this.lazy_request_body_buffer).? }; - } - return .{ - .stack = std.heap.stackFallback(request_body_send_stack_buffer_size, bun.default_allocator), - }; - } - - pub fn deflater(this: *@This()) *LibdeflateState { - if (this.lazy_libdeflater == null) { - this.lazy_libdeflater = LibdeflateState.new(.{ - .decompressor = bun.libdeflate.Decompressor.alloc() orelse bun.outOfMemory(), - }); - } - - return this.lazy_libdeflater.?; - } - - fn onInitErrorNoop(err: InitError, opts: InitOpts) noreturn { - switch (err) { - error.LoadCAFile => { - if (!bun.sys.existsZ(opts.abs_ca_file_name)) { - Output.err("HTTPThread", "failed to find CA file: '{s}'", .{opts.abs_ca_file_name}); - } else { - Output.err("HTTPThread", "failed to load CA file: '{s}'", .{opts.abs_ca_file_name}); - } - }, - error.InvalidCAFile => { - Output.err("HTTPThread", "the CA file is invalid: '{s}'", .{opts.abs_ca_file_name}); - }, - error.InvalidCA => { - Output.err("HTTPThread", "the provided CA is invalid", .{}); - }, - error.FailedToOpenSocket => { - Output.errGeneric("failed to start HTTP client thread", .{}); - }, - } - Global.crash(); - } - - pub const InitOpts = struct { - ca: []stringZ = &.{}, - abs_ca_file_name: stringZ = &.{}, - for_install: bool = false, - - onInitError: *const fn (err: InitError, opts: InitOpts) noreturn = &onInitErrorNoop, - }; - - fn initOnce(opts: *const InitOpts) void { - http_thread = .{ - .loop = undefined, - .http_context = .{ - .us_socket_context = undefined, - .pending_sockets = NewHTTPContext(false).PooledSocketHiveAllocator.empty, - }, - .https_context = .{ - .us_socket_context = undefined, - .pending_sockets = NewHTTPContext(true).PooledSocketHiveAllocator.empty, - }, - .timer = std.time.Timer.start() catch unreachable, - }; - bun.libdeflate.load(); - const thread = std.Thread.spawn( - .{ - .stack_size = bun.default_thread_stack_size, - }, - onStart, - .{opts.*}, - ) catch |err| Output.panic("Failed to start HTTP Client thread: {s}", .{@errorName(err)}); - thread.detach(); - } - var init_once = bun.once(initOnce); - - pub fn init(opts: *const InitOpts) void { - init_once.call(.{opts}); - } - - pub fn onStart(opts: InitOpts) void { - Output.Source.configureNamedThread("HTTP Client"); - default_arena = Arena.init() catch unreachable; - default_allocator = default_arena.allocator(); - - const loop = bun.JSC.MiniEventLoop.initGlobal(null); - - if (Environment.isWindows) { - _ = std.process.getenvW(comptime bun.strings.w("SystemRoot")) orelse { - bun.Output.errGeneric("The %SystemRoot% environment variable is not set. Bun needs this set in order for network requests to work.", .{}); - Global.crash(); - }; - } - - http_thread.loop = loop; - http_thread.http_context.init(); - http_thread.https_context.initWithThreadOpts(&opts) catch |err| opts.onInitError(err, opts); - http_thread.has_awoken.store(true, .monotonic); - http_thread.processEvents(); - } - - pub fn connect(this: *@This(), client: *HTTPClient, comptime is_ssl: bool) !NewHTTPContext(is_ssl).HTTPSocket { - if (client.unix_socket_path.length() > 0) { - return try this.context(is_ssl).connectSocket(client, client.unix_socket_path.slice()); - } - - if (comptime is_ssl) { - const needs_own_context = client.tls_props != null and client.tls_props.?.requires_custom_request_ctx; - if (needs_own_context) { - var requested_config = client.tls_props.?; - for (custom_ssl_context_map.keys()) |other_config| { - if (requested_config.isSame(other_config)) { - // we free the callers config since we have a existing one - if (requested_config != client.tls_props) { - requested_config.deinit(); - bun.default_allocator.destroy(requested_config); - } - client.tls_props = other_config; - if (client.http_proxy) |url| { - return try custom_ssl_context_map.get(other_config).?.connect(client, url.hostname, url.getPortAuto()); - } else { - return try custom_ssl_context_map.get(other_config).?.connect(client, client.url.hostname, client.url.getPortAuto()); - } - } - } - // we need the config so dont free it - var custom_context = try bun.default_allocator.create(NewHTTPContext(is_ssl)); - custom_context.initWithClientConfig(client) catch |err| { - client.tls_props = null; - - requested_config.deinit(); - bun.default_allocator.destroy(requested_config); - bun.default_allocator.destroy(custom_context); - - // TODO: these error names reach js. figure out how they should be handled - return switch (err) { - error.FailedToOpenSocket => |e| e, - error.InvalidCA => error.FailedToOpenSocket, - error.InvalidCAFile => error.FailedToOpenSocket, - error.LoadCAFile => error.FailedToOpenSocket, - }; - }; - try custom_ssl_context_map.put(requested_config, custom_context); - // We might deinit the socket context, so we disable keepalive to make sure we don't - // free it while in use. - client.flags.disable_keepalive = true; - if (client.http_proxy) |url| { - // https://github.com/oven-sh/bun/issues/11343 - if (url.protocol.len == 0 or strings.eqlComptime(url.protocol, "https") or strings.eqlComptime(url.protocol, "http")) { - return try this.context(is_ssl).connect(client, url.hostname, url.getPortAuto()); - } - return error.UnsupportedProxyProtocol; - } - return try custom_context.connect(client, client.url.hostname, client.url.getPortAuto()); - } - } - if (client.http_proxy) |url| { - if (url.href.len > 0) { - // https://github.com/oven-sh/bun/issues/11343 - if (url.protocol.len == 0 or strings.eqlComptime(url.protocol, "https") or strings.eqlComptime(url.protocol, "http")) { - return try this.context(is_ssl).connect(client, url.hostname, url.getPortAuto()); - } - return error.UnsupportedProxyProtocol; - } - } - return try this.context(is_ssl).connect(client, client.url.hostname, client.url.getPortAuto()); - } - - pub fn context(this: *@This(), comptime is_ssl: bool) *NewHTTPContext(is_ssl) { - return if (is_ssl) &this.https_context else &this.http_context; - } - - fn drainEvents(this: *@This()) void { - { - this.queued_shutdowns_lock.lock(); - defer this.queued_shutdowns_lock.unlock(); - for (this.queued_shutdowns.items) |http| { - if (socket_async_http_abort_tracker.fetchSwapRemove(http.async_http_id)) |socket_ptr| { - if (http.is_tls) { - const socket = uws.SocketTLS.fromAny(socket_ptr.value); - // do a fast shutdown here since we are aborting and we dont want to wait for the close_notify from the other side - socket.close(.failure); - } else { - const socket = uws.SocketTCP.fromAny(socket_ptr.value); - socket.close(.failure); - } - } - } - this.queued_shutdowns.clearRetainingCapacity(); - } - { - this.queued_writes_lock.lock(); - defer this.queued_writes_lock.unlock(); - for (this.queued_writes.items) |write| { - const ended = write.flags.ended; - defer if (!strings.eqlComptime(write.data, end_of_chunked_http1_1_encoding_response_body) and write.data.len > 0) { - // "0\r\n\r\n" is always a static so no need to free - bun.default_allocator.free(write.data); - }; - if (socket_async_http_abort_tracker.get(write.async_http_id)) |socket_ptr| { - if (write.flags.is_tls) { - const socket = uws.SocketTLS.fromAny(socket_ptr); - if (socket.isClosed() or socket.isShutdown()) { - continue; - } - const tagged = NewHTTPContext(true).getTaggedFromSocket(socket); - if (tagged.get(HTTPClient)) |client| { - if (client.state.original_request_body == .stream) { - var stream = &client.state.original_request_body.stream; - if (write.data.len > 0) { - stream.buffer.write(write.data) catch {}; - } - stream.ended = ended; - if (!stream.has_backpressure) { - client.onWritable( - false, - true, - socket, - ); - } - } - } - } else { - const socket = uws.SocketTCP.fromAny(socket_ptr); - if (socket.isClosed() or socket.isShutdown()) { - continue; - } - const tagged = NewHTTPContext(false).getTaggedFromSocket(socket); - if (tagged.get(HTTPClient)) |client| { - if (client.state.original_request_body == .stream) { - var stream = &client.state.original_request_body.stream; - if (write.data.len > 0) { - stream.buffer.write(write.data) catch {}; - } - stream.ended = ended; - if (!stream.has_backpressure) { - client.onWritable( - false, - false, - socket, - ); - } - } - } - } - } - } - this.queued_writes.clearRetainingCapacity(); - } - - while (this.queued_proxy_deref.pop()) |http| { - http.deref(); - } - - var count: usize = 0; - var active = AsyncHTTP.active_requests_count.load(.monotonic); - const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); - if (active >= max) return; - defer { - if (comptime Environment.allow_assert) { - if (count > 0) - log("Processed {d} tasks\n", .{count}); - } - } - - while (this.queued_tasks.pop()) |http| { - var cloned = ThreadlocalAsyncHTTP.new(.{ - .async_http = http.*, - }); - cloned.async_http.real = http; - cloned.async_http.onStart(); - if (comptime Environment.allow_assert) { - count += 1; - } - - active += 1; - if (active >= max) break; - } - } - - fn processEvents(this: *@This()) noreturn { - if (comptime Environment.isPosix) { - this.loop.loop.num_polls = @max(2, this.loop.loop.num_polls); - } else if (comptime Environment.isWindows) { - this.loop.loop.inc(); - } else { - @compileError("TODO:"); - } - - while (true) { - this.drainEvents(); - - var start_time: i128 = 0; - if (comptime Environment.isDebug) { - start_time = std.time.nanoTimestamp(); - } - Output.flush(); - - this.loop.loop.inc(); - this.loop.loop.tick(); - this.loop.loop.dec(); - - // this.loop.run(); - if (comptime Environment.isDebug) { - const end = std.time.nanoTimestamp(); - threadlog("Waited {any}\n", .{std.fmt.fmtDurationSigned(@as(i64, @truncate(end - start_time)))}); - Output.flush(); - } - } - } - - pub fn scheduleShutdown(this: *@This(), http: *AsyncHTTP) void { - { - this.queued_shutdowns_lock.lock(); - defer this.queued_shutdowns_lock.unlock(); - this.queued_shutdowns.append(bun.default_allocator, .{ - .async_http_id = http.async_http_id, - .is_tls = http.client.isHTTPS(), - }) catch bun.outOfMemory(); - } - if (this.has_awoken.load(.monotonic)) - this.loop.loop.wakeup(); - } - - pub fn scheduleRequestWrite(this: *@This(), http: *AsyncHTTP, data: []const u8, ended: bool) void { - { - this.queued_writes_lock.lock(); - defer this.queued_writes_lock.unlock(); - this.queued_writes.append(bun.default_allocator, .{ - .async_http_id = http.async_http_id, - .data = data, - .flags = .{ - .is_tls = http.client.isHTTPS(), - .ended = ended, - }, - }) catch bun.outOfMemory(); - } - if (this.has_awoken.load(.monotonic)) - this.loop.loop.wakeup(); - } - - pub fn scheduleProxyDeref(this: *@This(), proxy: *ProxyTunnel) void { - // this is always called on the http thread - { - this.queued_proxy_deref.append(bun.default_allocator, proxy) catch bun.outOfMemory(); - } - if (this.has_awoken.load(.monotonic)) - this.loop.loop.wakeup(); - } - - pub fn wakeup(this: *@This()) void { - if (this.has_awoken.load(.monotonic)) - this.loop.loop.wakeup(); - } - - pub fn schedule(this: *@This(), batch: Batch) void { - if (batch.len == 0) - return; - - { - var batch_ = batch; - while (batch_.pop()) |task| { - const http: *AsyncHTTP = @fieldParentPtr("task", task); - this.queued_tasks.push(http); - } - } - - if (this.has_awoken.load(.monotonic)) - this.loop.loop.wakeup(); - } -}; - const log = Output.scoped(.fetch, false); -var temp_hostname: [8192]u8 = undefined; +pub var temp_hostname: [8192]u8 = undefined; pub fn checkServerIdentity( client: *HTTPClient, @@ -1612,7 +97,7 @@ pub fn checkServerIdentity( return true; } -fn registerAbortTracker( +pub fn registerAbortTracker( client: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, @@ -1622,7 +107,7 @@ fn registerAbortTracker( } } -fn unregisterAbortTracker( +pub fn unregisterAbortTracker( client: *HTTPClient, ) void { if (client.signals.aborted != null) { @@ -1896,363 +381,6 @@ fn writeRequest( _ = writer.write("\r\n") catch 0; } -pub const HTTPStage = enum { - pending, - headers, - body, - body_chunk, - fail, - done, - proxy_handshake, - proxy_headers, - proxy_body, -}; - -pub const CertificateInfo = struct { - cert: []const u8, - cert_error: HTTPCertError, - hostname: []const u8, - pub fn deinit(this: *const CertificateInfo, allocator: std.mem.Allocator) void { - allocator.free(this.cert); - allocator.free(this.cert_error.code); - allocator.free(this.cert_error.reason); - allocator.free(this.hostname); - } -}; - -const Decompressor = union(enum) { - zlib: *Zlib.ZlibReaderArrayList, - brotli: *Brotli.BrotliReaderArrayList, - zstd: *zstd.ZstdReaderArrayList, - none: void, - - pub fn deinit(this: *Decompressor) void { - switch (this.*) { - inline .brotli, .zlib, .zstd => |that| { - that.deinit(); - this.* = .{ .none = {} }; - }, - .none => {}, - } - } - - pub fn updateBuffers(this: *Decompressor, encoding: Encoding, buffer: []const u8, body_out_str: *MutableString) !void { - if (!encoding.isCompressed()) { - return; - } - - if (this.* == .none) { - switch (encoding) { - .gzip, .deflate => { - this.* = .{ - .zlib = try Zlib.ZlibReaderArrayList.initWithOptionsAndListAllocator( - buffer, - &body_out_str.list, - body_out_str.allocator, - default_allocator, - .{ - // zlib.MAX_WBITS = 15 - // to (de-)compress deflate format, use wbits = -zlib.MAX_WBITS - // to (de-)compress deflate format with headers we use wbits = 0 (we can detect the first byte using 120) - // to (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16 - .windowBits = if (encoding == Encoding.gzip) Zlib.MAX_WBITS | 16 else (if (buffer.len > 1 and buffer[0] == 120) 0 else -Zlib.MAX_WBITS), - }, - ), - }; - return; - }, - .brotli => { - this.* = .{ - .brotli = try Brotli.BrotliReaderArrayList.newWithOptions( - buffer, - &body_out_str.list, - body_out_str.allocator, - .{}, - ), - }; - return; - }, - .zstd => { - this.* = .{ - .zstd = try zstd.ZstdReaderArrayList.initWithListAllocator( - buffer, - &body_out_str.list, - body_out_str.allocator, - default_allocator, - ), - }; - return; - }, - else => @panic("Invalid encoding. This code should not be reachable"), - } - } - - switch (this.*) { - .zlib => |reader| { - assert(reader.zlib.avail_in == 0); - reader.zlib.next_in = buffer.ptr; - reader.zlib.avail_in = @as(u32, @truncate(buffer.len)); - - const initial = body_out_str.list.items.len; - body_out_str.list.expandToCapacity(); - if (body_out_str.list.capacity == initial) { - try body_out_str.list.ensureUnusedCapacity(body_out_str.allocator, 4096); - body_out_str.list.expandToCapacity(); - } - reader.list = body_out_str.list; - reader.zlib.next_out = @ptrCast(&body_out_str.list.items[initial]); - reader.zlib.avail_out = @as(u32, @truncate(body_out_str.list.capacity - initial)); - // we reset the total out so we can track how much we decompressed this time - reader.zlib.total_out = @truncate(initial); - }, - .brotli => |reader| { - reader.input = buffer; - reader.total_in = 0; - - const initial = body_out_str.list.items.len; - reader.list = body_out_str.list; - reader.total_out = @truncate(initial); - }, - .zstd => |reader| { - reader.input = buffer; - reader.total_in = 0; - - const initial = body_out_str.list.items.len; - reader.list = body_out_str.list; - reader.total_out = @truncate(initial); - }, - else => @panic("Invalid encoding. This code should not be reachable"), - } - } - - pub fn readAll(this: *Decompressor, is_done: bool) !void { - switch (this.*) { - .zlib => |zlib| try zlib.readAll(), - .brotli => |brotli| try brotli.readAll(is_done), - .zstd => |reader| try reader.readAll(is_done), - .none => {}, - } - } -}; - -// TODO: reduce the size of this struct -// Many of these fields can be moved to a packed struct and use less space -pub const InternalState = struct { - response_message_buffer: MutableString = undefined, - /// pending response is the temporary storage for the response headers, url and status code - /// this uses shared_response_headers_buf to store the headers - /// this will be turned null once the metadata is cloned - pending_response: ?picohttp.Response = null, - - /// This is the cloned metadata containing the response headers, url and status code after the .headers phase are received - /// will be turned null once returned to the user (the ownership is transferred to the user) - /// this can happen after await fetch(...) and the body can continue streaming when this is already null - /// the user will receive only chunks of the body stored in body_out_str - cloned_metadata: ?HTTPResponseMetadata = null, - flags: InternalStateFlags = InternalStateFlags{}, - - transfer_encoding: Encoding = Encoding.identity, - encoding: Encoding = Encoding.identity, - content_encoding_i: u8 = std.math.maxInt(u8), - chunked_decoder: picohttp.phr_chunked_decoder = .{}, - decompressor: Decompressor = .{ .none = {} }, - stage: Stage = Stage.pending, - /// This is owned by the user and should not be freed here - body_out_str: ?*MutableString = null, - compressed_body: MutableString = undefined, - content_length: ?usize = null, - total_body_received: usize = 0, - request_body: []const u8 = "", - original_request_body: HTTPRequestBody = .{ .bytes = "" }, - request_sent_len: usize = 0, - fail: ?anyerror = null, - request_stage: HTTPStage = .pending, - response_stage: HTTPStage = .pending, - certificate_info: ?CertificateInfo = null, - - pub const InternalStateFlags = packed struct(u8) { - allow_keepalive: bool = true, - received_last_chunk: bool = false, - did_set_content_encoding: bool = false, - is_redirect_pending: bool = false, - is_libdeflate_fast_path_disabled: bool = false, - resend_request_body_on_redirect: bool = false, - _padding: u2 = 0, - }; - - pub fn init(body: HTTPRequestBody, body_out_str: *MutableString) InternalState { - return .{ - .original_request_body = body, - .request_body = if (body == .bytes) body.bytes else "", - .compressed_body = MutableString{ .allocator = default_allocator, .list = .{} }, - .response_message_buffer = MutableString{ .allocator = default_allocator, .list = .{} }, - .body_out_str = body_out_str, - .stage = Stage.pending, - .pending_response = null, - }; - } - - pub fn isChunkedEncoding(this: *InternalState) bool { - return this.transfer_encoding == Encoding.chunked; - } - - pub fn reset(this: *InternalState, allocator: std.mem.Allocator) void { - this.compressed_body.deinit(); - this.response_message_buffer.deinit(); - - const body_msg = this.body_out_str; - if (body_msg) |body| body.reset(); - this.decompressor.deinit(); - - // just in case we check and free to avoid leaks - if (this.cloned_metadata != null) { - this.cloned_metadata.?.deinit(allocator); - this.cloned_metadata = null; - } - - // if exists we own this info - if (this.certificate_info) |info| { - this.certificate_info = null; - info.deinit(bun.default_allocator); - } - - this.original_request_body.deinit(); - this.* = .{ - .body_out_str = body_msg, - .compressed_body = MutableString{ .allocator = default_allocator, .list = .{} }, - .response_message_buffer = MutableString{ .allocator = default_allocator, .list = .{} }, - .original_request_body = .{ .bytes = "" }, - .request_body = "", - .certificate_info = null, - .flags = .{}, - .total_body_received = 0, - }; - } - - pub fn getBodyBuffer(this: *InternalState) *MutableString { - if (this.encoding.isCompressed()) { - return &this.compressed_body; - } - - return this.body_out_str.?; - } - - fn isDone(this: *InternalState) bool { - if (this.isChunkedEncoding()) { - return this.flags.received_last_chunk; - } - - if (this.content_length) |content_length| { - return this.total_body_received >= content_length; - } - - // Content-Type: text/event-stream we should be done only when Close/End/Timeout connection - return this.flags.received_last_chunk; - } - - fn decompressBytes(this: *InternalState, buffer: []const u8, body_out_str: *MutableString, is_final_chunk: bool) !void { - defer this.compressed_body.reset(); - var gzip_timer: std.time.Timer = undefined; - - if (extremely_verbose) - gzip_timer = std.time.Timer.start() catch @panic("Timer failure"); - - var still_needs_to_decompress = true; - - if (FeatureFlags.isLibdeflateEnabled()) { - // Fast-path: use libdeflate - if (is_final_chunk and !this.flags.is_libdeflate_fast_path_disabled and this.encoding.canUseLibDeflate() and this.isDone()) libdeflate: { - this.flags.is_libdeflate_fast_path_disabled = true; - - log("Decompressing {d} bytes with libdeflate\n", .{buffer.len}); - var deflater = http_thread.deflater(); - - // gzip stores the size of the uncompressed data in the last 4 bytes of the stream - // But it's only valid if the stream is less than 4.7 GB, since it's 4 bytes. - // If we know that the stream is going to be larger than our - // pre-allocated buffer, then let's dynamically allocate the exact - // size. - if (this.encoding == Encoding.gzip and buffer.len > 16 and buffer.len < 1024 * 1024 * 1024) { - const estimated_size: u32 = @bitCast(buffer[buffer.len - 4 ..][0..4].*); - // Since this is arbtirary input from the internet, let's set an upper bound of 32 MB for the allocation size. - if (estimated_size > deflater.shared_buffer.len and estimated_size < 32 * 1024 * 1024) { - try body_out_str.list.ensureTotalCapacityPrecise(body_out_str.allocator, estimated_size); - const result = deflater.decompressor.decompress(buffer, body_out_str.list.allocatedSlice(), .gzip); - - if (result.status == .success) { - body_out_str.list.items.len = result.written; - still_needs_to_decompress = false; - } - - break :libdeflate; - } - } - - const result = deflater.decompressor.decompress(buffer, &deflater.shared_buffer, switch (this.encoding) { - .gzip => .gzip, - .deflate => .deflate, - else => unreachable, - }); - - if (result.status == .success) { - try body_out_str.list.ensureTotalCapacityPrecise(body_out_str.allocator, result.written); - body_out_str.list.appendSliceAssumeCapacity(deflater.shared_buffer[0..result.written]); - still_needs_to_decompress = false; - } - } - } - - // Slow path, or brotli: use the .decompressor - if (still_needs_to_decompress) { - log("Decompressing {d} bytes\n", .{buffer.len}); - if (body_out_str.list.capacity == 0) { - const min = @min(@ceil(@as(f64, @floatFromInt(buffer.len)) * 1.5), @as(f64, 1024 * 1024 * 2)); - try body_out_str.growBy(@max(@as(usize, @intFromFloat(min)), 32)); - } - - try this.decompressor.updateBuffers(this.encoding, buffer, body_out_str); - - this.decompressor.readAll(this.isDone()) catch |err| { - if (this.isDone() or error.ShortRead != err) { - Output.prettyErrorln("Decompression error: {s}", .{bun.asByteSlice(@errorName(err))}); - Output.flush(); - return err; - } - }; - } - - if (extremely_verbose) - this.gzip_elapsed = gzip_timer.read(); - } - - fn decompress(this: *InternalState, buffer: MutableString, body_out_str: *MutableString, is_final_chunk: bool) !void { - try this.decompressBytes(buffer.list.items, body_out_str, is_final_chunk); - } - - pub fn processBodyBuffer(this: *InternalState, buffer: MutableString, is_final_chunk: bool) !bool { - if (this.flags.is_redirect_pending) return false; - - var body_out_str = this.body_out_str.?; - - switch (this.encoding) { - Encoding.brotli, Encoding.gzip, Encoding.deflate, Encoding.zstd => { - try this.decompress(buffer, body_out_str, is_final_chunk); - }, - else => { - if (!body_out_str.owns(buffer.list.items)) { - body_out_str.append(buffer.list.items) catch |err| { - Output.prettyErrorln("Failed to append to body buffer: {s}", .{bun.asByteSlice(@errorName(err))}); - Output.flush(); - return err; - }; - } - }, - } - - return this.body_out_str.?.list.items.len > 0; - } -}; - const default_redirect_count = 127; pub const HTTPVerboseLevel = enum { @@ -2343,13 +471,6 @@ pub fn isKeepAlivePossible(this: *HTTPClient) bool { return false; } -const Stage = enum(u8) { - pending, - connect, - done, - fail, -}; - // lowercase hash header names so that we can be sure pub fn hashHeaderName(name: string) u64 { var hasher = std.hash.Wyhash.init(0); @@ -2386,29 +507,6 @@ const authorization_header_hash = hashHeaderConst("Authorization"); const proxy_authorization_header_hash = hashHeaderConst("Proxy-Authorization"); const cookie_header_hash = hashHeaderConst("Cookie"); -pub const Encoding = enum { - identity, - gzip, - deflate, - brotli, - zstd, - chunked, - - pub fn canUseLibDeflate(this: Encoding) bool { - return switch (this) { - .gzip, .deflate => true, - else => false, - }; - } - - pub fn isCompressed(this: Encoding) bool { - return switch (this) { - .brotli, .gzip, .deflate, .zstd => true, - else => false, - }; - } -}; - const host_header_name = "Host"; const content_length_header_name = "Content-Length"; const chunked_encoded_header = picohttp.Header{ .name = "Transfer-Encoding", .value = "chunked" }; @@ -2432,499 +530,7 @@ pub fn headerStr(this: *const HTTPClient, ptr: Api.StringPointer) string { return this.header_buf[ptr.offset..][0..ptr.length]; } -pub const HeaderBuilder = @import("./http/header_builder.zig"); - -const HTTPCallbackPair = .{ *AsyncHTTP, HTTPClientResult }; -pub const HTTPChannel = @import("./sync.zig").Channel(HTTPCallbackPair, .{ .Static = 1000 }); -// 32 pointers much cheaper than 1000 pointers -const SingleHTTPChannel = struct { - const SingleHTTPCHannel_ = @import("./sync.zig").Channel(HTTPClientResult, .{ .Static = 8 }); - channel: SingleHTTPCHannel_, - pub fn reset(_: *@This()) void {} - pub fn init() SingleHTTPChannel { - return SingleHTTPChannel{ .channel = SingleHTTPCHannel_.init() }; - } -}; - -pub const HTTPChannelContext = struct { - http: AsyncHTTP = undefined, - channel: *HTTPChannel, - - pub fn callback(data: HTTPCallbackPair) void { - var this: *HTTPChannelContext = @fieldParentPtr("http", data.@"0"); - this.channel.writeItem(data) catch unreachable; - } -}; - -pub const AsyncHTTP = struct { - request: ?picohttp.Request = null, - response: ?picohttp.Response = null, - request_headers: Headers.Entry.List = .empty, - response_headers: Headers.Entry.List = .empty, - response_buffer: *MutableString, - request_body: HTTPRequestBody = .{ .bytes = "" }, - allocator: std.mem.Allocator, - request_header_buf: string = "", - method: Method = Method.GET, - url: URL, - http_proxy: ?URL = null, - real: ?*AsyncHTTP = null, - next: ?*AsyncHTTP = null, - - task: ThreadPool.Task = ThreadPool.Task{ .callback = &startAsyncHTTP }, - result_callback: HTTPClientResult.Callback = undefined, - - redirected: bool = false, - - response_encoding: Encoding = Encoding.identity, - verbose: HTTPVerboseLevel = .none, - - client: HTTPClient = undefined, - waitingDeffered: bool = false, - finalized: bool = false, - err: ?anyerror = null, - async_http_id: u32 = 0, - - state: AtomicState = AtomicState.init(State.pending), - elapsed: u64 = 0, - gzip_elapsed: u64 = 0, - - signals: Signals = .{}, - - pub var active_requests_count = std.atomic.Value(usize).init(0); - pub var max_simultaneous_requests = std.atomic.Value(usize).init(256); - - pub fn loadEnv(allocator: std.mem.Allocator, logger: *Log, env: *DotEnv.Loader) void { - if (env.get("BUN_CONFIG_MAX_HTTP_REQUESTS")) |max_http_requests| { - const max = std.fmt.parseInt(u16, max_http_requests, 10) catch { - logger.addErrorFmt( - null, - Loc.Empty, - allocator, - "BUN_CONFIG_MAX_HTTP_REQUESTS value \"{s}\" is not a valid integer between 1 and 65535", - .{max_http_requests}, - ) catch unreachable; - return; - }; - if (max == 0) { - logger.addWarningFmt( - null, - Loc.Empty, - allocator, - "BUN_CONFIG_MAX_HTTP_REQUESTS value must be a number between 1 and 65535", - .{}, - ) catch unreachable; - return; - } - AsyncHTTP.max_simultaneous_requests.store(max, .monotonic); - } - } - - pub fn signalHeaderProgress(this: *AsyncHTTP) void { - var progress = this.signals.header_progress orelse return; - progress.store(true, .release); - } - - pub fn enableBodyStreaming(this: *AsyncHTTP) void { - var stream = this.signals.body_streaming orelse return; - stream.store(true, .release); - } - - pub fn clearData(this: *AsyncHTTP) void { - this.response_headers.deinit(this.allocator); - this.response_headers = .{}; - this.request = null; - this.response = null; - this.client.unix_socket_path.deinit(); - this.client.unix_socket_path = JSC.ZigString.Slice.empty; - } - - pub const State = enum(u32) { - pending = 0, - scheduled = 1, - sending = 2, - success = 3, - fail = 4, - }; - const AtomicState = std.atomic.Value(State); - - pub const Options = struct { - http_proxy: ?URL = null, - hostname: ?[]u8 = null, - signals: ?Signals = null, - unix_socket_path: ?JSC.ZigString.Slice = null, - disable_timeout: ?bool = null, - verbose: ?HTTPVerboseLevel = null, - disable_keepalive: ?bool = null, - disable_decompression: ?bool = null, - reject_unauthorized: ?bool = null, - tls_props: ?*SSLConfig = null, - }; - - const Preconnect = struct { - async_http: AsyncHTTP, - response_buffer: MutableString, - url: bun.URL, - is_url_owned: bool, - - pub const new = bun.TrivialNew(@This()); - - pub fn onResult(this: *Preconnect, _: *AsyncHTTP, _: HTTPClientResult) void { - this.response_buffer.deinit(); - this.async_http.clearData(); - this.async_http.client.deinit(); - if (this.is_url_owned) { - bun.default_allocator.free(this.url.href); - } - - bun.destroy(this); - } - }; - - pub fn preconnect( - url: URL, - is_url_owned: bool, - ) void { - if (!FeatureFlags.is_fetch_preconnect_supported) { - if (is_url_owned) { - bun.default_allocator.free(url.href); - } - - return; - } - - var this = Preconnect.new(.{ - .async_http = undefined, - .response_buffer = MutableString{ .allocator = default_allocator, .list = .{} }, - .url = url, - .is_url_owned = is_url_owned, - }); - - this.async_http = AsyncHTTP.init(bun.default_allocator, .GET, url, .{}, "", &this.response_buffer, "", HTTPClientResult.Callback.New(*Preconnect, Preconnect.onResult).init(this), .manual, .{}); - this.async_http.client.flags.is_preconnect_only = true; - - http_thread.schedule(Batch.from(&this.async_http.task)); - } - - pub fn init( - allocator: std.mem.Allocator, - method: Method, - url: URL, - headers: Headers.Entry.List, - headers_buf: string, - response_buffer: *MutableString, - request_body: []const u8, - callback: HTTPClientResult.Callback, - redirect_type: FetchRedirect, - options: Options, - ) AsyncHTTP { - var this = AsyncHTTP{ - .allocator = allocator, - .url = url, - .method = method, - .request_headers = headers, - .request_header_buf = headers_buf, - .request_body = .{ .bytes = request_body }, - .response_buffer = response_buffer, - .result_callback = callback, - .http_proxy = options.http_proxy, - .signals = options.signals orelse .{}, - .async_http_id = if (options.signals != null and options.signals.?.aborted != null) async_http_id_monotonic.fetchAdd(1, .monotonic) else 0, - }; - - this.client = .{ - .allocator = allocator, - .method = method, - .url = url, - .header_entries = headers, - .header_buf = headers_buf, - .hostname = options.hostname, - .signals = options.signals orelse this.signals, - .async_http_id = this.async_http_id, - .http_proxy = this.http_proxy, - .redirect_type = redirect_type, - }; - if (options.unix_socket_path) |val| { - assert(this.client.unix_socket_path.length() == 0); - this.client.unix_socket_path = val; - } - if (options.disable_timeout) |val| { - this.client.flags.disable_timeout = val; - } - if (options.verbose) |val| { - this.client.verbose = val; - } - if (options.disable_decompression) |val| { - this.client.flags.disable_decompression = val; - } - if (options.disable_keepalive) |val| { - this.client.flags.disable_keepalive = val; - } - if (options.reject_unauthorized) |val| { - this.client.flags.reject_unauthorized = val; - } - if (options.tls_props) |val| { - this.client.tls_props = val; - } - - if (options.http_proxy) |proxy| { - // Username between 0 and 4096 chars - if (proxy.username.len > 0 and proxy.username.len < 4096) { - // Password between 0 and 4096 chars - if (proxy.password.len > 0 and proxy.password.len < 4096) { - // decode password - var password_buffer = std.mem.zeroes([4096]u8); - var password_stream = std.io.fixedBufferStream(&password_buffer); - const password_writer = password_stream.writer(); - const PassWriter = @TypeOf(password_writer); - const password_len = PercentEncoding.decode(PassWriter, password_writer, proxy.password) catch { - // Invalid proxy authorization - return this; - }; - const password = password_buffer[0..password_len]; - - // Decode username - var username_buffer = std.mem.zeroes([4096]u8); - var username_stream = std.io.fixedBufferStream(&username_buffer); - const username_writer = username_stream.writer(); - const UserWriter = @TypeOf(username_writer); - const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { - // Invalid proxy authorization - return this; - }; - const username = username_buffer[0..username_len]; - - // concat user and password - const auth = std.fmt.allocPrint(allocator, "{s}:{s}", .{ username, password }) catch unreachable; - defer allocator.free(auth); - const size = std.base64.standard.Encoder.calcSize(auth.len); - var buf = this.allocator.alloc(u8, size + "Basic ".len) catch unreachable; - const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], auth); - buf[0.."Basic ".len].* = "Basic ".*; - this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; - } else { - //Decode username - var username_buffer = std.mem.zeroes([4096]u8); - var username_stream = std.io.fixedBufferStream(&username_buffer); - const username_writer = username_stream.writer(); - const UserWriter = @TypeOf(username_writer); - const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { - // Invalid proxy authorization - return this; - }; - const username = username_buffer[0..username_len]; - - // only use user - const size = std.base64.standard.Encoder.calcSize(username_len); - var buf = allocator.alloc(u8, size + "Basic ".len) catch unreachable; - const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], username); - buf[0.."Basic ".len].* = "Basic ".*; - this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; - } - } - } - return this; - } - - pub fn initSync( - allocator: std.mem.Allocator, - method: Method, - url: URL, - headers: Headers.Entry.List, - headers_buf: string, - response_buffer: *MutableString, - request_body: []const u8, - http_proxy: ?URL, - hostname: ?[]u8, - redirect_type: FetchRedirect, - ) AsyncHTTP { - return @This().init( - allocator, - method, - url, - headers, - headers_buf, - response_buffer, - request_body, - undefined, - redirect_type, - .{ - .http_proxy = http_proxy, - .hostname = hostname, - }, - ); - } - - fn reset(this: *AsyncHTTP) !void { - const aborted = this.client.aborted; - this.client = try HTTPClient.init(this.allocator, this.method, this.client.url, this.client.header_entries, this.client.header_buf, aborted); - this.client.http_proxy = this.http_proxy; - - if (this.http_proxy) |proxy| { - //TODO: need to understand how is possible to reuse Proxy with TSL, so disable keepalive if url is HTTPS - this.client.flags.disable_keepalive = this.url.isHTTPS(); - // Username between 0 and 4096 chars - if (proxy.username.len > 0 and proxy.username.len < 4096) { - // Password between 0 and 4096 chars - if (proxy.password.len > 0 and proxy.password.len < 4096) { - // decode password - var password_buffer = std.mem.zeroes([4096]u8); - var password_stream = std.io.fixedBufferStream(&password_buffer); - const password_writer = password_stream.writer(); - const PassWriter = @TypeOf(password_writer); - const password_len = PercentEncoding.decode(PassWriter, password_writer, proxy.password) catch { - // Invalid proxy authorization - return this; - }; - const password = password_buffer[0..password_len]; - - // Decode username - var username_buffer = std.mem.zeroes([4096]u8); - var username_stream = std.io.fixedBufferStream(&username_buffer); - const username_writer = username_stream.writer(); - const UserWriter = @TypeOf(username_writer); - const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { - // Invalid proxy authorization - return this; - }; - - const username = username_buffer[0..username_len]; - - // concat user and password - const auth = std.fmt.allocPrint(this.allocator, "{s}:{s}", .{ username, password }) catch unreachable; - defer this.allocator.free(auth); - const size = std.base64.standard.Encoder.calcSize(auth.len); - var buf = this.allocator.alloc(u8, size + "Basic ".len) catch unreachable; - const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], auth); - buf[0.."Basic ".len].* = "Basic ".*; - this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; - } else { - //Decode username - var username_buffer = std.mem.zeroes([4096]u8); - var username_stream = std.io.fixedBufferStream(&username_buffer); - const username_writer = username_stream.writer(); - const UserWriter = @TypeOf(username_writer); - const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { - // Invalid proxy authorization - return this; - }; - const username = username_buffer[0..username_len]; - - // only use user - const size = std.base64.standard.Encoder.calcSize(username_len); - var buf = this.allocator.alloc(u8, size + "Basic ".len) catch unreachable; - const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], username); - buf[0.."Basic ".len].* = "Basic ".*; - this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; - } - } - } - } - - pub fn schedule(this: *AsyncHTTP, _: std.mem.Allocator, batch: *ThreadPool.Batch) void { - this.state.store(.scheduled, .monotonic); - batch.push(ThreadPool.Batch.from(&this.task)); - } - - fn sendSyncCallback(this: *SingleHTTPChannel, async_http: *AsyncHTTP, result: HTTPClientResult) void { - async_http.real.?.* = async_http.*; - async_http.real.?.response_buffer = async_http.response_buffer; - this.channel.writeItem(result) catch unreachable; - } - - pub fn sendSync(this: *AsyncHTTP) anyerror!picohttp.Response { - HTTPThread.init(&.{}); - - var ctx = try bun.default_allocator.create(SingleHTTPChannel); - ctx.* = SingleHTTPChannel.init(); - this.result_callback = HTTPClientResult.Callback.New( - *SingleHTTPChannel, - sendSyncCallback, - ).init(ctx); - - var batch = bun.ThreadPool.Batch{}; - this.schedule(bun.default_allocator, &batch); - http_thread.schedule(batch); - - const result = ctx.channel.readItem() catch unreachable; - if (result.fail) |err| { - return err; - } - assert(result.metadata != null); - return result.metadata.?.response; - } - - pub fn onAsyncHTTPCallback(this: *AsyncHTTP, async_http: *AsyncHTTP, result: HTTPClientResult) void { - assert(this.real != null); - - var callback = this.result_callback; - this.elapsed = http_thread.timer.read() -| this.elapsed; - - // TODO: this condition seems wrong: if we started with a non-default value, we might - // report a redirect even if none happened - this.redirected = this.client.flags.redirected; - if (result.isSuccess()) { - this.err = null; - if (result.metadata) |metadata| { - this.response = metadata.response; - } - this.state.store(.success, .monotonic); - } else { - this.err = result.fail; - this.response = null; - this.state.store(State.fail, .monotonic); - } - - if (comptime Environment.enable_logs) { - if (socket_async_http_abort_tracker.count() > 0) { - log("socket_async_http_abort_tracker count: {d}", .{socket_async_http_abort_tracker.count()}); - } - } - - if (socket_async_http_abort_tracker.capacity() > 10_000 and socket_async_http_abort_tracker.count() < 100) { - socket_async_http_abort_tracker.shrinkAndFree(socket_async_http_abort_tracker.count()); - } - - if (result.has_more) { - callback.function(callback.ctx, async_http, result); - } else { - { - this.client.deinit(); - var threadlocal_http: *ThreadlocalAsyncHTTP = @fieldParentPtr("async_http", async_http); - defer threadlocal_http.deinit(); - log("onAsyncHTTPCallback: {any}", .{std.fmt.fmtDuration(this.elapsed)}); - callback.function(callback.ctx, async_http, result); - } - - const active_requests = AsyncHTTP.active_requests_count.fetchSub(1, .monotonic); - assert(active_requests > 0); - } - - if (!http_thread.queued_tasks.isEmpty() and AsyncHTTP.active_requests_count.load(.monotonic) < AsyncHTTP.max_simultaneous_requests.load(.monotonic)) { - http_thread.loop.loop.wakeup(); - } - } - - pub fn startAsyncHTTP(task: *Task) void { - var this: *AsyncHTTP = @fieldParentPtr("task", task); - this.onStart(); - } - - pub fn onStart(this: *AsyncHTTP) void { - _ = active_requests_count.fetchAdd(1, .monotonic); - this.err = null; - this.state.store(.sending, .monotonic); - this.client.result_callback = HTTPClientResult.Callback.New(*AsyncHTTP, onAsyncHTTPCallback).init( - this, - ); - - this.elapsed = http_thread.timer.read(); - if (this.response_buffer.list.capacity == 0) { - this.response_buffer.allocator = default_allocator; - } - this.client.start(this.request_body, this.response_buffer); - } -}; +pub const HeaderBuilder = @import("./http/HeaderBuilder.zig"); pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request { var header_count: usize = 0; @@ -3192,8 +798,6 @@ fn start_(this: *HTTPClient, comptime is_ssl: bool) void { } } -const Task = ThreadPool.Task; - pub const HTTPResponseMetadata = struct { url: []const u8 = "", owned_buf: []u8 = "", @@ -3316,10 +920,7 @@ noinline fn sendInitialRequestPayload(this: *HTTPClient, comptime is_first_call: assert(!socket.isShutdown()); assert(!socket.isClosed()); } - const amount = socket.write( - to_send, - false, - ); + const amount = try writeToSocket(is_ssl, socket, to_send); if (comptime is_first_call) { if (amount == 0) { // don't worry about it @@ -3331,11 +932,7 @@ noinline fn sendInitialRequestPayload(this: *HTTPClient, comptime is_first_call: } } - if (amount < 0) { - return error.WriteFailed; - } - - this.state.request_sent_len += @as(usize, @intCast(amount)); + this.state.request_sent_len += amount; const has_sent_headers = this.state.request_sent_len >= headers_len; if (has_sent_headers and this.verbose != .none) { @@ -3358,6 +955,102 @@ noinline fn sendInitialRequestPayload(this: *HTTPClient, comptime is_first_call: }; } +pub fn flushStream(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket) void { + // only flush the stream if needed no additional data is being added + this.writeToStream(is_ssl, socket, ""); +} + +/// Write data to the socket (Just a error wrapper to easly handle amount written and error handling) +fn writeToSocket(comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, data: []const u8) !usize { + const amount = socket.write(data); + if (amount < 0) { + return error.WriteFailed; + } + return @intCast(amount); +} + +/// Write data to the socket and buffer the unwritten data if there is backpressure +fn writeToSocketWithBufferFallback(comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, buffer: *bun.io.StreamBuffer, data: []const u8) !usize { + const amount = try writeToSocket(is_ssl, socket, data); + if (amount < data.len) { + buffer.write(data[@intCast(amount)..]) catch bun.outOfMemory(); + } + return amount; +} + +/// Write buffered data to the socket returning true if there is backpressure +fn writeToStreamUsingBuffer(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, buffer: *bun.io.StreamBuffer, data: []const u8) !bool { + if (buffer.isNotEmpty()) { + const to_send = buffer.slice(); + const amount = try writeToSocket(is_ssl, socket, to_send); + this.state.request_sent_len += amount; + buffer.cursor += amount; + if (amount < to_send.len) { + // we could not send all pending data so we need to buffer the extra data + if (data.len > 0) { + buffer.write(data) catch bun.outOfMemory(); + } + // failed to send everything so we have backpressure + return true; + } + if (buffer.isEmpty()) { + buffer.reset(); + } + } + // ok we flushed all pending data so we can reset the backpressure + if (data.len > 0) { + // no backpressure everything was sended so we can just try to send + const sent = try writeToSocketWithBufferFallback(is_ssl, socket, buffer, data); + this.state.request_sent_len += sent; + // if we didn't send all the data we have backpressure + return sent < data.len; + } + // no data to send so we are done + return false; +} + +pub fn writeToStream(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, data: []const u8) void { + log("flushStream", .{}); + var stream = &this.state.original_request_body.stream; + const stream_buffer = stream.buffer orelse return; + const buffer = stream_buffer.acquire(); + const wasEmpty = buffer.isEmpty() and data.len == 0; + if (wasEmpty and stream.ended) { + // nothing is buffered and the stream is done so we just release and detach + stream_buffer.release(); + stream.detach(); + return; + } + + // to simplify things here the buffer contains the raw data we just need to flush to the socket it + const has_backpressure = writeToStreamUsingBuffer(this, is_ssl, socket, buffer, data) catch |err| { + // we got some critical error so we need to fail and close the connection + stream_buffer.release(); + stream.detach(); + this.closeAndFail(err, is_ssl, socket); + return; + }; + + if (has_backpressure) { + // we have backpressure so just release the buffer and wait for onWritable + stream_buffer.release(); + } else { + if (stream.ended) { + // done sending everything so we can release the buffer and detach the stream + this.state.request_stage = .done; + stream_buffer.release(); + stream.detach(); + } else { + // only report drain if we send everything and previous we had something to send + if (!wasEmpty) { + stream_buffer.reportDrain(); + } + // release the buffer so main thread can use it to send more data + stream_buffer.release(); + } + } +} + pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket) void { if (this.signals.get(.aborted)) { this.closeAndAbort(is_ssl, socket); @@ -3431,14 +1124,13 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s switch (this.state.original_request_body) { .bytes => { const to_send = this.state.request_body; - const amount = socket.write(to_send, true); - if (amount < 0) { - this.closeAndFail(error.WriteFailed, is_ssl, socket); + const sent = writeToSocket(is_ssl, socket, to_send) catch |err| { + this.closeAndFail(err, is_ssl, socket); return; - } + }; - this.state.request_sent_len += @as(usize, @intCast(amount)); - this.state.request_body = this.state.request_body[@as(usize, @intCast(amount))..]; + this.state.request_sent_len += sent; + this.state.request_body = this.state.request_body[sent..]; if (this.state.request_body.len == 0) { this.state.request_stage = .done; @@ -3446,30 +1138,8 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s } }, .stream => { - var stream = &this.state.original_request_body.stream; - stream.has_backpressure = false; - // to simplify things here the buffer contains the raw data we just need to flush to the socket it - if (stream.buffer.isNotEmpty()) { - const to_send = stream.buffer.slice(); - const amount = socket.write(to_send, true); - if (amount < 0) { - this.closeAndFail(error.WriteFailed, is_ssl, socket); - return; - } - this.state.request_sent_len += @as(usize, @intCast(amount)); - stream.buffer.cursor += @intCast(amount); - if (amount < to_send.len) { - stream.has_backpressure = true; - } - if (stream.buffer.isEmpty()) { - stream.buffer.reset(); - } - } - if (stream.hasEnded()) { - this.state.request_stage = .done; - stream.buffer.deinit(); - return; - } + // flush without adding any new data + this.flushStream(is_ssl, socket); }, .sendfile => |*sendfile| { if (comptime is_ssl) { @@ -3500,10 +1170,10 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s this.setTimeout(socket, 5); const to_send = this.state.request_body; - const amount = proxy.writeData(to_send) catch return; // just wait and retry when onWritable! if closed internally will call proxy.onClose + const sent = proxy.writeData(to_send) catch return; // just wait and retry when onWritable! if closed internally will call proxy.onClose - this.state.request_sent_len += @as(usize, @intCast(amount)); - this.state.request_body = this.state.request_body[@as(usize, @intCast(amount))..]; + this.state.request_sent_len += sent; + this.state.request_body = this.state.request_body[sent..]; if (this.state.request_body.len == 0) { this.state.request_stage = .done; @@ -3511,25 +1181,7 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s } }, .stream => { - var stream = &this.state.original_request_body.stream; - stream.has_backpressure = false; - this.setTimeout(socket, 5); - - // to simplify things here the buffer contains the raw data we just need to flush to the socket it - if (stream.buffer.isNotEmpty()) { - const to_send = stream.buffer.slice(); - const amount = proxy.writeData(to_send) catch return; // just wait and retry when onWritable! if closed internally will call proxy.onClose - this.state.request_sent_len += amount; - stream.buffer.cursor += @truncate(amount); - if (amount < to_send.len) { - stream.has_backpressure = true; - } - } - if (stream.hasEnded()) { - this.state.request_stage = .done; - stream.buffer.deinit(); - return; - } + this.flushStream(is_ssl, socket); }, .sendfile => { @panic("sendfile is only supported without SSL. This code should never have been reached!"); @@ -4763,186 +2415,53 @@ pub fn handleResponseMetadata( const assert = bun.assert; // Exists for heap stats reasons. -const ThreadlocalAsyncHTTP = struct { +pub const ThreadlocalAsyncHTTP = struct { pub const new = bun.TrivialNew(@This()); pub const deinit = bun.TrivialDeinit(@This()); async_http: AsyncHTTP, }; -pub const Headers = struct { - pub const Entry = struct { - name: Api.StringPointer, - value: Api.StringPointer, +const bun = @import("bun"); +const picohttp = bun.picohttp; +const JSC = bun.JSC; +const string = bun.string; +const Output = bun.Output; +const Global = bun.Global; +const Environment = bun.Environment; +const strings = bun.strings; +const MutableString = bun.MutableString; +const FeatureFlags = bun.FeatureFlags; - pub const List = bun.MultiArrayList(Entry); - }; +const std = @import("std"); +const URL = @import("./url.zig").URL; - entries: Entry.List = .{}, - buf: std.ArrayListUnmanaged(u8) = .{}, - allocator: std.mem.Allocator, - - pub fn memoryCost(this: *const Headers) usize { - return this.buf.items.len + this.entries.memoryCost(); - } - - pub fn clone(this: *Headers) !Headers { - return Headers{ - .entries = try this.entries.clone(this.allocator), - .buf = try this.buf.clone(this.allocator), - .allocator = this.allocator, - }; - } - - pub fn get(this: *const Headers, name: []const u8) ?[]const u8 { - const entries = this.entries.slice(); - const names = entries.items(.name); - const values = entries.items(.value); - for (names, 0..) |name_ptr, i| { - if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name_ptr), name, true)) { - return this.asStr(values[i]); - } - } - - return null; - } - - pub fn append(this: *Headers, name: []const u8, value: []const u8) !void { - var offset: u32 = @truncate(this.buf.items.len); - try this.buf.ensureUnusedCapacity(this.allocator, name.len + value.len); - const name_ptr = Api.StringPointer{ - .offset = offset, - .length = @truncate(name.len), - }; - this.buf.appendSliceAssumeCapacity(name); - offset = @truncate(this.buf.items.len); - this.buf.appendSliceAssumeCapacity(value); - - const value_ptr = Api.StringPointer{ - .offset = offset, - .length = @truncate(value.len), - }; - try this.entries.append(this.allocator, .{ - .name = name_ptr, - .value = value_ptr, - }); - } - - pub fn deinit(this: *Headers) void { - this.entries.deinit(this.allocator); - this.buf.clearAndFree(this.allocator); - } - pub fn getContentType(this: *const Headers) ?[]const u8 { - if (this.entries.len == 0 or this.buf.items.len == 0) { - return null; - } - const header_entries = this.entries.slice(); - const header_names = header_entries.items(.name); - const header_values = header_entries.items(.value); - - for (header_names, 0..header_names.len) |name, i| { - if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name), "content-type", true)) { - return this.asStr(header_values[i]); - } - } - return null; - } - pub fn asStr(this: *const Headers, ptr: Api.StringPointer) []const u8 { - return if (ptr.offset + ptr.length <= this.buf.items.len) - this.buf.items[ptr.offset..][0..ptr.length] - else - ""; - } - - pub const Options = struct { - body: ?*const Blob.Any = null, - }; - - pub fn fromPicoHttpHeaders(headers: []const picohttp.Header, allocator: std.mem.Allocator) !Headers { - const header_count = headers.len; - var result = Headers{ - .entries = .{}, - .buf = .{}, - .allocator = allocator, - }; - - var buf_len: usize = 0; - for (headers) |header| { - buf_len += header.name.len + header.value.len; - } - result.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); - result.entries.len = headers.len; - result.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); - result.buf.items.len = buf_len; - var offset: u32 = 0; - for (headers, 0..headers.len) |header, i| { - const name_offset = offset; - bun.copy(u8, result.buf.items[offset..][0..header.name.len], header.name); - offset += @truncate(header.name.len); - const value_offset = offset; - bun.copy(u8, result.buf.items[offset..][0..header.value.len], header.value); - offset += @truncate(header.value.len); - - result.entries.set(i, .{ - .name = .{ - .offset = name_offset, - .length = @truncate(header.name.len), - }, - .value = .{ - .offset = value_offset, - .length = @truncate(header.value.len), - }, - }); - } - return result; - } - - pub fn from(fetch_headers_ref: ?*FetchHeaders, allocator: std.mem.Allocator, options: Options) !Headers { - var header_count: u32 = 0; - var buf_len: u32 = 0; - if (fetch_headers_ref) |headers_ref| - headers_ref.count(&header_count, &buf_len); - var headers = Headers{ - .entries = .{}, - .buf = .{}, - .allocator = allocator, - }; - const buf_len_before_content_type = buf_len; - const needs_content_type = brk: { - if (options.body) |body| { - if (body.hasContentTypeFromUser() and (fetch_headers_ref == null or !fetch_headers_ref.?.fastHas(.ContentType))) { - header_count += 1; - buf_len += @as(u32, @truncate(body.contentType().len + "Content-Type".len)); - break :brk true; - } - } - break :brk false; - }; - headers.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); - headers.entries.len = header_count; - headers.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); - headers.buf.items.len = buf_len; - var sliced = headers.entries.slice(); - var names = sliced.items(.name); - var values = sliced.items(.value); - if (fetch_headers_ref) |headers_ref| - headers_ref.copyTo(names.ptr, values.ptr, headers.buf.items.ptr); - - // TODO: maybe we should send Content-Type header first instead of last? - if (needs_content_type) { - bun.copy(u8, headers.buf.items[buf_len_before_content_type..], "Content-Type"); - names[header_count - 1] = .{ - .offset = buf_len_before_content_type, - .length = "Content-Type".len, - }; - - bun.copy(u8, headers.buf.items[buf_len_before_content_type + "Content-Type".len ..], options.body.?.contentType()); - values[header_count - 1] = .{ - .offset = buf_len_before_content_type + @as(u32, "Content-Type".len), - .length = @as(u32, @truncate(options.body.?.contentType().len)), - }; - } - - return headers; - } -}; +pub const Method = @import("./http/Method.zig").Method; +const Api = @import("./api/schema.zig").Api; +const HTTPClient = @This(); +const StringBuilder = bun.StringBuilder; +const posix = std.posix; +const SOCK = posix.SOCK; +const Arena = @import("./allocators/mimalloc_arena.zig").Arena; +const BoringSSL = bun.BoringSSL.c; +const Progress = bun.Progress; +const SSLConfig = @import("./bun.js/api/server.zig").ServerConfig.SSLConfig; +const uws = bun.uws; +const HTTPCertError = @import("./http/HTTPCertError.zig"); +const ProxyTunnel = @import("./http/ProxyTunnel.zig"); +pub const Headers = @import("./http/Headers.zig"); +pub const MimeType = @import("./http/MimeType.zig"); +pub const URLPath = @import("./http/URLPath.zig"); +pub const Encoding = @import("./http/Encoding.zig").Encoding; +pub const Decompressor = @import("./http/Decompressor.zig").Decompressor; +pub const Signals = @import("./http/Signals.zig"); +pub const ThreadSafeStreamBuffer = @import("./http/ThreadSafeStreamBuffer.zig"); +pub const HTTPThread = @import("./http/HTTPThread.zig"); +pub const NewHTTPContext = @import("./http/HTTPContext.zig").NewHTTPContext; +pub const AsyncHTTP = @import("./http/AsyncHTTP.zig"); +pub const InternalState = @import("./http/InternalState.zig"); +pub const CertificateInfo = @import("./http/CertificateInfo.zig"); +pub const FetchRedirect = @import("./http/FetchRedirect.zig").FetchRedirect; +pub const InitError = @import("./http/InitError.zig").InitError; +pub const HTTPRequestBody = @import("./http/HTTPRequestBody.zig").HTTPRequestBody; +pub const SendFile = @import("./http/SendFile.zig"); diff --git a/src/http/AsyncHTTP.zig b/src/http/AsyncHTTP.zig new file mode 100644 index 0000000000..c9d9f3800d --- /dev/null +++ b/src/http/AsyncHTTP.zig @@ -0,0 +1,523 @@ +const AsyncHTTP = @This(); + +request: ?picohttp.Request = null, +response: ?picohttp.Response = null, +request_headers: Headers.Entry.List = .empty, +response_headers: Headers.Entry.List = .empty, +response_buffer: *MutableString, +request_body: HTTPRequestBody = .{ .bytes = "" }, +allocator: std.mem.Allocator, +request_header_buf: string = "", +method: Method = Method.GET, +url: URL, +http_proxy: ?URL = null, +real: ?*AsyncHTTP = null, +next: ?*AsyncHTTP = null, + +task: ThreadPool.Task = ThreadPool.Task{ .callback = &startAsyncHTTP }, +result_callback: HTTPClientResult.Callback = undefined, + +redirected: bool = false, + +response_encoding: Encoding = Encoding.identity, +verbose: HTTPVerboseLevel = .none, + +client: HTTPClient = undefined, +waitingDeffered: bool = false, +finalized: bool = false, +err: ?anyerror = null, +async_http_id: u32 = 0, + +state: AtomicState = AtomicState.init(State.pending), +elapsed: u64 = 0, +gzip_elapsed: u64 = 0, + +signals: Signals = .{}, + +pub var active_requests_count = std.atomic.Value(usize).init(0); +pub var max_simultaneous_requests = std.atomic.Value(usize).init(256); + +pub fn loadEnv(allocator: std.mem.Allocator, logger: *Log, env: *DotEnv.Loader) void { + if (env.get("BUN_CONFIG_MAX_HTTP_REQUESTS")) |max_http_requests| { + const max = std.fmt.parseInt(u16, max_http_requests, 10) catch { + logger.addErrorFmt( + null, + Loc.Empty, + allocator, + "BUN_CONFIG_MAX_HTTP_REQUESTS value \"{s}\" is not a valid integer between 1 and 65535", + .{max_http_requests}, + ) catch unreachable; + return; + }; + if (max == 0) { + logger.addWarningFmt( + null, + Loc.Empty, + allocator, + "BUN_CONFIG_MAX_HTTP_REQUESTS value must be a number between 1 and 65535", + .{}, + ) catch unreachable; + return; + } + AsyncHTTP.max_simultaneous_requests.store(max, .monotonic); + } +} + +pub fn signalHeaderProgress(this: *AsyncHTTP) void { + var progress = this.signals.header_progress orelse return; + progress.store(true, .release); +} + +pub fn enableBodyStreaming(this: *AsyncHTTP) void { + var stream = this.signals.body_streaming orelse return; + stream.store(true, .release); +} + +pub fn clearData(this: *AsyncHTTP) void { + this.response_headers.deinit(this.allocator); + this.response_headers = .{}; + this.request = null; + this.response = null; + this.client.unix_socket_path.deinit(); + this.client.unix_socket_path = JSC.ZigString.Slice.empty; +} + +pub const State = enum(u32) { + pending = 0, + scheduled = 1, + sending = 2, + success = 3, + fail = 4, +}; +const AtomicState = std.atomic.Value(State); + +pub const Options = struct { + http_proxy: ?URL = null, + hostname: ?[]u8 = null, + signals: ?Signals = null, + unix_socket_path: ?JSC.ZigString.Slice = null, + disable_timeout: ?bool = null, + verbose: ?HTTPVerboseLevel = null, + disable_keepalive: ?bool = null, + disable_decompression: ?bool = null, + reject_unauthorized: ?bool = null, + tls_props: ?*SSLConfig = null, +}; + +const Preconnect = struct { + async_http: AsyncHTTP, + response_buffer: MutableString, + url: bun.URL, + is_url_owned: bool, + + pub const new = bun.TrivialNew(@This()); + + pub fn onResult(this: *Preconnect, _: *AsyncHTTP, _: HTTPClientResult) void { + this.response_buffer.deinit(); + this.async_http.clearData(); + this.async_http.client.deinit(); + if (this.is_url_owned) { + bun.default_allocator.free(this.url.href); + } + + bun.destroy(this); + } +}; + +pub fn preconnect( + url: URL, + is_url_owned: bool, +) void { + if (!FeatureFlags.is_fetch_preconnect_supported) { + if (is_url_owned) { + bun.default_allocator.free(url.href); + } + + return; + } + + var this = Preconnect.new(.{ + .async_http = undefined, + .response_buffer = MutableString{ .allocator = bun.http.default_allocator, .list = .{} }, + .url = url, + .is_url_owned = is_url_owned, + }); + + this.async_http = AsyncHTTP.init(bun.default_allocator, .GET, url, .{}, "", &this.response_buffer, "", HTTPClientResult.Callback.New(*Preconnect, Preconnect.onResult).init(this), .manual, .{}); + this.async_http.client.flags.is_preconnect_only = true; + + bun.http.http_thread.schedule(Batch.from(&this.async_http.task)); +} + +pub fn init( + allocator: std.mem.Allocator, + method: Method, + url: URL, + headers: Headers.Entry.List, + headers_buf: string, + response_buffer: *MutableString, + request_body: []const u8, + callback: HTTPClientResult.Callback, + redirect_type: FetchRedirect, + options: Options, +) AsyncHTTP { + var this = AsyncHTTP{ + .allocator = allocator, + .url = url, + .method = method, + .request_headers = headers, + .request_header_buf = headers_buf, + .request_body = .{ .bytes = request_body }, + .response_buffer = response_buffer, + .result_callback = callback, + .http_proxy = options.http_proxy, + .signals = options.signals orelse .{}, + .async_http_id = if (options.signals != null and options.signals.?.aborted != null) bun.http.async_http_id_monotonic.fetchAdd(1, .monotonic) else 0, + }; + + this.client = .{ + .allocator = allocator, + .method = method, + .url = url, + .header_entries = headers, + .header_buf = headers_buf, + .hostname = options.hostname, + .signals = options.signals orelse this.signals, + .async_http_id = this.async_http_id, + .http_proxy = this.http_proxy, + .redirect_type = redirect_type, + }; + if (options.unix_socket_path) |val| { + assert(this.client.unix_socket_path.length() == 0); + this.client.unix_socket_path = val; + } + if (options.disable_timeout) |val| { + this.client.flags.disable_timeout = val; + } + if (options.verbose) |val| { + this.client.verbose = val; + } + if (options.disable_decompression) |val| { + this.client.flags.disable_decompression = val; + } + if (options.disable_keepalive) |val| { + this.client.flags.disable_keepalive = val; + } + if (options.reject_unauthorized) |val| { + this.client.flags.reject_unauthorized = val; + } + if (options.tls_props) |val| { + this.client.tls_props = val; + } + + if (options.http_proxy) |proxy| { + // Username between 0 and 4096 chars + if (proxy.username.len > 0 and proxy.username.len < 4096) { + // Password between 0 and 4096 chars + if (proxy.password.len > 0 and proxy.password.len < 4096) { + // decode password + var password_buffer = std.mem.zeroes([4096]u8); + var password_stream = std.io.fixedBufferStream(&password_buffer); + const password_writer = password_stream.writer(); + const PassWriter = @TypeOf(password_writer); + const password_len = PercentEncoding.decode(PassWriter, password_writer, proxy.password) catch { + // Invalid proxy authorization + return this; + }; + const password = password_buffer[0..password_len]; + + // Decode username + var username_buffer = std.mem.zeroes([4096]u8); + var username_stream = std.io.fixedBufferStream(&username_buffer); + const username_writer = username_stream.writer(); + const UserWriter = @TypeOf(username_writer); + const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { + // Invalid proxy authorization + return this; + }; + const username = username_buffer[0..username_len]; + + // concat user and password + const auth = std.fmt.allocPrint(allocator, "{s}:{s}", .{ username, password }) catch unreachable; + defer allocator.free(auth); + const size = std.base64.standard.Encoder.calcSize(auth.len); + var buf = this.allocator.alloc(u8, size + "Basic ".len) catch unreachable; + const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], auth); + buf[0.."Basic ".len].* = "Basic ".*; + this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; + } else { + //Decode username + var username_buffer = std.mem.zeroes([4096]u8); + var username_stream = std.io.fixedBufferStream(&username_buffer); + const username_writer = username_stream.writer(); + const UserWriter = @TypeOf(username_writer); + const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { + // Invalid proxy authorization + return this; + }; + const username = username_buffer[0..username_len]; + + // only use user + const size = std.base64.standard.Encoder.calcSize(username_len); + var buf = allocator.alloc(u8, size + "Basic ".len) catch unreachable; + const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], username); + buf[0.."Basic ".len].* = "Basic ".*; + this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; + } + } + } + return this; +} + +pub fn initSync( + allocator: std.mem.Allocator, + method: Method, + url: URL, + headers: Headers.Entry.List, + headers_buf: string, + response_buffer: *MutableString, + request_body: []const u8, + http_proxy: ?URL, + hostname: ?[]u8, + redirect_type: FetchRedirect, +) AsyncHTTP { + return @This().init( + allocator, + method, + url, + headers, + headers_buf, + response_buffer, + request_body, + undefined, + redirect_type, + .{ + .http_proxy = http_proxy, + .hostname = hostname, + }, + ); +} + +fn reset(this: *AsyncHTTP) !void { + const aborted = this.client.aborted; + this.client = try HTTPClient.init(this.allocator, this.method, this.client.url, this.client.header_entries, this.client.header_buf, aborted); + this.client.http_proxy = this.http_proxy; + + if (this.http_proxy) |proxy| { + //TODO: need to understand how is possible to reuse Proxy with TSL, so disable keepalive if url is HTTPS + this.client.flags.disable_keepalive = this.url.isHTTPS(); + // Username between 0 and 4096 chars + if (proxy.username.len > 0 and proxy.username.len < 4096) { + // Password between 0 and 4096 chars + if (proxy.password.len > 0 and proxy.password.len < 4096) { + // decode password + var password_buffer = std.mem.zeroes([4096]u8); + var password_stream = std.io.fixedBufferStream(&password_buffer); + const password_writer = password_stream.writer(); + const PassWriter = @TypeOf(password_writer); + const password_len = PercentEncoding.decode(PassWriter, password_writer, proxy.password) catch { + // Invalid proxy authorization + return this; + }; + const password = password_buffer[0..password_len]; + + // Decode username + var username_buffer = std.mem.zeroes([4096]u8); + var username_stream = std.io.fixedBufferStream(&username_buffer); + const username_writer = username_stream.writer(); + const UserWriter = @TypeOf(username_writer); + const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { + // Invalid proxy authorization + return this; + }; + + const username = username_buffer[0..username_len]; + + // concat user and password + const auth = std.fmt.allocPrint(this.allocator, "{s}:{s}", .{ username, password }) catch unreachable; + defer this.allocator.free(auth); + const size = std.base64.standard.Encoder.calcSize(auth.len); + var buf = this.allocator.alloc(u8, size + "Basic ".len) catch unreachable; + const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], auth); + buf[0.."Basic ".len].* = "Basic ".*; + this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; + } else { + //Decode username + var username_buffer = std.mem.zeroes([4096]u8); + var username_stream = std.io.fixedBufferStream(&username_buffer); + const username_writer = username_stream.writer(); + const UserWriter = @TypeOf(username_writer); + const username_len = PercentEncoding.decode(UserWriter, username_writer, proxy.username) catch { + // Invalid proxy authorization + return this; + }; + const username = username_buffer[0..username_len]; + + // only use user + const size = std.base64.standard.Encoder.calcSize(username_len); + var buf = this.allocator.alloc(u8, size + "Basic ".len) catch unreachable; + const encoded = std.base64.url_safe.Encoder.encode(buf["Basic ".len..], username); + buf[0.."Basic ".len].* = "Basic ".*; + this.client.proxy_authorization = buf[0 .. "Basic ".len + encoded.len]; + } + } + } +} + +pub fn schedule(this: *AsyncHTTP, _: std.mem.Allocator, batch: *ThreadPool.Batch) void { + this.state.store(.scheduled, .monotonic); + batch.push(ThreadPool.Batch.from(&this.task)); +} + +fn sendSyncCallback(this: *SingleHTTPChannel, async_http: *AsyncHTTP, result: HTTPClientResult) void { + async_http.real.?.* = async_http.*; + async_http.real.?.response_buffer = async_http.response_buffer; + this.channel.writeItem(result) catch unreachable; +} + +pub fn sendSync(this: *AsyncHTTP) anyerror!picohttp.Response { + HTTPThread.init(&.{}); + + var ctx = try bun.default_allocator.create(SingleHTTPChannel); + ctx.* = SingleHTTPChannel.init(); + this.result_callback = HTTPClientResult.Callback.New( + *SingleHTTPChannel, + sendSyncCallback, + ).init(ctx); + + var batch = bun.ThreadPool.Batch{}; + this.schedule(bun.default_allocator, &batch); + bun.http.http_thread.schedule(batch); + + const result = ctx.channel.readItem() catch unreachable; + if (result.fail) |err| { + return err; + } + assert(result.metadata != null); + return result.metadata.?.response; +} + +pub fn onAsyncHTTPCallback(this: *AsyncHTTP, async_http: *AsyncHTTP, result: HTTPClientResult) void { + assert(this.real != null); + + var callback = this.result_callback; + this.elapsed = bun.http.http_thread.timer.read() -| this.elapsed; + + // TODO: this condition seems wrong: if we started with a non-default value, we might + // report a redirect even if none happened + this.redirected = this.client.flags.redirected; + if (result.isSuccess()) { + this.err = null; + if (result.metadata) |metadata| { + this.response = metadata.response; + } + this.state.store(.success, .monotonic); + } else { + this.err = result.fail; + this.response = null; + this.state.store(State.fail, .monotonic); + } + + if (comptime Environment.enable_logs) { + if (bun.http.socket_async_http_abort_tracker.count() > 0) { + log("bun.http.socket_async_http_abort_tracker count: {d}", .{bun.http.socket_async_http_abort_tracker.count()}); + } + } + + if (bun.http.socket_async_http_abort_tracker.capacity() > 10_000 and bun.http.socket_async_http_abort_tracker.count() < 100) { + bun.http.socket_async_http_abort_tracker.shrinkAndFree(bun.http.socket_async_http_abort_tracker.count()); + } + + if (result.has_more) { + callback.function(callback.ctx, async_http, result); + } else { + { + this.client.deinit(); + var threadlocal_http: *bun.http.ThreadlocalAsyncHTTP = @fieldParentPtr("async_http", async_http); + defer threadlocal_http.deinit(); + log("onAsyncHTTPCallback: {any}", .{std.fmt.fmtDuration(this.elapsed)}); + callback.function(callback.ctx, async_http, result); + } + + const active_requests = AsyncHTTP.active_requests_count.fetchSub(1, .monotonic); + assert(active_requests > 0); + } + + if (!bun.http.http_thread.queued_tasks.isEmpty() and AsyncHTTP.active_requests_count.load(.monotonic) < AsyncHTTP.max_simultaneous_requests.load(.monotonic)) { + bun.http.http_thread.loop.loop.wakeup(); + } +} + +pub fn startAsyncHTTP(task: *Task) void { + var this: *AsyncHTTP = @fieldParentPtr("task", task); + this.onStart(); +} + +pub fn onStart(this: *AsyncHTTP) void { + _ = active_requests_count.fetchAdd(1, .monotonic); + this.err = null; + this.state.store(.sending, .monotonic); + this.client.result_callback = HTTPClientResult.Callback.New(*AsyncHTTP, onAsyncHTTPCallback).init( + this, + ); + + this.elapsed = bun.http.http_thread.timer.read(); + if (this.response_buffer.list.capacity == 0) { + this.response_buffer.allocator = bun.http.default_allocator; + } + this.client.start(this.request_body, this.response_buffer); +} + +const std = @import("std"); +const bun = @import("bun"); +const assert = bun.assert; +const picohttp = bun.picohttp; +const string = bun.string; +const Environment = bun.Environment; +const FeatureFlags = bun.FeatureFlags; +const JSC = bun.JSC; +const Loc = bun.logger.Loc; +const Log = bun.logger.Log; + +const HTTPClient = bun.http; +const Method = HTTPClient.Method; +const HTTPClientResult = HTTPClient.HTTPClientResult; +const HTTPVerboseLevel = HTTPClient.HTTPVerboseLevel; +const HTTPRequestBody = HTTPClient.HTTPRequestBody; +const FetchRedirect = HTTPClient.FetchRedirect; +const Signals = HTTPClient.Signals; +const Encoding = @import("./Encoding.zig").Encoding; +const URL = @import("../url.zig").URL; +const PercentEncoding = @import("../url.zig").PercentEncoding; +const MutableString = bun.MutableString; +const Headers = @import("./Headers.zig"); +const HTTPThread = @import("./HTTPThread.zig"); +const DotEnv = @import("../env_loader.zig"); +const log = bun.Output.scoped(.AsyncHTTP, false); +const ThreadPool = bun.ThreadPool; +const Task = ThreadPool.Task; +const Batch = bun.ThreadPool.Batch; +const SSLConfig = @import("../bun.js/api/server.zig").ServerConfig.SSLConfig; + +const HTTPCallbackPair = .{ *AsyncHTTP, HTTPClientResult }; +const Channel = @import("../sync.zig").Channel; +pub const HTTPChannel = Channel(HTTPCallbackPair, .{ .Static = 1000 }); +// 32 pointers much cheaper than 1000 pointers +const SingleHTTPChannel = struct { + const SingleHTTPCHannel_ = Channel(HTTPClientResult, .{ .Static = 8 }); + channel: SingleHTTPCHannel_, + pub fn reset(_: *@This()) void {} + pub fn init() SingleHTTPChannel { + return SingleHTTPChannel{ .channel = SingleHTTPCHannel_.init() }; + } +}; + +pub const HTTPChannelContext = struct { + http: AsyncHTTP = undefined, + channel: *HTTPChannel, + + pub fn callback(data: HTTPCallbackPair) void { + var this: *HTTPChannelContext = @fieldParentPtr("http", data.@"0"); + this.channel.writeItem(data) catch unreachable; + } +}; diff --git a/src/http/CertificateInfo.zig b/src/http/CertificateInfo.zig new file mode 100644 index 0000000000..7adb777755 --- /dev/null +++ b/src/http/CertificateInfo.zig @@ -0,0 +1,14 @@ +const CertificateInfo = @This(); + +cert: []const u8, +cert_error: HTTPCertError, +hostname: []const u8, +pub fn deinit(this: *const CertificateInfo, allocator: std.mem.Allocator) void { + allocator.free(this.cert); + allocator.free(this.cert_error.code); + allocator.free(this.cert_error.reason); + allocator.free(this.hostname); +} + +const std = @import("std"); +const HTTPCertError = @import("./HTTPCertError.zig"); diff --git a/src/http/Decompressor.zig b/src/http/Decompressor.zig new file mode 100644 index 0000000000..d6d20939ec --- /dev/null +++ b/src/http/Decompressor.zig @@ -0,0 +1,120 @@ +pub const Decompressor = union(enum) { + zlib: *Zlib.ZlibReaderArrayList, + brotli: *Brotli.BrotliReaderArrayList, + zstd: *zstd.ZstdReaderArrayList, + none: void, + + pub fn deinit(this: *Decompressor) void { + switch (this.*) { + inline .brotli, .zlib, .zstd => |that| { + that.deinit(); + this.* = .{ .none = {} }; + }, + .none => {}, + } + } + + pub fn updateBuffers(this: *Decompressor, encoding: Encoding, buffer: []const u8, body_out_str: *MutableString) !void { + if (!encoding.isCompressed()) { + return; + } + + if (this.* == .none) { + switch (encoding) { + .gzip, .deflate => { + this.* = .{ + .zlib = try Zlib.ZlibReaderArrayList.initWithOptionsAndListAllocator( + buffer, + &body_out_str.list, + body_out_str.allocator, + bun.http.default_allocator, + .{ + // zlib.MAX_WBITS = 15 + // to (de-)compress deflate format, use wbits = -zlib.MAX_WBITS + // to (de-)compress deflate format with headers we use wbits = 0 (we can detect the first byte using 120) + // to (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16 + .windowBits = if (encoding == Encoding.gzip) Zlib.MAX_WBITS | 16 else (if (buffer.len > 1 and buffer[0] == 120) 0 else -Zlib.MAX_WBITS), + }, + ), + }; + return; + }, + .brotli => { + this.* = .{ + .brotli = try Brotli.BrotliReaderArrayList.newWithOptions( + buffer, + &body_out_str.list, + body_out_str.allocator, + .{}, + ), + }; + return; + }, + .zstd => { + this.* = .{ + .zstd = try zstd.ZstdReaderArrayList.initWithListAllocator( + buffer, + &body_out_str.list, + body_out_str.allocator, + bun.http.default_allocator, + ), + }; + return; + }, + else => @panic("Invalid encoding. This code should not be reachable"), + } + } + + switch (this.*) { + .zlib => |reader| { + bun.assert(reader.zlib.avail_in == 0); + reader.zlib.next_in = buffer.ptr; + reader.zlib.avail_in = @as(u32, @truncate(buffer.len)); + + const initial = body_out_str.list.items.len; + body_out_str.list.expandToCapacity(); + if (body_out_str.list.capacity == initial) { + try body_out_str.list.ensureUnusedCapacity(body_out_str.allocator, 4096); + body_out_str.list.expandToCapacity(); + } + reader.list = body_out_str.list; + reader.zlib.next_out = @ptrCast(&body_out_str.list.items[initial]); + reader.zlib.avail_out = @as(u32, @truncate(body_out_str.list.capacity - initial)); + // we reset the total out so we can track how much we decompressed this time + reader.zlib.total_out = @truncate(initial); + }, + .brotli => |reader| { + reader.input = buffer; + reader.total_in = 0; + + const initial = body_out_str.list.items.len; + reader.list = body_out_str.list; + reader.total_out = @truncate(initial); + }, + .zstd => |reader| { + reader.input = buffer; + reader.total_in = 0; + + const initial = body_out_str.list.items.len; + reader.list = body_out_str.list; + reader.total_out = @truncate(initial); + }, + else => @panic("Invalid encoding. This code should not be reachable"), + } + } + + pub fn readAll(this: *Decompressor, is_done: bool) !void { + switch (this.*) { + .zlib => |zlib| try zlib.readAll(), + .brotli => |brotli| try brotli.readAll(is_done), + .zstd => |reader| try reader.readAll(is_done), + .none => {}, + } + } +}; +const bun = @import("bun"); +const MutableString = bun.MutableString; +const Zlib = @import("../zlib.zig"); +const Brotli = bun.brotli; +const zstd = bun.zstd; +const Encoding = @import("./Encoding.zig").Encoding; diff --git a/src/http/Encoding.zig b/src/http/Encoding.zig new file mode 100644 index 0000000000..5a4b046bd0 --- /dev/null +++ b/src/http/Encoding.zig @@ -0,0 +1,22 @@ +pub const Encoding = enum { + identity, + gzip, + deflate, + brotli, + zstd, + chunked, + + pub fn canUseLibDeflate(this: Encoding) bool { + return switch (this) { + .gzip, .deflate => true, + else => false, + }; + } + + pub fn isCompressed(this: Encoding) bool { + return switch (this) { + .brotli, .gzip, .deflate, .zstd => true, + else => false, + }; + } +}; diff --git a/src/http/FetchRedirect.zig b/src/http/FetchRedirect.zig new file mode 100644 index 0000000000..9c0f34121b --- /dev/null +++ b/src/http/FetchRedirect.zig @@ -0,0 +1,13 @@ +pub const FetchRedirect = enum(u8) { + follow, + manual, + @"error", + + pub const Map = bun.ComptimeStringMap(FetchRedirect, .{ + .{ "follow", .follow }, + .{ "manual", .manual }, + .{ "error", .@"error" }, + }); +}; + +const bun = @import("bun"); diff --git a/src/http/HTTPCertError.zig b/src/http/HTTPCertError.zig new file mode 100644 index 0000000000..8112703440 --- /dev/null +++ b/src/http/HTTPCertError.zig @@ -0,0 +1,3 @@ +error_no: i32 = 0, +code: [:0]const u8 = "", +reason: [:0]const u8 = "", diff --git a/src/http/HTTPContext.zig b/src/http/HTTPContext.zig new file mode 100644 index 0000000000..aae2e0e5cb --- /dev/null +++ b/src/http/HTTPContext.zig @@ -0,0 +1,506 @@ +pub fn NewHTTPContext(comptime ssl: bool) type { + return struct { + const pool_size = 64; + const PooledSocket = struct { + http_socket: HTTPSocket, + hostname_buf: [MAX_KEEPALIVE_HOSTNAME]u8 = undefined, + hostname_len: u8 = 0, + port: u16 = 0, + /// If you set `rejectUnauthorized` to `false`, the connection fails to verify, + did_have_handshaking_error_while_reject_unauthorized_is_false: bool = false, + }; + + pub fn markSocketAsDead(socket: HTTPSocket) void { + if (socket.ext(**anyopaque)) |ctx| { + ctx.* = bun.cast(**anyopaque, ActiveSocket.init(&dead_socket).ptr()); + } + } + + pub fn terminateSocket(socket: HTTPSocket) void { + markSocketAsDead(socket); + socket.close(.failure); + } + + pub fn closeSocket(socket: HTTPSocket) void { + markSocketAsDead(socket); + socket.close(.normal); + } + + fn getTagged(ptr: *anyopaque) ActiveSocket { + return ActiveSocket.from(bun.cast(**anyopaque, ptr).*); + } + + pub fn getTaggedFromSocket(socket: HTTPSocket) ActiveSocket { + if (socket.ext(anyopaque)) |ctx| { + return getTagged(ctx); + } + return ActiveSocket.init(&dead_socket); + } + + pub const PooledSocketHiveAllocator = bun.HiveArray(PooledSocket, pool_size); + + pending_sockets: PooledSocketHiveAllocator, + us_socket_context: *uws.SocketContext, + + const Context = @This(); + pub const HTTPSocket = uws.NewSocketHandler(ssl); + + pub fn context() *@This() { + if (comptime ssl) { + return &bun.http.http_thread.https_context; + } else { + return &bun.http.http_thread.http_context; + } + } + + const ActiveSocket = TaggedPointerUnion(.{ + *DeadSocket, + HTTPClient, + PooledSocket, + }); + const ssl_int = @as(c_int, @intFromBool(ssl)); + + const MAX_KEEPALIVE_HOSTNAME = 128; + + pub fn sslCtx(this: *@This()) *BoringSSL.SSL_CTX { + if (comptime !ssl) { + unreachable; + } + + return @as(*BoringSSL.SSL_CTX, @ptrCast(this.us_socket_context.getNativeHandle(true))); + } + + pub fn deinit(this: *@This()) void { + this.us_socket_context.deinit(ssl); + bun.default_allocator.destroy(this); + } + + pub fn initWithClientConfig(this: *@This(), client: *HTTPClient) InitError!void { + if (!comptime ssl) { + @compileError("ssl only"); + } + var opts = client.tls_props.?.asUSockets(); + opts.request_cert = 1; + opts.reject_unauthorized = 0; + try this.initWithOpts(&opts); + } + + fn initWithOpts(this: *@This(), opts: *const uws.SocketContext.BunSocketContextOptions) InitError!void { + if (!comptime ssl) { + @compileError("ssl only"); + } + + var err: uws.create_bun_socket_error_t = .none; + const socket = uws.SocketContext.createSSLContext(bun.http.http_thread.loop.loop, @sizeOf(usize), opts.*, &err); + if (socket == null) { + return switch (err) { + .load_ca_file => error.LoadCAFile, + .invalid_ca_file => error.InvalidCAFile, + .invalid_ca => error.InvalidCA, + else => error.FailedToOpenSocket, + }; + } + this.us_socket_context = socket.?; + this.sslCtx().setup(); + + HTTPSocket.configure( + this.us_socket_context, + false, + anyopaque, + Handler, + ); + } + + pub fn initWithThreadOpts(this: *@This(), init_opts: *const HTTPThread.InitOpts) InitError!void { + if (!comptime ssl) { + @compileError("ssl only"); + } + var opts: uws.SocketContext.BunSocketContextOptions = .{ + .ca = if (init_opts.ca.len > 0) @ptrCast(init_opts.ca) else null, + .ca_count = @intCast(init_opts.ca.len), + .ca_file_name = if (init_opts.abs_ca_file_name.len > 0) init_opts.abs_ca_file_name else null, + .request_cert = 1, + }; + + try this.initWithOpts(&opts); + } + + pub fn init(this: *@This()) void { + if (comptime ssl) { + const opts: uws.SocketContext.BunSocketContextOptions = .{ + // we request the cert so we load root certs and can verify it + .request_cert = 1, + // we manually abort the connection if the hostname doesn't match + .reject_unauthorized = 0, + }; + var err: uws.create_bun_socket_error_t = .none; + this.us_socket_context = uws.SocketContext.createSSLContext(bun.http.http_thread.loop.loop, @sizeOf(usize), opts, &err).?; + + this.sslCtx().setup(); + } else { + this.us_socket_context = uws.SocketContext.createNoSSLContext(bun.http.http_thread.loop.loop, @sizeOf(usize)).?; + } + + HTTPSocket.configure( + this.us_socket_context, + false, + anyopaque, + Handler, + ); + } + + /// Attempt to keep the socket alive by reusing it for another request. + /// If no space is available, close the socket. + /// + /// If `did_have_handshaking_error_while_reject_unauthorized_is_false` + /// is set, then we can only reuse the socket for HTTP Keep Alive if + /// `reject_unauthorized` is set to `false`. + pub fn releaseSocket(this: *@This(), socket: HTTPSocket, did_have_handshaking_error_while_reject_unauthorized_is_false: bool, hostname: []const u8, port: u16) void { + // log("releaseSocket(0x{})", .{bun.fmt.hexIntUpper(@intFromPtr(socket.socket))}); + + if (comptime Environment.allow_assert) { + assert(!socket.isClosed()); + assert(!socket.isShutdown()); + assert(socket.isEstablished()); + } + assert(hostname.len > 0); + assert(port > 0); + + if (hostname.len <= MAX_KEEPALIVE_HOSTNAME and !socket.isClosedOrHasError() and socket.isEstablished()) { + if (this.pending_sockets.get()) |pending| { + if (socket.ext(**anyopaque)) |ctx| { + ctx.* = bun.cast(**anyopaque, ActiveSocket.init(pending).ptr()); + } + socket.flush(); + socket.timeout(0); + socket.setTimeoutMinutes(5); + + pending.http_socket = socket; + pending.did_have_handshaking_error_while_reject_unauthorized_is_false = did_have_handshaking_error_while_reject_unauthorized_is_false; + @memcpy(pending.hostname_buf[0..hostname.len], hostname); + pending.hostname_len = @as(u8, @truncate(hostname.len)); + pending.port = port; + + log("Keep-Alive release {s}:{d}", .{ + hostname, + port, + }); + return; + } + } + log("close socket", .{}); + closeSocket(socket); + } + + pub const Handler = struct { + pub fn onOpen( + ptr: *anyopaque, + socket: HTTPSocket, + ) void { + const active = getTagged(ptr); + if (active.get(HTTPClient)) |client| { + if (client.onOpen(comptime ssl, socket)) |_| { + return; + } else |_| { + log("Unable to open socket", .{}); + terminateSocket(socket); + return; + } + } + + if (active.get(PooledSocket)) |pooled| { + addMemoryBackToPool(pooled); + return; + } + + log("Unexpected open on unknown socket", .{}); + terminateSocket(socket); + } + pub fn onHandshake( + ptr: *anyopaque, + socket: HTTPSocket, + success: i32, + ssl_error: uws.us_bun_verify_error_t, + ) void { + const handshake_success = if (success == 1) true else false; + + const handshake_error = HTTPCertError{ + .error_no = ssl_error.error_no, + .code = if (ssl_error.code == null) "" else ssl_error.code[0..bun.len(ssl_error.code) :0], + .reason = if (ssl_error.code == null) "" else ssl_error.reason[0..bun.len(ssl_error.reason) :0], + }; + + const active = getTagged(ptr); + if (active.get(HTTPClient)) |client| { + // handshake completed but we may have ssl errors + client.flags.did_have_handshaking_error = handshake_error.error_no != 0; + if (handshake_success) { + if (client.flags.reject_unauthorized) { + // only reject the connection if reject_unauthorized == true + if (client.flags.did_have_handshaking_error) { + client.closeAndFail(BoringSSL.getCertErrorFromNo(handshake_error.error_no), comptime ssl, socket); + return; + } + + // if checkServerIdentity returns false, we dont call open this means that the connection was rejected + const ssl_ptr = @as(*BoringSSL.SSL, @ptrCast(socket.getNativeHandle())); + if (!client.checkServerIdentity(comptime ssl, socket, handshake_error, ssl_ptr, true)) { + client.flags.did_have_handshaking_error = true; + client.unregisterAbortTracker(); + if (!socket.isClosed()) terminateSocket(socket); + return; + } + } + + return client.firstCall(comptime ssl, socket); + } else { + // if we are here is because server rejected us, and the error_no is the cause of this + // if we set reject_unauthorized == false this means the server requires custom CA aka NODE_EXTRA_CA_CERTS + if (client.flags.did_have_handshaking_error) { + client.closeAndFail(BoringSSL.getCertErrorFromNo(handshake_error.error_no), comptime ssl, socket); + return; + } + // if handshake_success it self is false, this means that the connection was rejected + client.closeAndFail(error.ConnectionRefused, comptime ssl, socket); + return; + } + } + + if (socket.isClosed()) { + markSocketAsDead(socket); + if (active.get(PooledSocket)) |pooled| { + addMemoryBackToPool(pooled); + } + + return; + } + + if (handshake_success) { + if (active.is(PooledSocket)) { + // Allow pooled sockets to be reused if the handshake was successful. + socket.setTimeout(0); + socket.setTimeoutMinutes(5); + return; + } + } + + if (active.get(PooledSocket)) |pooled| { + addMemoryBackToPool(pooled); + } + + terminateSocket(socket); + } + pub fn onClose( + ptr: *anyopaque, + socket: HTTPSocket, + _: c_int, + _: ?*anyopaque, + ) void { + const tagged = getTagged(ptr); + markSocketAsDead(socket); + + if (tagged.get(HTTPClient)) |client| { + return client.onClose(comptime ssl, socket); + } + + if (tagged.get(PooledSocket)) |pooled| { + addMemoryBackToPool(pooled); + } + + return; + } + + fn addMemoryBackToPool(pooled: *PooledSocket) void { + assert(context().pending_sockets.put(pooled)); + } + + pub fn onData( + ptr: *anyopaque, + socket: HTTPSocket, + buf: []const u8, + ) void { + const tagged = getTagged(ptr); + if (tagged.get(HTTPClient)) |client| { + return client.onData( + comptime ssl, + buf, + if (comptime ssl) &bun.http.http_thread.https_context else &bun.http.http_thread.http_context, + socket, + ); + } else if (tagged.is(PooledSocket)) { + // trailing zero is fine to ignore + if (strings.eqlComptime(buf, bun.http.end_of_chunked_http1_1_encoding_response_body)) { + return; + } + + log("Unexpected data on socket", .{}); + + return; + } + log("Unexpected data on unknown socket", .{}); + terminateSocket(socket); + } + pub fn onWritable( + ptr: *anyopaque, + socket: HTTPSocket, + ) void { + const tagged = getTagged(ptr); + if (tagged.get(HTTPClient)) |client| { + return client.onWritable( + false, + comptime ssl, + socket, + ); + } else if (tagged.is(PooledSocket)) { + // it's a keep-alive socket + } else { + // don't know what this is, let's close it + log("Unexpected writable on socket", .{}); + terminateSocket(socket); + } + } + pub fn onLongTimeout( + ptr: *anyopaque, + socket: HTTPSocket, + ) void { + const tagged = getTagged(ptr); + if (tagged.get(HTTPClient)) |client| { + return client.onTimeout(comptime ssl, socket); + } else if (tagged.get(PooledSocket)) |pooled| { + // If a socket has been sitting around for 5 minutes + // Let's close it and remove it from the pool. + addMemoryBackToPool(pooled); + } + + terminateSocket(socket); + } + pub fn onConnectError( + ptr: *anyopaque, + socket: HTTPSocket, + _: c_int, + ) void { + const tagged = getTagged(ptr); + markSocketAsDead(socket); + if (tagged.get(HTTPClient)) |client| { + client.onConnectError(); + } else if (tagged.get(PooledSocket)) |pooled| { + addMemoryBackToPool(pooled); + } + // us_connecting_socket_close is always called internally by uSockets + } + pub fn onEnd( + _: *anyopaque, + socket: HTTPSocket, + ) void { + // TCP fin must be closed, but we must keep the original tagged + // pointer so that their onClose callback is called. + // + // Three possible states: + // 1. HTTP Keep-Alive socket: it must be removed from the pool + // 2. HTTP Client socket: it might need to be retried + // 3. Dead socket: it is already marked as dead + socket.close(.failure); + } + }; + + fn existingSocket(this: *@This(), reject_unauthorized: bool, hostname: []const u8, port: u16) ?HTTPSocket { + if (hostname.len > MAX_KEEPALIVE_HOSTNAME) + return null; + + var iter = this.pending_sockets.used.iterator(.{ .kind = .set }); + + while (iter.next()) |pending_socket_index| { + var socket = this.pending_sockets.at(@as(u16, @intCast(pending_socket_index))); + if (socket.port != port) { + continue; + } + + if (socket.did_have_handshaking_error_while_reject_unauthorized_is_false and reject_unauthorized) { + continue; + } + + if (strings.eqlLong(socket.hostname_buf[0..socket.hostname_len], hostname, true)) { + const http_socket = socket.http_socket; + assert(context().pending_sockets.put(socket)); + + if (http_socket.isClosed()) { + markSocketAsDead(http_socket); + continue; + } + + if (http_socket.isShutdown() or http_socket.getError() != 0) { + terminateSocket(http_socket); + continue; + } + + log("+ Keep-Alive reuse {s}:{d}", .{ hostname, port }); + return http_socket; + } + } + + return null; + } + + pub fn connectSocket(this: *@This(), client: *HTTPClient, socket_path: []const u8) !HTTPSocket { + client.connected_url = if (client.http_proxy) |proxy| proxy else client.url; + const socket = try HTTPSocket.connectUnixAnon( + socket_path, + this.us_socket_context, + ActiveSocket.init(client).ptr(), + false, // dont allow half-open sockets + ); + client.allow_retry = false; + return socket; + } + + pub fn connect(this: *@This(), client: *HTTPClient, hostname_: []const u8, port: u16) !HTTPSocket { + const hostname = if (FeatureFlags.hardcode_localhost_to_127_0_0_1 and strings.eqlComptime(hostname_, "localhost")) + "127.0.0.1" + else + hostname_; + + client.connected_url = if (client.http_proxy) |proxy| proxy else client.url; + client.connected_url.hostname = hostname; + + if (client.isKeepAlivePossible()) { + if (this.existingSocket(client.flags.reject_unauthorized, hostname, port)) |sock| { + if (sock.ext(**anyopaque)) |ctx| { + ctx.* = bun.cast(**anyopaque, ActiveSocket.init(client).ptr()); + } + client.allow_retry = true; + try client.onOpen(comptime ssl, sock); + if (comptime ssl) { + client.firstCall(comptime ssl, sock); + } + return sock; + } + } + + const socket = try HTTPSocket.connectAnon( + hostname, + port, + this.us_socket_context, + ActiveSocket.init(client).ptr(), + false, + ); + client.allow_retry = false; + return socket; + } + }; +} +const bun = @import("bun"); +const uws = bun.uws; +const BoringSSL = bun.BoringSSL.c; +const strings = bun.strings; +const Environment = bun.Environment; +const FeatureFlags = bun.FeatureFlags; +const assert = bun.assert; +const HTTPThread = @import("./HTTPThread.zig"); +const HTTPCertError = @import("./HTTPCertError.zig"); +const HTTPClient = bun.http; +const InitError = HTTPClient.InitError; +const TaggedPointerUnion = @import("../ptr.zig").TaggedPointerUnion; + +const DeadSocket = opaque {}; +var dead_socket = @as(*DeadSocket, @ptrFromInt(1)); +const log = bun.Output.scoped(.HTTPContext, true); diff --git a/src/http/HTTPRequestBody.zig b/src/http/HTTPRequestBody.zig new file mode 100644 index 0000000000..bb5e56db12 --- /dev/null +++ b/src/http/HTTPRequestBody.zig @@ -0,0 +1,37 @@ +pub const HTTPRequestBody = union(enum) { + bytes: []const u8, + sendfile: SendFile, + stream: struct { + buffer: ?*ThreadSafeStreamBuffer, + ended: bool, + + pub fn detach(this: *@This()) void { + if (this.buffer) |buffer| { + this.buffer = null; + buffer.deref(); + } + } + }, + + pub fn isStream(this: *const HTTPRequestBody) bool { + return this.* == .stream; + } + + pub fn deinit(this: *HTTPRequestBody) void { + switch (this.*) { + .sendfile, .bytes => {}, + .stream => |*stream| stream.detach(), + } + } + pub fn len(this: *const HTTPRequestBody) usize { + return switch (this.*) { + .bytes => this.bytes.len, + .sendfile => this.sendfile.content_size, + // unknow amounts + .stream => std.math.maxInt(usize), + }; + } +}; +const std = @import("std"); +const SendFile = @import("./SendFile.zig"); +const ThreadSafeStreamBuffer = @import("./ThreadSafeStreamBuffer.zig"); diff --git a/src/http/HTTPThread.zig b/src/http/HTTPThread.zig new file mode 100644 index 0000000000..23e1a088e6 --- /dev/null +++ b/src/http/HTTPThread.zig @@ -0,0 +1,481 @@ +var custom_ssl_context_map = std.AutoArrayHashMap(*SSLConfig, *NewHTTPContext(true)).init(bun.default_allocator); +const HTTPThread = @This(); + +loop: *JSC.MiniEventLoop, +http_context: NewHTTPContext(false), +https_context: NewHTTPContext(true), + +queued_tasks: Queue = Queue{}, + +queued_shutdowns: std.ArrayListUnmanaged(ShutdownMessage) = std.ArrayListUnmanaged(ShutdownMessage){}, +queued_writes: std.ArrayListUnmanaged(WriteMessage) = std.ArrayListUnmanaged(WriteMessage){}, + +queued_shutdowns_lock: bun.Mutex = .{}, +queued_writes_lock: bun.Mutex = .{}, + +queued_proxy_deref: std.ArrayListUnmanaged(*ProxyTunnel) = std.ArrayListUnmanaged(*ProxyTunnel){}, + +has_awoken: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), +timer: std.time.Timer, +lazy_libdeflater: ?*LibdeflateState = null, +lazy_request_body_buffer: ?*HeapRequestBodyBuffer = null, + +pub const HeapRequestBodyBuffer = struct { + buffer: [512 * 1024]u8 = undefined, + fixed_buffer_allocator: std.heap.FixedBufferAllocator, + + pub const new = bun.TrivialNew(@This()); + pub const deinit = bun.TrivialDeinit(@This()); + + pub fn init() *@This() { + var this = HeapRequestBodyBuffer.new(.{ + .fixed_buffer_allocator = undefined, + }); + this.fixed_buffer_allocator = std.heap.FixedBufferAllocator.init(&this.buffer); + return this; + } + + pub fn put(this: *@This()) void { + if (bun.http.http_thread.lazy_request_body_buffer == null) { + // This case hypothetically should never happen + this.fixed_buffer_allocator.reset(); + bun.http.http_thread.lazy_request_body_buffer = this; + } else { + this.deinit(); + } + } +}; + +pub const RequestBodyBuffer = union(enum) { + heap: *HeapRequestBodyBuffer, + stack: std.heap.StackFallbackAllocator(request_body_send_stack_buffer_size), + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .heap => |heap| heap.put(), + .stack => {}, + } + } + + pub fn allocatedSlice(this: *@This()) []u8 { + return switch (this.*) { + .heap => |heap| &heap.buffer, + .stack => |*stack| &stack.buffer, + }; + } + + pub fn allocator(this: *@This()) std.mem.Allocator { + return switch (this.*) { + .heap => |heap| heap.fixed_buffer_allocator.allocator(), + .stack => |*stack| stack.get(), + }; + } + + pub fn toArrayList(this: *@This()) std.ArrayList(u8) { + var arraylist = std.ArrayList(u8).fromOwnedSlice(this.allocator(), this.allocatedSlice()); + arraylist.items.len = 0; + return arraylist; + } +}; + +const threadlog = Output.scoped(.HTTPThread, true); +const WriteMessage = struct { + async_http_id: u32, + flags: packed struct(u8) { + is_tls: bool, + type: Type, + _: u5 = 0, + }, + + pub const Type = enum(u2) { + data = 0, + end = 1, + endChunked = 2, + }; +}; +const ShutdownMessage = struct { + async_http_id: u32, + is_tls: bool, +}; + +pub const LibdeflateState = struct { + decompressor: *bun.libdeflate.Decompressor = undefined, + shared_buffer: [512 * 1024]u8 = undefined, + + pub const new = bun.TrivialNew(@This()); +}; + +const request_body_send_stack_buffer_size = 32 * 1024; + +pub inline fn getRequestBodySendBuffer(this: *@This(), estimated_size: usize) RequestBodyBuffer { + if (estimated_size >= request_body_send_stack_buffer_size) { + if (this.lazy_request_body_buffer == null) { + log("Allocating HeapRequestBodyBuffer due to {d} bytes request body", .{estimated_size}); + return .{ + .heap = HeapRequestBodyBuffer.init(), + }; + } + + return .{ .heap = bun.take(&this.lazy_request_body_buffer).? }; + } + return .{ + .stack = std.heap.stackFallback(request_body_send_stack_buffer_size, bun.default_allocator), + }; +} + +pub fn deflater(this: *@This()) *LibdeflateState { + if (this.lazy_libdeflater == null) { + this.lazy_libdeflater = LibdeflateState.new(.{ + .decompressor = bun.libdeflate.Decompressor.alloc() orelse bun.outOfMemory(), + }); + } + + return this.lazy_libdeflater.?; +} + +fn onInitErrorNoop(err: InitError, opts: InitOpts) noreturn { + switch (err) { + error.LoadCAFile => { + if (!bun.sys.existsZ(opts.abs_ca_file_name)) { + Output.err("HTTPThread", "failed to find CA file: '{s}'", .{opts.abs_ca_file_name}); + } else { + Output.err("HTTPThread", "failed to load CA file: '{s}'", .{opts.abs_ca_file_name}); + } + }, + error.InvalidCAFile => { + Output.err("HTTPThread", "the CA file is invalid: '{s}'", .{opts.abs_ca_file_name}); + }, + error.InvalidCA => { + Output.err("HTTPThread", "the provided CA is invalid", .{}); + }, + error.FailedToOpenSocket => { + Output.errGeneric("failed to start HTTP client thread", .{}); + }, + } + Global.crash(); +} + +pub const InitOpts = struct { + ca: []stringZ = &.{}, + abs_ca_file_name: stringZ = &.{}, + for_install: bool = false, + + onInitError: *const fn (err: InitError, opts: InitOpts) noreturn = &onInitErrorNoop, +}; + +fn initOnce(opts: *const InitOpts) void { + bun.http.http_thread = .{ + .loop = undefined, + .http_context = .{ + .us_socket_context = undefined, + .pending_sockets = NewHTTPContext(false).PooledSocketHiveAllocator.empty, + }, + .https_context = .{ + .us_socket_context = undefined, + .pending_sockets = NewHTTPContext(true).PooledSocketHiveAllocator.empty, + }, + .timer = std.time.Timer.start() catch unreachable, + }; + bun.libdeflate.load(); + const thread = std.Thread.spawn( + .{ + .stack_size = bun.default_thread_stack_size, + }, + onStart, + .{opts.*}, + ) catch |err| Output.panic("Failed to start HTTP Client thread: {s}", .{@errorName(err)}); + thread.detach(); +} +var init_once = bun.once(initOnce); + +pub fn init(opts: *const InitOpts) void { + init_once.call(.{opts}); +} + +pub fn onStart(opts: InitOpts) void { + Output.Source.configureNamedThread("HTTP Client"); + bun.http.default_arena = Arena.init() catch unreachable; + bun.http.default_allocator = bun.http.default_arena.allocator(); + + const loop = bun.JSC.MiniEventLoop.initGlobal(null); + + if (Environment.isWindows) { + _ = std.process.getenvW(comptime bun.strings.w("SystemRoot")) orelse { + bun.Output.errGeneric("The %SystemRoot% environment variable is not set. Bun needs this set in order for network requests to work.", .{}); + Global.crash(); + }; + } + + bun.http.http_thread.loop = loop; + bun.http.http_thread.http_context.init(); + bun.http.http_thread.https_context.initWithThreadOpts(&opts) catch |err| opts.onInitError(err, opts); + bun.http.http_thread.has_awoken.store(true, .monotonic); + bun.http.http_thread.processEvents(); +} + +pub fn connect(this: *@This(), client: *HTTPClient, comptime is_ssl: bool) !NewHTTPContext(is_ssl).HTTPSocket { + if (client.unix_socket_path.length() > 0) { + return try this.context(is_ssl).connectSocket(client, client.unix_socket_path.slice()); + } + + if (comptime is_ssl) { + const needs_own_context = client.tls_props != null and client.tls_props.?.requires_custom_request_ctx; + if (needs_own_context) { + var requested_config = client.tls_props.?; + for (custom_ssl_context_map.keys()) |other_config| { + if (requested_config.isSame(other_config)) { + // we free the callers config since we have a existing one + if (requested_config != client.tls_props) { + requested_config.deinit(); + bun.default_allocator.destroy(requested_config); + } + client.tls_props = other_config; + if (client.http_proxy) |url| { + return try custom_ssl_context_map.get(other_config).?.connect(client, url.hostname, url.getPortAuto()); + } else { + return try custom_ssl_context_map.get(other_config).?.connect(client, client.url.hostname, client.url.getPortAuto()); + } + } + } + // we need the config so dont free it + var custom_context = try bun.default_allocator.create(NewHTTPContext(is_ssl)); + custom_context.initWithClientConfig(client) catch |err| { + client.tls_props = null; + + requested_config.deinit(); + bun.default_allocator.destroy(requested_config); + bun.default_allocator.destroy(custom_context); + + // TODO: these error names reach js. figure out how they should be handled + return switch (err) { + error.FailedToOpenSocket => |e| e, + error.InvalidCA => error.FailedToOpenSocket, + error.InvalidCAFile => error.FailedToOpenSocket, + error.LoadCAFile => error.FailedToOpenSocket, + }; + }; + try custom_ssl_context_map.put(requested_config, custom_context); + // We might deinit the socket context, so we disable keepalive to make sure we don't + // free it while in use. + client.flags.disable_keepalive = true; + if (client.http_proxy) |url| { + // https://github.com/oven-sh/bun/issues/11343 + if (url.protocol.len == 0 or strings.eqlComptime(url.protocol, "https") or strings.eqlComptime(url.protocol, "http")) { + return try this.context(is_ssl).connect(client, url.hostname, url.getPortAuto()); + } + return error.UnsupportedProxyProtocol; + } + return try custom_context.connect(client, client.url.hostname, client.url.getPortAuto()); + } + } + if (client.http_proxy) |url| { + if (url.href.len > 0) { + // https://github.com/oven-sh/bun/issues/11343 + if (url.protocol.len == 0 or strings.eqlComptime(url.protocol, "https") or strings.eqlComptime(url.protocol, "http")) { + return try this.context(is_ssl).connect(client, url.hostname, url.getPortAuto()); + } + return error.UnsupportedProxyProtocol; + } + } + return try this.context(is_ssl).connect(client, client.url.hostname, client.url.getPortAuto()); +} + +pub fn context(this: *@This(), comptime is_ssl: bool) *NewHTTPContext(is_ssl) { + return if (is_ssl) &this.https_context else &this.http_context; +} + +fn drainEvents(this: *@This()) void { + { + this.queued_shutdowns_lock.lock(); + defer this.queued_shutdowns_lock.unlock(); + for (this.queued_shutdowns.items) |http| { + if (bun.http.socket_async_http_abort_tracker.fetchSwapRemove(http.async_http_id)) |socket_ptr| { + if (http.is_tls) { + const socket = uws.SocketTLS.fromAny(socket_ptr.value); + // do a fast shutdown here since we are aborting and we dont want to wait for the close_notify from the other side + socket.close(.failure); + } else { + const socket = uws.SocketTCP.fromAny(socket_ptr.value); + socket.close(.failure); + } + } + } + this.queued_shutdowns.clearRetainingCapacity(); + } + { + this.queued_writes_lock.lock(); + defer this.queued_writes_lock.unlock(); + for (this.queued_writes.items) |write| { + const flags = write.flags; + const messageType = flags.type; + const ended = messageType == .end or messageType == .endChunked; + + if (bun.http.socket_async_http_abort_tracker.get(write.async_http_id)) |socket_ptr| { + switch (flags.is_tls) { + inline true, false => |is_tls| { + const socket = uws.NewSocketHandler(is_tls).fromAny(socket_ptr); + if (socket.isClosed() or socket.isShutdown()) { + continue; + } + const tagged = NewHTTPContext(is_tls).getTaggedFromSocket(socket); + if (tagged.get(HTTPClient)) |client| { + if (client.state.original_request_body == .stream) { + var stream = &client.state.original_request_body.stream; + stream.ended = ended; + if (messageType == .endChunked) { + // only send the 0-length chunk if the request body is chunked + client.writeToStream(is_tls, socket, bun.http.end_of_chunked_http1_1_encoding_response_body); + } else { + client.flushStream(is_tls, socket); + } + } + } + }, + } + } + } + this.queued_writes.clearRetainingCapacity(); + } + + while (this.queued_proxy_deref.pop()) |http| { + http.deref(); + } + + var count: usize = 0; + var active = AsyncHTTP.active_requests_count.load(.monotonic); + const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); + if (active >= max) return; + defer { + if (comptime Environment.allow_assert) { + if (count > 0) + log("Processed {d} tasks\n", .{count}); + } + } + + while (this.queued_tasks.pop()) |http| { + var cloned = bun.http.ThreadlocalAsyncHTTP.new(.{ + .async_http = http.*, + }); + cloned.async_http.real = http; + cloned.async_http.onStart(); + if (comptime Environment.allow_assert) { + count += 1; + } + + active += 1; + if (active >= max) break; + } +} + +fn processEvents(this: *@This()) noreturn { + if (comptime Environment.isPosix) { + this.loop.loop.num_polls = @max(2, this.loop.loop.num_polls); + } else if (comptime Environment.isWindows) { + this.loop.loop.inc(); + } else { + @compileError("TODO:"); + } + + while (true) { + this.drainEvents(); + + var start_time: i128 = 0; + if (comptime Environment.isDebug) { + start_time = std.time.nanoTimestamp(); + } + Output.flush(); + + this.loop.loop.inc(); + this.loop.loop.tick(); + this.loop.loop.dec(); + + // this.loop.run(); + if (comptime Environment.isDebug) { + const end = std.time.nanoTimestamp(); + threadlog("Waited {any}\n", .{std.fmt.fmtDurationSigned(@as(i64, @truncate(end - start_time)))}); + Output.flush(); + } + } +} + +pub fn scheduleShutdown(this: *@This(), http: *AsyncHTTP) void { + { + this.queued_shutdowns_lock.lock(); + defer this.queued_shutdowns_lock.unlock(); + this.queued_shutdowns.append(bun.default_allocator, .{ + .async_http_id = http.async_http_id, + .is_tls = http.client.isHTTPS(), + }) catch bun.outOfMemory(); + } + if (this.has_awoken.load(.monotonic)) + this.loop.loop.wakeup(); +} + +pub fn scheduleRequestWrite(this: *@This(), http: *AsyncHTTP, messageType: WriteMessage.Type) void { + { + this.queued_writes_lock.lock(); + defer this.queued_writes_lock.unlock(); + this.queued_writes.append(bun.default_allocator, .{ + .async_http_id = http.async_http_id, + .flags = .{ + .is_tls = http.client.isHTTPS(), + .type = messageType, + }, + }) catch bun.outOfMemory(); + } + if (this.has_awoken.load(.monotonic)) + this.loop.loop.wakeup(); +} + +pub fn scheduleProxyDeref(this: *@This(), proxy: *ProxyTunnel) void { + // this is always called on the http thread + { + this.queued_proxy_deref.append(bun.default_allocator, proxy) catch bun.outOfMemory(); + } + if (this.has_awoken.load(.monotonic)) + this.loop.loop.wakeup(); +} + +pub fn wakeup(this: *@This()) void { + if (this.has_awoken.load(.monotonic)) + this.loop.loop.wakeup(); +} + +pub fn schedule(this: *@This(), batch: Batch) void { + if (batch.len == 0) + return; + + { + var batch_ = batch; + while (batch_.pop()) |task| { + const http: *AsyncHTTP = @fieldParentPtr("task", task); + this.queued_tasks.push(http); + } + } + + if (this.has_awoken.load(.monotonic)) + this.loop.loop.wakeup(); +} + +const std = @import("std"); + +const bun = @import("bun"); +const Output = bun.Output; +const Environment = bun.Environment; +const Global = bun.Global; +const uws = bun.uws; +const strings = bun.strings; +const stringZ = bun.stringZ; +const JSC = bun.JSC; +const NewHTTPContext = bun.http.NewHTTPContext; +const UnboundedQueue = @import("../bun.js/unbounded_queue.zig").UnboundedQueue; +const AsyncHTTP = bun.http.AsyncHTTP; +pub const Queue = UnboundedQueue(AsyncHTTP, .next); + +const HTTPClient = bun.http; +const ProxyTunnel = @import("./ProxyTunnel.zig"); +const InitError = HTTPClient.InitError; +const Batch = bun.ThreadPool.Batch; +const Arena = @import("../allocators/mimalloc_arena.zig").Arena; +const SSLConfig = @import("../bun.js/api/server.zig").ServerConfig.SSLConfig; +const log = Output.scoped(.HTTPThread, false); diff --git a/src/http/header_builder.zig b/src/http/HeaderBuilder.zig similarity index 100% rename from src/http/header_builder.zig rename to src/http/HeaderBuilder.zig diff --git a/src/http/Headers.zig b/src/http/Headers.zig new file mode 100644 index 0000000000..fc6d6072bd --- /dev/null +++ b/src/http/Headers.zig @@ -0,0 +1,182 @@ +const Headers = @This(); +pub const Entry = struct { + name: Api.StringPointer, + value: Api.StringPointer, + + pub const List = bun.MultiArrayList(Entry); +}; + +entries: Entry.List = .{}, +buf: std.ArrayListUnmanaged(u8) = .{}, +allocator: std.mem.Allocator, + +pub fn memoryCost(this: *const Headers) usize { + return this.buf.items.len + this.entries.memoryCost(); +} + +pub fn clone(this: *Headers) !Headers { + return Headers{ + .entries = try this.entries.clone(this.allocator), + .buf = try this.buf.clone(this.allocator), + .allocator = this.allocator, + }; +} + +pub fn get(this: *const Headers, name: []const u8) ?[]const u8 { + const entries = this.entries.slice(); + const names = entries.items(.name); + const values = entries.items(.value); + for (names, 0..) |name_ptr, i| { + if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name_ptr), name, true)) { + return this.asStr(values[i]); + } + } + + return null; +} + +pub fn append(this: *Headers, name: []const u8, value: []const u8) !void { + var offset: u32 = @truncate(this.buf.items.len); + try this.buf.ensureUnusedCapacity(this.allocator, name.len + value.len); + const name_ptr = Api.StringPointer{ + .offset = offset, + .length = @truncate(name.len), + }; + this.buf.appendSliceAssumeCapacity(name); + offset = @truncate(this.buf.items.len); + this.buf.appendSliceAssumeCapacity(value); + + const value_ptr = Api.StringPointer{ + .offset = offset, + .length = @truncate(value.len), + }; + try this.entries.append(this.allocator, .{ + .name = name_ptr, + .value = value_ptr, + }); +} + +pub fn deinit(this: *Headers) void { + this.entries.deinit(this.allocator); + this.buf.clearAndFree(this.allocator); +} +pub fn getContentType(this: *const Headers) ?[]const u8 { + if (this.entries.len == 0 or this.buf.items.len == 0) { + return null; + } + const header_entries = this.entries.slice(); + const header_names = header_entries.items(.name); + const header_values = header_entries.items(.value); + + for (header_names, 0..header_names.len) |name, i| { + if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name), "content-type", true)) { + return this.asStr(header_values[i]); + } + } + return null; +} +pub fn asStr(this: *const Headers, ptr: Api.StringPointer) []const u8 { + return if (ptr.offset + ptr.length <= this.buf.items.len) + this.buf.items[ptr.offset..][0..ptr.length] + else + ""; +} + +pub const Options = struct { + body: ?*const Blob.Any = null, +}; + +pub fn fromPicoHttpHeaders(headers: []const picohttp.Header, allocator: std.mem.Allocator) !Headers { + const header_count = headers.len; + var result = Headers{ + .entries = .{}, + .buf = .{}, + .allocator = allocator, + }; + + var buf_len: usize = 0; + for (headers) |header| { + buf_len += header.name.len + header.value.len; + } + result.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); + result.entries.len = headers.len; + result.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); + result.buf.items.len = buf_len; + var offset: u32 = 0; + for (headers, 0..headers.len) |header, i| { + const name_offset = offset; + bun.copy(u8, result.buf.items[offset..][0..header.name.len], header.name); + offset += @truncate(header.name.len); + const value_offset = offset; + bun.copy(u8, result.buf.items[offset..][0..header.value.len], header.value); + offset += @truncate(header.value.len); + + result.entries.set(i, .{ + .name = .{ + .offset = name_offset, + .length = @truncate(header.name.len), + }, + .value = .{ + .offset = value_offset, + .length = @truncate(header.value.len), + }, + }); + } + return result; +} + +pub fn from(fetch_headers_ref: ?*FetchHeaders, allocator: std.mem.Allocator, options: Options) !Headers { + var header_count: u32 = 0; + var buf_len: u32 = 0; + if (fetch_headers_ref) |headers_ref| + headers_ref.count(&header_count, &buf_len); + var headers = Headers{ + .entries = .{}, + .buf = .{}, + .allocator = allocator, + }; + const buf_len_before_content_type = buf_len; + const needs_content_type = brk: { + if (options.body) |body| { + if (body.hasContentTypeFromUser() and (fetch_headers_ref == null or !fetch_headers_ref.?.fastHas(.ContentType))) { + header_count += 1; + buf_len += @as(u32, @truncate(body.contentType().len + "Content-Type".len)); + break :brk true; + } + } + break :brk false; + }; + headers.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); + headers.entries.len = header_count; + headers.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); + headers.buf.items.len = buf_len; + var sliced = headers.entries.slice(); + var names = sliced.items(.name); + var values = sliced.items(.value); + if (fetch_headers_ref) |headers_ref| + headers_ref.copyTo(names.ptr, values.ptr, headers.buf.items.ptr); + + // TODO: maybe we should send Content-Type header first instead of last? + if (needs_content_type) { + bun.copy(u8, headers.buf.items[buf_len_before_content_type..], "Content-Type"); + names[header_count - 1] = .{ + .offset = buf_len_before_content_type, + .length = "Content-Type".len, + }; + + bun.copy(u8, headers.buf.items[buf_len_before_content_type + "Content-Type".len ..], options.body.?.contentType()); + values[header_count - 1] = .{ + .offset = buf_len_before_content_type + @as(u32, "Content-Type".len), + .length = @as(u32, @truncate(options.body.?.contentType().len)), + }; + } + + return headers; +} + +const Api = @import("../api/schema.zig").Api; +const std = @import("std"); +const bun = @import("bun"); +const picohttp = bun.picohttp; +const Blob = bun.webcore.Blob; +const FetchHeaders = bun.webcore.FetchHeaders; diff --git a/src/http/InitError.zig b/src/http/InitError.zig new file mode 100644 index 0000000000..4ef73a3064 --- /dev/null +++ b/src/http/InitError.zig @@ -0,0 +1,6 @@ +pub const InitError = error{ + FailedToOpenSocket, + LoadCAFile, + InvalidCAFile, + InvalidCA, +}; diff --git a/src/http/InternalState.zig b/src/http/InternalState.zig new file mode 100644 index 0000000000..f044e9da1c --- /dev/null +++ b/src/http/InternalState.zig @@ -0,0 +1,250 @@ +const InternalState = @This(); +// TODO: reduce the size of this struct +// Many of these fields can be moved to a packed struct and use less space + +response_message_buffer: MutableString = undefined, +/// pending response is the temporary storage for the response headers, url and status code +/// this uses shared_response_headers_buf to store the headers +/// this will be turned null once the metadata is cloned +pending_response: ?picohttp.Response = null, + +/// This is the cloned metadata containing the response headers, url and status code after the .headers phase are received +/// will be turned null once returned to the user (the ownership is transferred to the user) +/// this can happen after await fetch(...) and the body can continue streaming when this is already null +/// the user will receive only chunks of the body stored in body_out_str +cloned_metadata: ?HTTPResponseMetadata = null, +flags: InternalStateFlags = InternalStateFlags{}, + +transfer_encoding: Encoding = Encoding.identity, +encoding: Encoding = Encoding.identity, +content_encoding_i: u8 = std.math.maxInt(u8), +chunked_decoder: picohttp.phr_chunked_decoder = .{}, +decompressor: Decompressor = .{ .none = {} }, +stage: Stage = Stage.pending, +/// This is owned by the user and should not be freed here +body_out_str: ?*MutableString = null, +compressed_body: MutableString = undefined, +content_length: ?usize = null, +total_body_received: usize = 0, +request_body: []const u8 = "", +original_request_body: HTTPRequestBody = .{ .bytes = "" }, +request_sent_len: usize = 0, +fail: ?anyerror = null, +request_stage: HTTPStage = .pending, +response_stage: HTTPStage = .pending, +certificate_info: ?CertificateInfo = null, + +pub const InternalStateFlags = packed struct(u8) { + allow_keepalive: bool = true, + received_last_chunk: bool = false, + did_set_content_encoding: bool = false, + is_redirect_pending: bool = false, + is_libdeflate_fast_path_disabled: bool = false, + resend_request_body_on_redirect: bool = false, + _padding: u2 = 0, +}; + +pub fn init(body: HTTPRequestBody, body_out_str: *MutableString) InternalState { + return .{ + .original_request_body = body, + .request_body = if (body == .bytes) body.bytes else "", + .compressed_body = MutableString{ .allocator = bun.http.default_allocator, .list = .{} }, + .response_message_buffer = MutableString{ .allocator = bun.http.default_allocator, .list = .{} }, + .body_out_str = body_out_str, + .stage = Stage.pending, + .pending_response = null, + }; +} + +pub fn isChunkedEncoding(this: *InternalState) bool { + return this.transfer_encoding == Encoding.chunked; +} + +pub fn reset(this: *InternalState, allocator: std.mem.Allocator) void { + this.compressed_body.deinit(); + this.response_message_buffer.deinit(); + + const body_msg = this.body_out_str; + if (body_msg) |body| body.reset(); + this.decompressor.deinit(); + + // just in case we check and free to avoid leaks + if (this.cloned_metadata != null) { + this.cloned_metadata.?.deinit(allocator); + this.cloned_metadata = null; + } + + // if exists we own this info + if (this.certificate_info) |info| { + this.certificate_info = null; + info.deinit(bun.default_allocator); + } + + this.original_request_body.deinit(); + this.* = .{ + .body_out_str = body_msg, + .compressed_body = MutableString{ .allocator = bun.http.default_allocator, .list = .{} }, + .response_message_buffer = MutableString{ .allocator = bun.http.default_allocator, .list = .{} }, + .original_request_body = .{ .bytes = "" }, + .request_body = "", + .certificate_info = null, + .flags = .{}, + .total_body_received = 0, + }; +} + +pub fn getBodyBuffer(this: *InternalState) *MutableString { + if (this.encoding.isCompressed()) { + return &this.compressed_body; + } + + return this.body_out_str.?; +} + +pub fn isDone(this: *InternalState) bool { + if (this.isChunkedEncoding()) { + return this.flags.received_last_chunk; + } + + if (this.content_length) |content_length| { + return this.total_body_received >= content_length; + } + + // Content-Type: text/event-stream we should be done only when Close/End/Timeout connection + return this.flags.received_last_chunk; +} + +pub fn decompressBytes(this: *InternalState, buffer: []const u8, body_out_str: *MutableString, is_final_chunk: bool) !void { + defer this.compressed_body.reset(); + var gzip_timer: std.time.Timer = undefined; + + if (bun.http.extremely_verbose) + gzip_timer = std.time.Timer.start() catch @panic("Timer failure"); + + var still_needs_to_decompress = true; + + if (FeatureFlags.isLibdeflateEnabled()) { + // Fast-path: use libdeflate + if (is_final_chunk and !this.flags.is_libdeflate_fast_path_disabled and this.encoding.canUseLibDeflate() and this.isDone()) libdeflate: { + this.flags.is_libdeflate_fast_path_disabled = true; + + log("Decompressing {d} bytes with libdeflate\n", .{buffer.len}); + var deflater = bun.http.http_thread.deflater(); + + // gzip stores the size of the uncompressed data in the last 4 bytes of the stream + // But it's only valid if the stream is less than 4.7 GB, since it's 4 bytes. + // If we know that the stream is going to be larger than our + // pre-allocated buffer, then let's dynamically allocate the exact + // size. + if (this.encoding == Encoding.gzip and buffer.len > 16 and buffer.len < 1024 * 1024 * 1024) { + const estimated_size: u32 = @bitCast(buffer[buffer.len - 4 ..][0..4].*); + // Since this is arbtirary input from the internet, let's set an upper bound of 32 MB for the allocation size. + if (estimated_size > deflater.shared_buffer.len and estimated_size < 32 * 1024 * 1024) { + try body_out_str.list.ensureTotalCapacityPrecise(body_out_str.allocator, estimated_size); + const result = deflater.decompressor.decompress(buffer, body_out_str.list.allocatedSlice(), .gzip); + + if (result.status == .success) { + body_out_str.list.items.len = result.written; + still_needs_to_decompress = false; + } + + break :libdeflate; + } + } + + const result = deflater.decompressor.decompress(buffer, &deflater.shared_buffer, switch (this.encoding) { + .gzip => .gzip, + .deflate => .deflate, + else => unreachable, + }); + + if (result.status == .success) { + try body_out_str.list.ensureTotalCapacityPrecise(body_out_str.allocator, result.written); + body_out_str.list.appendSliceAssumeCapacity(deflater.shared_buffer[0..result.written]); + still_needs_to_decompress = false; + } + } + } + + // Slow path, or brotli: use the .decompressor + if (still_needs_to_decompress) { + log("Decompressing {d} bytes\n", .{buffer.len}); + if (body_out_str.list.capacity == 0) { + const min = @min(@ceil(@as(f64, @floatFromInt(buffer.len)) * 1.5), @as(f64, 1024 * 1024 * 2)); + try body_out_str.growBy(@max(@as(usize, @intFromFloat(min)), 32)); + } + + try this.decompressor.updateBuffers(this.encoding, buffer, body_out_str); + + this.decompressor.readAll(this.isDone()) catch |err| { + if (this.isDone() or error.ShortRead != err) { + Output.prettyErrorln("Decompression error: {s}", .{bun.asByteSlice(@errorName(err))}); + Output.flush(); + return err; + } + }; + } + + if (bun.http.extremely_verbose) + this.gzip_elapsed = gzip_timer.read(); +} + +pub fn decompress(this: *InternalState, buffer: MutableString, body_out_str: *MutableString, is_final_chunk: bool) !void { + try this.decompressBytes(buffer.list.items, body_out_str, is_final_chunk); +} + +pub fn processBodyBuffer(this: *InternalState, buffer: MutableString, is_final_chunk: bool) !bool { + if (this.flags.is_redirect_pending) return false; + + var body_out_str = this.body_out_str.?; + + switch (this.encoding) { + Encoding.brotli, Encoding.gzip, Encoding.deflate, Encoding.zstd => { + try this.decompress(buffer, body_out_str, is_final_chunk); + }, + else => { + if (!body_out_str.owns(buffer.list.items)) { + body_out_str.append(buffer.list.items) catch |err| { + Output.prettyErrorln("Failed to append to body buffer: {s}", .{bun.asByteSlice(@errorName(err))}); + Output.flush(); + return err; + }; + } + }, + } + + return this.body_out_str.?.list.items.len > 0; +} + +const std = @import("std"); +const bun = @import("bun"); +const MutableString = bun.MutableString; +const picohttp = bun.picohttp; +const Output = bun.Output; +const FeatureFlags = bun.FeatureFlags; +const HTTPClient = bun.http; +const HTTPResponseMetadata = HTTPClient.HTTPResponseMetadata; +const CertificateInfo = HTTPClient.CertificateInfo; +const Encoding = HTTPClient.Encoding; +const Decompressor = HTTPClient.Decompressor; +const HTTPRequestBody = HTTPClient.HTTPRequestBody; +const log = Output.scoped(.HTTPInternalState, true); + +const HTTPStage = enum { + pending, + headers, + body, + body_chunk, + fail, + done, + proxy_handshake, + proxy_headers, + proxy_body, +}; + +const Stage = enum(u8) { + pending, + connect, + done, + fail, +}; diff --git a/src/http/method.zig b/src/http/Method.zig similarity index 100% rename from src/http/method.zig rename to src/http/Method.zig diff --git a/src/http/mime_type.zig b/src/http/MimeType.zig similarity index 100% rename from src/http/mime_type.zig rename to src/http/MimeType.zig diff --git a/src/http/ProxyTunnel.zig b/src/http/ProxyTunnel.zig new file mode 100644 index 0000000000..fdf0adb2a8 --- /dev/null +++ b/src/http/ProxyTunnel.zig @@ -0,0 +1,345 @@ +const ProxyTunnel = @This(); +const RefCount = bun.ptr.RefCount(@This(), "ref_count", ProxyTunnel.deinit, .{}); +pub const ref = ProxyTunnel.RefCount.ref; +pub const deref = ProxyTunnel.RefCount.deref; + +wrapper: ?ProxyTunnelWrapper = null, +shutdown_err: anyerror = error.ConnectionClosed, +// active socket is the socket that is currently being used +socket: union(enum) { + tcp: NewHTTPContext(false).HTTPSocket, + ssl: NewHTTPContext(true).HTTPSocket, + none: void, +} = .{ .none = {} }, +write_buffer: bun.io.StreamBuffer = .{}, +ref_count: RefCount, + +const ProxyTunnelWrapper = SSLWrapper(*HTTPClient); + +fn onOpen(this: *HTTPClient) void { + log("ProxyTunnel onOpen", .{}); + this.state.response_stage = .proxy_handshake; + this.state.request_stage = .proxy_handshake; + if (this.proxy_tunnel) |proxy| { + proxy.ref(); + defer proxy.deref(); + if (proxy.wrapper) |*wrapper| { + var ssl_ptr = wrapper.ssl orelse return; + const _hostname = this.hostname orelse this.url.hostname; + + var hostname: [:0]const u8 = ""; + var hostname_needs_free = false; + if (!strings.isIPAddress(_hostname)) { + if (_hostname.len < bun.http.temp_hostname.len) { + @memcpy(bun.http.temp_hostname[0.._hostname.len], _hostname); + bun.http.temp_hostname[_hostname.len] = 0; + hostname = bun.http.temp_hostname[0.._hostname.len :0]; + } else { + hostname = bun.default_allocator.dupeZ(u8, _hostname) catch unreachable; + hostname_needs_free = true; + } + } + + defer if (hostname_needs_free) bun.default_allocator.free(hostname); + ssl_ptr.configureHTTPClient(hostname); + } + } +} + +fn onData(this: *HTTPClient, decoded_data: []const u8) void { + if (decoded_data.len == 0) return; + log("ProxyTunnel onData decoded {}", .{decoded_data.len}); + if (this.proxy_tunnel) |proxy| { + proxy.ref(); + defer proxy.deref(); + switch (this.state.response_stage) { + .body => { + log("ProxyTunnel onData body", .{}); + if (decoded_data.len == 0) return; + const report_progress = this.handleResponseBody(decoded_data, false) catch |err| { + proxy.close(err); + return; + }; + + if (report_progress) { + switch (proxy.socket) { + .ssl => |socket| { + this.progressUpdate(true, &bun.http.http_thread.https_context, socket); + }, + .tcp => |socket| { + this.progressUpdate(false, &bun.http.http_thread.http_context, socket); + }, + .none => {}, + } + return; + } + }, + .body_chunk => { + log("ProxyTunnel onData body_chunk", .{}); + if (decoded_data.len == 0) return; + const report_progress = this.handleResponseBodyChunkedEncoding(decoded_data) catch |err| { + proxy.close(err); + return; + }; + + if (report_progress) { + switch (proxy.socket) { + .ssl => |socket| { + this.progressUpdate(true, &bun.http.http_thread.https_context, socket); + }, + .tcp => |socket| { + this.progressUpdate(false, &bun.http.http_thread.http_context, socket); + }, + .none => {}, + } + return; + } + }, + .proxy_headers => { + log("ProxyTunnel onData proxy_headers", .{}); + switch (proxy.socket) { + .ssl => |socket| { + this.handleOnDataHeaders(true, decoded_data, &bun.http.http_thread.https_context, socket); + }, + .tcp => |socket| { + this.handleOnDataHeaders(false, decoded_data, &bun.http.http_thread.http_context, socket); + }, + .none => {}, + } + }, + else => { + log("ProxyTunnel onData unexpected data", .{}); + this.state.pending_response = null; + proxy.close(error.UnexpectedData); + }, + } + } +} + +fn onHandshake(this: *HTTPClient, handshake_success: bool, ssl_error: uws.us_bun_verify_error_t) void { + if (this.proxy_tunnel) |proxy| { + log("ProxyTunnel onHandshake", .{}); + proxy.ref(); + defer proxy.deref(); + this.state.response_stage = .proxy_headers; + this.state.request_stage = .proxy_headers; + this.state.request_sent_len = 0; + const handshake_error = HTTPCertError{ + .error_no = ssl_error.error_no, + .code = if (ssl_error.code == null) "" else ssl_error.code[0..bun.len(ssl_error.code) :0], + .reason = if (ssl_error.code == null) "" else ssl_error.reason[0..bun.len(ssl_error.reason) :0], + }; + if (handshake_success) { + log("ProxyTunnel onHandshake success", .{}); + // handshake completed but we may have ssl errors + this.flags.did_have_handshaking_error = handshake_error.error_no != 0; + if (this.flags.reject_unauthorized) { + // only reject the connection if reject_unauthorized == true + if (this.flags.did_have_handshaking_error) { + proxy.close(BoringSSL.getCertErrorFromNo(handshake_error.error_no)); + return; + } + + // if checkServerIdentity returns false, we dont call open this means that the connection was rejected + bun.assert(proxy.wrapper != null); + const ssl_ptr = proxy.wrapper.?.ssl orelse return; + + switch (proxy.socket) { + .ssl => |socket| { + if (!this.checkServerIdentity(true, socket, handshake_error, ssl_ptr, false)) { + log("ProxyTunnel onHandshake checkServerIdentity failed", .{}); + this.flags.did_have_handshaking_error = true; + + this.unregisterAbortTracker(); + return; + } + }, + .tcp => |socket| { + if (!this.checkServerIdentity(false, socket, handshake_error, ssl_ptr, false)) { + log("ProxyTunnel onHandshake checkServerIdentity failed", .{}); + this.flags.did_have_handshaking_error = true; + this.unregisterAbortTracker(); + return; + } + }, + .none => {}, + } + } + + switch (proxy.socket) { + .ssl => |socket| { + this.onWritable(true, true, socket); + }, + .tcp => |socket| { + this.onWritable(true, false, socket); + }, + .none => {}, + } + } else { + log("ProxyTunnel onHandshake failed", .{}); + // if we are here is because server rejected us, and the error_no is the cause of this + // if we set reject_unauthorized == false this means the server requires custom CA aka NODE_EXTRA_CA_CERTS + if (this.flags.did_have_handshaking_error and handshake_error.error_no != 0) { + proxy.close(BoringSSL.getCertErrorFromNo(handshake_error.error_no)); + return; + } + // if handshake_success it self is false, this means that the connection was rejected + proxy.close(error.ConnectionRefused); + return; + } + } +} + +pub fn write(this: *HTTPClient, encoded_data: []const u8) void { + if (this.proxy_tunnel) |proxy| { + const written = switch (proxy.socket) { + .ssl => |socket| socket.write(encoded_data), + .tcp => |socket| socket.write(encoded_data), + .none => 0, + }; + const pending = encoded_data[@intCast(written)..]; + if (pending.len > 0) { + // lets flush when we are truly writable + proxy.write_buffer.write(pending) catch bun.outOfMemory(); + } + } +} + +fn onClose(this: *HTTPClient) void { + log("ProxyTunnel onClose {s}", .{if (this.proxy_tunnel == null) "tunnel is detached" else "tunnel exists"}); + if (this.proxy_tunnel) |proxy| { + proxy.ref(); + // defer the proxy deref the proxy tunnel may still be in use after triggering the close callback + defer bun.http.http_thread.scheduleProxyDeref(proxy); + const err = proxy.shutdown_err; + switch (proxy.socket) { + .ssl => |socket| { + this.closeAndFail(err, true, socket); + }, + .tcp => |socket| { + this.closeAndFail(err, false, socket); + }, + .none => {}, + } + proxy.detachSocket(); + } +} + +pub fn start(this: *HTTPClient, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket, ssl_options: JSC.API.ServerConfig.SSLConfig, start_payload: []const u8) void { + const proxy_tunnel = bun.new(ProxyTunnel, .{ + .ref_count = .init(), + }); + + var custom_options = ssl_options; + // we always request the cert so we can verify it and also we manually abort the connection if the hostname doesn't match + custom_options.reject_unauthorized = 0; + custom_options.request_cert = 1; + proxy_tunnel.wrapper = SSLWrapper(*HTTPClient).init(custom_options, true, .{ + .onOpen = ProxyTunnel.onOpen, + .onData = ProxyTunnel.onData, + .onHandshake = ProxyTunnel.onHandshake, + .onClose = ProxyTunnel.onClose, + .write = ProxyTunnel.write, + .ctx = this, + }) catch |err| { + if (err == error.OutOfMemory) { + bun.outOfMemory(); + } + + // invalid TLS Options + proxy_tunnel.detachAndDeref(); + this.closeAndFail(error.ConnectionRefused, is_ssl, socket); + return; + }; + this.proxy_tunnel = proxy_tunnel; + if (is_ssl) { + proxy_tunnel.socket = .{ .ssl = socket }; + } else { + proxy_tunnel.socket = .{ .tcp = socket }; + } + if (start_payload.len > 0) { + log("proxy tunnel start with payload", .{}); + proxy_tunnel.wrapper.?.startWithPayload(start_payload); + } else { + log("proxy tunnel start", .{}); + proxy_tunnel.wrapper.?.start(); + } +} + +pub fn close(this: *ProxyTunnel, err: anyerror) void { + this.shutdown_err = err; + this.shutdown(); +} + +pub fn shutdown(this: *ProxyTunnel) void { + if (this.wrapper) |*wrapper| { + // fast shutdown the connection + _ = wrapper.shutdown(true); + } +} + +pub fn onWritable(this: *ProxyTunnel, comptime is_ssl: bool, socket: NewHTTPContext(is_ssl).HTTPSocket) void { + log("ProxyTunnel onWritable", .{}); + this.ref(); + defer this.deref(); + defer if (this.wrapper) |*wrapper| { + // Cycle to through the SSL state machine + _ = wrapper.flush(); + }; + + const encoded_data = this.write_buffer.slice(); + if (encoded_data.len == 0) { + return; + } + const written = socket.write(encoded_data); + if (written == encoded_data.len) { + this.write_buffer.reset(); + } else { + this.write_buffer.cursor += @intCast(written); + } +} + +pub fn receiveData(this: *ProxyTunnel, buf: []const u8) void { + this.ref(); + defer this.deref(); + if (this.wrapper) |*wrapper| { + wrapper.receiveData(buf); + } +} + +pub fn writeData(this: *ProxyTunnel, buf: []const u8) !usize { + if (this.wrapper) |*wrapper| { + return try wrapper.writeData(buf); + } + return error.ConnectionClosed; +} + +pub fn detachSocket(this: *ProxyTunnel) void { + this.socket = .{ .none = {} }; +} + +pub fn detachAndDeref(this: *ProxyTunnel) void { + this.detachSocket(); + this.deref(); +} + +fn deinit(this: *ProxyTunnel) void { + this.socket = .{ .none = {} }; + if (this.wrapper) |*wrapper| { + wrapper.deinit(); + this.wrapper = null; + } + this.write_buffer.deinit(); + bun.destroy(this); +} + +const bun = @import("bun"); +const strings = bun.strings; +const uws = bun.uws; +const BoringSSL = bun.BoringSSL.c; +const NewHTTPContext = bun.http.NewHTTPContext; +const HTTPClient = bun.http; +const JSC = bun.JSC; +const HTTPCertError = @import("./HTTPCertError.zig"); +const SSLWrapper = @import("../bun.js/api/bun/ssl_wrapper.zig").SSLWrapper; +const log = bun.Output.scoped(.http_proxy_tunnel, false); diff --git a/src/http/SendFile.zig b/src/http/SendFile.zig new file mode 100644 index 0000000000..63ca105a28 --- /dev/null +++ b/src/http/SendFile.zig @@ -0,0 +1,78 @@ +const SendFile = @This(); + +fd: bun.FileDescriptor, +remain: usize = 0, +offset: usize = 0, +content_size: usize = 0, + +pub fn isEligible(url: bun.URL) bool { + if (comptime Environment.isWindows or !FeatureFlags.streaming_file_uploads_for_http_client) { + return false; + } + return url.isHTTP() and url.href.len > 0; +} + +pub fn write( + this: *SendFile, + socket: NewHTTPContext(false).HTTPSocket, +) Status { + const adjusted_count_temporary = @min(@as(u64, this.remain), @as(u63, std.math.maxInt(u63))); + // TODO we should not need this int cast; improve the return type of `@min` + const adjusted_count = @as(u63, @intCast(adjusted_count_temporary)); + + if (Environment.isLinux) { + var signed_offset = @as(i64, @intCast(this.offset)); + const begin = this.offset; + const val = + // this does the syscall directly, without libc + std.os.linux.sendfile(socket.fd().cast(), this.fd.cast(), &signed_offset, this.remain); + this.offset = @as(u64, @intCast(signed_offset)); + + const errcode = bun.sys.getErrno(val); + + this.remain -|= @as(u64, @intCast(this.offset -| begin)); + + if (errcode != .SUCCESS or this.remain == 0 or val == 0) { + if (errcode == .SUCCESS) { + return .{ .done = {} }; + } + + return .{ .err = bun.errnoToZigErr(errcode) }; + } + } else if (Environment.isPosix) { + var sbytes: std.posix.off_t = adjusted_count; + const signed_offset = @as(i64, @bitCast(@as(u64, this.offset))); + const errcode = bun.sys.getErrno(std.c.sendfile( + this.fd.cast(), + socket.fd().cast(), + signed_offset, + &sbytes, + null, + 0, + )); + const wrote = @as(u64, @intCast(sbytes)); + this.offset +|= wrote; + this.remain -|= wrote; + if (errcode != .AGAIN or this.remain == 0 or sbytes == 0) { + if (errcode == .SUCCESS) { + return .{ .done = {} }; + } + + return .{ .err = bun.errnoToZigErr(errcode) }; + } + } + + return .{ .again = {} }; +} + +pub const Status = union(enum) { + done: void, + err: anyerror, + again: void, +}; + +const std = @import("std"); +const bun = @import("bun"); +const Environment = bun.Environment; +const FeatureFlags = bun.FeatureFlags; +const NewHTTPContext = bun.http.NewHTTPContext; diff --git a/src/http/Signals.zig b/src/http/Signals.zig new file mode 100644 index 0000000000..78531e7f41 --- /dev/null +++ b/src/http/Signals.zig @@ -0,0 +1,31 @@ +const Signals = @This(); + +header_progress: ?*std.atomic.Value(bool) = null, +body_streaming: ?*std.atomic.Value(bool) = null, +aborted: ?*std.atomic.Value(bool) = null, +cert_errors: ?*std.atomic.Value(bool) = null, +pub fn isEmpty(this: *const Signals) bool { + return this.aborted == null and this.body_streaming == null and this.header_progress == null and this.cert_errors == null; +} + +pub const Store = struct { + header_progress: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), + body_streaming: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), + aborted: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), + cert_errors: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), + pub fn to(this: *Store) Signals { + return .{ + .header_progress = &this.header_progress, + .body_streaming = &this.body_streaming, + .aborted = &this.aborted, + .cert_errors = &this.cert_errors, + }; + } +}; + +pub fn get(this: Signals, comptime field: std.meta.FieldEnum(Signals)) bool { + var ptr: *std.atomic.Value(bool) = @field(this, @tagName(field)) orelse return false; + return ptr.load(.monotonic); +} + +const std = @import("std"); diff --git a/src/http/ThreadSafeStreamBuffer.zig b/src/http/ThreadSafeStreamBuffer.zig new file mode 100644 index 0000000000..00cd279c12 --- /dev/null +++ b/src/http/ThreadSafeStreamBuffer.zig @@ -0,0 +1,61 @@ +const ThreadSafeStreamBuffer = @This(); + +buffer: bun.io.StreamBuffer = .{}, +mutex: bun.Mutex = .{}, +ref_count: StreamBufferRefCount = .initExactRefs(2), // 1 for main thread and 1 for http thread +// callback will be called passing the context for the http callback +// this is used to report when the buffer is drained and only if end chunk was not sent/reported +callback: ?Callback = null, + +const Callback = struct { + callback: *const fn (*anyopaque) void, + context: *anyopaque, + + pub fn init(comptime T: type, callback: *const fn (*T) void, context: *T) @This() { + return .{ .callback = @ptrCast(callback), .context = @ptrCast(context) }; + } + + pub fn call(this: @This()) void { + this.callback(this.context); + } +}; + +const StreamBufferRefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", ThreadSafeStreamBuffer.deinit, .{}); +pub const ref = StreamBufferRefCount.ref; +pub const deref = StreamBufferRefCount.deref; +pub const new = bun.TrivialNew(@This()); + +pub fn acquire(this: *ThreadSafeStreamBuffer) *bun.io.StreamBuffer { + this.mutex.lock(); + return &this.buffer; +} + +pub fn release(this: *ThreadSafeStreamBuffer) void { + this.mutex.unlock(); +} + +/// Should only be called in the main thread and before schedule the it to the http thread +pub fn setDrainCallback(this: *ThreadSafeStreamBuffer, comptime T: type, callback: *const fn (*T) void, context: *T) void { + this.callback = Callback.init(T, callback, context); +} + +pub fn clearDrainCallback(this: *ThreadSafeStreamBuffer) void { + this.callback = null; +} + +/// This is exclusively called from the http thread +/// Buffer should be acquired before calling this +pub fn reportDrain(this: *ThreadSafeStreamBuffer) void { + if (this.buffer.isEmpty()) { + if (this.callback) |callback| { + callback.call(); + } + } +} + +pub fn deinit(this: *ThreadSafeStreamBuffer) void { + this.buffer.deinit(); + bun.destroy(this); +} + +const bun = @import("bun"); diff --git a/src/http/url_path.zig b/src/http/URLPath.zig similarity index 100% rename from src/http/url_path.zig rename to src/http/URLPath.zig diff --git a/src/http/websocket_client.zig b/src/http/websocket_client.zig index 87c99ee22b..110b0aa5a9 100644 --- a/src/http/websocket_client.zig +++ b/src/http/websocket_client.zig @@ -46,11 +46,34 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { initial_data_handler: ?*InitialDataHandler = null, event_loop: *JSC.EventLoop = undefined, + deflate: ?*WebSocketDeflate = null, + + // Track if current message is compressed + receiving_compressed: bool = false, + // Track compression state of the entire message (across fragments) + message_is_compressed: bool = false, const stack_frame_size = 1024; + // Minimum message size to compress (RFC 7692 recommendation) + const MIN_COMPRESS_SIZE = 860; + // DEFLATE overhead + const COMPRESSION_OVERHEAD = 4; const WebSocket = @This(); + fn shouldCompress(this: *const WebSocket, data_len: usize, opcode: Opcode) bool { + // Check if compression is available + if (this.deflate == null) return false; + + // Only compress Text and Binary messages + if (opcode != .Text and opcode != .Binary) return false; + + // Don't compress small messages where overhead exceeds benefit + if (data_len < MIN_COMPRESS_SIZE) return false; + + return true; + } + pub fn register(global: *JSC.JSGlobalObject, loop_: *anyopaque, ctx_: *anyopaque) callconv(.C) void { const vm = global.bunVM(); const loop = @as(*uws.Loop, @ptrCast(@alignCast(loop_))); @@ -88,6 +111,10 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { this.ping_received = false; this.ping_len = 0; this.receive_pending_chunk_len = 0; + this.receiving_compressed = false; + this.message_is_compressed = false; + if (this.deflate) |d| d.deinit(); + this.deflate = null; } pub fn cancel(this: *WebSocket) callconv(.C) void { @@ -123,24 +150,34 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { if (this.outgoing_websocket) |ws| { const reject_unauthorized = ws.rejectUnauthorized(); - if (ssl_error.error_no != 0 and (reject_unauthorized or !authorized)) { - this.outgoing_websocket = null; - ws.didAbruptClose(ErrorCode.failed_to_connect); - return; - } - if (authorized) { - if (reject_unauthorized) { - const ssl_ptr = @as(*BoringSSL.c.SSL, @ptrCast(socket.getNativeHandle())); - if (BoringSSL.c.SSL_get_servername(ssl_ptr, 0)) |servername| { - const hostname = servername[0..bun.len(servername)]; - if (!BoringSSL.checkServerIdentity(ssl_ptr, hostname)) { - this.outgoing_websocket = null; - ws.didAbruptClose(ErrorCode.failed_to_connect); - } + // Only reject the connection if reject_unauthorized is true + if (reject_unauthorized) { + // Check for SSL errors + if (ssl_error.error_no != 0) { + this.outgoing_websocket = null; + ws.didAbruptClose(ErrorCode.failed_to_connect); + return; + } + + // Check authorization status + if (!authorized) { + this.outgoing_websocket = null; + ws.didAbruptClose(ErrorCode.failed_to_connect); + return; + } + + // Check server identity + const ssl_ptr = @as(*BoringSSL.c.SSL, @ptrCast(socket.getNativeHandle())); + if (BoringSSL.c.SSL_get_servername(ssl_ptr, 0)) |servername| { + const hostname = servername[0..bun.len(servername)]; + if (!BoringSSL.checkServerIdentity(ssl_ptr, hostname)) { + this.outgoing_websocket = null; + ws.didAbruptClose(ErrorCode.failed_to_connect); } } } + // If reject_unauthorized is false, we accept the connection regardless of SSL errors } } pub fn handleClose(this: *WebSocket, _: Socket, _: c_int, _: ?*anyopaque) void { @@ -182,6 +219,29 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { } } + fn dispatchCompressedData(this: *WebSocket, data_: []const u8, kind: Opcode) void { + const deflate = this.deflate orelse { + this.terminate(ErrorCode.compression_unsupported); + return; + }; + + // Decompress the data + var decompressed = deflate.rare_data.arrayList(); + defer decompressed.deinit(); + + deflate.decompress(data_, &decompressed) catch |err| { + const error_code = switch (err) { + error.InflateFailed => ErrorCode.invalid_compressed_data, + error.OutOfMemory => ErrorCode.failed_to_allocate_memory, + }; + this.terminate(error_code); + return; + }; + + this.dispatchData(decompressed.items, kind); + } + + /// Data will be cloned in C++. fn dispatchData(this: *WebSocket, data_: []const u8, kind: Opcode) void { var out = this.outgoing_websocket orelse { this.clearData(); @@ -222,15 +282,46 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { pub fn consume(this: *WebSocket, data_: []const u8, left_in_fragment: usize, kind: Opcode, is_final: bool) usize { bun.assert(data_.len <= left_in_fragment); + // For compressed messages, we must buffer all fragments until the message is complete + if (this.receiving_compressed) { + // Always buffer compressed data + if (data_.len > 0) { + var writable = this.receive_buffer.writableWithSize(data_.len) catch { + this.terminate(ErrorCode.closed); + return 0; + }; + @memcpy(writable[0..data_.len], data_); + this.receive_buffer.update(data_.len); + } + + if (left_in_fragment >= data_.len and left_in_fragment - data_.len - this.receive_pending_chunk_len == 0) { + this.receive_pending_chunk_len = 0; + this.receive_body_remain = 0; + if (is_final) { + // Decompress the complete message + this.dispatchCompressedData(this.receive_buffer.readableSlice(0), kind); + this.clearReceiveBuffers(false); + this.receiving_compressed = false; + this.message_is_compressed = false; + } + } else { + this.receive_pending_chunk_len -|= left_in_fragment; + } + return data_.len; + } + + // Non-compressed path remains the same // did all the data fit in the buffer? // we can avoid copying & allocating a temporary buffer if (is_final and data_.len == left_in_fragment and this.receive_pending_chunk_len == 0) { if (this.receive_buffer.count == 0) { this.dispatchData(data_, kind); + this.message_is_compressed = false; return data_.len; } else if (data_.len == 0) { this.dispatchData(this.receive_buffer.readableSlice(0), kind); this.clearReceiveBuffers(false); + this.message_is_compressed = false; return 0; } } @@ -248,6 +339,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { if (is_final) { this.dispatchData(this.receive_buffer.readableSlice(0), kind); this.clearReceiveBuffers(false); + this.message_is_compressed = false; } } else { this.receive_pending_chunk_len -|= left_in_fragment; @@ -363,6 +455,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { // if is final is true continue is invalid if (this.receiving_is_final) { // nothing to continue here + // Per Autobahn test case 5.9: "The connection is failed immediately, since there is no message to continue." this.terminate(ErrorCode.unexpected_opcode); terminated = true; break; @@ -395,12 +488,29 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { }, } - if (need_compression) { + if (need_compression and this.deflate == null) { this.terminate(ErrorCode.compression_unsupported); terminated = true; break; } + // Control frames must not be compressed + if (need_compression and receiving_type.isControl()) { + this.terminate(ErrorCode.invalid_control_frame); + terminated = true; + break; + } + + // Track compression state for this message + if (receiving_type == .Text or receiving_type == .Binary) { + // New message starts - set both compression states + this.message_is_compressed = need_compression; + this.receiving_compressed = need_compression; + } else if (receiving_type == .Continue) { + // Continuation frame - use the compression state from the message start + this.receiving_compressed = this.message_is_compressed; + } + // Handle when the payload length is 0, but it is a message // // This should become @@ -420,6 +530,8 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { // Return to the header state to read the next frame receive_state = .need_header; is_fragmented = false; + this.receiving_compressed = false; + this.message_is_compressed = false; // Bail out if there's nothing left to read if (data.len == 0) break; @@ -598,7 +710,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { // fast path: no backpressure, no queue, just send the bytes. if (!this.hasBackpressure()) { // Do not set MSG_MORE, see https://github.com/oven-sh/bun/issues/4010 - const wrote = socket.write(bytes, false); + const wrote = socket.write(bytes); const expected = @as(c_int, @intCast(bytes.len)); if (wrote == expected) { return true; @@ -621,11 +733,80 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { } fn sendData(this: *WebSocket, bytes: Copy, do_write: bool, opcode: Opcode) bool { + const should_compress = this.deflate != null and (opcode == .Text or opcode == .Binary) and bytes != .raw; + + if (should_compress) { + // For compressed messages, we need to compress the content first + var temp_buffer: ?[]u8 = null; + const allocator = this.deflate.?.rare_data.allocator(); + defer if (temp_buffer) |buf| allocator.free(buf); + const content_to_compress: []const u8 = switch (bytes) { + .utf16 => |utf16| brk: { + // Convert UTF16 to UTF8 for compression + const content_byte_len: usize = strings.elementLengthUTF16IntoUTF8([]const u16, utf16); + temp_buffer = allocator.alloc(u8, content_byte_len) catch return false; + const encode_result = strings.copyUTF16IntoUTF8(temp_buffer.?, []const u16, utf16); + break :brk temp_buffer.?[0..encode_result.written]; + }, + .latin1 => |latin1| brk: { + // Convert Latin1 to UTF8 for compression + const content_byte_len: usize = strings.elementLengthLatin1IntoUTF8(latin1); + if (content_byte_len == latin1.len) { + // It's all ascii, we don't need to copy it an extra time. + break :brk latin1; + } + + temp_buffer = allocator.alloc(u8, content_byte_len) catch return false; + const encode_result = strings.copyLatin1IntoUTF8(temp_buffer.?, []const u8, latin1); + break :brk temp_buffer.?[0..encode_result.written]; + }, + .bytes => |b| b, + .raw => unreachable, + }; + + // Check if compression is worth it + if (!this.shouldCompress(content_to_compress.len, opcode)) { + return this.sendDataUncompressed(bytes, do_write, opcode); + } + + { + // Compress the content + var compressed = std.ArrayList(u8).init(allocator); + defer compressed.deinit(); + + this.deflate.?.compress(content_to_compress, &compressed) catch { + // If compression fails, fall back to uncompressed + return this.sendDataUncompressed(bytes, do_write, opcode); + }; + + // Create the compressed frame + const frame_size = WebsocketHeader.frameSizeIncludingMask(compressed.items.len); + const writable = this.send_buffer.writableWithSize(frame_size) catch return false; + Copy.copyCompressed(this.globalThis, writable[0..frame_size], compressed.items, opcode, true); + this.send_buffer.update(frame_size); + } + + if (do_write) { + if (comptime Environment.allow_assert) { + bun.assert(!this.tcp.isShutdown()); + bun.assert(!this.tcp.isClosed()); + bun.assert(this.tcp.isEstablished()); + } + return this.sendBuffer(this.send_buffer.readableSlice(0)); + } + } else { + return this.sendDataUncompressed(bytes, do_write, opcode); + } + + return true; + } + + fn sendDataUncompressed(this: *WebSocket, bytes: Copy, do_write: bool, opcode: Opcode) bool { var content_byte_len: usize = 0; const write_len = bytes.len(&content_byte_len); bun.assert(write_len > 0); - var writable = this.send_buffer.writableWithSize(write_len) catch unreachable; + const writable = this.send_buffer.writableWithSize(write_len) catch unreachable; bytes.copy(this.globalThis, writable[0..write_len], content_byte_len, opcode); this.send_buffer.update(write_len); @@ -650,7 +831,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { if (this.tcp.isClosed()) { return false; } - const wrote = this.tcp.write(out_buf, false); + const wrote = this.tcp.write(out_buf); if (wrote < 0) { this.terminate(ErrorCode.failed_to_write); return false; @@ -928,6 +1109,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { globalThis: *JSC.JSGlobalObject, buffered_data: [*]u8, buffered_data_len: usize, + deflate_params: ?*const WebSocketDeflate.Params, ) callconv(.C) ?*anyopaque { const tcp = @as(*uws.us_socket_t, @ptrCast(input_socket)); const ctx = @as(*uws.SocketContext, @ptrCast(socket_ctx)); @@ -941,6 +1123,15 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { .event_loop = globalThis.bunVM().eventLoop(), }); + if (deflate_params) |params| { + if (WebSocketDeflate.init(bun.default_allocator, params.*, globalThis.bunVM().rareData())) |deflate| { + ws.deflate = deflate; + } else |_| { + // failed to init, silently disable compression + ws.deflate = null; + } + } + if (!Socket.adoptPtr( tcp, ctx, @@ -952,8 +1143,8 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { return null; } - ws.send_buffer.ensureTotalCapacity(2048) catch return null; - ws.receive_buffer.ensureTotalCapacity(2048) catch return null; + ws.send_buffer.ensureTotalCapacity(2048) catch bun.outOfMemory(); + ws.receive_buffer.ensureTotalCapacity(2048) catch bun.outOfMemory(); ws.poll_ref.ref(globalThis.bunVM()); const buffered_slice: []u8 = buffered_data[0..buffered_data_len]; @@ -1003,6 +1194,8 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { pub fn deinit(this: *WebSocket) void { this.clearData(); + if (this.deflate) |d| d.deinit(); + this.deflate = null; bun.destroy(this); } @@ -1030,34 +1223,38 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { } pub const ErrorCode = enum(i32) { - cancel, - invalid_response, - expected_101_status_code, - missing_upgrade_header, - missing_connection_header, - missing_websocket_accept_header, - invalid_upgrade_header, - invalid_connection_header, - invalid_websocket_version, - mismatch_websocket_accept_header, - missing_client_protocol, - mismatch_client_protocol, - timeout, - closed, - failed_to_write, - failed_to_connect, - headers_too_large, - ended, - failed_to_allocate_memory, - control_frame_is_fragmented, - invalid_control_frame, - compression_unsupported, - unexpected_mask_from_server, - expected_control_frame, - unsupported_control_frame, - unexpected_opcode, - invalid_utf8, - tls_handshake_failed, + cancel = 1, + invalid_response = 2, + expected_101_status_code = 3, + missing_upgrade_header = 4, + missing_connection_header = 5, + missing_websocket_accept_header = 6, + invalid_upgrade_header = 7, + invalid_connection_header = 8, + invalid_websocket_version = 9, + mismatch_websocket_accept_header = 10, + missing_client_protocol = 11, + mismatch_client_protocol = 12, + timeout = 13, + closed = 14, + failed_to_write = 15, + failed_to_connect = 16, + headers_too_large = 17, + ended = 18, + failed_to_allocate_memory = 19, + control_frame_is_fragmented = 20, + invalid_control_frame = 21, + compression_unsupported = 22, + invalid_compressed_data = 23, + compression_failed = 24, + unexpected_mask_from_server = 25, + expected_control_frame = 26, + unsupported_control_frame = 27, + unexpected_opcode = 28, + invalid_utf8 = 29, + tls_handshake_failed = 30, + message_too_big = 31, + protocol_error = 32, }; const CppWebSocket = @import("./websocket_client/CppWebSocket.zig").CppWebSocket; @@ -1138,12 +1335,32 @@ fn parseWebSocketHeader( else => false, } or !header.final; is_final.* = header.final; - need_compression.* = header.compressed; + + // Per RFC 7692, RSV1 bit indicates compression for the first fragment of a message + // For continuation frames, compression state is inherited from the first fragment + if (header.opcode == .Text or header.opcode == .Binary) { + need_compression.* = header.compressed; + } else if (header.opcode == .Continue) { + // Compression state for continuation frames should be inherited from the message start + // This needs to be tracked at a higher level, not determined by the continuation frame's RSV1 + // For now, we don't set it here - it should be maintained by the WebSocket state + need_compression.* = false; + } else { + // Control frames cannot be compressed + if (header.compressed) { + return .fail; // Control frames with RSV1 set should fail + } + need_compression.* = false; + } + if (header.mask and (header.opcode == .Text or header.opcode == .Binary)) { return .need_mask; } - // reserved bits must be 0 + + // Check RSV bits (rsv2 and rsv3 must always be 0 per RFC 6455) + // rsv1 (compressed bit) is handled separately above if (header.rsv != 0) { + // RSV2 and RSV3 bits must always be 0 return .fail; } @@ -1228,7 +1445,7 @@ const Copy = union(enum) { switch (this) { .utf16 => |utf16| { header.len = WebsocketHeader.packLength(content_byte_len); - const encode_into_result = strings.copyUTF16IntoUTF8(to_mask, []const u16, utf16, true); + const encode_into_result = strings.copyUTF16IntoUTF8Impl(to_mask, []const u16, utf16, true); bun.assert(@as(usize, encode_into_result.written) == content_byte_len); bun.assert(@as(usize, encode_into_result.read) == utf16.len); header.len = WebsocketHeader.packLength(encode_into_result.written); @@ -1258,6 +1475,42 @@ const Copy = union(enum) { .raw => unreachable, } } + + pub fn copyCompressed(globalThis: *JSC.JSGlobalObject, buf: []u8, compressed_data: []const u8, opcode: Opcode, is_first_fragment: bool) void { + const content_byte_len = compressed_data.len; + const how_big_is_the_length_integer = WebsocketHeader.lengthByteCount(content_byte_len); + const how_big_is_the_mask = 4; + const mask_offset = 2 + how_big_is_the_length_integer; + const content_offset = mask_offset + how_big_is_the_mask; + + // 2 byte header + // 4 byte mask + // 0, 2, 8 byte length + var to_mask = buf[content_offset..]; + + // Write extended length if needed + switch (how_big_is_the_length_integer) { + 0 => {}, + 2 => std.mem.writeInt(u16, buf[2..][0..2], @as(u16, @truncate(content_byte_len)), .big), + 8 => std.mem.writeInt(u64, buf[2..][0..8], @as(u64, @truncate(content_byte_len)), .big), + else => unreachable, + } + + var header = @as(WebsocketHeader, @bitCast(@as(u16, 0))); + + header.mask = true; + header.compressed = is_first_fragment; // Only set compressed flag for first fragment + header.final = true; + header.opcode = opcode; + header.len = WebsocketHeader.packLength(content_byte_len); + + bun.assert(WebsocketHeader.frameSizeIncludingMask(content_byte_len) == buf.len); + + var fib = std.io.fixedBufferStream(buf); + header.writeHeader(fib.writer(), content_byte_len) catch unreachable; + + Mask.fill(globalThis, buf[mask_offset..][0..4], to_mask[0..content_byte_len], compressed_data); + } }; const std = @import("std"); @@ -1274,5 +1527,6 @@ const Async = bun.Async; const Opcode = @import("./websocket.zig").Opcode; const WebsocketHeader = @import("./websocket.zig").WebsocketHeader; const BoringSSL = bun.BoringSSL; +const WebSocketDeflate = @import("./websocket_client/WebSocketDeflate.zig"); const log = Output.scoped(.WebSocketClient, false); diff --git a/src/http/websocket_client/CppWebSocket.zig b/src/http/websocket_client/CppWebSocket.zig index e1c0c75eb9..d47ed54e11 100644 --- a/src/http/websocket_client/CppWebSocket.zig +++ b/src/http/websocket_client/CppWebSocket.zig @@ -13,6 +13,7 @@ pub const CppWebSocket = opaque { socket: *uws.Socket, buffered_data: ?[*]u8, buffered_len: usize, + deflate_params: ?*const WebSocketDeflate.Params, ) void; extern fn WebSocket__didAbruptClose(websocket_context: *CppWebSocket, reason: ErrorCode) void; extern fn WebSocket__didClose(websocket_context: *CppWebSocket, code: u16, reason: *const bun.String) void; @@ -49,11 +50,11 @@ pub const CppWebSocket = opaque { defer loop.exit(); return WebSocket__rejectUnauthorized(this); } - pub fn didConnect(this: *CppWebSocket, socket: *uws.Socket, buffered_data: ?[*]u8, buffered_len: usize) void { + pub fn didConnect(this: *CppWebSocket, socket: *uws.Socket, buffered_data: ?[*]u8, buffered_len: usize, deflate_params: ?*const WebSocketDeflate.Params) void { const loop = JSC.VirtualMachine.get().eventLoop(); loop.enter(); defer loop.exit(); - WebSocket__didConnect(this, socket, buffered_data, buffered_len); + WebSocket__didConnect(this, socket, buffered_data, buffered_len, deflate_params); } extern fn WebSocket__incrementPendingActivity(websocket_context: *CppWebSocket) void; extern fn WebSocket__decrementPendingActivity(websocket_context: *CppWebSocket) void; @@ -72,3 +73,4 @@ const uws = bun.uws; const JSC = bun.JSC; const ErrorCode = @import("../websocket_client.zig").ErrorCode; const bun = @import("bun"); +const WebSocketDeflate = @import("./WebSocketDeflate.zig"); diff --git a/src/http/websocket_client/WebSocketDeflate.zig b/src/http/websocket_client/WebSocketDeflate.zig new file mode 100644 index 0000000000..8defae819c --- /dev/null +++ b/src/http/websocket_client/WebSocketDeflate.zig @@ -0,0 +1,223 @@ +/// Manages the DEFLATE compression and decompression streams for a WebSocket connection. +const PerMessageDeflate = @This(); + +pub const Params = extern struct { + server_max_window_bits: u8 = 15, + client_max_window_bits: u8 = 15, + server_no_context_takeover: u8 = 0, + client_no_context_takeover: u8 = 0, + + pub const MAX_WINDOW_BITS: u8 = 15; + pub const MIN_WINDOW_BITS: u8 = 8; +}; + +pub const RareData = struct { + libdeflate_compressor: ?*libdeflate.Compressor = null, + libdeflate_decompressor: ?*libdeflate.Decompressor = null, + stack_fallback: std.heap.StackFallbackAllocator(RareData.stack_buffer_size) = undefined, + + pub const stack_buffer_size = 128 * 1024; + + pub fn arrayList(this: *RareData) std.ArrayList(u8) { + var list = std.ArrayList(u8).init(this.allocator()); + list.items = &this.stack_fallback.buffer; + list.items.len = 0; + list.capacity = this.stack_fallback.buffer.len; + this.stack_fallback.fixed_buffer_allocator.end_index = this.stack_fallback.buffer.len; + return list; + } + + pub fn deinit(this: *RareData) void { + inline for (.{ &this.libdeflate_compressor, &this.libdeflate_decompressor }) |comp| { + if (comp.*) |c| { + c.deinit(); + } + } + + bun.destroy(this); + } + + pub fn allocator(this: *RareData) std.mem.Allocator { + this.stack_fallback = .{ + .buffer = undefined, + .fallback_allocator = bun.default_allocator, + .fixed_buffer_allocator = undefined, + }; + return this.stack_fallback.get(); + } + + pub fn decompressor(this: *RareData) *libdeflate.Decompressor { + return this.libdeflate_decompressor orelse brk: { + this.libdeflate_decompressor = libdeflate.Decompressor.alloc(); + break :brk this.libdeflate_decompressor.?; + }; + } + + pub fn compressor(this: *RareData) *libdeflate.Compressor { + return this.libdeflate_compressor orelse brk: { + this.libdeflate_compressor = libdeflate.Compressor.alloc(); + break :brk this.libdeflate_compressor.?; + }; + } +}; + +allocator: std.mem.Allocator, +compress_stream: zlib.z_stream, +decompress_stream: zlib.z_stream, +params: Params, +rare_data: *RareData, + +// Constants from zlib.h +const Z_DEFAULT_COMPRESSION = 6; +const Z_DEFLATED = 8; +const Z_DEFAULT_STRATEGY = 0; +const Z_DEFAULT_MEM_LEVEL = 8; + +// Buffer size for compression/decompression operations +const COMPRESSION_BUFFER_SIZE = 4096; + +// DEFLATE trailer bytes added by Z_SYNC_FLUSH +const DEFLATE_TRAILER = [_]u8{ 0x00, 0x00, 0xff, 0xff }; + +pub fn init(allocator: std.mem.Allocator, params: Params, rare_data: *JSC.RareData) !*PerMessageDeflate { + const self = try allocator.create(PerMessageDeflate); + self.* = .{ + .allocator = allocator, + .params = params, + .compress_stream = std.mem.zeroes(zlib.z_stream), + .decompress_stream = std.mem.zeroes(zlib.z_stream), + .rare_data = rare_data.websocketDeflate(), + }; + + // Initialize compressor (deflate) + // We use negative window bits for raw DEFLATE, as required by RFC 7692. + const compress_err = zlib.deflateInit2_( + &self.compress_stream, + Z_DEFAULT_COMPRESSION, // level + Z_DEFLATED, // method + -@as(c_int, self.params.client_max_window_bits), // windowBits + Z_DEFAULT_MEM_LEVEL, // memLevel + Z_DEFAULT_STRATEGY, // strategy + zlib.zlibVersion(), + @sizeOf(zlib.z_stream), + ); + if (compress_err != .Ok) { + allocator.destroy(self); + return error.DeflateInitFailed; + } + + // Initialize decompressor (inflate) + const decompress_err = zlib.inflateInit2_( + &self.decompress_stream, + -@as(c_int, self.params.server_max_window_bits), // windowBits + zlib.zlibVersion(), + @sizeOf(zlib.z_stream), + ); + if (decompress_err != .Ok) { + _ = zlib.deflateEnd(&self.compress_stream); + allocator.destroy(self); + return error.InflateInitFailed; + } + + return self; +} + +pub fn deinit(self: *PerMessageDeflate) void { + _ = zlib.deflateEnd(&self.compress_stream); + _ = zlib.inflateEnd(&self.decompress_stream); + self.allocator.destroy(self); +} + +fn canUseLibDeflate(len: usize) bool { + if (bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_NO_LIBDEFLATE)) { + return false; + } + + return len < RareData.stack_buffer_size; +} + +pub fn decompress(self: *PerMessageDeflate, in_buf: []const u8, out: *std.ArrayList(u8)) error{ InflateFailed, OutOfMemory }!void { + + // First we try with libdeflate, which is both faster and doesn't need the trailing deflate bytes + if (canUseLibDeflate(in_buf.len)) { + const result = self.rare_data.decompressor().deflate(in_buf, out.unusedCapacitySlice()); + if (result.status == .success) { + out.items.len += result.written; + return; + } + } + + var in_with_trailer = std.ArrayList(u8).init(self.allocator); + defer in_with_trailer.deinit(); + try in_with_trailer.appendSlice(in_buf); + try in_with_trailer.appendSlice(&DEFLATE_TRAILER); + + self.decompress_stream.next_in = in_with_trailer.items.ptr; + self.decompress_stream.avail_in = @intCast(in_with_trailer.items.len); + + while (true) { + try out.ensureUnusedCapacity(COMPRESSION_BUFFER_SIZE); + self.decompress_stream.next_out = out.unusedCapacitySlice().ptr; + self.decompress_stream.avail_out = @intCast(out.unusedCapacitySlice().len); + + const res = zlib.inflate(&self.decompress_stream, zlib.FlushValue.NoFlush); + out.items.len += out.unusedCapacitySlice().len - self.decompress_stream.avail_out; + + if (res == .StreamEnd) { + break; + } + if (res != .Ok) { + return error.InflateFailed; + } + if (self.decompress_stream.avail_out == 0 and self.decompress_stream.avail_in != 0) { + // Need more output buffer space, continue loop + continue; + } + if (self.decompress_stream.avail_in == 0) { + // This shouldn't happen with the trailer, but as a safeguard. + break; + } + } + + if (self.params.server_no_context_takeover == 1) { + _ = zlib.inflateReset(&self.decompress_stream); + } +} + +pub fn compress(self: *PerMessageDeflate, in_buf: []const u8, out: *std.ArrayList(u8)) error{ DeflateFailed, OutOfMemory }!void { + self.compress_stream.next_in = in_buf.ptr; + self.compress_stream.avail_in = @intCast(in_buf.len); + + while (true) { + try out.ensureUnusedCapacity(COMPRESSION_BUFFER_SIZE); + self.compress_stream.next_out = out.unusedCapacitySlice().ptr; + self.compress_stream.avail_out = @intCast(out.unusedCapacitySlice().len); + + const res = zlib.deflate(&self.compress_stream, zlib.FlushValue.SyncFlush); + out.items.len += out.unusedCapacitySlice().len - self.compress_stream.avail_out; + if (res != .Ok) + return error.DeflateFailed; + + // exit only when zlib is truly finished + if (self.compress_stream.avail_in == 0 and self.compress_stream.avail_out != 0) { + break; + } + } + + // Remove the 4-byte trailer (00 00 FF FF) added by Z_SYNC_FLUSH + if (out.items.len >= 4 and + std.mem.eql(u8, out.items[out.items.len - 4 ..], &DEFLATE_TRAILER)) + { + out.shrinkRetainingCapacity(out.items.len - 4); + } + + if (self.params.client_no_context_takeover == 1) { + _ = zlib.deflateReset(&self.compress_stream); + } +} + +const std = @import("std"); +const bun = @import("bun"); +const zlib = bun.zlib; +const JSC = bun.JSC; +const libdeflate = bun.libdeflate; diff --git a/src/http/websocket_client/WebSocketUpgradeClient.zig b/src/http/websocket_client/WebSocketUpgradeClient.zig index cb23ce9051..f52dfb2aa9 100644 --- a/src/http/websocket_client/WebSocketUpgradeClient.zig +++ b/src/http/websocket_client/WebSocketUpgradeClient.zig @@ -25,6 +25,11 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { pub const deref = RefCount.deref; pub const Socket = uws.NewSocketHandler(ssl); + pub const DeflateNegotiationResult = struct { + enabled: bool = false, + params: WebSocketDeflate.Params = .{}, + }; + ref_count: RefCount, tcp: Socket, outgoing_websocket: ?*CppWebSocket, @@ -270,7 +275,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { } // Do not set MSG_MORE, see https://github.com/oven-sh/bun/issues/4010 - const wrote = socket.write(this.input_body_buf, false); + const wrote = socket.write(this.input_body_buf); if (wrote < 0) { this.terminate(ErrorCode.failed_to_write); return; @@ -343,6 +348,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { var websocket_accept_header = PicoHTTP.Header{ .name = "", .value = "" }; var visited_protocol = this.websocket_protocol == 0; // var visited_version = false; + var deflate_result = DeflateNegotiationResult{}; if (response.status_code != 101) { this.terminate(ErrorCode.expected_101_status_code); @@ -354,17 +360,11 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { "Connection".len => { if (connection_header.name.len == 0 and strings.eqlCaseInsensitiveASCII(header.name, "Connection", false)) { connection_header = header; - if (visited_protocol and upgrade_header.name.len > 0 and connection_header.name.len > 0 and websocket_accept_header.name.len > 0) { - break; - } } }, "Upgrade".len => { if (upgrade_header.name.len == 0 and strings.eqlCaseInsensitiveASCII(header.name, "Upgrade", false)) { upgrade_header = header; - if (visited_protocol and upgrade_header.name.len > 0 and connection_header.name.len > 0 and websocket_accept_header.name.len > 0) { - break; - } } }, "Sec-WebSocket-Version".len => { @@ -378,9 +378,6 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { "Sec-WebSocket-Accept".len => { if (websocket_accept_header.name.len == 0 and strings.eqlCaseInsensitiveASCII(header.name, "Sec-WebSocket-Accept", false)) { websocket_accept_header = header; - if (visited_protocol and upgrade_header.name.len > 0 and connection_header.name.len > 0 and websocket_accept_header.name.len > 0) { - break; - } } }, "Sec-WebSocket-Protocol".len => { @@ -390,9 +387,61 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { return; } visited_protocol = true; + } + }, + "Sec-WebSocket-Extensions".len => { + if (strings.eqlCaseInsensitiveASCII(header.name, "Sec-WebSocket-Extensions", false)) { + // This is a simplified parser. A full parser would handle multiple extensions and quoted values. + var it = std.mem.splitScalar(u8, header.value, ','); + while (it.next()) |ext_str| { + var ext_it = std.mem.splitScalar(u8, std.mem.trim(u8, ext_str, " \t"), ';'); + const ext_name = std.mem.trim(u8, ext_it.next() orelse "", " \t"); + if (strings.eqlComptime(ext_name, "permessage-deflate")) { + deflate_result.enabled = true; + while (ext_it.next()) |param_str| { + var param_it = std.mem.splitScalar(u8, std.mem.trim(u8, param_str, " \t"), '='); + const key = std.mem.trim(u8, param_it.next() orelse "", " \t"); + const value = std.mem.trim(u8, param_it.next() orelse "", " \t"); - if (visited_protocol and upgrade_header.name.len > 0 and connection_header.name.len > 0 and websocket_accept_header.name.len > 0) { - break; + if (strings.eqlComptime(key, "server_no_context_takeover")) { + deflate_result.params.server_no_context_takeover = 1; + } else if (strings.eqlComptime(key, "client_no_context_takeover")) { + deflate_result.params.client_no_context_takeover = 1; + } else if (strings.eqlComptime(key, "server_max_window_bits")) { + if (value.len > 0) { + // Remove quotes if present + const trimmed_value = if (value.len >= 2 and value[0] == '"' and value[value.len - 1] == '"') + value[1 .. value.len - 1] + else + value; + + if (std.fmt.parseInt(u8, trimmed_value, 10) catch null) |bits| { + if (bits >= WebSocketDeflate.Params.MIN_WINDOW_BITS and bits <= WebSocketDeflate.Params.MAX_WINDOW_BITS) { + deflate_result.params.server_max_window_bits = bits; + } + } + } + } else if (strings.eqlComptime(key, "client_max_window_bits")) { + if (value.len > 0) { + // Remove quotes if present + const trimmed_value = if (value.len >= 2 and value[0] == '"' and value[value.len - 1] == '"') + value[1 .. value.len - 1] + else + value; + + if (std.fmt.parseInt(u8, trimmed_value, 10) catch null) |bits| { + if (bits >= WebSocketDeflate.Params.MIN_WINDOW_BITS and bits <= WebSocketDeflate.Params.MAX_WINDOW_BITS) { + deflate_result.params.client_max_window_bits = bits; + } + } + } else { + // client_max_window_bits without value means use default (15) + deflate_result.params.client_max_window_bits = 15; + } + } + } + break; // Found and parsed permessage-deflate, stop. + } } } }, @@ -462,7 +511,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { // Once again for the TCP socket. defer this.deref(); - ws.didConnect(socket.socket.get().?, overflow.ptr, overflow.len); + ws.didConnect(socket.socket.get().?, overflow.ptr, overflow.len, if (deflate_result.enabled) &deflate_result.params else null); } else if (this.tcp.isClosed()) { this.terminate(ErrorCode.cancel); } else if (this.outgoing_websocket == null) { @@ -490,7 +539,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { defer this.deref(); // Do not set MSG_MORE, see https://github.com/oven-sh/bun/issues/4010 - const wrote = socket.write(this.to_send, false); + const wrote = socket.write(this.to_send); if (wrote < 0) { this.terminate(ErrorCode.failed_to_write); return; @@ -618,6 +667,7 @@ fn buildRequestBody( "Connection: Upgrade\r\n" ++ "Upgrade: websocket\r\n" ++ "Sec-WebSocket-Version: 13\r\n" ++ + "Sec-WebSocket-Extensions: permessage-deflate; client_max_window_bits\r\n" ++ "{s}" ++ "{s}" ++ "\r\n", @@ -642,5 +692,6 @@ const Async = bun.Async; const websocket_client = @import("../websocket_client.zig"); const CppWebSocket = @import("./CppWebSocket.zig").CppWebSocket; const ErrorCode = websocket_client.ErrorCode; +const WebSocketDeflate = @import("./WebSocketDeflate.zig"); const log = Output.scoped(.WebSocketUpgradeClient, false); diff --git a/src/identity_context.zig b/src/identity_context.zig index 5e0cfe987e..171c253540 100644 --- a/src/identity_context.zig +++ b/src/identity_context.zig @@ -1,7 +1,11 @@ pub fn IdentityContext(comptime Key: type) type { return struct { pub fn hash(_: @This(), key: Key) u64 { - return key; + return switch (comptime @typeInfo(Key)) { + .@"enum" => @intFromEnum(key), + .int => key, + else => @compileError("unexpected identity context type"), + }; } pub fn eql(_: @This(), a: Key, b: Key) bool { diff --git a/src/install/ExternalSlice.zig b/src/install/ExternalSlice.zig new file mode 100644 index 0000000000..faace729ce --- /dev/null +++ b/src/install/ExternalSlice.zig @@ -0,0 +1,75 @@ +pub fn ExternalSlice(comptime Type: type) type { + return extern struct { + pub const Slice = @This(); + + pub const Child: type = Type; + + off: u32 = 0, + len: u32 = 0, + + pub const invalid: @This() = .{ .off = std.math.maxInt(u32), .len = std.math.maxInt(u32) }; + + pub inline fn isInvalid(this: Slice) bool { + return this.off == std.math.maxInt(u32) and this.len == std.math.maxInt(u32); + } + + pub inline fn contains(this: Slice, id: u32) bool { + return id >= this.off and id < (this.len + this.off); + } + + pub inline fn get(this: Slice, in: []const Type) []const Type { + if (comptime Environment.allow_assert) { + bun.assert(this.off + this.len <= in.len); + } + // it should be impossible to address this out of bounds due to the minimum here + return in.ptr[this.off..@min(in.len, this.off + this.len)]; + } + + pub inline fn mut(this: Slice, in: []Type) []Type { + if (comptime Environment.allow_assert) { + bun.assert(this.off + this.len <= in.len); + } + return in.ptr[this.off..@min(in.len, this.off + this.len)]; + } + + pub inline fn begin(this: Slice) u32 { + return this.off; + } + + pub inline fn end(this: Slice) u32 { + return this.off + this.len; + } + + pub fn init(buf: []const Type, in: []const Type) Slice { + // if (comptime Environment.allow_assert) { + // bun.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr)); + // bun.assert((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len)); + // } + + return Slice{ + .off = @as(u32, @truncate((@intFromPtr(in.ptr) - @intFromPtr(buf.ptr)) / @sizeOf(Type))), + .len = @as(u32, @truncate(in.len)), + }; + } + }; +} + +pub const ExternalStringMap = extern struct { + name: ExternalStringList = .{}, + value: ExternalStringList = .{}, +}; + +pub const ExternalStringList = ExternalSlice(ExternalString); +pub const ExternalPackageNameHashList = ExternalSlice(PackageNameHash); +pub const VersionSlice = ExternalSlice(Semver.Version); + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const PackageNameHash = bun.install.PackageNameHash; + +const Semver = bun.Semver; +const ExternalString = Semver.ExternalString; diff --git a/src/install/NetworkTask.zig b/src/install/NetworkTask.zig new file mode 100644 index 0000000000..8f0e711f06 --- /dev/null +++ b/src/install/NetworkTask.zig @@ -0,0 +1,332 @@ +unsafe_http_client: AsyncHTTP = undefined, +response: bun.http.HTTPClientResult = .{}, +task_id: Task.Id, +url_buf: []const u8 = &[_]u8{}, +retried: u16 = 0, +allocator: std.mem.Allocator, +request_buffer: MutableString = undefined, +response_buffer: MutableString = undefined, +package_manager: *PackageManager, +callback: union(Task.Tag) { + package_manifest: struct { + loaded_manifest: ?Npm.PackageManifest = null, + name: strings.StringOrTinyString, + }, + extract: ExtractTarball, + git_clone: void, + git_checkout: void, + local_tarball: void, +}, +/// Key in patchedDependencies in package.json +apply_patch_task: ?*PatchTask = null, +next: ?*NetworkTask = null, + +pub const DedupeMapEntry = struct { + is_required: bool, +}; +pub const DedupeMap = std.HashMap(Task.Id, DedupeMapEntry, IdentityContext(Task.Id), 80); + +pub fn notify(this: *NetworkTask, async_http: *AsyncHTTP, result: bun.http.HTTPClientResult) void { + defer this.package_manager.wake(); + async_http.real.?.* = async_http.*; + async_http.real.?.response_buffer = async_http.response_buffer; + this.response = result; + this.package_manager.async_network_task_queue.push(this); +} + +pub const Authorization = enum { + no_authorization, + allow_authorization, +}; + +// We must use a less restrictive Accept header value +// https://github.com/oven-sh/bun/issues/341 +// https://www.jfrog.com/jira/browse/RTFACT-18398 +const accept_header_value = "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"; + +const default_headers_buf: string = "Accept" ++ accept_header_value; + +fn appendAuth(header_builder: *HeaderBuilder, scope: *const Npm.Registry.Scope) void { + if (scope.token.len > 0) { + header_builder.appendFmt("Authorization", "Bearer {s}", .{scope.token}); + } else if (scope.auth.len > 0) { + header_builder.appendFmt("Authorization", "Basic {s}", .{scope.auth}); + } else { + return; + } + header_builder.append("npm-auth-type", "legacy"); +} + +fn countAuth(header_builder: *HeaderBuilder, scope: *const Npm.Registry.Scope) void { + if (scope.token.len > 0) { + header_builder.count("Authorization", ""); + header_builder.content.cap += "Bearer ".len + scope.token.len; + } else if (scope.auth.len > 0) { + header_builder.count("Authorization", ""); + header_builder.content.cap += "Basic ".len + scope.auth.len; + } else { + return; + } + header_builder.count("npm-auth-type", "legacy"); +} + +pub fn forManifest( + this: *NetworkTask, + name: string, + allocator: std.mem.Allocator, + scope: *const Npm.Registry.Scope, + loaded_manifest: ?*const Npm.PackageManifest, + is_optional: bool, +) !void { + this.url_buf = blk: { + + // Not all registries support scoped package names when fetching the manifest. + // registry.npmjs.org supports both "@storybook%2Faddons" and "@storybook/addons" + // Other registries like AWS codeartifact only support the former. + // "npm" CLI requests the manifest with the encoded name. + var arena = std.heap.ArenaAllocator.init(bun.default_allocator); + defer arena.deinit(); + var stack_fallback_allocator = std.heap.stackFallback(512, arena.allocator()); + var encoded_name = name; + if (strings.containsChar(name, '/')) { + encoded_name = try std.mem.replaceOwned(u8, stack_fallback_allocator.get(), name, "/", "%2f"); + } + + const tmp = bun.JSC.URL.join( + bun.String.fromUTF8(scope.url.href), + bun.String.fromUTF8(encoded_name), + ); + defer tmp.deref(); + + if (tmp.tag == .Dead) { + if (!is_optional) { + this.package_manager.log.addErrorFmt( + null, + logger.Loc.Empty, + allocator, + "Failed to join registry {} and package {} URLs", + .{ bun.fmt.QuotedFormatter{ .text = scope.url.href }, bun.fmt.QuotedFormatter{ .text = name } }, + ) catch bun.outOfMemory(); + } else { + this.package_manager.log.addWarningFmt( + null, + logger.Loc.Empty, + allocator, + "Failed to join registry {} and package {} URLs", + .{ bun.fmt.QuotedFormatter{ .text = scope.url.href }, bun.fmt.QuotedFormatter{ .text = name } }, + ) catch bun.outOfMemory(); + } + return error.InvalidURL; + } + + if (!(tmp.hasPrefixComptime("https://") or tmp.hasPrefixComptime("http://"))) { + if (!is_optional) { + this.package_manager.log.addErrorFmt( + null, + logger.Loc.Empty, + allocator, + "Registry URL must be http:// or https://\nReceived: \"{}\"", + .{tmp}, + ) catch bun.outOfMemory(); + } else { + this.package_manager.log.addWarningFmt( + null, + logger.Loc.Empty, + allocator, + "Registry URL must be http:// or https://\nReceived: \"{}\"", + .{tmp}, + ) catch bun.outOfMemory(); + } + return error.InvalidURL; + } + + // This actually duplicates the string! So we defer deref the WTF managed one above. + break :blk try tmp.toOwnedSlice(allocator); + }; + + var last_modified: string = ""; + var etag: string = ""; + if (loaded_manifest) |manifest| { + last_modified = manifest.pkg.last_modified.slice(manifest.string_buf); + etag = manifest.pkg.etag.slice(manifest.string_buf); + } + + var header_builder = HeaderBuilder{}; + + countAuth(&header_builder, scope); + + if (etag.len != 0) { + header_builder.count("If-None-Match", etag); + } + + if (last_modified.len != 0) { + header_builder.count("If-Modified-Since", last_modified); + } + + if (header_builder.header_count > 0) { + header_builder.count("Accept", accept_header_value); + if (last_modified.len > 0 and etag.len > 0) { + header_builder.content.count(last_modified); + } + try header_builder.allocate(allocator); + + appendAuth(&header_builder, scope); + + if (etag.len != 0) { + header_builder.append("If-None-Match", etag); + } else if (last_modified.len != 0) { + header_builder.append("If-Modified-Since", last_modified); + } + + header_builder.append("Accept", accept_header_value); + + if (last_modified.len > 0 and etag.len > 0) { + last_modified = header_builder.content.append(last_modified); + } + } else { + try header_builder.entries.append( + allocator, + .{ + .name = .{ .offset = 0, .length = @as(u32, @truncate("Accept".len)) }, + .value = .{ .offset = "Accept".len, .length = @as(u32, @truncate(default_headers_buf.len - "Accept".len)) }, + }, + ); + header_builder.header_count = 1; + header_builder.content = GlobalStringBuilder{ .ptr = @as([*]u8, @ptrFromInt(@intFromPtr(bun.span(default_headers_buf).ptr))), .len = default_headers_buf.len, .cap = default_headers_buf.len }; + } + + this.response_buffer = try MutableString.init(allocator, 0); + this.allocator = allocator; + + const url = URL.parse(this.url_buf); + this.unsafe_http_client = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_builder.content.ptr.?[0..header_builder.content.len], &this.response_buffer, "", this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ + .http_proxy = this.package_manager.httpProxy(url), + }); + this.unsafe_http_client.client.flags.reject_unauthorized = this.package_manager.tlsRejectUnauthorized(); + + if (PackageManager.verbose_install) { + this.unsafe_http_client.client.verbose = .headers; + } + + this.callback = .{ + .package_manifest = .{ + .name = try strings.StringOrTinyString.initAppendIfNeeded(name, *FileSystem.FilenameStore, FileSystem.FilenameStore.instance), + .loaded_manifest = if (loaded_manifest) |manifest| manifest.* else null, + }, + }; + + if (PackageManager.verbose_install) { + this.unsafe_http_client.verbose = .headers; + this.unsafe_http_client.client.verbose = .headers; + } + + // Incase the ETag causes invalidation, we fallback to the last modified date. + if (last_modified.len != 0 and bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_LAST_MODIFIED_PRETEND_304)) { + this.unsafe_http_client.client.flags.force_last_modified = true; + this.unsafe_http_client.client.if_modified_since = last_modified; + } +} + +pub fn getCompletionCallback(this: *NetworkTask) HTTP.HTTPClientResult.Callback { + return HTTP.HTTPClientResult.Callback.New(*NetworkTask, notify).init(this); +} + +pub fn schedule(this: *NetworkTask, batch: *ThreadPool.Batch) void { + this.unsafe_http_client.schedule(this.allocator, batch); +} + +pub const ForTarballError = OOM || error{ + InvalidURL, +}; + +pub fn forTarball( + this: *NetworkTask, + allocator: std.mem.Allocator, + tarball_: *const ExtractTarball, + scope: *const Npm.Registry.Scope, + authorization: NetworkTask.Authorization, +) ForTarballError!void { + this.callback = .{ .extract = tarball_.* }; + const tarball = &this.callback.extract; + const tarball_url = tarball.url.slice(); + if (tarball_url.len == 0) { + this.url_buf = try ExtractTarball.buildURL( + scope.url.href, + tarball.name, + tarball.resolution.value.npm.version, + this.package_manager.lockfile.buffers.string_bytes.items, + ); + } else { + this.url_buf = tarball_url; + } + + if (!(strings.hasPrefixComptime(this.url_buf, "https://") or strings.hasPrefixComptime(this.url_buf, "http://"))) { + const msg = .{ + .fmt = "Expected tarball URL to start with https:// or http://, got {} while fetching package {}", + .args = .{ bun.fmt.QuotedFormatter{ .text = this.url_buf }, bun.fmt.QuotedFormatter{ .text = tarball.name.slice() } }, + }; + + try this.package_manager.log.addErrorFmt(null, .{}, allocator, msg.fmt, msg.args); + return error.InvalidURL; + } + + this.response_buffer = MutableString.initEmpty(allocator); + this.allocator = allocator; + + var header_builder = HeaderBuilder{}; + var header_buf: string = ""; + + if (authorization == .allow_authorization) { + countAuth(&header_builder, scope); + } + + if (header_builder.header_count > 0) { + try header_builder.allocate(allocator); + + if (authorization == .allow_authorization) { + appendAuth(&header_builder, scope); + } + + header_buf = header_builder.content.ptr.?[0..header_builder.content.len]; + } + + const url = URL.parse(this.url_buf); + + this.unsafe_http_client = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_buf, &this.response_buffer, "", this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ + .http_proxy = this.package_manager.httpProxy(url), + }); + this.unsafe_http_client.client.flags.reject_unauthorized = this.package_manager.tlsRejectUnauthorized(); + if (PackageManager.verbose_install) { + this.unsafe_http_client.client.verbose = .headers; + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const GlobalStringBuilder = bun.StringBuilder; +const IdentityContext = bun.IdentityContext; +const MutableString = bun.MutableString; +const OOM = bun.OOM; +const ThreadPool = bun.ThreadPool; +const URL = bun.URL; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const HTTP = bun.http; +const AsyncHTTP = HTTP.AsyncHTTP; +const HeaderBuilder = HTTP.HeaderBuilder; + +const install = @import("install.zig"); +const ExtractTarball = install.ExtractTarball; +const NetworkTask = install.NetworkTask; +const Npm = install.Npm; +const PackageManager = install.PackageManager; +const PatchTask = install.PatchTask; +const Task = install.Task; diff --git a/src/install/PackageInstall.zig b/src/install/PackageInstall.zig index 589205a197..c65dfc098d 100644 --- a/src/install/PackageInstall.zig +++ b/src/install/PackageInstall.zig @@ -43,7 +43,7 @@ pub const PackageInstall = struct { package_name: String, package_version: string, - patch: Patch, + patch: ?Patch, // TODO: this is never read file_count: u32 = 0, @@ -53,15 +53,8 @@ pub const PackageInstall = struct { const ThisPackageInstall = @This(); pub const Patch = struct { - root_project_dir: ?[]const u8 = null, - patch_path: string = undefined, - patch_contents_hash: u64 = 0, - - pub const NULL = Patch{}; - - pub fn isNull(this: Patch) bool { - return this.root_project_dir == null; - } + path: string, + contents_hash: u64, }; const debug = Output.scoped(.install, true); @@ -140,12 +133,11 @@ pub const PackageInstall = struct { /// fn verifyPatchHash( this: *@This(), + patch: *const Patch, root_node_modules_dir: std.fs.Dir, ) bool { - bun.debugAssert(!this.patch.isNull()); - // hash from the .patch file, to be checked against bun tag - const patchfile_contents_hash = this.patch.patch_contents_hash; + const patchfile_contents_hash = patch.contents_hash; var buf: BuntagHashBuf = undefined; const bunhashtag = buntaghashbuf_make(&buf, patchfile_contents_hash); @@ -211,9 +203,12 @@ pub const PackageInstall = struct { this.verifyTransitiveSymlinkedFolder(root_node_modules_dir), else => this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag), }; - if (this.patch.isNull()) return verified; - if (!verified) return false; - return this.verifyPatchHash(root_node_modules_dir); + + if (this.patch) |*patch| { + if (!verified) return false; + return this.verifyPatchHash(patch, root_node_modules_dir); + } + return verified; } // Only check for destination directory in node_modules. We can't use package.json because @@ -415,7 +410,7 @@ pub const PackageInstall = struct { var cached_package_dir = bun.openDir(this.cache_dir, this.cache_dir_subpath) catch |err| return Result.fail(err, .opening_cache_dir, @errorReturnTrace()); defer cached_package_dir.close(); var walker_ = Walker.walk( - cached_package_dir, + .fromStdDir(cached_package_dir), this.allocator, &[_]bun.OSPathSlice{}, &[_]bun.OSPathSlice{}, @@ -429,7 +424,7 @@ pub const PackageInstall = struct { ) !u32 { var real_file_count: u32 = 0; var stackpath: [bun.MAX_PATH_BYTES]u8 = undefined; - while (try walker.next()) |entry| { + while (try walker.next().unwrap()) |entry| { switch (entry.kind) { .directory => { _ = bun.sys.mkdirat(.fromStdDir(destination_dir_), entry.path, 0o755); @@ -440,7 +435,7 @@ pub const PackageInstall = struct { const path: [:0]u8 = stackpath[0..entry.path.len :0]; const basename: [:0]u8 = stackpath[entry.path.len - entry.basename.len .. entry.path.len :0]; switch (bun.c.clonefileat( - entry.dir.fd, + entry.dir.cast(), basename, destination_dir_.fd, path, @@ -549,7 +544,7 @@ pub const PackageInstall = struct { return Result.fail(err, .opening_cache_dir, @errorReturnTrace()); state.walker = Walker.walk( - state.cached_package_dir, + .fromStdDir(state.cached_package_dir), this.allocator, &[_]bun.OSPathSlice{}, if (method == .symlink and this.cache_dir_subpath.len == 1 and this.cache_dir_subpath[0] == '.') @@ -635,7 +630,7 @@ pub const PackageInstall = struct { var copy_file_state: bun.CopyFileState = .{}; - while (try walker.next()) |entry| { + while (try walker.next().unwrap()) |entry| { if (comptime Environment.isWindows) { switch (entry.kind) { .directory, .file => {}, @@ -688,10 +683,9 @@ pub const PackageInstall = struct { } else { if (entry.kind != .file) continue; real_file_count += 1; - const openFile = std.fs.Dir.openFile; const createFile = std.fs.Dir.createFile; - var in_file = try openFile(entry.dir, entry.basename, .{ .mode = .read_only }); + var in_file = try entry.dir.openat(entry.basename, bun.O.RDONLY, 0).unwrap(); defer in_file.close(); debug("createFile {} {s}\n", .{ destination_dir_.fd, entry.path }); @@ -712,11 +706,11 @@ pub const PackageInstall = struct { defer outfile.close(); if (comptime Environment.isPosix) { - const stat = in_file.stat() catch continue; + const stat = in_file.stat().unwrap() catch continue; _ = bun.c.fchmod(outfile.handle, @intCast(stat.mode)); } - bun.copyFileWithState(.fromStdFile(in_file), .fromStdFile(outfile), ©_file_state).unwrap() catch |err| { + bun.copyFileWithState(in_file, .fromStdFile(outfile), ©_file_state).unwrap() catch |err| { if (progress_) |progress| { progress.root.end(); progress.refresh(); @@ -910,20 +904,20 @@ pub const PackageInstall = struct { var real_file_count: u32 = 0; var queue = if (Environment.isWindows) HardLinkWindowsInstallTask.getQueue(); - while (try walker.next()) |entry| { + while (try walker.next().unwrap()) |entry| { if (comptime Environment.isPosix) { switch (entry.kind) { .directory => { bun.MakePath.makePath(std.meta.Elem(@TypeOf(entry.path)), destination_dir, entry.path) catch {}; }, .file => { - std.posix.linkat(entry.dir.fd, entry.basename, destination_dir.fd, entry.path, 0) catch |err| { + std.posix.linkatZ(entry.dir.cast(), entry.basename, destination_dir.fd, entry.path, 0) catch |err| { if (err != error.PathAlreadyExists) { return err; } - std.posix.unlinkat(destination_dir.fd, entry.path, 0) catch {}; - try std.posix.linkat(entry.dir.fd, entry.basename, destination_dir.fd, entry.path, 0); + std.posix.unlinkatZ(destination_dir.fd, entry.path, 0) catch {}; + try std.posix.linkatZ(entry.dir.cast(), entry.basename, destination_dir.fd, entry.path, 0); }; real_file_count += 1; @@ -1019,7 +1013,7 @@ pub const PackageInstall = struct { head2: []if (Environment.isWindows) u16 else u8, ) !u32 { var real_file_count: u32 = 0; - while (try walker.next()) |entry| { + while (try walker.next().unwrap()) |entry| { if (comptime Environment.isPosix) { switch (entry.kind) { .directory => { @@ -1181,7 +1175,7 @@ pub const PackageInstall = struct { var unintall_task: *@This() = @fieldParentPtr("task", task); var debug_timer = bun.Output.DebugTimer.start(); defer { - _ = PackageManager.get().decrementPendingTasks(); + PackageManager.get().decrementPendingTasks(); PackageManager.get().wake(); } @@ -1317,7 +1311,7 @@ pub const PackageInstall = struct { _ = node_fs_for_package_installer.mkdirRecursiveOSPathImpl(void, {}, fullpath, 0, false); } - const res = strings.copyUTF16IntoUTF8(dest_buf[0..], []const u16, wbuf[0..i], true); + const res = strings.copyUTF16IntoUTF8(dest_buf[0..], []const u16, wbuf[0..i]); var offset: usize = res.written; if (dest_buf[offset - 1] != std.fs.path.sep_windows) { dest_buf[offset] = std.fs.path.sep_windows; @@ -1334,12 +1328,12 @@ pub const PackageInstall = struct { // https://github.com/npm/cli/blob/162c82e845d410ede643466f9f8af78a312296cc/workspaces/arborist/lib/arborist/reify.js#L738 // https://github.com/npm/cli/commit/0e58e6f6b8f0cd62294642a502c17561aaf46553 - switch (bun.sys.symlinkOrJunction(dest_z, target_z)) { + switch (bun.sys.symlinkOrJunction(dest_z, target_z, null)) { .err => |err_| brk: { var err = err_; if (err.getErrno() == .EXIST) { _ = bun.sys.rmdirat(.fromStdDir(destination_dir), this.destination_dir_subpath); - switch (bun.sys.symlinkOrJunction(dest_z, target_z)) { + switch (bun.sys.symlinkOrJunction(dest_z, target_z, null)) { .err => |e| err = e, .result => break :brk, } @@ -1380,7 +1374,7 @@ pub const PackageInstall = struct { return switch (state) { .done => false, else => brk: { - if (this.patch.isNull()) { + if (this.patch == null) { const exists = switch (resolution_tag) { .npm => package_json_exists: { var buf = &PackageManager.cached_package_folder_name_buf; diff --git a/src/install/PackageInstaller.zig b/src/install/PackageInstaller.zig index 89f610522a..5f654a58cf 100644 --- a/src/install/PackageInstaller.zig +++ b/src/install/PackageInstaller.zig @@ -242,7 +242,6 @@ pub const PackageInstaller = struct { pub fn incrementTreeInstallCount( this: *PackageInstaller, tree_id: Lockfile.Tree.Id, - maybe_destination_dir: ?*LazyPackageDestinationDir, comptime should_install_packages: bool, log_level: Options.LogLevel, ) void { @@ -269,19 +268,13 @@ pub const PackageInstaller = struct { this.completed_trees.set(tree_id); - // Avoid opening this directory if we don't need to. if (tree.binaries.count() > 0) { - // Don't close this directory in here. It will be closed by the caller. - if (maybe_destination_dir) |maybe| { - if (maybe.getDir() catch null) |destination_dir| { - this.seen_bin_links.clearRetainingCapacity(); + this.seen_bin_links.clearRetainingCapacity(); - var link_target_buf: bun.PathBuffer = undefined; - var link_dest_buf: bun.PathBuffer = undefined; - var link_rel_buf: bun.PathBuffer = undefined; - this.linkTreeBins(tree, tree_id, destination_dir, &link_target_buf, &link_dest_buf, &link_rel_buf, log_level); - } - } + var link_target_buf: bun.PathBuffer = undefined; + var link_dest_buf: bun.PathBuffer = undefined; + var link_rel_buf: bun.PathBuffer = undefined; + this.linkTreeBins(tree, tree_id, &link_target_buf, &link_dest_buf, &link_rel_buf, log_level); } if (comptime should_install_packages) { @@ -295,7 +288,6 @@ pub const PackageInstaller = struct { this: *PackageInstaller, tree: *TreeContext, tree_id: TreeContext.Id, - destination_dir: std.fs.Dir, link_target_buf: []u8, link_dest_buf: []u8, link_rel_buf: []u8, @@ -303,6 +295,9 @@ pub const PackageInstaller = struct { ) void { const lockfile = this.lockfile; const string_buf = lockfile.buffers.string_bytes.items; + var node_modules_path: bun.AbsPath(.{}) = .from(this.node_modules.path.items); + defer node_modules_path.deinit(); + while (tree.binaries.removeOrNull()) |dep_id| { bun.assertWithLocation(dep_id < lockfile.buffers.dependencies.items.len, @src()); const package_id = lockfile.buffers.resolutions.items[dep_id]; @@ -319,8 +314,7 @@ pub const PackageInstaller = struct { .string_buf = string_buf, .extern_string_buf = lockfile.buffers.extern_strings.items, .seen = &this.seen_bin_links, - .node_modules_path = this.node_modules.path.items, - .node_modules = .fromStdDir(destination_dir), + .node_modules_path = &node_modules_path, .abs_target_buf = link_target_buf, .abs_dest_buf = link_dest_buf, .rel_buf = link_rel_buf, @@ -385,18 +379,7 @@ pub const PackageInstaller = struct { this.node_modules.path.appendSlice(rel_path) catch bun.outOfMemory(); - var destination_dir = this.node_modules.openDir(this.root_node_modules_folder) catch |err| { - if (log_level != .silent) { - Output.err(err, "Failed to open node_modules folder at {s}", .{ - bun.fmt.fmtPath(u8, this.node_modules.path.items, .{}), - }); - } - - continue; - }; - defer destination_dir.close(); - - this.linkTreeBins(tree, @intCast(tree_id), destination_dir, &link_target_buf, &link_dest_buf, &link_rel_buf, log_level); + this.linkTreeBins(tree, @intCast(tree_id), &link_target_buf, &link_dest_buf, &link_rel_buf, log_level); } } } @@ -417,6 +400,7 @@ pub const PackageInstaller = struct { entry.list, optional, output_in_foreground, + null, ) catch |err| { if (log_level != .silent) { const fmt = "\nerror: failed to spawn life-cycle scripts for {s}: {s}\n"; @@ -498,7 +482,7 @@ pub const PackageInstaller = struct { const optional = entry.optional; const output_in_foreground = false; - this.manager.spawnPackageLifecycleScripts(this.command_ctx, entry.list, optional, output_in_foreground) catch |err| { + this.manager.spawnPackageLifecycleScripts(this.command_ctx, entry.list, optional, output_in_foreground, null) catch |err| { if (log_level != .silent) { const fmt = "\nerror: failed to spawn life-cycle scripts for {s}: {s}\n"; const args = .{ package_name, @errorName(err) }; @@ -594,39 +578,24 @@ pub const PackageInstaller = struct { /// Install versions of a package which are waiting on a network request pub fn installEnqueuedPackagesAfterExtraction( this: *PackageInstaller, + task_id: Task.Id, dependency_id: DependencyID, data: *const ExtractData, log_level: Options.LogLevel, ) void { const package_id = this.lockfile.buffers.resolutions.items[dependency_id]; const name = this.names[package_id]; - const resolution = &this.resolutions[package_id]; - const task_id = switch (resolution.tag) { - .git => Task.Id.forGitCheckout(data.url, data.resolved), - .github => Task.Id.forTarball(data.url), - .local_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.local_tarball)), - .remote_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.remote_tarball)), - .npm => Task.Id.forNPMPackage(name.slice(this.lockfile.buffers.string_bytes.items), resolution.value.npm.version), - else => unreachable, - }; - if (!this.installEnqueuedPackagesImpl(name, task_id, log_level)) { - if (comptime Environment.allow_assert) { - Output.panic("Ran callback to install enqueued packages, but there was no task associated with it. {}:{} (dependency_id: {d})", .{ - bun.fmt.quote(name.slice(this.lockfile.buffers.string_bytes.items)), - bun.fmt.quote(data.url), - dependency_id, - }); - } - } - } + // const resolution = &this.resolutions[package_id]; + // const task_id = switch (resolution.tag) { + // .git => Task.Id.forGitCheckout(data.url, data.resolved), + // .github => Task.Id.forTarball(data.url), + // .local_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.local_tarball)), + // .remote_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.remote_tarball)), + // .npm => Task.Id.forNPMPackage(name.slice(this.lockfile.buffers.string_bytes.items), resolution.value.npm.version), + // else => unreachable, + // }; - pub fn installEnqueuedPackagesImpl( - this: *PackageInstaller, - name: String, - task_id: Task.Id.Type, - log_level: Options.LogLevel, - ) bool { if (this.manager.task_queue.fetchRemove(task_id)) |removed| { var callbacks = removed.value; defer callbacks.deinit(this.manager.allocator); @@ -638,7 +607,7 @@ pub const PackageInstaller = struct { if (callbacks.items.len == 0) { debug("Unexpected state: no callbacks for async task.", .{}); - return true; + return; } for (callbacks.items) |*cb| { @@ -664,9 +633,16 @@ pub const PackageInstaller = struct { ); this.node_modules.deinit(); } - return true; + return; + } + + if (comptime Environment.allow_assert) { + Output.panic("Ran callback to install enqueued packages, but there was no task associated with it. {}:{} (dependency_id: {d})", .{ + bun.fmt.quote(name.slice(this.lockfile.buffers.string_bytes.items)), + bun.fmt.quote(data.url), + dependency_id, + }); } - return false; } fn getInstalledPackageScriptsCount( @@ -674,7 +650,7 @@ pub const PackageInstaller = struct { alias: string, package_id: PackageID, resolution_tag: Resolution.Tag, - node_modules_folder: *LazyPackageDestinationDir, + folder_path: *bun.AbsPath(.{ .sep = .auto }), log_level: Options.LogLevel, ) usize { if (comptime Environment.allow_assert) { @@ -696,8 +672,7 @@ pub const PackageInstaller = struct { this.lockfile.allocator, &string_builder, this.manager.log, - node_modules_folder, - alias, + folder_path, ) catch |err| { if (log_level != .silent) { Output.errGeneric("failed to fill lifecycle scripts for {s}: {s}", .{ @@ -835,11 +810,10 @@ pub const PackageInstaller = struct { .destination_dir_subpath_buf = &this.destination_dir_subpath_buf, .allocator = this.lockfile.allocator, .package_name = pkg_name, - .patch = if (patch_patch) |p| PackageInstall.Patch{ - .patch_contents_hash = patch_contents_hash.?, - .patch_path = p, - .root_project_dir = FileSystem.instance.top_level_dir, - } else PackageInstall.Patch.NULL, + .patch = if (patch_patch) |p| .{ + .contents_hash = patch_contents_hash.?, + .path = p, + } else null, .package_version = package_version, .node_modules = &this.node_modules, .lockfile = this.lockfile, @@ -848,7 +822,6 @@ pub const PackageInstaller = struct { pkg_name.slice(this.lockfile.buffers.string_bytes.items), resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix), }); - const pkg_has_patch = !installer.patch.isNull(); switch (resolution.tag) { .npm => { @@ -917,32 +890,7 @@ pub const PackageInstaller = struct { installer.cache_dir = std.fs.cwd(); }, .symlink => { - const directory = this.manager.globalLinkDir() catch |err| { - if (log_level != .silent) { - const fmt = "\nerror: unable to access global directory while installing {s}: {s}\n"; - const args = .{ pkg_name.slice(this.lockfile.buffers.string_bytes.items), @errorName(err) }; - - if (log_level.showProgress()) { - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| { - this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); - }, - } - } else { - Output.prettyErrorln(fmt, args); - } - } - - if (this.manager.options.enable.fail_early) { - Global.exit(1); - } - - Output.flush(); - this.summary.fail += 1; - - if (!installer.patch.isNull()) this.incrementTreeInstallCount(this.current_tree_id, null, !is_pending_package_install, log_level); - return; - }; + const directory = this.manager.globalLinkDir(); const folder = resolution.value.symlink.slice(this.lockfile.buffers.string_bytes.items); @@ -950,7 +898,7 @@ pub const PackageInstaller = struct { installer.cache_dir_subpath = "."; installer.cache_dir = std.fs.cwd(); } else { - const global_link_dir = this.manager.globalLinkDirPath() catch unreachable; + const global_link_dir = this.manager.globalLinkDirPath(); var ptr = &this.folder_path_buf; var remain: []u8 = this.folder_path_buf[0..]; @memcpy(ptr[0..global_link_dir.len], global_link_dir); @@ -971,7 +919,7 @@ pub const PackageInstaller = struct { if (comptime Environment.allow_assert) { @panic("Internal assertion failure: unexpected resolution tag"); } - if (!installer.patch.isNull()) this.incrementTreeInstallCount(this.current_tree_id, null, !is_pending_package_install, log_level); + this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); return; }, } @@ -983,7 +931,7 @@ pub const PackageInstaller = struct { this.summary.skipped += @intFromBool(!needs_install); if (needs_install) { - if (!remove_patch and resolution.tag.canEnqueueInstallTask() and installer.packageMissingFromCache(this.manager, package_id, resolution.tag)) { + if (resolution.tag.canEnqueueInstallTask() and installer.packageMissingFromCache(this.manager, package_id, resolution.tag)) { if (comptime Environment.allow_assert) { bun.assertWithLocation(resolution.canEnqueueInstallTask(), @src()); } @@ -1017,7 +965,6 @@ pub const PackageInstaller = struct { ) catch |err| switch (err) { error.OutOfMemory => bun.outOfMemory(), error.InvalidURL => this.failWithInvalidUrl( - pkg_has_patch, is_pending_package_install, log_level, ), @@ -1041,7 +988,6 @@ pub const PackageInstaller = struct { ) catch |err| switch (err) { error.OutOfMemory => bun.outOfMemory(), error.InvalidURL => this.failWithInvalidUrl( - pkg_has_patch, is_pending_package_install, log_level, ), @@ -1070,7 +1016,6 @@ pub const PackageInstaller = struct { ) catch |err| switch (err) { error.OutOfMemory => bun.outOfMemory(), error.InvalidURL => this.failWithInvalidUrl( - pkg_has_patch, is_pending_package_install, log_level, ), @@ -1080,7 +1025,7 @@ pub const PackageInstaller = struct { if (comptime Environment.allow_assert) { @panic("unreachable, handled above"); } - if (!installer.patch.isNull()) this.incrementTreeInstallCount(this.current_tree_id, null, !is_pending_package_install, log_level); + this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); this.summary.fail += 1; }, } @@ -1090,12 +1035,12 @@ pub const PackageInstaller = struct { // above checks if unpatched package is in cache, if not null apply patch in temp directory, copy // into cache, then install into node_modules - if (!installer.patch.isNull()) { + if (installer.patch) |patch| { if (installer.patchedPackageMissingFromCache(this.manager, package_id)) { const task = PatchTask.newApplyPatchHash( this.manager, package_id, - installer.patch.patch_contents_hash, + patch.contents_hash, patch_name_and_version_hash.?, ); task.callback.apply.install_context = .{ @@ -1126,7 +1071,7 @@ pub const PackageInstaller = struct { }); } this.summary.fail += 1; - if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, null, !is_pending_package_install, log_level); + this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); return; }; @@ -1185,10 +1130,14 @@ pub const PackageInstaller = struct { }; if (resolution.tag != .root and (resolution.tag == .workspace or is_trusted)) { + var folder_path: bun.AbsPath(.{ .sep = .auto }) = .from(this.node_modules.path.items); + defer folder_path.deinit(); + folder_path.append(alias.slice(this.lockfile.buffers.string_bytes.items)); + if (this.enqueueLifecycleScripts( alias.slice(this.lockfile.buffers.string_bytes.items), log_level, - &lazy_package_dir, + &folder_path, package_id, dep.behavior.optional, resolution, @@ -1212,11 +1161,15 @@ pub const PackageInstaller = struct { else => if (!is_trusted and this.metas[package_id].hasInstallScript()) { // Check if the package actually has scripts. `hasInstallScript` can be false positive if a package is published with // an auto binding.gyp rebuild script but binding.gyp is excluded from the published files. + var folder_path: bun.AbsPath(.{ .sep = .auto }) = .from(this.node_modules.path.items); + defer folder_path.deinit(); + folder_path.append(alias.slice(this.lockfile.buffers.string_bytes.items)); + const count = this.getInstalledPackageScriptsCount( alias.slice(this.lockfile.buffers.string_bytes.items), package_id, resolution.tag, - &lazy_package_dir, + &folder_path, log_level, ); if (count > 0) { @@ -1234,7 +1187,7 @@ pub const PackageInstaller = struct { }, } - if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, &lazy_package_dir, !is_pending_package_install, log_level); + this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); }, .failure => |cause| { if (comptime Environment.allow_assert) { @@ -1243,7 +1196,7 @@ pub const PackageInstaller = struct { // even if the package failed to install, we still need to increment the install // counter for this tree - if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, &lazy_package_dir, !is_pending_package_install, log_level); + this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); if (cause.err == error.DanglingSymlink) { Output.prettyErrorln( @@ -1333,7 +1286,7 @@ pub const PackageInstaller = struct { destination_dir.close(); } - defer if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, &destination_dir, !is_pending_package_install, log_level); + defer this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); const dep = this.lockfile.buffers.dependencies.items[dependency_id]; const truncated_dep_name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); @@ -1349,10 +1302,14 @@ pub const PackageInstaller = struct { }; if (resolution.tag != .root and is_trusted) { + var folder_path: bun.AbsPath(.{ .sep = .auto }) = .from(this.node_modules.path.items); + defer folder_path.deinit(); + folder_path.append(alias.slice(this.lockfile.buffers.string_bytes.items)); + if (this.enqueueLifecycleScripts( alias.slice(this.lockfile.buffers.string_bytes.items), log_level, - &destination_dir, + &folder_path, package_id, dep.behavior.optional, resolution, @@ -1375,12 +1332,11 @@ pub const PackageInstaller = struct { fn failWithInvalidUrl( this: *PackageInstaller, - pkg_has_patch: bool, comptime is_pending_package_install: bool, log_level: Options.LogLevel, ) void { this.summary.fail += 1; - if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, null, !is_pending_package_install, log_level); + this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); } // returns true if scripts are enqueued @@ -1388,7 +1344,7 @@ pub const PackageInstaller = struct { this: *PackageInstaller, folder_name: string, log_level: Options.LogLevel, - node_modules_folder: *LazyPackageDestinationDir, + package_path: *bun.AbsPath(.{ .sep = .auto }), package_id: PackageID, optional: bool, resolution: *const Resolution, @@ -1397,8 +1353,7 @@ pub const PackageInstaller = struct { const scripts_list = scripts.getList( this.manager.log, this.lockfile, - node_modules_folder, - this.node_modules.path.items, + package_path, folder_name, resolution, ) catch |err| { diff --git a/src/install/PackageManager.zig b/src/install/PackageManager.zig new file mode 100644 index 0000000000..d4425eeaed --- /dev/null +++ b/src/install/PackageManager.zig @@ -0,0 +1,1304 @@ +cache_directory_: ?std.fs.Dir = null, + +cache_directory_path: stringZ = "", +temp_dir_: ?std.fs.Dir = null, +temp_dir_path: stringZ = "", +temp_dir_name: string = "", +root_dir: *Fs.FileSystem.DirEntry, +allocator: std.mem.Allocator, +log: *logger.Log, +resolve_tasks: ResolveTaskQueue = .{}, +timestamp_for_manifest_cache_control: u32 = 0, +extracted_count: u32 = 0, +default_features: Features = .{}, +summary: Lockfile.Package.Diff.Summary = .{}, +env: *DotEnv.Loader, +progress: Progress = .{}, +downloads_node: ?*Progress.Node = null, +scripts_node: ?*Progress.Node = null, +progress_name_buf: [768]u8 = undefined, +progress_name_buf_dynamic: []u8 = &[_]u8{}, +cpu_count: u32 = 0, + +track_installed_bin: TrackInstalledBin = .{ + .none = {}, +}, + +// progress bar stuff when not stack allocated +root_progress_node: *Progress.Node = undefined, + +to_update: bool = false, + +subcommand: Subcommand, +update_requests: []UpdateRequest = &[_]UpdateRequest{}, + +/// Only set in `bun pm` +root_package_json_name_at_time_of_init: []const u8 = "", + +root_package_json_file: std.fs.File, + +/// The package id corresponding to the workspace the install is happening in. Could be root, or +/// could be any of the workspaces. +root_package_id: struct { + id: ?PackageID = null, + pub fn get(this: *@This(), lockfile: *const Lockfile, workspace_name_hash: ?PackageNameHash) PackageID { + return this.id orelse { + this.id = lockfile.getWorkspacePackageID(workspace_name_hash); + return this.id.?; + }; + } +} = .{}, + +thread_pool: ThreadPool, +task_batch: ThreadPool.Batch = .{}, +task_queue: TaskDependencyQueue = .{}, + +manifests: PackageManifestMap = .{}, +folders: FolderResolution.Map = .{}, +git_repositories: RepositoryMap = .{}, + +network_dedupe_map: NetworkTask.DedupeMap = NetworkTask.DedupeMap.init(bun.default_allocator), +async_network_task_queue: AsyncNetworkTaskQueue = .{}, +network_tarball_batch: ThreadPool.Batch = .{}, +network_resolve_batch: ThreadPool.Batch = .{}, +network_task_fifo: NetworkQueue = undefined, +patch_apply_batch: ThreadPool.Batch = .{}, +patch_calc_hash_batch: ThreadPool.Batch = .{}, +patch_task_fifo: PatchTaskFifo = PatchTaskFifo.init(), +patch_task_queue: PatchTaskQueue = .{}, +/// We actually need to calculate the patch file hashes +/// every single time, because someone could edit the patchfile at anytime +pending_pre_calc_hashes: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), +pending_tasks: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), +total_tasks: u32 = 0, +preallocated_network_tasks: PreallocatedNetworkTasks, +preallocated_resolve_tasks: PreallocatedTaskStore, + +/// items are only inserted into this if they took more than 500ms +lifecycle_script_time_log: LifecycleScriptTimeLog = .{}, + +pending_lifecycle_script_tasks: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), +finished_installing: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), +total_scripts: usize = 0, + +root_lifecycle_scripts: ?Package.Scripts.List = null, + +node_gyp_tempdir_name: string = "", + +env_configure: ?ScriptRunEnvironment = null, + +lockfile: *Lockfile = undefined, + +options: Options, +preinstall_state: std.ArrayListUnmanaged(PreinstallState) = .{}, + +global_link_dir: ?std.fs.Dir = null, +global_dir: ?std.fs.Dir = null, +global_link_dir_path: string = "", + +onWake: WakeHandler = .{}, +ci_mode: bun.LazyBool(computeIsContinuousIntegration, @This(), "ci_mode") = .{}, + +peer_dependencies: std.fifo.LinearFifo(DependencyID, .Dynamic) = std.fifo.LinearFifo(DependencyID, .Dynamic).init(default_allocator), + +// name hash from alias package name -> aliased package dependency version info +known_npm_aliases: NpmAliasMap = .{}, + +event_loop: JSC.AnyEventLoop, + +// During `installPackages` we learn exactly what dependencies from --trust +// actually have scripts to run, and we add them to this list +trusted_deps_to_add_to_package_json: std.ArrayListUnmanaged(string) = .{}, + +any_failed_to_install: bool = false, + +// When adding a `file:` dependency in a workspace package, we want to install it +// relative to the workspace root, but the path provided is relative to the +// workspace package. We keep track of the original here. +original_package_json_path: stringZ, + +// null means root. Used during `cleanWithLogger` to identifier which +// workspace is adding/removing packages +workspace_name_hash: ?PackageNameHash = null, + +workspace_package_json_cache: WorkspacePackageJSONCache = .{}, + +// normally we have `UpdateRequests` to work with for adding/deleting/updating packages, but +// if `bun update` is used without any package names we need a way to keep information for +// the original packages that are updating. +// +// dependency name -> original version information +updating_packages: bun.StringArrayHashMapUnmanaged(PackageUpdateInfo) = .{}, + +patched_dependencies_to_remove: std.ArrayHashMapUnmanaged(PackageNameAndVersionHash, void, ArrayIdentityContext.U64, false) = .{}, + +active_lifecycle_scripts: LifecycleScriptSubprocess.List, +last_reported_slow_lifecycle_script_at: u64 = 0, +cached_tick_for_slow_lifecycle_script_logging: u64 = 0, + +/// Corresponds to possible commands from the CLI. +pub const Subcommand = enum { + install, + update, + pm, + add, + remove, + link, + unlink, + patch, + @"patch-commit", + outdated, + pack, + publish, + audit, + info, + + // bin, + // hash, + // @"hash-print", + // @"hash-string", + // cache, + // @"default-trusted", + // untrusted, + // trust, + // ls, + // migrate, + + pub fn canGloballyInstallPackages(this: Subcommand) bool { + return switch (this) { + .install, .update, .add => true, + else => false, + }; + } + + pub fn supportsWorkspaceFiltering(this: Subcommand) bool { + return switch (this) { + .outdated => true, + .install => true, + // .pack => true, + // .add => true, + else => false, + }; + } + + pub fn supportsJsonOutput(this: Subcommand) bool { + return switch (this) { + .audit, + .pm, + .info, + => true, + else => false, + }; + } + + // TODO: make all subcommands find root and chdir + pub fn shouldChdirToRoot(this: Subcommand) bool { + return switch (this) { + .link => false, + else => true, + }; + } +}; + +pub const WorkspaceFilter = union(enum) { + all, + name: []const u8, + path: []const u8, + + pub fn init(allocator: std.mem.Allocator, input: string, cwd: string, path_buf: []u8) OOM!WorkspaceFilter { + if ((input.len == 1 and input[0] == '*') or strings.eqlComptime(input, "**")) { + return .all; + } + + var remain = input; + + var prepend_negate = false; + while (remain.len > 0 and remain[0] == '!') { + prepend_negate = !prepend_negate; + remain = remain[1..]; + } + + const is_path = remain.len > 0 and remain[0] == '.'; + + const filter = if (is_path) + strings.withoutTrailingSlash(bun.path.joinAbsStringBuf(cwd, path_buf, &.{remain}, .posix)) + else + remain; + + if (filter.len == 0) { + // won't match anything + return .{ .path = &.{} }; + } + const copy_start = @intFromBool(prepend_negate); + const copy_end = copy_start + filter.len; + + const buf = try allocator.alloc(u8, copy_end); + @memcpy(buf[copy_start..copy_end], filter); + + if (prepend_negate) { + buf[0] = '!'; + } + + const pattern = buf[0..copy_end]; + + return if (is_path) + .{ .path = pattern } + else + .{ .name = pattern }; + } + + pub fn deinit(this: WorkspaceFilter, allocator: std.mem.Allocator) void { + switch (this) { + .name, + .path, + => |pattern| allocator.free(pattern), + .all => {}, + } + } +}; + +pub const PackageUpdateInfo = struct { + original_version_literal: string, + is_alias: bool, + original_version_string_buf: string = "", + original_version: ?Semver.Version, +}; + +pub fn clearCachedItemsDependingOnLockfileBuffer(this: *PackageManager) void { + this.root_package_id.id = null; +} + +pub fn crash(this: *PackageManager) noreturn { + if (this.options.log_level != .silent) { + this.log.print(Output.errorWriter()) catch {}; + } + Global.crash(); +} + +const TrackInstalledBin = union(enum) { + none: void, + pending: void, + basename: []const u8, +}; + +pub var verbose_install = false; + +pub const PatchTaskQueue = bun.UnboundedQueue(PatchTask, .next); +pub const AsyncNetworkTaskQueue = bun.UnboundedQueue(NetworkTask, .next); + +pub const ScriptRunEnvironment = struct { + root_dir_info: *DirInfo, + transpiler: bun.Transpiler, +}; + +const TimePasser = struct { + pub var last_time: u64 = 0; +}; + +pub fn hasEnoughTimePassedBetweenWaitingMessages() bool { + const iter = get().event_loop.loop().iterationNumber(); + if (TimePasser.last_time < iter) { + TimePasser.last_time = iter; + return true; + } + + return false; +} + +pub fn configureEnvForScripts(this: *PackageManager, ctx: Command.Context, log_level: Options.LogLevel) !transpiler.Transpiler { + return configureEnvForScriptsOnce.call(.{ this, ctx, log_level }); +} + +pub var configureEnvForScriptsOnce = bun.once(struct { + pub fn run(this: *PackageManager, ctx: Command.Context, log_level: Options.LogLevel) !transpiler.Transpiler { + + // We need to figure out the PATH and other environment variables + // to do that, we re-use the code from bun run + // this is expensive, it traverses the entire directory tree going up to the root + // so we really only want to do it when strictly necessary + var this_transpiler: transpiler.Transpiler = undefined; + _ = try RunCommand.configureEnvForRun( + ctx, + &this_transpiler, + this.env, + log_level != .silent, + false, + ); + + const init_cwd_entry = try this.env.map.getOrPutWithoutValue("INIT_CWD"); + if (!init_cwd_entry.found_existing) { + init_cwd_entry.key_ptr.* = try ctx.allocator.dupe(u8, init_cwd_entry.key_ptr.*); + init_cwd_entry.value_ptr.* = .{ + .value = try ctx.allocator.dupe(u8, strings.withoutTrailingSlash(FileSystem.instance.top_level_dir)), + .conditional = false, + }; + } + + this.env.loadCCachePath(this_transpiler.fs); + + { + // Run node-gyp jobs in parallel. + // https://github.com/nodejs/node-gyp/blob/7d883b5cf4c26e76065201f85b0be36d5ebdcc0e/lib/build.js#L150-L184 + const thread_count = bun.getThreadCount(); + if (thread_count > 2) { + if (!this_transpiler.env.has("JOBS")) { + var int_buf: [10]u8 = undefined; + const jobs_str = std.fmt.bufPrint(&int_buf, "{d}", .{thread_count}) catch unreachable; + this_transpiler.env.map.putAllocValue(bun.default_allocator, "JOBS", jobs_str) catch unreachable; + } + } + } + + { + var node_path: bun.PathBuffer = undefined; + if (this.env.getNodePath(this_transpiler.fs, &node_path)) |node_pathZ| { + _ = try this.env.loadNodeJSConfig(this_transpiler.fs, bun.default_allocator.dupe(u8, node_pathZ) catch bun.outOfMemory()); + } else brk: { + const current_path = this.env.get("PATH") orelse ""; + var PATH = try std.ArrayList(u8).initCapacity(bun.default_allocator, current_path.len); + try PATH.appendSlice(current_path); + var bun_path: string = ""; + RunCommand.createFakeTemporaryNodeExecutable(&PATH, &bun_path) catch break :brk; + try this.env.map.put("PATH", PATH.items); + _ = try this.env.loadNodeJSConfig(this_transpiler.fs, bun.default_allocator.dupe(u8, bun_path) catch bun.outOfMemory()); + } + } + + return this_transpiler; + } +}.run); + +pub fn httpProxy(this: *PackageManager, url: URL) ?URL { + return this.env.getHttpProxyFor(url); +} + +pub fn tlsRejectUnauthorized(this: *PackageManager) bool { + return this.env.getTLSRejectUnauthorized(); +} + +pub fn computeIsContinuousIntegration(this: *PackageManager) bool { + return this.env.isCI(); +} + +pub inline fn isContinuousIntegration(this: *PackageManager) bool { + return this.ci_mode.get(); +} + +pub const WakeHandler = struct { + // handler: fn (ctx: *anyopaque, pm: *PackageManager) void = undefined, + // onDependencyError: fn (ctx: *anyopaque, Dependency, PackageID, anyerror) void = undefined, + handler: *const anyopaque = undefined, + onDependencyError: *const anyopaque = undefined, + context: ?*anyopaque = null, + + pub inline fn getHandler(t: @This()) *const fn (ctx: *anyopaque, pm: *PackageManager) void { + return bun.cast(*const fn (ctx: *anyopaque, pm: *PackageManager) void, t.handler); + } + + pub inline fn getonDependencyError(t: @This()) *const fn (ctx: *anyopaque, Dependency, DependencyID, anyerror) void { + return bun.cast(*const fn (ctx: *anyopaque, Dependency, DependencyID, anyerror) void, t.handler); + } +}; + +pub fn failRootResolution(this: *PackageManager, dependency: *const Dependency, dependency_id: DependencyID, err: anyerror) void { + if (this.onWake.context) |ctx| { + this.onWake.getonDependencyError()( + ctx, + dependency.*, + dependency_id, + err, + ); + } +} + +pub fn wake(this: *PackageManager) void { + if (this.onWake.context) |ctx| { + this.onWake.getHandler()(ctx, this); + } + + this.event_loop.wakeup(); +} + +pub fn sleepUntil(this: *PackageManager, closure: anytype, comptime isDoneFn: anytype) void { + Output.flush(); + this.event_loop.tick(closure, isDoneFn); +} + +pub threadlocal var cached_package_folder_name_buf: bun.PathBuffer = undefined; + +const Holder = struct { + pub var ptr: *PackageManager = undefined; +}; + +pub fn allocatePackageManager() void { + Holder.ptr = bun.default_allocator.create(PackageManager) catch bun.outOfMemory(); +} + +pub fn get() *PackageManager { + return Holder.ptr; +} + +pub const SuccessFn = *const fn (*PackageManager, DependencyID, PackageID) void; +pub const FailFn = *const fn (*PackageManager, *const Dependency, PackageID, anyerror) void; + +pub const debug = Output.scoped(.PackageManager, true); + +pub fn ensureTempNodeGypScript(this: *PackageManager) !void { + return ensureTempNodeGypScriptOnce.call(.{this}); +} + +var ensureTempNodeGypScriptOnce = bun.once(struct { + pub fn run(manager: *PackageManager) !void { + if (manager.node_gyp_tempdir_name.len > 0) return; + + const tempdir = manager.getTemporaryDirectory(); + var path_buf: bun.PathBuffer = undefined; + const node_gyp_tempdir_name = bun.span(try Fs.FileSystem.instance.tmpname("node-gyp", &path_buf, 12345)); + + // used later for adding to path for scripts + manager.node_gyp_tempdir_name = try manager.allocator.dupe(u8, node_gyp_tempdir_name); + + var node_gyp_tempdir = tempdir.makeOpenPath(manager.node_gyp_tempdir_name, .{}) catch |err| { + if (err == error.EEXIST) { + // it should not exist + Output.prettyErrorln("error: node-gyp tempdir already exists", .{}); + Global.crash(); + } + Output.prettyErrorln("error: {s} creating node-gyp tempdir", .{@errorName(err)}); + Global.crash(); + }; + defer node_gyp_tempdir.close(); + + const file_name = switch (Environment.os) { + else => "node-gyp", + .windows => "node-gyp.cmd", + }; + const mode = switch (Environment.os) { + else => 0o755, + .windows => 0, // windows does not have an executable bit + }; + + var node_gyp_file = node_gyp_tempdir.createFile(file_name, .{ .mode = mode }) catch |err| { + Output.prettyErrorln("error: {s} creating node-gyp tempdir", .{@errorName(err)}); + Global.crash(); + }; + defer node_gyp_file.close(); + + const content = switch (Environment.os) { + .windows => + \\if not defined npm_config_node_gyp ( + \\ bun x --silent node-gyp %* + \\) else ( + \\ node "%npm_config_node_gyp%" %* + \\) + \\ + , + else => + \\#!/bin/sh + \\if [ "x$npm_config_node_gyp" = "x" ]; then + \\ bun x --silent node-gyp $@ + \\else + \\ "$npm_config_node_gyp" $@ + \\fi + \\ + , + }; + + node_gyp_file.writeAll(content) catch |err| { + Output.prettyErrorln("error: {s} writing to " ++ file_name ++ " file", .{@errorName(err)}); + Global.crash(); + }; + + // Add our node-gyp tempdir to the path + const existing_path = manager.env.get("PATH") orelse ""; + var PATH = try std.ArrayList(u8).initCapacity(bun.default_allocator, existing_path.len + 1 + manager.temp_dir_name.len + 1 + manager.node_gyp_tempdir_name.len); + try PATH.appendSlice(existing_path); + if (existing_path.len > 0 and existing_path[existing_path.len - 1] != std.fs.path.delimiter) + try PATH.append(std.fs.path.delimiter); + try PATH.appendSlice(strings.withoutTrailingSlash(manager.temp_dir_name)); + try PATH.append(std.fs.path.sep); + try PATH.appendSlice(manager.node_gyp_tempdir_name); + try manager.env.map.put("PATH", PATH.items); + + const npm_config_node_gyp = try std.fmt.bufPrint(&path_buf, "{s}{s}{s}{s}{s}", .{ + strings.withoutTrailingSlash(manager.temp_dir_name), + std.fs.path.sep_str, + strings.withoutTrailingSlash(manager.node_gyp_tempdir_name), + std.fs.path.sep_str, + file_name, + }); + + const node_gyp_abs_dir = std.fs.path.dirname(npm_config_node_gyp).?; + try manager.env.map.putAllocKeyAndValue(manager.allocator, "BUN_WHICH_IGNORE_CWD", node_gyp_abs_dir); + } +}.run); + +fn httpThreadOnInitError(err: HTTP.InitError, opts: HTTP.HTTPThread.InitOpts) noreturn { + switch (err) { + error.LoadCAFile => { + var normalizer: bun.path.PosixToWinNormalizer = .{}; + const normalized = normalizer.resolveZ(FileSystem.instance.top_level_dir, opts.abs_ca_file_name); + if (!bun.sys.existsZ(normalized)) { + Output.err("HTTPThread", "could not find CA file: '{s}'", .{opts.abs_ca_file_name}); + } else { + Output.err("HTTPThread", "invalid CA file: '{s}'", .{opts.abs_ca_file_name}); + } + }, + error.InvalidCAFile => { + Output.err("HTTPThread", "invalid CA file: '{s}'", .{opts.abs_ca_file_name}); + }, + error.InvalidCA => { + Output.err("HTTPThread", "the CA is invalid", .{}); + }, + error.FailedToOpenSocket => { + Output.errGeneric("failed to start HTTP client thread", .{}); + }, + } + Global.crash(); +} + +pub fn init( + ctx: Command.Context, + cli: CommandLineArguments, + subcommand: Subcommand, +) !struct { *PackageManager, string } { + if (cli.global) { + var explicit_global_dir: string = ""; + if (ctx.install) |opts| { + explicit_global_dir = opts.global_dir orelse explicit_global_dir; + } + var global_dir = try Options.openGlobalDir(explicit_global_dir); + try global_dir.setAsCwd(); + } + + var fs = try Fs.FileSystem.init(null); + const top_level_dir_no_trailing_slash = strings.withoutTrailingSlash(fs.top_level_dir); + if (comptime Environment.isWindows) { + _ = Path.pathToPosixBuf(u8, top_level_dir_no_trailing_slash, &cwd_buf); + } else { + @memcpy(cwd_buf[0..top_level_dir_no_trailing_slash.len], top_level_dir_no_trailing_slash); + } + + var original_package_json_path_buf = std.ArrayListUnmanaged(u8).initCapacity(ctx.allocator, top_level_dir_no_trailing_slash.len + "/package.json".len + 1) catch bun.outOfMemory(); + original_package_json_path_buf.appendSliceAssumeCapacity(top_level_dir_no_trailing_slash); + original_package_json_path_buf.appendSliceAssumeCapacity(std.fs.path.sep_str ++ "package.json"); + original_package_json_path_buf.appendAssumeCapacity(0); + + var original_package_json_path: stringZ = original_package_json_path_buf.items[0 .. top_level_dir_no_trailing_slash.len + "/package.json".len :0]; + const original_cwd = strings.withoutSuffixComptime(original_package_json_path, std.fs.path.sep_str ++ "package.json"); + const original_cwd_clone = ctx.allocator.dupe(u8, original_cwd) catch bun.outOfMemory(); + + var workspace_names = Package.WorkspaceMap.init(ctx.allocator); + var workspace_package_json_cache: WorkspacePackageJSONCache = .{ + .map = .{}, + }; + + var workspace_name_hash: ?PackageNameHash = null; + var root_package_json_name_at_time_of_init: []const u8 = ""; + + // Step 1. Find the nearest package.json directory + // + // We will walk up from the cwd, trying to find the nearest package.json file. + const root_package_json_file = root_package_json_file: { + var this_cwd: string = original_cwd; + var created_package_json = false; + const child_json = child: { + // if we are only doing `bun install` (no args), then we can open as read_only + // in all other cases we will need to write new data later. + // this is relevant because it allows us to succeed an install if package.json + // is readable but not writable + // + // probably wont matter as if package.json isn't writable, it's likely that + // the underlying directory and node_modules isn't either. + const need_write = subcommand != .install or cli.positionals.len > 1; + + while (true) { + var package_json_path_buf: bun.PathBuffer = undefined; + @memcpy(package_json_path_buf[0..this_cwd.len], this_cwd); + package_json_path_buf[this_cwd.len..package_json_path_buf.len][0.."/package.json".len].* = "/package.json".*; + package_json_path_buf[this_cwd.len + "/package.json".len] = 0; + const package_json_path = package_json_path_buf[0 .. this_cwd.len + "/package.json".len :0]; + + break :child std.fs.cwd().openFileZ( + package_json_path, + .{ .mode = if (need_write) .read_write else .read_only }, + ) catch |err| switch (err) { + error.FileNotFound => { + if (std.fs.path.dirname(this_cwd)) |parent| { + this_cwd = strings.withoutTrailingSlash(parent); + continue; + } else { + break; + } + }, + error.AccessDenied => { + Output.err("EACCES", "Permission denied while opening \"{s}\"", .{ + package_json_path, + }); + if (need_write) { + Output.note("package.json must be writable to add packages", .{}); + } else { + Output.note("package.json is missing read permissions, or is owned by another user", .{}); + } + Global.crash(); + }, + else => { + Output.err(err, "could not open \"{s}\"", .{ + package_json_path, + }); + return err; + }, + }; + } + + if (subcommand == .install) { + if (cli.positionals.len > 1) { + // this is `bun add `. + // + // create the package json instead of return error. this works around + // a zig bug where continuing control flow through a catch seems to + // cause a segfault the second time `PackageManager.init` is called after + // switching to the add command. + this_cwd = original_cwd; + created_package_json = true; + break :child try attemptToCreatePackageJSONAndOpen(); + } + } + return error.MissingPackageJSON; + }; + + bun.assertWithLocation(strings.eqlLong(original_package_json_path_buf.items[0..this_cwd.len], this_cwd, true), @src()); + original_package_json_path_buf.items.len = this_cwd.len; + original_package_json_path_buf.appendSliceAssumeCapacity(std.fs.path.sep_str ++ "package.json"); + original_package_json_path_buf.appendAssumeCapacity(0); + + original_package_json_path = original_package_json_path_buf.items[0 .. this_cwd.len + "/package.json".len :0]; + const child_cwd = strings.withoutSuffixComptime(original_package_json_path, std.fs.path.sep_str ++ "package.json"); + + // Check if this is a workspace; if so, use root package + var found = false; + if (subcommand.shouldChdirToRoot()) { + if (!created_package_json) { + while (std.fs.path.dirname(this_cwd)) |parent| : (this_cwd = parent) { + const parent_without_trailing_slash = strings.withoutTrailingSlash(parent); + var parent_path_buf: bun.PathBuffer = undefined; + @memcpy(parent_path_buf[0..parent_without_trailing_slash.len], parent_without_trailing_slash); + parent_path_buf[parent_without_trailing_slash.len..parent_path_buf.len][0.."/package.json".len].* = "/package.json".*; + parent_path_buf[parent_without_trailing_slash.len + "/package.json".len] = 0; + + const json_file = std.fs.cwd().openFileZ( + parent_path_buf[0 .. parent_without_trailing_slash.len + "/package.json".len :0].ptr, + .{ .mode = .read_write }, + ) catch { + continue; + }; + defer if (!found) json_file.close(); + const json_stat_size = try json_file.getEndPos(); + const json_buf = try ctx.allocator.alloc(u8, json_stat_size + 64); + defer ctx.allocator.free(json_buf); + const json_len = try json_file.preadAll(json_buf, 0); + const json_path = try bun.getFdPath(.fromStdFile(json_file), &package_json_cwd_buf); + const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); + initializeStore(); + const json = try JSON.parsePackageJSONUTF8(&json_source, ctx.log, ctx.allocator); + if (subcommand == .pm) { + if (json.getStringCloned(ctx.allocator, "name") catch null) |name| { + root_package_json_name_at_time_of_init = name; + } + } + + if (json.asProperty("workspaces")) |prop| { + const json_array = switch (prop.expr.data) { + .e_array => |arr| arr, + .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) { + .e_array => |arr| arr, + else => break, + } else break, + else => break, + }; + var log = logger.Log.init(ctx.allocator); + defer log.deinit(); + _ = workspace_names.processNamesArray( + ctx.allocator, + &workspace_package_json_cache, + &log, + json_array, + &json_source, + prop.loc, + null, + ) catch break; + + for (workspace_names.keys(), workspace_names.values()) |path, entry| { + const child_path = if (std.fs.path.isAbsolute(path)) + child_cwd + else + bun.path.relativeNormalized(json_source.path.name.dir, child_cwd, .auto, true); + + const maybe_workspace_path = if (comptime Environment.isWindows) brk: { + @memcpy(parent_path_buf[0..child_path.len], child_path); + bun.path.dangerouslyConvertPathToPosixInPlace(u8, parent_path_buf[0..child_path.len]); + break :brk parent_path_buf[0..child_path.len]; + } else child_path; + + if (strings.eqlLong(maybe_workspace_path, path, true)) { + fs.top_level_dir = try bun.default_allocator.dupeZ(u8, parent); + found = true; + child_json.close(); + if (comptime Environment.isWindows) { + try json_file.seekTo(0); + } + workspace_name_hash = String.Builder.stringHash(entry.name); + break :root_package_json_file json_file; + } + } + + break; + } + } + } + } + + fs.top_level_dir = try bun.default_allocator.dupeZ(u8, child_cwd); + break :root_package_json_file child_json; + }; + + try bun.sys.chdir(fs.top_level_dir, fs.top_level_dir).unwrap(); + try BunArguments.loadConfig(ctx.allocator, cli.config, ctx, .InstallCommand); + bun.copy(u8, &cwd_buf, fs.top_level_dir); + cwd_buf[fs.top_level_dir.len] = 0; + fs.top_level_dir = cwd_buf[0..fs.top_level_dir.len :0]; + package_json_cwd = try bun.getFdPath(.fromStdFile(root_package_json_file), &package_json_cwd_buf); + + const entries_option = try fs.fs.readDirectory(fs.top_level_dir, null, 0, true); + + var env: *DotEnv.Loader = brk: { + const map = try ctx.allocator.create(DotEnv.Map); + map.* = DotEnv.Map.init(ctx.allocator); + + const loader = try ctx.allocator.create(DotEnv.Loader); + loader.* = DotEnv.Loader.init(map, ctx.allocator); + break :brk loader; + }; + + env.loadProcess(); + try env.load(entries_option.entries, &[_][]u8{}, .production, false); + + initializeStore(); + if (bun.getenvZ("XDG_CONFIG_HOME") orelse bun.getenvZ(bun.DotEnv.home_env)) |data_dir| { + var buf: bun.PathBuffer = undefined; + var parts = [_]string{ + "./.npmrc", + }; + + bun.ini.loadNpmrcConfig(ctx.allocator, ctx.install orelse brk: { + const install_ = ctx.allocator.create(Api.BunInstall) catch bun.outOfMemory(); + install_.* = std.mem.zeroes(Api.BunInstall); + ctx.install = install_; + break :brk install_; + }, env, true, &[_][:0]const u8{ Path.joinAbsStringBufZ( + data_dir, + &buf, + &parts, + .auto, + ), ".npmrc" }); + } else { + bun.ini.loadNpmrcConfig(ctx.allocator, ctx.install orelse brk: { + const install_ = ctx.allocator.create(Api.BunInstall) catch bun.outOfMemory(); + install_.* = std.mem.zeroes(Api.BunInstall); + ctx.install = install_; + break :brk install_; + }, env, true, &[_][:0]const u8{".npmrc"}); + } + const cpu_count = bun.getThreadCount(); + + const options = Options{ + .global = cli.global, + .max_concurrent_lifecycle_scripts = cli.concurrent_scripts orelse cpu_count * 2, + }; + + if (env.get("BUN_INSTALL_VERBOSE") != null) { + PackageManager.verbose_install = true; + } + + if (env.get("BUN_FEATURE_FLAG_FORCE_WAITER_THREAD") != null) { + bun.spawn.process.WaiterThread.setShouldUseWaiterThread(); + } + + if (bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_FORCE_WINDOWS_JUNCTIONS)) { + bun.sys.WindowsSymlinkOptions.has_failed_to_create_symlink = true; + } + + if (PackageManager.verbose_install) { + Output.prettyErrorln("Cache Dir: {s}", .{options.cache_directory}); + Output.flush(); + } + + workspace_names.map.deinit(); + + PackageManager.allocatePackageManager(); + const manager = PackageManager.get(); + // var progress = Progress{}; + // var node = progress.start(name: []const u8, estimated_total_items: usize) + manager.* = PackageManager{ + .preallocated_network_tasks = .init(bun.default_allocator), + .preallocated_resolve_tasks = .init(bun.default_allocator), + .options = options, + .active_lifecycle_scripts = .{ + .context = manager, + }, + .network_task_fifo = NetworkQueue.init(), + .patch_task_fifo = PatchTaskFifo.init(), + .allocator = ctx.allocator, + .log = ctx.log, + .root_dir = entries_option.entries, + .env = env, + .cpu_count = cpu_count, + .thread_pool = ThreadPool.init(.{ + .max_threads = cpu_count, + }), + .resolve_tasks = .{}, + .lockfile = undefined, + .root_package_json_file = root_package_json_file, + // .progress + .event_loop = .{ + .mini = JSC.MiniEventLoop.init(bun.default_allocator), + }, + .original_package_json_path = original_package_json_path, + .workspace_package_json_cache = workspace_package_json_cache, + .workspace_name_hash = workspace_name_hash, + .subcommand = subcommand, + .root_package_json_name_at_time_of_init = root_package_json_name_at_time_of_init, + }; + manager.event_loop.loop().internal_loop_data.setParentEventLoop(bun.JSC.EventLoopHandle.init(&manager.event_loop)); + manager.lockfile = try ctx.allocator.create(Lockfile); + JSC.MiniEventLoop.global = &manager.event_loop.mini; + if (!manager.options.enable.cache) { + manager.options.enable.manifest_cache = false; + manager.options.enable.manifest_cache_control = false; + } + + if (env.get("BUN_MANIFEST_CACHE")) |manifest_cache| { + if (strings.eqlComptime(manifest_cache, "1")) { + manager.options.enable.manifest_cache = true; + manager.options.enable.manifest_cache_control = false; + } else if (strings.eqlComptime(manifest_cache, "2")) { + manager.options.enable.manifest_cache = true; + manager.options.enable.manifest_cache_control = true; + } else { + manager.options.enable.manifest_cache = false; + manager.options.enable.manifest_cache_control = false; + } + } + + try manager.options.load( + ctx.allocator, + ctx.log, + env, + cli, + ctx.install, + subcommand, + ); + + var ca: []stringZ = &.{}; + if (manager.options.ca.len > 0) { + ca = try manager.allocator.alloc(stringZ, manager.options.ca.len); + for (ca, manager.options.ca) |*z, s| { + z.* = try manager.allocator.dupeZ(u8, s); + } + } + + var abs_ca_file_name: stringZ = &.{}; + if (manager.options.ca_file_name.len > 0) { + // resolve with original cwd + if (std.fs.path.isAbsolute(manager.options.ca_file_name)) { + abs_ca_file_name = try manager.allocator.dupeZ(u8, manager.options.ca_file_name); + } else { + var path_buf: bun.PathBuffer = undefined; + abs_ca_file_name = try manager.allocator.dupeZ(u8, bun.path.joinAbsStringBuf( + original_cwd_clone, + &path_buf, + &.{manager.options.ca_file_name}, + .auto, + )); + } + } + + AsyncHTTP.max_simultaneous_requests.store(brk: { + if (cli.network_concurrency) |network_concurrency| { + break :brk @max(network_concurrency, 1); + } + + // If any HTTP proxy is set, use a diferent limit + if (env.has("http_proxy") or env.has("https_proxy") or env.has("HTTPS_PROXY") or env.has("HTTP_PROXY")) { + break :brk default_max_simultaneous_requests_for_bun_install_for_proxies; + } + + break :brk default_max_simultaneous_requests_for_bun_install; + }, .monotonic); + + HTTP.HTTPThread.init(&.{ + .ca = ca, + .abs_ca_file_name = abs_ca_file_name, + .onInitError = &httpThreadOnInitError, + }); + + manager.timestamp_for_manifest_cache_control = brk: { + if (comptime bun.Environment.allow_assert) { + if (env.get("BUN_CONFIG_MANIFEST_CACHE_CONTROL_TIMESTAMP")) |cache_control| { + if (std.fmt.parseInt(u32, cache_control, 10)) |int| { + break :brk int; + } else |_| {} + } + } + + break :brk @truncate(@as(u64, @intCast(@max(std.time.timestamp(), 0)))); + }; + return .{ + manager, + original_cwd_clone, + }; +} + +pub fn initWithRuntime( + log: *logger.Log, + bun_install: ?*Api.BunInstall, + allocator: std.mem.Allocator, + cli: CommandLineArguments, + env: *DotEnv.Loader, +) *PackageManager { + init_with_runtime_once.call(.{ + log, + bun_install, + allocator, + cli, + env, + }); + return PackageManager.get(); +} + +var init_with_runtime_once = bun.once(initWithRuntimeOnce); + +pub fn initWithRuntimeOnce( + log: *logger.Log, + bun_install: ?*Api.BunInstall, + allocator: std.mem.Allocator, + cli: CommandLineArguments, + env: *DotEnv.Loader, +) void { + if (env.get("BUN_INSTALL_VERBOSE") != null) { + PackageManager.verbose_install = true; + } + + const cpu_count = bun.getThreadCount(); + PackageManager.allocatePackageManager(); + const manager = PackageManager.get(); + var root_dir = Fs.FileSystem.instance.fs.readDirectory( + Fs.FileSystem.instance.top_level_dir, + null, + 0, + true, + ) catch |err| { + Output.err(err, "failed to read root directory: '{s}'", .{Fs.FileSystem.instance.top_level_dir}); + @panic("Failed to initialize package manager"); + }; + + // var progress = Progress{}; + // var node = progress.start(name: []const u8, estimated_total_items: usize) + const top_level_dir_no_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); + var original_package_json_path = allocator.allocSentinel(u8, top_level_dir_no_trailing_slash.len + "/package.json".len, 0) catch bun.outOfMemory(); + @memcpy(original_package_json_path[0..top_level_dir_no_trailing_slash.len], top_level_dir_no_trailing_slash); + @memcpy(original_package_json_path[top_level_dir_no_trailing_slash.len..][0.."/package.json".len], "/package.json"); + + manager.* = PackageManager{ + .preallocated_network_tasks = .init(bun.default_allocator), + .preallocated_resolve_tasks = .init(bun.default_allocator), + .options = .{ + .max_concurrent_lifecycle_scripts = cli.concurrent_scripts orelse cpu_count * 2, + }, + .active_lifecycle_scripts = .{ + .context = manager, + }, + .network_task_fifo = NetworkQueue.init(), + .allocator = allocator, + .log = log, + .root_dir = root_dir.entries, + .env = env, + .cpu_count = cpu_count, + .thread_pool = ThreadPool.init(.{ + .max_threads = cpu_count, + }), + .lockfile = undefined, + .root_package_json_file = undefined, + .event_loop = .{ + .js = JSC.VirtualMachine.get().eventLoop(), + }, + .original_package_json_path = original_package_json_path[0..original_package_json_path.len :0], + .subcommand = .install, + }; + manager.lockfile = allocator.create(Lockfile) catch bun.outOfMemory(); + + if (Output.enable_ansi_colors_stderr) { + manager.progress = Progress{}; + manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; + manager.root_progress_node = manager.progress.start("", 0); + } else { + manager.options.log_level = .default_no_progress; + } + + if (!manager.options.enable.cache) { + manager.options.enable.manifest_cache = false; + manager.options.enable.manifest_cache_control = false; + } + + if (env.get("BUN_MANIFEST_CACHE")) |manifest_cache| { + if (strings.eqlComptime(manifest_cache, "1")) { + manager.options.enable.manifest_cache = true; + manager.options.enable.manifest_cache_control = false; + } else if (strings.eqlComptime(manifest_cache, "2")) { + manager.options.enable.manifest_cache = true; + manager.options.enable.manifest_cache_control = true; + } else { + manager.options.enable.manifest_cache = false; + manager.options.enable.manifest_cache_control = false; + } + } + + manager.options.load( + allocator, + log, + env, + cli, + bun_install, + .install, + ) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + } + }; + + manager.timestamp_for_manifest_cache_control = @as( + u32, + @truncate(@as( + u64, + @intCast(@max( + std.time.timestamp(), + 0, + )), + )), + // When using "bun install", we check for updates with a 300 second cache. + // When using bun, we only do staleness checks once per day + ) -| std.time.s_per_day; + + if (root_dir.entries.hasComptimeQuery("bun.lockb")) { + switch (manager.lockfile.loadFromCwd( + manager, + allocator, + log, + true, + )) { + .ok => |load| manager.lockfile = load.lockfile, + else => manager.lockfile.initEmpty(allocator), + } + } else { + manager.lockfile.initEmpty(allocator); + } +} +var cwd_buf: bun.PathBuffer = undefined; +pub var package_json_cwd_buf: bun.PathBuffer = undefined; +pub var package_json_cwd: string = ""; + +// Default to a maximum of 64 simultaneous HTTP requests for bun install if no proxy is specified +// if a proxy IS specified, default to 64. We have different values because we might change this in the future. +// https://github.com/npm/cli/issues/7072 +// https://pnpm.io/npmrc#network-concurrency (pnpm defaults to 16) +// https://yarnpkg.com/configuration/yarnrc#networkConcurrency (defaults to 50) +const default_max_simultaneous_requests_for_bun_install = 64; +const default_max_simultaneous_requests_for_bun_install_for_proxies = 64; + +pub const TaskCallbackList = std.ArrayListUnmanaged(TaskCallbackContext); +const TaskDependencyQueue = std.HashMapUnmanaged(Task.Id, TaskCallbackList, IdentityContext(Task.Id), 80); + +const PreallocatedTaskStore = bun.HiveArray(Task, 64).Fallback; +const PreallocatedNetworkTasks = bun.HiveArray(NetworkTask, 128).Fallback; +const ResolveTaskQueue = bun.UnboundedQueue(Task, .next); + +const RepositoryMap = std.HashMapUnmanaged(Task.Id, bun.FileDescriptor, IdentityContext(Task.Id), 80); +const NpmAliasMap = std.HashMapUnmanaged(PackageNameHash, Dependency.Version, IdentityContext(u64), 80); + +const NetworkQueue = std.fifo.LinearFifo(*NetworkTask, .{ .Static = 32 }); +const PatchTaskFifo = std.fifo.LinearFifo(*PatchTask, .{ .Static = 32 }); + +// pub const ensureTempNodeGypScript = directories.ensureTempNodeGypScript; + +// @sortImports + +pub const CommandLineArguments = @import("./PackageManager/CommandLineArguments.zig"); +const DirInfo = @import("../resolver/dir_info.zig"); +pub const Options = @import("./PackageManager/PackageManagerOptions.zig"); +pub const PackageJSONEditor = @import("./PackageManager/PackageJSONEditor.zig"); +pub const UpdateRequest = @import("PackageManager/UpdateRequest.zig"); +pub const WorkspacePackageJSONCache = @import("PackageManager/WorkspacePackageJSONCache.zig"); +const std = @import("std"); +pub const PackageInstaller = @import("./PackageInstaller.zig").PackageInstaller; +pub const installWithManager = @import("PackageManager/install_with_manager.zig").installWithManager; + +pub const directories = @import("PackageManager/PackageManagerDirectories.zig"); +pub const attemptToCreatePackageJSON = directories.attemptToCreatePackageJSON; +const attemptToCreatePackageJSONAndOpen = directories.attemptToCreatePackageJSONAndOpen; +pub const cachedGitFolderName = directories.cachedGitFolderName; +pub const cachedGitFolderNamePrint = directories.cachedGitFolderNamePrint; +pub const cachedGitFolderNamePrintAuto = directories.cachedGitFolderNamePrintAuto; +pub const cachedGitHubFolderName = directories.cachedGitHubFolderName; +pub const cachedGitHubFolderNamePrint = directories.cachedGitHubFolderNamePrint; +pub const cachedGitHubFolderNamePrintAuto = directories.cachedGitHubFolderNamePrintAuto; +pub const cachedNPMPackageFolderName = directories.cachedNPMPackageFolderName; +pub const cachedNPMPackageFolderNamePrint = directories.cachedNPMPackageFolderNamePrint; +pub const cachedNPMPackageFolderPrintBasename = directories.cachedNPMPackageFolderPrintBasename; +pub const cachedTarballFolderName = directories.cachedTarballFolderName; +pub const cachedTarballFolderNamePrint = directories.cachedTarballFolderNamePrint; +pub const computeCacheDirAndSubpath = directories.computeCacheDirAndSubpath; +pub const fetchCacheDirectoryPath = directories.fetchCacheDirectoryPath; +pub const getCacheDirectory = directories.getCacheDirectory; +pub const getCacheDirectoryAndAbsPath = directories.getCacheDirectoryAndAbsPath; +pub const getTemporaryDirectory = directories.getTemporaryDirectory; +pub const globalLinkDir = directories.globalLinkDir; +pub const globalLinkDirAndPath = directories.globalLinkDirAndPath; +pub const globalLinkDirPath = directories.globalLinkDirPath; +pub const isFolderInCache = directories.isFolderInCache; +pub const pathForCachedNPMPath = directories.pathForCachedNPMPath; +pub const pathForResolution = directories.pathForResolution; +pub const saveLockfile = directories.saveLockfile; +pub const setupGlobalDir = directories.setupGlobalDir; +pub const updateLockfileIfNeeded = directories.updateLockfileIfNeeded; +pub const writeYarnLock = directories.writeYarnLock; + +pub const enqueue = @import("PackageManager/PackageManagerEnqueue.zig"); +pub const enqueueDependencyList = enqueue.enqueueDependencyList; +pub const enqueueDependencyToRoot = enqueue.enqueueDependencyToRoot; +pub const enqueueDependencyWithMain = enqueue.enqueueDependencyWithMain; +pub const enqueueDependencyWithMainAndSuccessFn = enqueue.enqueueDependencyWithMainAndSuccessFn; +pub const enqueueExtractNPMPackage = enqueue.enqueueExtractNPMPackage; +pub const enqueueGitCheckout = enqueue.enqueueGitCheckout; +pub const enqueueGitForCheckout = enqueue.enqueueGitForCheckout; +pub const enqueueNetworkTask = enqueue.enqueueNetworkTask; +pub const enqueuePackageForDownload = enqueue.enqueuePackageForDownload; +pub const enqueueParseNPMPackage = enqueue.enqueueParseNPMPackage; +pub const enqueuePatchTask = enqueue.enqueuePatchTask; +pub const enqueuePatchTaskPre = enqueue.enqueuePatchTaskPre; +pub const enqueueTarballForDownload = enqueue.enqueueTarballForDownload; +pub const enqueueTarballForReading = enqueue.enqueueTarballForReading; + +const lifecycle = @import("PackageManager/PackageManagerLifecycle.zig"); +const LifecycleScriptTimeLog = lifecycle.LifecycleScriptTimeLog; +pub const determinePreinstallState = lifecycle.determinePreinstallState; +pub const ensurePreinstallStateListCapacity = lifecycle.ensurePreinstallStateListCapacity; +pub const findTrustedDependenciesFromUpdateRequests = lifecycle.findTrustedDependenciesFromUpdateRequests; +pub const getPreinstallState = lifecycle.getPreinstallState; +pub const hasNoMorePendingLifecycleScripts = lifecycle.hasNoMorePendingLifecycleScripts; +pub const loadRootLifecycleScripts = lifecycle.loadRootLifecycleScripts; +pub const reportSlowLifecycleScripts = lifecycle.reportSlowLifecycleScripts; +pub const setPreinstallState = lifecycle.setPreinstallState; +pub const sleep = lifecycle.sleep; +pub const spawnPackageLifecycleScripts = lifecycle.spawnPackageLifecycleScripts; +pub const tickLifecycleScripts = lifecycle.tickLifecycleScripts; + +const resolution = @import("PackageManager/PackageManagerResolution.zig"); +pub const assignResolution = resolution.assignResolution; +pub const assignRootResolution = resolution.assignRootResolution; +pub const formatLaterVersionInCache = resolution.formatLaterVersionInCache; +pub const getInstalledVersionsFromDiskCache = resolution.getInstalledVersionsFromDiskCache; +pub const resolveFromDiskCache = resolution.resolveFromDiskCache; +pub const scopeForPackageName = resolution.scopeForPackageName; +pub const verifyResolutions = resolution.verifyResolutions; + +pub const progress_zig = @import("PackageManager/ProgressStrings.zig"); +pub const ProgressStrings = progress_zig.ProgressStrings; +pub const endProgressBar = progress_zig.endProgressBar; +pub const setNodeName = progress_zig.setNodeName; +pub const startProgressBar = progress_zig.startProgressBar; +pub const startProgressBarIfNone = progress_zig.startProgressBarIfNone; + +pub const PatchCommitResult = @import("PackageManager/patchPackage.zig").PatchCommitResult; +pub const doPatchCommit = @import("PackageManager/patchPackage.zig").doPatchCommit; +pub const preparePatch = @import("PackageManager/patchPackage.zig").preparePatch; + +pub const GitResolver = @import("PackageManager/processDependencyList.zig").GitResolver; +pub const processDependencyList = @import("PackageManager/processDependencyList.zig").processDependencyList; +pub const processDependencyListItem = @import("PackageManager/processDependencyList.zig").processDependencyListItem; +pub const processExtractedTarballPackage = @import("PackageManager/processDependencyList.zig").processExtractedTarballPackage; +pub const processPeerDependencyList = @import("PackageManager/processDependencyList.zig").processPeerDependencyList; + +pub const allocGitHubURL = @import("PackageManager/runTasks.zig").allocGitHubURL; +pub const decrementPendingTasks = @import("PackageManager/runTasks.zig").decrementPendingTasks; +pub const drainDependencyList = @import("PackageManager/runTasks.zig").drainDependencyList; +pub const flushDependencyQueue = @import("PackageManager/runTasks.zig").flushDependencyQueue; +pub const flushNetworkQueue = @import("PackageManager/runTasks.zig").flushNetworkQueue; +pub const flushPatchTaskQueue = @import("PackageManager/runTasks.zig").flushPatchTaskQueue; +pub const generateNetworkTaskForTarball = @import("PackageManager/runTasks.zig").generateNetworkTaskForTarball; +pub const getNetworkTask = @import("PackageManager/runTasks.zig").getNetworkTask; +pub const hasCreatedNetworkTask = @import("PackageManager/runTasks.zig").hasCreatedNetworkTask; +pub const incrementPendingTasks = @import("PackageManager/runTasks.zig").incrementPendingTasks; +pub const isNetworkTaskRequired = @import("PackageManager/runTasks.zig").isNetworkTaskRequired; +pub const pendingTaskCount = @import("PackageManager/runTasks.zig").pendingTaskCount; +pub const runTasks = @import("PackageManager/runTasks.zig").runTasks; +pub const scheduleTasks = @import("PackageManager/runTasks.zig").scheduleTasks; + +const updatePackageJSONAndInstall = @import("PackageManager/updatePackageJSONAndInstall.zig").updatePackageJSONAndInstall; +pub const updatePackageJSONAndInstallCatchError = @import("PackageManager/updatePackageJSONAndInstall.zig").updatePackageJSONAndInstallCatchError; +pub const updatePackageJSONAndInstallWithManager = @import("PackageManager/updatePackageJSONAndInstall.zig").updatePackageJSONAndInstallWithManager; + +const bun = @import("bun"); +const DotEnv = bun.DotEnv; +const Environment = bun.Environment; +const Global = bun.Global; +const JSC = bun.JSC; +const JSON = bun.JSON; +const OOM = bun.OOM; +const Output = bun.Output; +const Path = bun.path; +const Progress = bun.Progress; +const RunCommand = bun.RunCommand; +const ThreadPool = bun.ThreadPool; +const URL = bun.URL; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const stringZ = bun.stringZ; +const strings = bun.strings; +const transpiler = bun.transpiler; +const Api = bun.Schema.Api; +const File = bun.sys.File; + +const BunArguments = bun.CLI.Arguments; +const Command = bun.CLI.Command; + +const Semver = bun.Semver; +const String = Semver.String; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const HTTP = bun.http; +const AsyncHTTP = HTTP.AsyncHTTP; + +const ArrayIdentityContext = bun.install.ArrayIdentityContext; +const Dependency = bun.install.Dependency; +const DependencyID = bun.install.DependencyID; +const Features = bun.install.Features; +const FolderResolution = bun.install.FolderResolution; +const IdentityContext = bun.install.IdentityContext; +const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess; +const NetworkTask = bun.install.NetworkTask; +const PackageID = bun.install.PackageID; +const PackageManager = bun.install.PackageManager; +const PackageManifestMap = bun.install.PackageManifestMap; +const PackageNameAndVersionHash = bun.install.PackageNameAndVersionHash; +const PackageNameHash = bun.install.PackageNameHash; +const PatchTask = bun.install.PatchTask; +const PreinstallState = bun.install.PreinstallState; +const Task = bun.install.Task; +const TaskCallbackContext = bun.install.TaskCallbackContext; +const initializeStore = bun.install.initializeStore; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; diff --git a/src/install/PackageManager/CommandLineArguments.zig b/src/install/PackageManager/CommandLineArguments.zig index a3e35bf920..a3251006ad 100644 --- a/src/install/PackageManager/CommandLineArguments.zig +++ b/src/install/PackageManager/CommandLineArguments.zig @@ -74,6 +74,11 @@ pub const pm_params: []const ParamType = &(shared_params ++ [_]ParamType{ clap.parseParam("--destination The directory the tarball will be saved in") catch unreachable, clap.parseParam("--filename The filename of the tarball") catch unreachable, clap.parseParam("--gzip-level Specify a custom compression level for gzip. Default is 9.") catch unreachable, + clap.parseParam("--git-tag-version Create a git commit and tag") catch unreachable, + clap.parseParam("--no-git-tag-version") catch unreachable, + clap.parseParam("--allow-same-version Allow bumping to the same version") catch unreachable, + clap.parseParam("-m, --message Use the given message for the commit") catch unreachable, + clap.parseParam("--preid Identifier to be used to prefix premajor, preminor, prepatch or prerelease version increments") catch unreachable, clap.parseParam(" ... ") catch unreachable, }); @@ -200,6 +205,12 @@ save_text_lockfile: ?bool = null, lockfile_only: bool = false, +// `bun pm version` options +git_tag_version: bool = true, +allow_same_version: bool = false, +preid: string = "", +message: ?string = null, + const PatchOpts = union(enum) { nothing: struct {}, patch: struct {}, @@ -243,7 +254,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Skip devDependencies \\ bun install --production \\ - \\Full documentation is available at https://bun.sh/docs/cli/install. + \\Full documentation is available at https://bun.com/docs/cli/install. \\ ; Output.pretty(intro_text, .{}); @@ -273,7 +284,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Update specific packages: \\ bun update zod jquery@3 \\ - \\Full documentation is available at https://bun.sh/docs/cli/update. + \\Full documentation is available at https://bun.com/docs/cli/update. \\ ; Output.pretty(intro_text, .{}); @@ -304,7 +315,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Generate a patch file in a custom directory for changes made to jquery \\ bun patch --patches-dir 'my-patches' 'node_modules/jquery' \\ - \\Full documentation is available at https://bun.sh/docs/install/patch. + \\Full documentation is available at https://bun.com/docs/install/patch. \\ ; @@ -332,7 +343,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Generate a patch in a custom directory ("./my-patches") \\ bun patch-commit --patches-dir 'my-patches' 'node_modules/jquery' \\ - \\Full documentation is available at https://bun.sh/docs/install/patch. + \\Full documentation is available at https://bun.com/docs/install/patch. \\ ; Output.pretty(intro_text, .{}); @@ -367,7 +378,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ bun add --optional lodash \\ bun add --peer esbuild \\ - \\Full documentation is available at https://bun.sh/docs/cli/add. + \\Full documentation is available at https://bun.com/docs/cli/add. \\ ; Output.pretty(intro_text, .{}); @@ -392,7 +403,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Remove a dependency \\ bun remove ts-node \\ - \\Full documentation is available at https://bun.sh/docs/cli/remove. + \\Full documentation is available at https://bun.com/docs/cli/remove. \\ ; Output.pretty(intro_text, .{}); @@ -420,7 +431,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Add a previously-registered linkable package as a dependency of the current project. \\ bun link \ \\ - \\Full documentation is available at https://bun.sh/docs/cli/link. + \\Full documentation is available at https://bun.com/docs/cli/link. \\ ; Output.pretty(intro_text, .{}); @@ -445,7 +456,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Unregister the current directory as a linkable package. \\ bun unlink \\ - \\Full documentation is available at https://bun.sh/docs/cli/unlink. + \\Full documentation is available at https://bun.com/docs/cli/unlink. \\ ; @@ -481,7 +492,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ bun outdated "is-*" \\ bun outdated "!is-even" \\ - \\Full documentation is available at https://bun.sh/docs/cli/outdated. + \\Full documentation is available at https://bun.com/docs/cli/outdated. \\ ; @@ -506,7 +517,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\Examples: \\ bun pm pack \\ - \\Full documentation is available at https://bun.sh/docs/cli/pm#pack. + \\Full documentation is available at https://bun.com/docs/cli/pm#pack. \\ ; @@ -538,7 +549,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Publish a pre-existing package tarball with tag 'next'. \\ bun publish --tag next ./path/to/tarball.tgz \\ - \\Full documentation is available at https://bun.sh/docs/cli/publish. + \\Full documentation is available at https://bun.com/docs/cli/publish. \\ ; @@ -567,7 +578,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Output package vulnerabilities in JSON format. \\ bun audit --json \\ - \\Full documentation is available at https://bun.sh/docs/install/audit. + \\Full documentation is available at https://bun.com/docs/install/audit. \\ ; @@ -599,7 +610,7 @@ pub fn printHelp(subcommand: Subcommand) void { \\ Display a specific property in JSON format \\ bun info react version --json \\ - \\Full documentation is available at https://bun.sh/docs/cli/info. + \\Full documentation is available at https://bun.com/docs/cli/info. \\ ; @@ -880,6 +891,28 @@ pub fn parse(allocator: std.mem.Allocator, comptime subcommand: Subcommand) !Com Global.crash(); } + if (comptime subcommand == .pm) { + // `bun pm version` command options + if (args.option("--git-tag-version")) |git_tag_version| { + if (strings.eqlComptime(git_tag_version, "true")) { + cli.git_tag_version = true; + } else if (strings.eqlComptime(git_tag_version, "false")) { + cli.git_tag_version = false; + } + } else if (args.flag("--no-git-tag-version")) { + cli.git_tag_version = false; + } else { + cli.git_tag_version = true; + } + cli.allow_same_version = args.flag("--allow-same-version"); + if (args.option("--preid")) |preid| { + cli.preid = preid; + } + if (args.option("--message")) |message| { + cli.message = message; + } + } + return cli; } diff --git a/src/install/PackageManager/PackageManagerDirectories.zig b/src/install/PackageManager/PackageManagerDirectories.zig new file mode 100644 index 0000000000..98aa1f9cfa --- /dev/null +++ b/src/install/PackageManager/PackageManagerDirectories.zig @@ -0,0 +1,774 @@ +pub inline fn getCacheDirectory(this: *PackageManager) std.fs.Dir { + return this.cache_directory_ orelse brk: { + this.cache_directory_ = ensureCacheDirectory(this); + break :brk this.cache_directory_.?; + }; +} + +pub inline fn getCacheDirectoryAndAbsPath(this: *PackageManager) struct { FD, bun.AbsPath(.{}) } { + const cache_dir = this.getCacheDirectory(); + return .{ .fromStdDir(cache_dir), .from(this.cache_directory_path) }; +} + +pub inline fn getTemporaryDirectory(this: *PackageManager) std.fs.Dir { + return this.temp_dir_ orelse brk: { + this.temp_dir_ = ensureTemporaryDirectory(this); + var pathbuf: bun.PathBuffer = undefined; + const temp_dir_path = bun.getFdPathZ(.fromStdDir(this.temp_dir_.?), &pathbuf) catch Output.panic("Unable to read temporary directory path", .{}); + this.temp_dir_path = bun.default_allocator.dupeZ(u8, temp_dir_path) catch bun.outOfMemory(); + break :brk this.temp_dir_.?; + }; +} + +noinline fn ensureCacheDirectory(this: *PackageManager) std.fs.Dir { + loop: while (true) { + if (this.options.enable.cache) { + const cache_dir = fetchCacheDirectoryPath(this.env, &this.options); + this.cache_directory_path = this.allocator.dupeZ(u8, cache_dir.path) catch bun.outOfMemory(); + + return std.fs.cwd().makeOpenPath(cache_dir.path, .{}) catch { + this.options.enable.cache = false; + this.allocator.free(this.cache_directory_path); + continue :loop; + }; + } + + this.cache_directory_path = this.allocator.dupeZ(u8, Path.joinAbsString( + Fs.FileSystem.instance.top_level_dir, + &.{ + "node_modules", + ".cache", + }, + .auto, + )) catch bun.outOfMemory(); + + return std.fs.cwd().makeOpenPath("node_modules/.cache", .{}) catch |err| { + Output.prettyErrorln("error: bun is unable to write files: {s}", .{@errorName(err)}); + Global.crash(); + }; + } + unreachable; +} + +// We need a temporary directory that can be rename() +// This is important for extracting files. +// +// However, we want it to be reused! Otherwise a cache is silly. +// Error RenameAcrossMountPoints moving react-is to cache dir: +noinline fn ensureTemporaryDirectory(this: *PackageManager) std.fs.Dir { + var cache_directory = this.getCacheDirectory(); + // The chosen tempdir must be on the same filesystem as the cache directory + // This makes renameat() work + this.temp_dir_name = Fs.FileSystem.RealFS.getDefaultTempDir(); + + var tried_dot_tmp = false; + var tempdir: std.fs.Dir = bun.MakePath.makeOpenPath(std.fs.cwd(), this.temp_dir_name, .{}) catch brk: { + tried_dot_tmp = true; + break :brk bun.MakePath.makeOpenPath(cache_directory, bun.pathLiteral(".tmp"), .{}) catch |err| { + Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); + Global.crash(); + }; + }; + var tmpbuf: bun.PathBuffer = undefined; + const tmpname = Fs.FileSystem.instance.tmpname("hm", &tmpbuf, bun.fastRandom()) catch unreachable; + var timer: std.time.Timer = if (this.options.log_level != .silent) std.time.Timer.start() catch unreachable else undefined; + brk: while (true) { + var file = tempdir.createFileZ(tmpname, .{ .truncate = true }) catch |err2| { + if (!tried_dot_tmp) { + tried_dot_tmp = true; + + tempdir = bun.MakePath.makeOpenPath(cache_directory, bun.pathLiteral(".tmp"), .{}) catch |err| { + Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); + Global.crash(); + }; + + if (PackageManager.verbose_install) { + Output.prettyErrorln("warn: bun is unable to access tempdir: {s}, using fallback", .{@errorName(err2)}); + } + + continue :brk; + } + Output.prettyErrorln("error: {s} accessing temporary directory. Please set $BUN_TMPDIR or $BUN_INSTALL", .{ + @errorName(err2), + }); + Global.crash(); + }; + file.close(); + + std.posix.renameatZ(tempdir.fd, tmpname, cache_directory.fd, tmpname) catch |err| { + if (!tried_dot_tmp) { + tried_dot_tmp = true; + tempdir = cache_directory.makeOpenPath(".tmp", .{}) catch |err2| { + Output.prettyErrorln("error: bun is unable to write files to tempdir: {s}", .{@errorName(err2)}); + Global.crash(); + }; + + if (PackageManager.verbose_install) { + Output.prettyErrorln("info: cannot move files from tempdir: {s}, using fallback", .{@errorName(err)}); + } + + continue :brk; + } + + Output.prettyErrorln("error: {s} accessing temporary directory. Please set $BUN_TMPDIR or $BUN_INSTALL", .{ + @errorName(err), + }); + Global.crash(); + }; + cache_directory.deleteFileZ(tmpname) catch {}; + break; + } + if (tried_dot_tmp) { + using_fallback_temp_dir = true; + } + if (this.options.log_level != .silent) { + const elapsed = timer.read(); + if (elapsed > std.time.ns_per_ms * 100) { + var path_buf: bun.PathBuffer = undefined; + const cache_dir_path = bun.getFdPath(.fromStdDir(cache_directory), &path_buf) catch "it"; + Output.prettyErrorln( + "warn: Slow filesystem detected. If {s} is a network drive, consider setting $BUN_INSTALL_CACHE_DIR to a local folder.", + .{cache_dir_path}, + ); + } + } + + return tempdir; +} + +const CacheDir = struct { path: string, is_node_modules: bool }; +pub fn fetchCacheDirectoryPath(env: *DotEnv.Loader, options: ?*const Options) CacheDir { + if (env.get("BUN_INSTALL_CACHE_DIR")) |dir| { + return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{dir}), .is_node_modules = false }; + } + + if (options) |opts| { + if (opts.cache_directory.len > 0) { + return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{opts.cache_directory}), .is_node_modules = false }; + } + } + + if (env.get("BUN_INSTALL")) |dir| { + var parts = [_]string{ dir, "install/", "cache/" }; + return CacheDir{ .path = Fs.FileSystem.instance.abs(&parts), .is_node_modules = false }; + } + + if (env.get("XDG_CACHE_HOME")) |dir| { + var parts = [_]string{ dir, ".bun/", "install/", "cache/" }; + return CacheDir{ .path = Fs.FileSystem.instance.abs(&parts), .is_node_modules = false }; + } + + if (env.get(bun.DotEnv.home_env)) |dir| { + var parts = [_]string{ dir, ".bun/", "install/", "cache/" }; + return CacheDir{ .path = Fs.FileSystem.instance.abs(&parts), .is_node_modules = false }; + } + + var fallback_parts = [_]string{"node_modules/.bun-cache"}; + return CacheDir{ .is_node_modules = true, .path = Fs.FileSystem.instance.abs(&fallback_parts) }; +} + +pub fn cachedGitFolderNamePrint(buf: []u8, resolved: string, patch_hash: ?u64) stringZ { + return std.fmt.bufPrintZ(buf, "@G@{s}{}", .{ resolved, PatchHashFmt{ .hash = patch_hash } }) catch unreachable; +} + +pub fn cachedGitFolderName(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { + return cachedGitFolderNamePrint(&PackageManager.cached_package_folder_name_buf, this.lockfile.str(&repository.resolved), patch_hash); +} + +pub fn cachedGitFolderNamePrintAuto(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { + if (!repository.resolved.isEmpty()) { + return this.cachedGitFolderName(repository, patch_hash); + } + + if (!repository.repo.isEmpty() and !repository.committish.isEmpty()) { + const string_buf = this.lockfile.buffers.string_bytes.items; + return std.fmt.bufPrintZ( + &PackageManager.cached_package_folder_name_buf, + "@G@{any}{}{}", + .{ + repository.committish.fmt(string_buf), + CacheVersion.Formatter{ .version_number = CacheVersion.current }, + PatchHashFmt{ .hash = patch_hash }, + }, + ) catch unreachable; + } + + return ""; +} + +pub fn cachedGitHubFolderNamePrint(buf: []u8, resolved: string, patch_hash: ?u64) stringZ { + return std.fmt.bufPrintZ(buf, "@GH@{s}{}{}", .{ + resolved, + CacheVersion.Formatter{ .version_number = CacheVersion.current }, + PatchHashFmt{ .hash = patch_hash }, + }) catch unreachable; +} + +pub fn cachedGitHubFolderName(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { + return cachedGitHubFolderNamePrint(&PackageManager.cached_package_folder_name_buf, this.lockfile.str(&repository.resolved), patch_hash); +} + +pub fn cachedGitHubFolderNamePrintAuto(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { + if (!repository.resolved.isEmpty()) { + return this.cachedGitHubFolderName(repository, patch_hash); + } + + if (!repository.owner.isEmpty() and !repository.repo.isEmpty() and !repository.committish.isEmpty()) { + return cachedGitHubFolderNamePrintGuess(&PackageManager.cached_package_folder_name_buf, this.lockfile.buffers.string_bytes.items, repository, patch_hash); + } + + return ""; +} + +// TODO: normalize to alphanumeric +pub fn cachedNPMPackageFolderNamePrint(this: *const PackageManager, buf: []u8, name: string, version: Semver.Version, patch_hash: ?u64) stringZ { + const scope = this.scopeForPackageName(name); + + if (scope.name.len == 0 and !this.options.did_override_default_scope) { + const include_version_number = true; + return cachedNPMPackageFolderPrintBasename(buf, name, version, patch_hash, include_version_number); + } + + const include_version_number = false; + const basename = cachedNPMPackageFolderPrintBasename(buf, name, version, null, include_version_number); + + const spanned = bun.span(basename); + const available = buf[spanned.len..]; + var end: []u8 = undefined; + if (scope.url.hostname.len > 32 or available.len < 64) { + const visible_hostname = scope.url.hostname[0..@min(scope.url.hostname.len, 12)]; + end = std.fmt.bufPrint(available, "@@{s}__{any}{}{}", .{ + visible_hostname, + bun.fmt.hexIntLower(String.Builder.stringHash(scope.url.href)), + CacheVersion.Formatter{ .version_number = CacheVersion.current }, + PatchHashFmt{ .hash = patch_hash }, + }) catch unreachable; + } else { + end = std.fmt.bufPrint(available, "@@{s}{}{}", .{ + scope.url.hostname, + CacheVersion.Formatter{ .version_number = CacheVersion.current }, + PatchHashFmt{ .hash = patch_hash }, + }) catch unreachable; + } + + buf[spanned.len + end.len] = 0; + const result: [:0]u8 = buf[0 .. spanned.len + end.len :0]; + return result; +} + +fn cachedGitHubFolderNamePrintGuess(buf: []u8, string_buf: []const u8, repository: *const Repository, patch_hash: ?u64) stringZ { + return std.fmt.bufPrintZ( + buf, + "@GH@{any}-{any}-{any}{}{}", + .{ + repository.owner.fmt(string_buf), + repository.repo.fmt(string_buf), + repository.committish.fmt(string_buf), + CacheVersion.Formatter{ .version_number = CacheVersion.current }, + PatchHashFmt{ .hash = patch_hash }, + }, + ) catch unreachable; +} +pub fn cachedNPMPackageFolderName(this: *const PackageManager, name: string, version: Semver.Version, patch_hash: ?u64) stringZ { + return this.cachedNPMPackageFolderNamePrint(&PackageManager.cached_package_folder_name_buf, name, version, patch_hash); +} + +// TODO: normalize to alphanumeric +pub fn cachedNPMPackageFolderPrintBasename( + buf: []u8, + name: string, + version: Semver.Version, + patch_hash: ?u64, + include_cache_version: bool, +) stringZ { + if (version.tag.hasPre()) { + if (version.tag.hasBuild()) { + return std.fmt.bufPrintZ( + buf, + "{s}@{d}.{d}.{d}-{any}+{any}{}{}", + .{ + name, + version.major, + version.minor, + version.patch, + bun.fmt.hexIntLower(version.tag.pre.hash), + bun.fmt.hexIntUpper(version.tag.build.hash), + CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, + PatchHashFmt{ .hash = patch_hash }, + }, + ) catch unreachable; + } + return std.fmt.bufPrintZ( + buf, + "{s}@{d}.{d}.{d}-{any}{}{}", + .{ + name, + version.major, + version.minor, + version.patch, + bun.fmt.hexIntLower(version.tag.pre.hash), + CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, + PatchHashFmt{ .hash = patch_hash }, + }, + ) catch unreachable; + } + if (version.tag.hasBuild()) { + return std.fmt.bufPrintZ( + buf, + "{s}@{d}.{d}.{d}+{any}{}{}", + .{ + name, + version.major, + version.minor, + version.patch, + bun.fmt.hexIntUpper(version.tag.build.hash), + CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, + PatchHashFmt{ .hash = patch_hash }, + }, + ) catch unreachable; + } + return std.fmt.bufPrintZ(buf, "{s}@{d}.{d}.{d}{}{}", .{ + name, + version.major, + version.minor, + version.patch, + CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, + PatchHashFmt{ .hash = patch_hash }, + }) catch unreachable; +} + +pub fn cachedTarballFolderNamePrint(buf: []u8, url: string, patch_hash: ?u64) stringZ { + return std.fmt.bufPrintZ(buf, "@T@{any}{}{}", .{ + bun.fmt.hexIntLower(String.Builder.stringHash(url)), + CacheVersion.Formatter{ .version_number = CacheVersion.current }, + PatchHashFmt{ .hash = patch_hash }, + }) catch unreachable; +} + +pub fn cachedTarballFolderName(this: *const PackageManager, url: String, patch_hash: ?u64) stringZ { + return cachedTarballFolderNamePrint(&PackageManager.cached_package_folder_name_buf, this.lockfile.str(&url), patch_hash); +} + +pub fn isFolderInCache(this: *PackageManager, folder_path: stringZ) bool { + return bun.sys.directoryExistsAt(.fromStdDir(this.getCacheDirectory()), folder_path).unwrap() catch false; +} + +pub fn setupGlobalDir(manager: *PackageManager, ctx: Command.Context) !void { + manager.options.global_bin_dir = try Options.openGlobalBinDir(ctx.install); + var out_buffer: bun.PathBuffer = undefined; + const result = try bun.getFdPathZ(.fromStdDir(manager.options.global_bin_dir), &out_buffer); + const path = try FileSystem.instance.dirname_store.append([:0]u8, result); + manager.options.bin_path = path.ptr[0..path.len :0]; +} + +pub fn globalLinkDir(this: *PackageManager) std.fs.Dir { + return this.global_link_dir orelse brk: { + var global_dir = Options.openGlobalDir(this.options.explicit_global_directory) catch |err| switch (err) { + error.@"No global directory found" => { + Output.errGeneric("failed to find a global directory for package caching and global link directories", .{}); + Global.exit(1); + }, + else => { + Output.err(err, "failed to open the global directory", .{}); + Global.exit(1); + }, + }; + this.global_dir = global_dir; + this.global_link_dir = global_dir.makeOpenPath("node_modules", .{}) catch |err| { + Output.err(err, "failed to open global link dir node_modules at '{}'", .{FD.fromStdDir(global_dir)}); + Global.exit(1); + }; + var buf: bun.PathBuffer = undefined; + const _path = bun.getFdPath(.fromStdDir(this.global_link_dir.?), &buf) catch |err| { + Output.err(err, "failed to get the full path of the global directory", .{}); + Global.exit(1); + }; + this.global_link_dir_path = Fs.FileSystem.DirnameStore.instance.append([]const u8, _path) catch bun.outOfMemory(); + break :brk this.global_link_dir.?; + }; +} + +pub fn globalLinkDirPath(this: *PackageManager) []const u8 { + _ = this.globalLinkDir(); + return this.global_link_dir_path; +} + +pub fn globalLinkDirAndPath(this: *PackageManager) struct { std.fs.Dir, []const u8 } { + const dir = this.globalLinkDir(); + return .{ dir, this.global_link_dir_path }; +} + +pub fn pathForCachedNPMPath( + this: *PackageManager, + buf: *bun.PathBuffer, + package_name: []const u8, + version: Semver.Version, +) ![]u8 { + var cache_path_buf: bun.PathBuffer = undefined; + + const cache_path = this.cachedNPMPackageFolderNamePrint(&cache_path_buf, package_name, version, null); + + if (comptime Environment.allow_assert) { + bun.assertWithLocation(cache_path[package_name.len] == '@', @src()); + } + + cache_path_buf[package_name.len] = std.fs.path.sep; + + const cache_dir: bun.FD = .fromStdDir(this.getCacheDirectory()); + + if (comptime Environment.isWindows) { + var path_buf: bun.PathBuffer = undefined; + const joined = bun.path.joinAbsStringBufZ(this.cache_directory_path, &path_buf, &[_]string{cache_path}, .windows); + return bun.sys.readlink(joined, buf).unwrap() catch |err| { + _ = bun.sys.unlink(joined); + return err; + }; + } + + return cache_dir.readlinkat(cache_path, buf).unwrap() catch |err| { + // if we run into an error, delete the symlink + // so that we don't repeatedly try to read it + _ = cache_dir.unlinkat(cache_path); + return err; + }; +} + +pub fn pathForResolution( + this: *PackageManager, + package_id: PackageID, + resolution: Resolution, + buf: *bun.PathBuffer, +) ![]u8 { + // const folder_name = this.cachedNPMPackageFolderName(name, version); + switch (resolution.tag) { + .npm => { + const npm = resolution.value.npm; + const package_name_ = this.lockfile.packages.items(.name)[package_id]; + const package_name = this.lockfile.str(&package_name_); + + return this.pathForCachedNPMPath(buf, package_name, npm.version); + }, + else => return "", + } +} + +/// this is copy pasted from `installPackageWithNameAndResolution()` +/// it's not great to do this +pub fn computeCacheDirAndSubpath( + manager: *PackageManager, + pkg_name: string, + resolution: *const Resolution, + folder_path_buf: *bun.PathBuffer, + patch_hash: ?u64, +) struct { cache_dir: std.fs.Dir, cache_dir_subpath: stringZ } { + const name = pkg_name; + const buf = manager.lockfile.buffers.string_bytes.items; + var cache_dir = std.fs.cwd(); + var cache_dir_subpath: stringZ = ""; + + switch (resolution.tag) { + .npm => { + cache_dir_subpath = manager.cachedNPMPackageFolderName(name, resolution.value.npm.version, patch_hash); + cache_dir = manager.getCacheDirectory(); + }, + .git => { + cache_dir_subpath = manager.cachedGitFolderName( + &resolution.value.git, + patch_hash, + ); + cache_dir = manager.getCacheDirectory(); + }, + .github => { + cache_dir_subpath = manager.cachedGitHubFolderName(&resolution.value.github, patch_hash); + cache_dir = manager.getCacheDirectory(); + }, + .folder => { + const folder = resolution.value.folder.slice(buf); + // Handle when a package depends on itself via file: + // example: + // "mineflayer": "file:." + if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { + cache_dir_subpath = "."; + } else { + @memcpy(folder_path_buf[0..folder.len], folder); + folder_path_buf[folder.len] = 0; + cache_dir_subpath = folder_path_buf[0..folder.len :0]; + } + cache_dir = std.fs.cwd(); + }, + .local_tarball => { + cache_dir_subpath = manager.cachedTarballFolderName(resolution.value.local_tarball, patch_hash); + cache_dir = manager.getCacheDirectory(); + }, + .remote_tarball => { + cache_dir_subpath = manager.cachedTarballFolderName(resolution.value.remote_tarball, patch_hash); + cache_dir = manager.getCacheDirectory(); + }, + .workspace => { + const folder = resolution.value.workspace.slice(buf); + // Handle when a package depends on itself + if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { + cache_dir_subpath = "."; + } else { + @memcpy(folder_path_buf[0..folder.len], folder); + folder_path_buf[folder.len] = 0; + cache_dir_subpath = folder_path_buf[0..folder.len :0]; + } + cache_dir = std.fs.cwd(); + }, + .symlink => { + const directory = manager.globalLinkDir(); + + const folder = resolution.value.symlink.slice(buf); + + if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { + cache_dir_subpath = "."; + cache_dir = std.fs.cwd(); + } else { + const global_link_dir = manager.globalLinkDirPath(); + var ptr = folder_path_buf; + var remain: []u8 = folder_path_buf[0..]; + @memcpy(ptr[0..global_link_dir.len], global_link_dir); + remain = remain[global_link_dir.len..]; + if (global_link_dir[global_link_dir.len - 1] != std.fs.path.sep) { + remain[0] = std.fs.path.sep; + remain = remain[1..]; + } + @memcpy(remain[0..folder.len], folder); + remain = remain[folder.len..]; + remain[0] = 0; + const len = @intFromPtr(remain.ptr) - @intFromPtr(ptr); + cache_dir_subpath = folder_path_buf[0..len :0]; + cache_dir = directory; + } + }, + else => {}, + } + + return .{ + .cache_dir = cache_dir, + .cache_dir_subpath = cache_dir_subpath, + }; +} + +pub fn attemptToCreatePackageJSONAndOpen() !std.fs.File { + const package_json_file = std.fs.cwd().createFileZ("package.json", .{ .read = true }) catch |err| { + Output.prettyErrorln("error: {s} create package.json", .{@errorName(err)}); + Global.crash(); + }; + + try package_json_file.pwriteAll("{\"dependencies\": {}}", 0); + + return package_json_file; +} + +pub fn attemptToCreatePackageJSON() !void { + var file = try attemptToCreatePackageJSONAndOpen(); + file.close(); +} + +pub fn saveLockfile( + this: *PackageManager, + load_result: *const Lockfile.LoadResult, + save_format: Lockfile.LoadResult.LockfileFormat, + had_any_diffs: bool, + // TODO(dylan-conway): this and `packages_len_before_install` can most likely be deleted + // now that git dependnecies don't append to lockfile during installation. + lockfile_before_install: *const Lockfile, + packages_len_before_install: usize, + log_level: Options.LogLevel, +) OOM!void { + if (this.lockfile.isEmpty()) { + if (!this.options.dry_run) delete: { + const delete_format = switch (load_result.*) { + .not_found => break :delete, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + + bun.sys.unlinkat( + FD.cwd(), + if (delete_format == .text) comptime bun.OSPathLiteral("bun.lock") else comptime bun.OSPathLiteral("bun.lockb"), + ).unwrap() catch |err| { + // we don't care + if (err == error.ENOENT) { + if (had_any_diffs) return; + break :delete; + } + + if (log_level != .silent) { + Output.err(err, "failed to delete empty lockfile", .{}); + } + return; + }; + } + if (!this.options.global) { + if (log_level != .silent) { + switch (this.subcommand) { + .remove => Output.prettyErrorln("\npackage.json has no dependencies! Deleted empty lockfile", .{}), + else => Output.prettyErrorln("No packages! Deleted empty lockfile", .{}), + } + } + } + + return; + } + + var save_node: *Progress.Node = undefined; + + if (log_level.showProgress()) { + this.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; + save_node = this.progress.start(ProgressStrings.save(), 0); + save_node.activate(); + + this.progress.refresh(); + } + + this.lockfile.saveToDisk(load_result, &this.options); + + // delete binary lockfile if saving text lockfile + if (save_format == .text and load_result.loadedFromBinaryLockfile()) { + _ = bun.sys.unlinkat(FD.cwd(), comptime bun.OSPathLiteral("bun.lockb")); + } + + if (comptime Environment.allow_assert) { + if (load_result.* != .not_found) { + if (load_result.loadedFromTextLockfile()) { + if (!try this.lockfile.eql(lockfile_before_install, packages_len_before_install, this.allocator)) { + Output.panic("Lockfile non-deterministic after saving", .{}); + } + } else { + if (this.lockfile.hasMetaHashChanged(false, packages_len_before_install) catch false) { + Output.panic("Lockfile metahash non-deterministic after saving", .{}); + } + } + } + } + + if (log_level.showProgress()) { + save_node.end(); + this.progress.refresh(); + this.progress.root.end(); + this.progress = .{}; + } else if (log_level != .silent) { + Output.prettyErrorln("Saved lockfile", .{}); + Output.flush(); + } +} + +pub fn updateLockfileIfNeeded( + manager: *PackageManager, + load_result: Lockfile.LoadResult, +) !void { + if (load_result == .ok and load_result.ok.serializer_result.packages_need_update) { + const slice = manager.lockfile.packages.slice(); + for (slice.items(.meta)) |*meta| { + // these are possibly updated later, but need to make sure non are zero + meta.setHasInstallScript(false); + } + } + + return; +} + +pub fn writeYarnLock(this: *PackageManager) !void { + var printer = Lockfile.Printer{ + .lockfile = this.lockfile, + .options = this.options, + }; + + var tmpname_buf: [512]u8 = undefined; + tmpname_buf[0..8].* = "tmplock-".*; + var tmpfile = FileSystem.RealFS.Tmpfile{}; + var secret: [32]u8 = undefined; + std.mem.writeInt(u64, secret[0..8], @as(u64, @intCast(std.time.milliTimestamp())), .little); + var base64_bytes: [64]u8 = undefined; + std.crypto.random.bytes(&base64_bytes); + + const tmpname__ = std.fmt.bufPrint(tmpname_buf[8..], "{s}", .{std.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable; + tmpname_buf[tmpname__.len + 8] = 0; + const tmpname = tmpname_buf[0 .. tmpname__.len + 8 :0]; + + tmpfile.create(&FileSystem.instance.fs, tmpname) catch |err| { + Output.prettyErrorln("error: failed to create tmpfile: {s}", .{@errorName(err)}); + Global.crash(); + }; + + var file = tmpfile.file(); + const file_writer = file.writer(); + var buffered_writer = std.io.BufferedWriter(std.heap.page_size_min, @TypeOf(file_writer)){ + .unbuffered_writer = file_writer, + }; + const writer = buffered_writer.writer(); + try Lockfile.Printer.Yarn.print(&printer, @TypeOf(writer), writer); + try buffered_writer.flush(); + + if (comptime Environment.isPosix) { + _ = bun.c.fchmod( + tmpfile.fd.cast(), + // chmod 666, + 0o0000040 | 0o0000004 | 0o0000002 | 0o0000400 | 0o0000200 | 0o0000020, + ); + } + + try tmpfile.promoteToCWD(tmpname, "yarn.lock"); +} + +const CacheVersion = struct { + pub const current = 1; + pub const Formatter = struct { + version_number: ?usize = null, + + pub fn format(this: *const @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + if (this.version_number) |version| { + try writer.print("@@@{d}", .{version}); + } + } + }; +}; + +const PatchHashFmt = struct { + hash: ?u64 = null, + + pub fn format(this: *const PatchHashFmt, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + if (this.hash) |h| { + try writer.print("_patch_hash={x}", .{h}); + } + } +}; + +var using_fallback_temp_dir: bool = false; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const DotEnv = bun.DotEnv; +const Environment = bun.Environment; +const FD = bun.FD; +const Global = bun.Global; +const OOM = bun.OOM; +const Output = bun.Output; +const Path = bun.path; +const Progress = bun.Progress; +const default_allocator = bun.default_allocator; +const string = bun.string; +const stringZ = bun.stringZ; +const Command = bun.CLI.Command; +const File = bun.sys.File; + +const Semver = bun.Semver; +const String = Semver.String; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const Lockfile = bun.install.Lockfile; +const PackageID = bun.install.PackageID; +const Repository = bun.install.Repository; +const Resolution = bun.install.Resolution; + +const PackageManager = bun.install.PackageManager; +const Options = PackageManager.Options; +const ProgressStrings = PackageManager.ProgressStrings; diff --git a/src/install/PackageManager/PackageManagerEnqueue.zig b/src/install/PackageManager/PackageManagerEnqueue.zig new file mode 100644 index 0000000000..5fc2717417 --- /dev/null +++ b/src/install/PackageManager/PackageManagerEnqueue.zig @@ -0,0 +1,1815 @@ +pub fn enqueueDependencyWithMain( + this: *PackageManager, + id: DependencyID, + /// This must be a *const to prevent UB + dependency: *const Dependency, + resolution: PackageID, + install_peer: bool, +) !void { + return this.enqueueDependencyWithMainAndSuccessFn( + id, + dependency, + resolution, + install_peer, + assignResolution, + null, + ); +} + +pub fn enqueueDependencyList( + this: *PackageManager, + dependencies_list: Lockfile.DependencySlice, +) void { + this.task_queue.ensureUnusedCapacity(this.allocator, dependencies_list.len) catch unreachable; + const lockfile = this.lockfile; + + // Step 1. Go through main dependencies + var begin = dependencies_list.off; + const end = dependencies_list.off +| dependencies_list.len; + + // if dependency is peer and is going to be installed + // through "dependencies", skip it + if (end - begin > 1 and lockfile.buffers.dependencies.items[0].behavior.isPeer()) { + var peer_i: usize = 0; + var peer = &lockfile.buffers.dependencies.items[peer_i]; + while (peer.behavior.isPeer()) { + var dep_i: usize = end - 1; + var dep = lockfile.buffers.dependencies.items[dep_i]; + while (!dep.behavior.isPeer()) { + if (!dep.behavior.isDev()) { + if (peer.name_hash == dep.name_hash) { + peer.* = lockfile.buffers.dependencies.items[begin]; + begin += 1; + break; + } + } + dep_i -= 1; + dep = lockfile.buffers.dependencies.items[dep_i]; + } + peer_i += 1; + if (peer_i == end) break; + peer = &lockfile.buffers.dependencies.items[peer_i]; + } + } + + var i = begin; + + // we have to be very careful with pointers here + while (i < end) : (i += 1) { + const dependency = lockfile.buffers.dependencies.items[i]; + const resolution = lockfile.buffers.resolutions.items[i]; + this.enqueueDependencyWithMain( + i, + &dependency, + resolution, + false, + ) catch |err| { + const note = .{ + .fmt = "error occurred while resolving {}", + .args = .{bun.fmt.fmtPath(u8, lockfile.str(&dependency.realname()), .{ + .path_sep = switch (dependency.version.tag) { + .folder => .auto, + else => .any, + }, + })}, + }; + + if (dependency.behavior.isOptional() or dependency.behavior.isPeer()) + this.log.addWarningWithNote(null, .{}, this.allocator, @errorName(err), note.fmt, note.args) catch unreachable + else + this.log.addZigErrorWithNote(this.allocator, err, note.fmt, note.args) catch unreachable; + + continue; + }; + } + + this.drainDependencyList(); +} + +pub fn enqueueTarballForDownload( + this: *PackageManager, + dependency_id: DependencyID, + package_id: PackageID, + url: string, + task_context: TaskCallbackContext, + patch_name_and_version_hash: ?u64, +) EnqueueTarballForDownloadError!void { + const task_id = Task.Id.forTarball(url); + var task_queue = try this.task_queue.getOrPut(this.allocator, task_id); + if (!task_queue.found_existing) { + task_queue.value_ptr.* = .{}; + } + + try task_queue.value_ptr.append( + this.allocator, + task_context, + ); + + if (task_queue.found_existing) return; + + if (try this.generateNetworkTaskForTarball( + task_id, + url, + this.lockfile.buffers.dependencies.items[dependency_id].behavior.isRequired(), + dependency_id, + this.lockfile.packages.get(package_id), + patch_name_and_version_hash, + .no_authorization, + )) |task| { + task.schedule(&this.network_tarball_batch); + if (this.network_tarball_batch.len > 0) { + _ = this.scheduleTasks(); + } + } +} + +pub fn enqueueTarballForReading( + this: *PackageManager, + dependency_id: DependencyID, + alias: string, + resolution: *const Resolution, + task_context: TaskCallbackContext, +) void { + const path = this.lockfile.str(&resolution.value.local_tarball); + const task_id = Task.Id.forTarball(path); + var task_queue = this.task_queue.getOrPut(this.allocator, task_id) catch unreachable; + if (!task_queue.found_existing) { + task_queue.value_ptr.* = .{}; + } + + task_queue.value_ptr.append( + this.allocator, + task_context, + ) catch unreachable; + + if (task_queue.found_existing) return; + + this.task_batch.push(ThreadPool.Batch.from(enqueueLocalTarball( + this, + task_id, + dependency_id, + alias, + path, + resolution.*, + ))); +} + +pub fn enqueueGitForCheckout( + this: *PackageManager, + dependency_id: DependencyID, + alias: string, + resolution: *const Resolution, + task_context: TaskCallbackContext, + patch_name_and_version_hash: ?u64, +) void { + const repository = &resolution.value.git; + const url = this.lockfile.str(&repository.repo); + const clone_id = Task.Id.forGitClone(url); + const resolved = this.lockfile.str(&repository.resolved); + const checkout_id = Task.Id.forGitCheckout(url, resolved); + var checkout_queue = this.task_queue.getOrPut(this.allocator, checkout_id) catch unreachable; + if (!checkout_queue.found_existing) { + checkout_queue.value_ptr.* = .{}; + } + + checkout_queue.value_ptr.append( + this.allocator, + task_context, + ) catch unreachable; + + if (checkout_queue.found_existing) return; + + if (this.git_repositories.get(clone_id)) |repo_fd| { + this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitCheckout(checkout_id, repo_fd, dependency_id, alias, resolution.*, resolved, patch_name_and_version_hash))); + } else { + var clone_queue = this.task_queue.getOrPut(this.allocator, clone_id) catch unreachable; + if (!clone_queue.found_existing) { + clone_queue.value_ptr.* = .{}; + } + + clone_queue.value_ptr.append( + this.allocator, + .{ .dependency = dependency_id }, + ) catch unreachable; + + if (clone_queue.found_existing) return; + + this.task_batch.push(ThreadPool.Batch.from(enqueueGitClone( + this, + clone_id, + alias, + repository, + dependency_id, + &this.lockfile.buffers.dependencies.items[dependency_id], + resolution, + null, + ))); + } +} + +pub fn enqueueParseNPMPackage( + this: *PackageManager, + task_id: Task.Id, + name: strings.StringOrTinyString, + network_task: *NetworkTask, +) *ThreadPool.Task { + var task = this.preallocated_resolve_tasks.get(); + task.* = Task{ + .package_manager = this, + .log = logger.Log.init(this.allocator), + .tag = Task.Tag.package_manifest, + .request = .{ + .package_manifest = .{ + .network = network_task, + .name = name, + }, + }, + .id = task_id, + .data = undefined, + }; + return &task.threadpool_task; +} + +pub fn enqueuePackageForDownload( + this: *PackageManager, + name: []const u8, + dependency_id: DependencyID, + package_id: PackageID, + version: bun.Semver.Version, + url: []const u8, + task_context: TaskCallbackContext, + patch_name_and_version_hash: ?u64, +) EnqueuePackageForDownloadError!void { + const task_id = Task.Id.forNPMPackage(name, version); + var task_queue = try this.task_queue.getOrPut(this.allocator, task_id); + if (!task_queue.found_existing) { + task_queue.value_ptr.* = .{}; + } + + try task_queue.value_ptr.append( + this.allocator, + task_context, + ); + + if (task_queue.found_existing) return; + + const is_required = this.lockfile.buffers.dependencies.items[dependency_id].behavior.isRequired(); + + if (try this.generateNetworkTaskForTarball( + task_id, + url, + is_required, + dependency_id, + this.lockfile.packages.get(package_id), + patch_name_and_version_hash, + .allow_authorization, + )) |task| { + task.schedule(&this.network_tarball_batch); + if (this.network_tarball_batch.len > 0) { + _ = this.scheduleTasks(); + } + } +} + +const DependencyToEnqueue = union(enum) { + pending: DependencyID, + resolution: struct { package_id: PackageID, resolution: Resolution }, + not_found: void, + failure: anyerror, +}; + +pub fn enqueueDependencyToRoot( + this: *PackageManager, + name: []const u8, + version: *const Dependency.Version, + version_buf: []const u8, + behavior: Dependency.Behavior, +) DependencyToEnqueue { + const dep_id = @as(DependencyID, @truncate(brk: { + const str_buf = this.lockfile.buffers.string_bytes.items; + for (this.lockfile.buffers.dependencies.items, 0..) |dep, id| { + if (!strings.eqlLong(dep.name.slice(str_buf), name, true)) continue; + if (!dep.version.eql(version, str_buf, version_buf)) continue; + break :brk id; + } + + var builder = this.lockfile.stringBuilder(); + const dummy = Dependency{ + .name = String.init(name, name), + .name_hash = String.Builder.stringHash(name), + .version = version.*, + .behavior = behavior, + }; + dummy.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder); + + builder.allocate() catch |err| return .{ .failure = err }; + + const dep = dummy.cloneWithDifferentBuffers(this, name, version_buf, @TypeOf(&builder), &builder) catch unreachable; + builder.clamp(); + const index = this.lockfile.buffers.dependencies.items.len; + this.lockfile.buffers.dependencies.append(this.allocator, dep) catch unreachable; + this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable; + if (comptime Environment.allow_assert) bun.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len); + break :brk index; + })); + + if (this.lockfile.buffers.resolutions.items[dep_id] == invalid_package_id) { + this.enqueueDependencyWithMainAndSuccessFn( + dep_id, + &this.lockfile.buffers.dependencies.items[dep_id], + invalid_package_id, + false, + assignRootResolution, + failRootResolution, + ) catch |err| { + return .{ .failure = err }; + }; + } + + const resolution_id = switch (this.lockfile.buffers.resolutions.items[dep_id]) { + invalid_package_id => brk: { + this.drainDependencyList(); + + const Closure = struct { + // https://github.com/ziglang/zig/issues/19586 + pub fn issue_19586_workaround() type { + return struct { + err: ?anyerror = null, + manager: *PackageManager, + pub fn isDone(closure: *@This()) bool { + const manager = closure.manager; + if (manager.pendingTaskCount() > 0) { + manager.runTasks( + void, + {}, + .{ + .onExtract = {}, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, + }, + false, + manager.options.log_level, + ) catch |err| { + closure.err = err; + return true; + }; + + if (PackageManager.verbose_install and manager.pendingTaskCount() > 0) { + if (PackageManager.hasEnoughTimePassedBetweenWaitingMessages()) Output.prettyErrorln("[PackageManager] waiting for {d} tasks\n", .{closure.manager.pendingTaskCount()}); + } + } + + return manager.pendingTaskCount() == 0; + } + }; + } + }.issue_19586_workaround(); + + if (this.options.log_level.showProgress()) { + this.startProgressBarIfNone(); + } + + var closure = Closure{ .manager = this }; + this.sleepUntil(&closure, &Closure.isDone); + + if (this.options.log_level.showProgress()) { + this.endProgressBar(); + Output.flush(); + } + + if (closure.err) |err| { + return .{ .failure = err }; + } + + break :brk this.lockfile.buffers.resolutions.items[dep_id]; + }, + // we managed to synchronously resolve the dependency + else => |pkg_id| pkg_id, + }; + + if (resolution_id == invalid_package_id) { + return .{ + .not_found = {}, + }; + } + + return .{ + .resolution = .{ + .resolution = this.lockfile.packages.items(.resolution)[resolution_id], + .package_id = resolution_id, + }, + }; +} + +pub fn enqueueNetworkTask(this: *PackageManager, task: *NetworkTask) void { + if (this.network_task_fifo.writableLength() == 0) { + this.flushNetworkQueue(); + } + + this.network_task_fifo.writeItemAssumeCapacity(task); +} + +pub fn enqueuePatchTask(this: *PackageManager, task: *PatchTask) void { + debug("Enqueue patch task: 0x{x} {s}", .{ @intFromPtr(task), @tagName(task.callback) }); + if (this.patch_task_fifo.writableLength() == 0) { + this.flushPatchTaskQueue(); + } + + this.patch_task_fifo.writeItemAssumeCapacity(task); +} + +/// We need to calculate all the patchfile hashes at the beginning so we don't run into problems with stale hashes +pub fn enqueuePatchTaskPre(this: *PackageManager, task: *PatchTask) void { + debug("Enqueue patch task pre: 0x{x} {s}", .{ @intFromPtr(task), @tagName(task.callback) }); + task.pre = true; + if (this.patch_task_fifo.writableLength() == 0) { + this.flushPatchTaskQueue(); + } + + this.patch_task_fifo.writeItemAssumeCapacity(task); + _ = this.pending_pre_calc_hashes.fetchAdd(1, .monotonic); +} + +/// Q: "What do we do with a dependency in a package.json?" +/// A: "We enqueue it!" +pub fn enqueueDependencyWithMainAndSuccessFn( + this: *PackageManager, + id: DependencyID, + /// This must be a *const to prevent UB + dependency: *const Dependency, + resolution: PackageID, + install_peer: bool, + comptime successFn: SuccessFn, + comptime failFn: ?FailFn, +) !void { + if (dependency.behavior.isOptionalPeer()) return; + + var name = dependency.realname(); + + var name_hash = switch (dependency.version.tag) { + .dist_tag, .git, .github, .npm, .tarball, .workspace => String.Builder.stringHash(this.lockfile.str(&name)), + else => dependency.name_hash, + }; + + const version = version: { + if (dependency.version.tag == .npm) { + if (this.known_npm_aliases.get(name_hash)) |aliased| { + const group = dependency.version.value.npm.version; + const buf = this.lockfile.buffers.string_bytes.items; + var curr_list: ?*const Semver.Query.List = &aliased.value.npm.version.head; + while (curr_list) |queries| { + var curr: ?*const Semver.Query = &queries.head; + while (curr) |query| { + if (group.satisfies(query.range.left.version, buf, buf) or group.satisfies(query.range.right.version, buf, buf)) { + name = aliased.value.npm.name; + name_hash = String.Builder.stringHash(this.lockfile.str(&name)); + break :version aliased; + } + curr = query.next; + } + curr_list = queries.next; + } + + // fallthrough. a package that matches the name of an alias but does not match + // the version should be enqueued as a normal npm dependency, overrides allowed + } + } + + // allow overriding all dependencies unless the dependency is coming directly from an alias, "npm:" or + // if it's a workspaceOnly dependency + if (!dependency.behavior.isWorkspaceOnly() and (dependency.version.tag != .npm or !dependency.version.value.npm.is_alias)) { + if (this.lockfile.overrides.get(name_hash)) |new| { + debug("override: {s} -> {s}", .{ this.lockfile.str(&dependency.version.literal), this.lockfile.str(&new.literal) }); + + name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, new); + + if (new.tag == .catalog) { + if (this.lockfile.catalogs.get(this.lockfile, new.value.catalog, name)) |catalog_dep| { + name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, catalog_dep.version); + break :version catalog_dep.version; + } + } + + // `name_hash` stays the same + break :version new; + } + + if (dependency.version.tag == .catalog) { + if (this.lockfile.catalogs.get(this.lockfile, dependency.version.value.catalog, name)) |catalog_dep| { + name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, catalog_dep.version); + + break :version catalog_dep.version; + } + } + } + + // explicit copy here due to `dependency.version` becoming undefined + // when `getOrPutResolvedPackageWithFindResult` is called and resizes the list. + break :version Dependency.Version{ + .literal = dependency.version.literal, + .tag = dependency.version.tag, + .value = dependency.version.value, + }; + }; + var loaded_manifest: ?Npm.PackageManifest = null; + + switch (version.tag) { + .dist_tag, .folder, .npm => { + retry_from_manifests_ptr: while (true) { + var resolve_result_ = getOrPutResolvedPackage( + this, + name_hash, + name, + dependency, + version, + dependency.behavior, + id, + resolution, + install_peer, + successFn, + ); + + retry_with_new_resolve_result: while (true) { + const resolve_result = resolve_result_ catch |err| { + switch (err) { + error.DistTagNotFound => { + if (dependency.behavior.isRequired()) { + if (failFn) |fail| { + fail( + this, + dependency, + id, + err, + ); + } else { + this.log.addErrorFmt( + null, + logger.Loc.Empty, + this.allocator, + "Package \"{s}\" with tag \"{s}\" not found, but package exists", + .{ + this.lockfile.str(&name), + this.lockfile.str(&version.value.dist_tag.tag), + }, + ) catch unreachable; + } + } + + return; + }, + error.NoMatchingVersion => { + if (dependency.behavior.isRequired()) { + if (failFn) |fail| { + fail( + this, + dependency, + id, + err, + ); + } else { + this.log.addErrorFmt( + null, + logger.Loc.Empty, + this.allocator, + "No version matching \"{s}\" found for specifier \"{s}\" (but package exists)", + .{ + this.lockfile.str(&version.literal), + this.lockfile.str(&name), + }, + ) catch unreachable; + } + } + return; + }, + else => { + if (failFn) |fail| { + fail( + this, + dependency, + id, + err, + ); + return; + } + + return err; + }, + } + }; + + if (resolve_result) |result| { + // First time? + if (result.is_first_time) { + if (PackageManager.verbose_install) { + const label: string = this.lockfile.str(&version.literal); + + Output.prettyErrorln(" -> \"{s}\": \"{s}\" -> {s}@{}", .{ + this.lockfile.str(&result.package.name), + label, + this.lockfile.str(&result.package.name), + result.package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), + }); + } + // Resolve dependencies first + if (result.package.dependencies.len > 0) { + try this.lockfile.scratch.dependency_list_queue.writeItem(result.package.dependencies); + } + } + + if (result.task != null) { + switch (result.task.?) { + .network_task => |network_task| { + if (this.getPreinstallState(result.package.meta.id) == .extract) { + this.setPreinstallState(result.package.meta.id, this.lockfile, .extracting); + this.enqueueNetworkTask(network_task); + } + }, + .patch_task => |patch_task| { + if (patch_task.callback == .calc_hash and this.getPreinstallState(result.package.meta.id) == .calc_patch_hash) { + this.setPreinstallState(result.package.meta.id, this.lockfile, .calcing_patch_hash); + this.enqueuePatchTask(patch_task); + } else if (patch_task.callback == .apply and this.getPreinstallState(result.package.meta.id) == .apply_patch) { + this.setPreinstallState(result.package.meta.id, this.lockfile, .applying_patch); + this.enqueuePatchTask(patch_task); + } + }, + } + } + + if (comptime Environment.allow_assert) + debug( + "enqueueDependency({d}, {s}, {s}, {s}) = {d}", + .{ + id, + @tagName(version.tag), + this.lockfile.str(&name), + this.lockfile.str(&version.literal), + result.package.meta.id, + }, + ); + } else if (version.tag.isNPM()) { + const name_str = this.lockfile.str(&name); + const task_id = Task.Id.forManifest(name_str); + + if (comptime Environment.allow_assert) bun.assert(task_id.get() != 0); + + if (comptime Environment.allow_assert) + debug( + "enqueueDependency({d}, {s}, {s}, {s}) = task {d}", + .{ + id, + @tagName(version.tag), + this.lockfile.str(&name), + this.lockfile.str(&version.literal), + task_id, + }, + ); + + if (!dependency.behavior.isPeer() or install_peer) { + if (!this.hasCreatedNetworkTask(task_id, dependency.behavior.isRequired())) { + if (this.options.enable.manifest_cache) { + var expired = false; + if (this.manifests.byNameHashAllowExpired( + this, + this.scopeForPackageName(name_str), + name_hash, + &expired, + .load_from_memory_fallback_to_disk, + )) |manifest| { + loaded_manifest = manifest.*; + + // If it's an exact package version already living in the cache + // We can skip the network request, even if it's beyond the caching period + if (version.tag == .npm and version.value.npm.version.isExact()) { + if (loaded_manifest.?.findByVersion(version.value.npm.version.head.head.range.left.version)) |find_result| { + if (getOrPutResolvedPackageWithFindResult( + this, + name_hash, + name, + dependency, + version, + id, + dependency.behavior, + &loaded_manifest.?, + find_result, + install_peer, + successFn, + ) catch null) |new_resolve_result| { + resolve_result_ = new_resolve_result; + _ = this.network_dedupe_map.remove(task_id); + continue :retry_with_new_resolve_result; + } + } + } + + // Was it recent enough to just load it without the network call? + if (this.options.enable.manifest_cache_control and !expired) { + _ = this.network_dedupe_map.remove(task_id); + continue :retry_from_manifests_ptr; + } + } + } + + if (PackageManager.verbose_install) { + Output.prettyErrorln("Enqueue package manifest for download: {s}", .{name_str}); + } + + var network_task = this.getNetworkTask(); + network_task.* = .{ + .package_manager = this, + .callback = undefined, + .task_id = task_id, + .allocator = this.allocator, + }; + try network_task.forManifest( + name_str, + this.allocator, + this.scopeForPackageName(name_str), + if (loaded_manifest) |*manifest| manifest else null, + dependency.behavior.isOptional(), + ); + this.enqueueNetworkTask(network_task); + } + } else { + try this.peer_dependencies.writeItem(id); + return; + } + + var manifest_entry_parse = try this.task_queue.getOrPutContext(this.allocator, task_id, .{}); + if (!manifest_entry_parse.found_existing) { + manifest_entry_parse.value_ptr.* = TaskCallbackList{}; + } + + const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; + try manifest_entry_parse.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); + } + return; + } + } + return; + }, + .git => { + const dep = &version.value.git; + const res = Resolution{ + .tag = .git, + .value = .{ + .git = dep.*, + }, + }; + + // First: see if we already loaded the git package in-memory + if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { + successFn(this, id, pkg_id); + return; + } + + const alias = this.lockfile.str(&dependency.name); + const url = this.lockfile.str(&dep.repo); + const clone_id = Task.Id.forGitClone(url); + const ctx = @unionInit( + TaskCallbackContext, + if (successFn == assignRootResolution) "root_dependency" else "dependency", + id, + ); + + if (comptime Environment.allow_assert) + debug( + "enqueueDependency({d}, {s}, {s}, {s}) = {s}", + .{ + id, + @tagName(version.tag), + this.lockfile.str(&name), + this.lockfile.str(&version.literal), + url, + }, + ); + + if (this.git_repositories.get(clone_id)) |repo_fd| { + const resolved = try Repository.findCommit( + this.allocator, + this.env, + this.log, + repo_fd.stdDir(), + alias, + this.lockfile.str(&dep.committish), + clone_id, + ); + const checkout_id = Task.Id.forGitCheckout(url, resolved); + + var entry = this.task_queue.getOrPutContext(this.allocator, checkout_id, .{}) catch unreachable; + if (!entry.found_existing) entry.value_ptr.* = .{}; + if (this.lockfile.buffers.resolutions.items[id] == invalid_package_id) { + try entry.value_ptr.append(this.allocator, ctx); + } + + if (dependency.behavior.isPeer()) { + if (!install_peer) { + try this.peer_dependencies.writeItem(id); + return; + } + } + + if (this.hasCreatedNetworkTask(checkout_id, dependency.behavior.isRequired())) return; + + this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitCheckout( + checkout_id, + repo_fd, + id, + alias, + res, + resolved, + null, + ))); + } else { + var entry = this.task_queue.getOrPutContext(this.allocator, clone_id, .{}) catch unreachable; + if (!entry.found_existing) entry.value_ptr.* = .{}; + try entry.value_ptr.append(this.allocator, ctx); + + if (dependency.behavior.isPeer()) { + if (!install_peer) { + try this.peer_dependencies.writeItem(id); + return; + } + } + + if (this.hasCreatedNetworkTask(clone_id, dependency.behavior.isRequired())) return; + + this.task_batch.push(ThreadPool.Batch.from(enqueueGitClone(this, clone_id, alias, dep, id, dependency, &res, null))); + } + }, + .github => { + const dep = &version.value.github; + const res = Resolution{ + .tag = .github, + .value = .{ + .github = dep.*, + }, + }; + + // First: see if we already loaded the github package in-memory + if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { + successFn(this, id, pkg_id); + return; + } + + const url = this.allocGitHubURL(dep); + defer this.allocator.free(url); + const task_id = Task.Id.forTarball(url); + var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable; + if (!entry.found_existing) { + entry.value_ptr.* = TaskCallbackList{}; + } + + if (comptime Environment.allow_assert) + debug( + "enqueueDependency({d}, {s}, {s}, {s}) = {s}", + .{ + id, + @tagName(version.tag), + this.lockfile.str(&name), + this.lockfile.str(&version.literal), + url, + }, + ); + + const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; + try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); + + if (dependency.behavior.isPeer()) { + if (!install_peer) { + try this.peer_dependencies.writeItem(id); + return; + } + } + + if (try this.generateNetworkTaskForTarball( + task_id, + url, + dependency.behavior.isRequired(), + id, + .{ + .name = dependency.name, + .name_hash = dependency.name_hash, + .resolution = res, + }, + null, + .no_authorization, + )) |network_task| { + this.enqueueNetworkTask(network_task); + } + }, + inline .symlink, .workspace => |dependency_tag| { + const _result = getOrPutResolvedPackage( + this, + name_hash, + name, + dependency, + version, + dependency.behavior, + id, + resolution, + install_peer, + successFn, + ) catch |err| brk: { + if (err == error.MissingPackageJSON) { + break :brk @as(?ResolvedPackageResult, null); + } + + return err; + }; + + const workspace_not_found_fmt = + \\Workspace dependency "{[name]s}" not found + \\ + \\Searched in {[search_path]} + \\ + \\Workspace documentation: https://bun.com/docs/install/workspaces + \\ + ; + const link_not_found_fmt = + \\Package "{[name]s}" is not linked + \\ + \\To install a linked package: + \\ bun link my-pkg-name-from-package-json + \\ + \\Tip: the package name is from package.json, which can differ from the folder name. + \\ + ; + if (_result) |result| { + // First time? + if (result.is_first_time) { + if (PackageManager.verbose_install) { + const label: string = this.lockfile.str(&version.literal); + + Output.prettyErrorln(" -> \"{s}\": \"{s}\" -> {s}@{}", .{ + this.lockfile.str(&result.package.name), + label, + this.lockfile.str(&result.package.name), + result.package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), + }); + } + // We shouldn't see any dependencies + if (result.package.dependencies.len > 0) { + try this.lockfile.scratch.dependency_list_queue.writeItem(result.package.dependencies); + } + } + + // should not trigger a network call + if (comptime Environment.allow_assert) bun.assert(result.task == null); + + if (comptime Environment.allow_assert) + debug( + "enqueueDependency({d}, {s}, {s}, {s}) = {d}", + .{ + id, + @tagName(version.tag), + this.lockfile.str(&name), + this.lockfile.str(&version.literal), + result.package.meta.id, + }, + ); + } else if (dependency.behavior.isRequired()) { + if (comptime dependency_tag == .workspace) { + this.log.addErrorFmt( + null, + logger.Loc.Empty, + this.allocator, + workspace_not_found_fmt, + .{ + .name = this.lockfile.str(&name), + .search_path = FolderResolution.PackageWorkspaceSearchPathFormatter{ .manager = this, .version = version }, + }, + ) catch unreachable; + } else { + this.log.addErrorFmt( + null, + logger.Loc.Empty, + this.allocator, + link_not_found_fmt, + .{ + .name = this.lockfile.str(&name), + }, + ) catch unreachable; + } + } else if (this.options.log_level.isVerbose()) { + if (comptime dependency_tag == .workspace) { + this.log.addWarningFmt( + null, + logger.Loc.Empty, + this.allocator, + workspace_not_found_fmt, + .{ + .name = this.lockfile.str(&name), + .search_path = FolderResolution.PackageWorkspaceSearchPathFormatter{ .manager = this, .version = version }, + }, + ) catch unreachable; + } else { + this.log.addWarningFmt( + null, + logger.Loc.Empty, + this.allocator, + link_not_found_fmt, + .{ + .name = this.lockfile.str(&name), + }, + ) catch unreachable; + } + } + }, + .tarball => { + const res: Resolution = switch (version.value.tarball.uri) { + .local => |path| .{ + .tag = .local_tarball, + .value = .{ + .local_tarball = path, + }, + }, + .remote => |url| .{ + .tag = .remote_tarball, + .value = .{ + .remote_tarball = url, + }, + }, + }; + + // First: see if we already loaded the tarball package in-memory + if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { + successFn(this, id, pkg_id); + return; + } + + const url = switch (version.value.tarball.uri) { + .local => |path| this.lockfile.str(&path), + .remote => |url| this.lockfile.str(&url), + }; + const task_id = Task.Id.forTarball(url); + var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable; + if (!entry.found_existing) { + entry.value_ptr.* = TaskCallbackList{}; + } + + if (comptime Environment.allow_assert) + debug( + "enqueueDependency({d}, {s}, {s}, {s}) = {s}", + .{ + id, + @tagName(version.tag), + this.lockfile.str(&name), + this.lockfile.str(&version.literal), + url, + }, + ); + + const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; + try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); + + if (dependency.behavior.isPeer()) { + if (!install_peer) { + try this.peer_dependencies.writeItem(id); + return; + } + } + + switch (version.value.tarball.uri) { + .local => { + if (this.hasCreatedNetworkTask(task_id, dependency.behavior.isRequired())) return; + + this.task_batch.push(ThreadPool.Batch.from(enqueueLocalTarball( + this, + task_id, + id, + this.lockfile.str(&dependency.name), + url, + res, + ))); + }, + .remote => { + if (try this.generateNetworkTaskForTarball( + task_id, + url, + dependency.behavior.isRequired(), + id, + .{ + .name = dependency.name, + .name_hash = dependency.name_hash, + .resolution = res, + }, + null, + .no_authorization, + )) |network_task| { + this.enqueueNetworkTask(network_task); + } + }, + } + }, + else => {}, + } +} + +pub fn enqueueExtractNPMPackage( + this: *PackageManager, + tarball: *const ExtractTarball, + network_task: *NetworkTask, +) *ThreadPool.Task { + var task = this.preallocated_resolve_tasks.get(); + task.* = Task{ + .package_manager = this, + .log = logger.Log.init(this.allocator), + .tag = Task.Tag.extract, + .request = .{ + .extract = .{ + .network = network_task, + .tarball = tarball.*, + }, + }, + .id = network_task.task_id, + .data = undefined, + }; + task.request.extract.tarball.skip_verify = !this.options.do.verify_integrity; + return &task.threadpool_task; +} + +fn enqueueGitClone( + this: *PackageManager, + task_id: Task.Id, + name: string, + repository: *const Repository, + dep_id: DependencyID, + dependency: *const Dependency, + res: *const Resolution, + /// if patched then we need to do apply step after network task is done + patch_name_and_version_hash: ?u64, +) *ThreadPool.Task { + var task = this.preallocated_resolve_tasks.get(); + task.* = Task{ + .package_manager = this, + .log = logger.Log.init(this.allocator), + .tag = Task.Tag.git_clone, + .request = .{ + .git_clone = .{ + .name = strings.StringOrTinyString.initAppendIfNeeded( + name, + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch unreachable, + .url = strings.StringOrTinyString.initAppendIfNeeded( + this.lockfile.str(&repository.repo), + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch unreachable, + .env = Repository.shared_env.get(this.allocator, this.env), + .dep_id = dep_id, + .res = res.*, + }, + }, + .id = task_id, + .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { + const dep = dependency; + const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { + .id => |p| p, + .ids => |ps| ps.items[0], // TODO is this correct + }; + const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; + const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); + pt.callback.apply.task_id = task_id; + break :brk pt; + } else null, + .data = undefined, + }; + return &task.threadpool_task; +} + +pub fn enqueueGitCheckout( + this: *PackageManager, + task_id: Task.Id, + dir: bun.FileDescriptor, + dependency_id: DependencyID, + name: string, + resolution: Resolution, + resolved: string, + /// if patched then we need to do apply step after network task is done + patch_name_and_version_hash: ?u64, +) *ThreadPool.Task { + var task = this.preallocated_resolve_tasks.get(); + task.* = Task{ + .package_manager = this, + .log = logger.Log.init(this.allocator), + .tag = Task.Tag.git_checkout, + .request = .{ + .git_checkout = .{ + .repo_dir = dir, + .resolution = resolution, + .dependency_id = dependency_id, + .name = strings.StringOrTinyString.initAppendIfNeeded( + name, + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch unreachable, + .url = strings.StringOrTinyString.initAppendIfNeeded( + this.lockfile.str(&resolution.value.git.repo), + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch unreachable, + .resolved = strings.StringOrTinyString.initAppendIfNeeded( + resolved, + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch unreachable, + .env = Repository.shared_env.get(this.allocator, this.env), + }, + }, + .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { + const dep = this.lockfile.buffers.dependencies.items[dependency_id]; + const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { + .id => |p| p, + .ids => |ps| ps.items[0], // TODO is this correct + }; + const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; + const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); + pt.callback.apply.task_id = task_id; + break :brk pt; + } else null, + .id = task_id, + .data = undefined, + }; + return &task.threadpool_task; +} + +fn enqueueLocalTarball( + this: *PackageManager, + task_id: Task.Id, + dependency_id: DependencyID, + name: string, + path: string, + resolution: Resolution, +) *ThreadPool.Task { + var task = this.preallocated_resolve_tasks.get(); + task.* = Task{ + .package_manager = this, + .log = logger.Log.init(this.allocator), + .tag = Task.Tag.local_tarball, + .request = .{ + .local_tarball = .{ + .tarball = .{ + .package_manager = this, + .name = strings.StringOrTinyString.initAppendIfNeeded( + name, + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch unreachable, + .resolution = resolution, + .cache_dir = this.getCacheDirectory(), + .temp_dir = this.getTemporaryDirectory(), + .dependency_id = dependency_id, + .url = strings.StringOrTinyString.initAppendIfNeeded( + path, + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch unreachable, + }, + }, + }, + .id = task_id, + .data = undefined, + }; + return &task.threadpool_task; +} + +fn updateNameAndNameHashFromVersionReplacement( + lockfile: *const Lockfile, + original_name: String, + original_name_hash: PackageNameHash, + new_version: Dependency.Version, +) struct { String, PackageNameHash } { + return switch (new_version.tag) { + // only get name hash for npm and dist_tag. git, github, tarball don't have names until after extracting tarball + .dist_tag => .{ new_version.value.dist_tag.name, String.Builder.stringHash(lockfile.str(&new_version.value.dist_tag.name)) }, + .npm => .{ new_version.value.npm.name, String.Builder.stringHash(lockfile.str(&new_version.value.npm.name)) }, + .git => .{ new_version.value.git.package_name, original_name_hash }, + .github => .{ new_version.value.github.package_name, original_name_hash }, + .tarball => .{ new_version.value.tarball.package_name, original_name_hash }, + else => .{ original_name, original_name_hash }, + }; +} + +pub const ResolvedPackageResult = struct { + package: Lockfile.Package, + + /// Is this the first time we've seen this package? + is_first_time: bool = false, + + task: ?union(enum) { + /// Pending network task to schedule + network_task: *NetworkTask, + + /// Apply patch task or calc patch hash task + patch_task: *PatchTask, + } = null, +}; + +fn getOrPutResolvedPackageWithFindResult( + this: *PackageManager, + name_hash: PackageNameHash, + name: String, + dependency: *const Dependency, + version: Dependency.Version, + dependency_id: DependencyID, + behavior: Behavior, + manifest: *const Npm.PackageManifest, + find_result: Npm.PackageManifest.FindResult, + install_peer: bool, + comptime successFn: SuccessFn, +) !?ResolvedPackageResult { + const should_update = this.to_update and + // If updating, only update packages in the current workspace + this.lockfile.isRootDependency(this, dependency_id) and + // no need to do a look up if update requests are empty (`bun update` with no args) + (this.update_requests.len == 0 or + this.updating_packages.contains(dependency.name.slice(this.lockfile.buffers.string_bytes.items))); + + // Was this package already allocated? Let's reuse the existing one. + if (this.lockfile.getPackageID( + name_hash, + if (should_update) null else version, + &.{ + .tag = .npm, + .value = .{ + .npm = .{ + .version = find_result.version, + .url = find_result.package.tarball_url.value, + }, + }, + }, + )) |id| { + successFn(this, dependency_id, id); + return .{ + .package = this.lockfile.packages.get(id), + .is_first_time = false, + }; + } else if (behavior.isPeer() and !install_peer) { + return null; + } + + // appendPackage sets the PackageID on the package + const package = try this.lockfile.appendPackage(try Lockfile.Package.fromNPM( + this, + this.allocator, + this.lockfile, + this.log, + manifest, + find_result.version, + find_result.package, + manifest.string_buf, + Features.npm, + )); + + if (comptime Environment.allow_assert) bun.assert(package.meta.id != invalid_package_id); + defer successFn(this, dependency_id, package.meta.id); + + // non-null if the package is in "patchedDependencies" + var name_and_version_hash: ?u64 = null; + var patchfile_hash: ?u64 = null; + + return switch (this.determinePreinstallState( + package, + this.lockfile, + &name_and_version_hash, + &patchfile_hash, + )) { + // Is this package already in the cache? + // We don't need to download the tarball, but we should enqueue dependencies + .done => .{ .package = package, .is_first_time = true }, + // Do we need to download the tarball? + .extract => extract: { + const task_id = Task.Id.forNPMPackage(this.lockfile.str(&name), package.resolution.value.npm.version); + bun.debugAssert(!this.network_dedupe_map.contains(task_id)); + + break :extract .{ + .package = package, + .is_first_time = true, + .task = .{ + .network_task = try this.generateNetworkTaskForTarball( + task_id, + manifest.str(&find_result.package.tarball_url), + dependency.behavior.isRequired(), + dependency_id, + package, + name_and_version_hash, + // its npm. + .allow_authorization, + ) orelse unreachable, + }, + }; + }, + .calc_patch_hash => .{ + .package = package, + .is_first_time = true, + .task = .{ + .patch_task = PatchTask.newCalcPatchHash( + this, + name_and_version_hash.?, + .{ + .pkg_id = package.meta.id, + .dependency_id = dependency_id, + .url = this.allocator.dupe(u8, manifest.str(&find_result.package.tarball_url)) catch bun.outOfMemory(), + }, + ), + }, + }, + .apply_patch => .{ + .package = package, + .is_first_time = true, + .task = .{ + .patch_task = PatchTask.newApplyPatchHash( + this, + package.meta.id, + patchfile_hash.?, + name_and_version_hash.?, + ), + }, + }, + else => unreachable, + }; +} + +fn getOrPutResolvedPackage( + this: *PackageManager, + name_hash: PackageNameHash, + name: String, + dependency: *const Dependency, + version: Dependency.Version, + behavior: Behavior, + dependency_id: DependencyID, + resolution: PackageID, + install_peer: bool, + comptime successFn: SuccessFn, +) !?ResolvedPackageResult { + if (install_peer and behavior.isPeer()) { + if (this.lockfile.package_index.get(name_hash)) |index| { + const resolutions: []Resolution = this.lockfile.packages.items(.resolution); + switch (index) { + .id => |existing_id| { + if (existing_id < resolutions.len) { + const existing_resolution = resolutions[existing_id]; + if (resolutionSatisfiesDependency(this, existing_resolution, version)) { + successFn(this, dependency_id, existing_id); + return .{ + // we must fetch it from the packages array again, incase the package array mutates the value in the `successFn` + .package = this.lockfile.packages.get(existing_id), + }; + } + + const res_tag = resolutions[existing_id].tag; + const ver_tag = version.tag; + if ((res_tag == .npm and ver_tag == .npm) or (res_tag == .git and ver_tag == .git) or (res_tag == .github and ver_tag == .github)) { + const existing_package = this.lockfile.packages.get(existing_id); + this.log.addWarningFmt( + null, + logger.Loc.Empty, + this.allocator, + "incorrect peer dependency \"{}@{}\"", + .{ + existing_package.name.fmt(this.lockfile.buffers.string_bytes.items), + existing_package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), + }, + ) catch unreachable; + successFn(this, dependency_id, existing_id); + return .{ + // we must fetch it from the packages array again, incase the package array mutates the value in the `successFn` + .package = this.lockfile.packages.get(existing_id), + }; + } + } + }, + .ids => |list| { + for (list.items) |existing_id| { + if (existing_id < resolutions.len) { + const existing_resolution = resolutions[existing_id]; + if (resolutionSatisfiesDependency(this, existing_resolution, version)) { + successFn(this, dependency_id, existing_id); + return .{ + .package = this.lockfile.packages.get(existing_id), + }; + } + } + } + + if (list.items[0] < resolutions.len) { + const res_tag = resolutions[list.items[0]].tag; + const ver_tag = version.tag; + if ((res_tag == .npm and ver_tag == .npm) or (res_tag == .git and ver_tag == .git) or (res_tag == .github and ver_tag == .github)) { + const existing_package_id = list.items[0]; + const existing_package = this.lockfile.packages.get(existing_package_id); + this.log.addWarningFmt( + null, + logger.Loc.Empty, + this.allocator, + "incorrect peer dependency \"{}@{}\"", + .{ + existing_package.name.fmt(this.lockfile.buffers.string_bytes.items), + existing_package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), + }, + ) catch unreachable; + successFn(this, dependency_id, list.items[0]); + return .{ + // we must fetch it from the packages array again, incase the package array mutates the value in the `successFn` + .package = this.lockfile.packages.get(existing_package_id), + }; + } + } + }, + } + } + } + + if (resolution < this.lockfile.packages.len) { + return .{ .package = this.lockfile.packages.get(resolution) }; + } + + switch (version.tag) { + .npm, .dist_tag => { + resolve_from_workspace: { + if (version.tag == .npm) { + const workspace_path = if (this.lockfile.workspace_paths.count() > 0) this.lockfile.workspace_paths.get(name_hash) else null; + const workspace_version = this.lockfile.workspace_versions.get(name_hash); + const buf = this.lockfile.buffers.string_bytes.items; + if (this.options.link_workspace_packages and + (((workspace_version != null and version.value.npm.version.satisfies(workspace_version.?, buf, buf)) or + // https://github.com/oven-sh/bun/pull/10899#issuecomment-2099609419 + // if the workspace doesn't have a version, it can still be used if + // dependency version is wildcard + (workspace_path != null and version.value.npm.version.@"is *"())))) + { + const root_package = this.lockfile.rootPackage() orelse break :resolve_from_workspace; + const root_dependencies = root_package.dependencies.get(this.lockfile.buffers.dependencies.items); + const root_resolutions = root_package.resolutions.get(this.lockfile.buffers.resolutions.items); + + for (root_dependencies, root_resolutions) |root_dep, workspace_package_id| { + if (workspace_package_id != invalid_package_id and root_dep.version.tag == .workspace and root_dep.name_hash == name_hash) { + // make sure verifyResolutions sees this resolution as a valid package id + successFn(this, dependency_id, workspace_package_id); + return .{ + .package = this.lockfile.packages.get(workspace_package_id), + .is_first_time = false, + }; + } + } + } + } + } + + // Resolve the version from the loaded NPM manifest + const name_str = this.lockfile.str(&name); + const manifest = this.manifests.byNameHash( + this, + this.scopeForPackageName(name_str), + name_hash, + .load_from_memory_fallback_to_disk, + ) orelse return null; // manifest might still be downloading. This feels unreliable. + const find_result: Npm.PackageManifest.FindResult = switch (version.tag) { + .dist_tag => manifest.findByDistTag(this.lockfile.str(&version.value.dist_tag.tag)), + .npm => manifest.findBestVersion(version.value.npm.version, this.lockfile.buffers.string_bytes.items), + else => unreachable, + } orelse { + resolve_workspace_from_dist_tag: { + // choose a workspace for a dist_tag only if a version was not found + if (version.tag == .dist_tag) { + const workspace_path = if (this.lockfile.workspace_paths.count() > 0) this.lockfile.workspace_paths.get(name_hash) else null; + if (workspace_path != null) { + const root_package = this.lockfile.rootPackage() orelse break :resolve_workspace_from_dist_tag; + const root_dependencies = root_package.dependencies.get(this.lockfile.buffers.dependencies.items); + const root_resolutions = root_package.resolutions.get(this.lockfile.buffers.resolutions.items); + + for (root_dependencies, root_resolutions) |root_dep, workspace_package_id| { + if (workspace_package_id != invalid_package_id and root_dep.version.tag == .workspace and root_dep.name_hash == name_hash) { + // make sure verifyResolutions sees this resolution as a valid package id + successFn(this, dependency_id, workspace_package_id); + return .{ + .package = this.lockfile.packages.get(workspace_package_id), + .is_first_time = false, + }; + } + } + } + } + } + + if (behavior.isPeer()) { + return null; + } + + return switch (version.tag) { + .npm => error.NoMatchingVersion, + .dist_tag => error.DistTagNotFound, + else => unreachable, + }; + }; + + return try getOrPutResolvedPackageWithFindResult( + this, + name_hash, + name, + dependency, + version, + dependency_id, + behavior, + manifest, + find_result, + install_peer, + successFn, + ); + }, + + .folder => { + const res: FolderResolution = res: { + if (this.lockfile.isWorkspaceDependency(dependency_id)) { + // relative to cwd + const folder_path = this.lockfile.str(&version.value.folder); + var buf2: bun.PathBuffer = undefined; + const folder_path_abs = if (std.fs.path.isAbsolute(folder_path)) folder_path else blk: { + break :blk Path.joinAbsStringBuf( + FileSystem.instance.top_level_dir, + &buf2, + &.{folder_path}, + .auto, + ); + // break :blk Path.joinAbsStringBuf( + // strings.withoutSuffixComptime(this.original_package_json_path, "package.json"), + // &buf2, + // &[_]string{folder_path}, + // .auto, + // ); + }; + + // if (strings.eqlLong(strings.withoutTrailingSlash(folder_path_abs), strings.withoutTrailingSlash(FileSystem.instance.top_level_dir), true)) { + // successFn(this, dependency_id, 0); + // return .{ .package = this.lockfile.packages.get(0) }; + // } + + break :res FolderResolution.getOrPut(.{ .relative = .folder }, version, folder_path_abs, this); + } + + // transitive folder dependencies do not have their dependencies resolved + var name_slice = this.lockfile.str(&name); + var folder_path = this.lockfile.str(&version.value.folder); + var package = Lockfile.Package{}; + + { + // only need name and path + var builder = this.lockfile.stringBuilder(); + + builder.count(name_slice); + builder.count(folder_path); + + builder.allocate() catch bun.outOfMemory(); + + name_slice = this.lockfile.str(&name); + folder_path = this.lockfile.str(&version.value.folder); + + package.name = builder.append(String, name_slice); + package.name_hash = name_hash; + + package.resolution = Resolution.init(.{ + .folder = builder.append(String, folder_path), + }); + + package.scripts.filled = true; + package.meta.setHasInstallScript(false); + + builder.clamp(); + } + + // these are always new + package = this.lockfile.appendPackage(package) catch bun.outOfMemory(); + + break :res .{ + .new_package_id = package.meta.id, + }; + }; + + switch (res) { + .err => |err| return err, + .package_id => |package_id| { + successFn(this, dependency_id, package_id); + return .{ .package = this.lockfile.packages.get(package_id) }; + }, + + .new_package_id => |package_id| { + successFn(this, dependency_id, package_id); + return .{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; + }, + } + }, + .workspace => { + // package name hash should be used to find workspace path from map + const workspace_path_raw: *const String = this.lockfile.workspace_paths.getPtr(name_hash) orelse &version.value.workspace; + const workspace_path = this.lockfile.str(workspace_path_raw); + var buf2: bun.PathBuffer = undefined; + const workspace_path_u8 = if (std.fs.path.isAbsolute(workspace_path)) workspace_path else blk: { + break :blk Path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &buf2, &[_]string{workspace_path}, .auto); + }; + + const res = FolderResolution.getOrPut(.{ .relative = .workspace }, version, workspace_path_u8, this); + + switch (res) { + .err => |err| return err, + .package_id => |package_id| { + successFn(this, dependency_id, package_id); + return .{ .package = this.lockfile.packages.get(package_id) }; + }, + + .new_package_id => |package_id| { + successFn(this, dependency_id, package_id); + return .{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; + }, + } + }, + .symlink => { + const res = FolderResolution.getOrPut(.{ .global = this.globalLinkDirPath() }, version, this.lockfile.str(&version.value.symlink), this); + + switch (res) { + .err => |err| return err, + .package_id => |package_id| { + successFn(this, dependency_id, package_id); + return .{ .package = this.lockfile.packages.get(package_id) }; + }, + + .new_package_id => |package_id| { + successFn(this, dependency_id, package_id); + return .{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; + }, + } + }, + + else => return null, + } +} + +fn resolutionSatisfiesDependency(this: *PackageManager, resolution: Resolution, dependency: Dependency.Version) bool { + const buf = this.lockfile.buffers.string_bytes.items; + if (resolution.tag == .npm and dependency.tag == .npm) { + return dependency.value.npm.version.satisfies(resolution.value.npm.version, buf, buf); + } + + if (resolution.tag == .git and dependency.tag == .git) { + return resolution.value.git.eql(&dependency.value.git, buf, buf); + } + + if (resolution.tag == .github and dependency.tag == .github) { + return resolution.value.github.eql(&dependency.value.github, buf, buf); + } + + return false; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const Output = bun.Output; +const Path = bun.path; +const ThreadPool = bun.ThreadPool; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; + +const Semver = bun.Semver; +const String = Semver.String; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const Behavior = bun.install.Behavior; +const Dependency = bun.install.Dependency; +const DependencyID = bun.install.DependencyID; +const ExtractTarball = bun.install.ExtractTarball; +const Features = bun.install.Features; +const FolderResolution = bun.install.FolderResolution; +const Npm = bun.install.Npm; +const PackageID = bun.install.PackageID; +const PackageNameHash = bun.install.PackageNameHash; +const PatchTask = bun.install.PatchTask; +const Repository = bun.install.Repository; +const Resolution = bun.install.Resolution; +const Task = bun.install.Task; +const TaskCallbackContext = bun.install.TaskCallbackContext; +const invalid_package_id = bun.install.invalid_package_id; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; + +const NetworkTask = bun.install.NetworkTask; +const EnqueuePackageForDownloadError = NetworkTask.ForTarballError; +const EnqueueTarballForDownloadError = NetworkTask.ForTarballError; + +const PackageManager = bun.install.PackageManager; +const FailFn = PackageManager.FailFn; +const SuccessFn = PackageManager.SuccessFn; +const TaskCallbackList = PackageManager.TaskCallbackList; +const assignResolution = PackageManager.assignResolution; +const assignRootResolution = PackageManager.assignRootResolution; +const debug = PackageManager.debug; +const failRootResolution = PackageManager.failRootResolution; diff --git a/src/install/PackageManager/PackageManagerLifecycle.zig b/src/install/PackageManager/PackageManagerLifecycle.zig new file mode 100644 index 0000000000..1ba1aec35d --- /dev/null +++ b/src/install/PackageManager/PackageManagerLifecycle.zig @@ -0,0 +1,392 @@ +pub const LifecycleScriptTimeLog = struct { + const Entry = struct { + package_name: string, + script_id: u8, + + // nanosecond duration + duration: u64, + }; + + mutex: bun.Mutex = .{}, + list: std.ArrayListUnmanaged(Entry) = .{}, + + pub fn appendConcurrent(log: *LifecycleScriptTimeLog, allocator: std.mem.Allocator, entry: Entry) void { + log.mutex.lock(); + defer log.mutex.unlock(); + log.list.append(allocator, entry) catch bun.outOfMemory(); + } + + /// this can be called if .start was never called + pub fn printAndDeinit(log: *LifecycleScriptTimeLog, allocator: std.mem.Allocator) void { + if (Environment.isDebug) { + if (!log.mutex.tryLock()) @panic("LifecycleScriptTimeLog.print is not intended to be thread-safe"); + log.mutex.unlock(); + } + + if (log.list.items.len > 0) { + const longest: Entry = longest: { + var i: usize = 0; + var longest: u64 = log.list.items[0].duration; + for (log.list.items[1..], 1..) |item, j| { + if (item.duration > longest) { + i = j; + longest = item.duration; + } + } + break :longest log.list.items[i]; + }; + + // extra \n will print a blank line after this one + Output.warn("{s}'s {s} script took {}\n\n", .{ + longest.package_name, + Lockfile.Scripts.names[longest.script_id], + bun.fmt.fmtDurationOneDecimal(longest.duration), + }); + Output.flush(); + } + log.list.deinit(allocator); + } +}; + +pub fn ensurePreinstallStateListCapacity(this: *PackageManager, count: usize) void { + if (this.preinstall_state.items.len >= count) { + return; + } + + const offset = this.preinstall_state.items.len; + this.preinstall_state.ensureTotalCapacity(this.allocator, count) catch bun.outOfMemory(); + this.preinstall_state.expandToCapacity(); + @memset(this.preinstall_state.items[offset..], PreinstallState.unknown); +} + +pub fn setPreinstallState(this: *PackageManager, package_id: PackageID, lockfile: *const Lockfile, value: PreinstallState) void { + this.ensurePreinstallStateListCapacity(lockfile.packages.len); + this.preinstall_state.items[package_id] = value; +} + +pub fn getPreinstallState(this: *PackageManager, package_id: PackageID) PreinstallState { + if (package_id >= this.preinstall_state.items.len) { + return PreinstallState.unknown; + } + return this.preinstall_state.items[package_id]; +} + +pub fn determinePreinstallState( + manager: *PackageManager, + pkg: Package, + lockfile: *Lockfile, + out_name_and_version_hash: *?u64, + out_patchfile_hash: *?u64, +) PreinstallState { + switch (manager.getPreinstallState(pkg.meta.id)) { + .unknown => { + + // Do not automatically start downloading packages which are disabled + // i.e. don't download all of esbuild's versions or SWCs + if (pkg.isDisabled()) { + manager.setPreinstallState(pkg.meta.id, lockfile, .done); + return .done; + } + + const patch_hash: ?u64 = brk: { + if (manager.lockfile.patched_dependencies.entries.len == 0) break :brk null; + var sfb = std.heap.stackFallback(1024, manager.lockfile.allocator); + const name_and_version = std.fmt.allocPrint( + sfb.get(), + "{s}@{}", + .{ + pkg.name.slice(manager.lockfile.buffers.string_bytes.items), + pkg.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .posix), + }, + ) catch unreachable; + const name_and_version_hash = String.Builder.stringHash(name_and_version); + const patched_dep = manager.lockfile.patched_dependencies.get(name_and_version_hash) orelse break :brk null; + defer out_name_and_version_hash.* = name_and_version_hash; + if (patched_dep.patchfile_hash_is_null) { + manager.setPreinstallState(pkg.meta.id, manager.lockfile, .calc_patch_hash); + return .calc_patch_hash; + } + out_patchfile_hash.* = patched_dep.patchfileHash().?; + break :brk patched_dep.patchfileHash().?; + }; + + const folder_path = switch (pkg.resolution.tag) { + .git => manager.cachedGitFolderNamePrintAuto(&pkg.resolution.value.git, patch_hash), + .github => manager.cachedGitHubFolderNamePrintAuto(&pkg.resolution.value.github, patch_hash), + .npm => manager.cachedNPMPackageFolderName(lockfile.str(&pkg.name), pkg.resolution.value.npm.version, patch_hash), + .local_tarball => manager.cachedTarballFolderName(pkg.resolution.value.local_tarball, patch_hash), + .remote_tarball => manager.cachedTarballFolderName(pkg.resolution.value.remote_tarball, patch_hash), + else => "", + }; + + if (folder_path.len == 0) { + manager.setPreinstallState(pkg.meta.id, lockfile, .extract); + return .extract; + } + + if (manager.isFolderInCache(folder_path)) { + manager.setPreinstallState(pkg.meta.id, lockfile, .done); + return .done; + } + + // If the package is patched, then `folder_path` looks like: + // is-even@1.0.0_patch_hash=abc8s6dedhsddfkahaldfjhlj + // + // If that's not in the cache, we need to put it there: + // 1. extract the non-patched pkg in the cache + // 2. copy non-patched pkg into temp dir + // 3. apply patch to temp dir + // 4. rename temp dir to `folder_path` + if (patch_hash != null) { + const non_patched_path_ = folder_path[0 .. std.mem.indexOf(u8, folder_path, "_patch_hash=") orelse @panic("Expected folder path to contain `patch_hash=`, this is a bug in Bun. Please file a GitHub issue.")]; + const non_patched_path = manager.lockfile.allocator.dupeZ(u8, non_patched_path_) catch bun.outOfMemory(); + defer manager.lockfile.allocator.free(non_patched_path); + if (manager.isFolderInCache(non_patched_path)) { + manager.setPreinstallState(pkg.meta.id, manager.lockfile, .apply_patch); + // yay step 1 is already done for us + return .apply_patch; + } + // we need to extract non-patched pkg into the cache + manager.setPreinstallState(pkg.meta.id, lockfile, .extract); + return .extract; + } + + manager.setPreinstallState(pkg.meta.id, lockfile, .extract); + return .extract; + }, + else => |val| return val, + } +} + +pub fn hasNoMorePendingLifecycleScripts(this: *PackageManager) bool { + this.reportSlowLifecycleScripts(); + return this.pending_lifecycle_script_tasks.load(.monotonic) == 0; +} + +pub fn tickLifecycleScripts(this: *PackageManager) void { + this.event_loop.tickOnce(this); +} + +pub fn sleep(this: *PackageManager) void { + this.reportSlowLifecycleScripts(); + Output.flush(); + this.event_loop.tick(this, hasNoMorePendingLifecycleScripts); +} + +pub fn reportSlowLifecycleScripts(this: *PackageManager) void { + const log_level = this.options.log_level; + if (log_level == .silent) return; + if (bun.getRuntimeFeatureFlag(.BUN_DISABLE_SLOW_LIFECYCLE_SCRIPT_LOGGING)) { + return; + } + + if (this.active_lifecycle_scripts.peek()) |active_lifecycle_script_running_for_the_longest_amount_of_time| { + if (this.cached_tick_for_slow_lifecycle_script_logging == this.event_loop.iterationNumber()) { + return; + } + this.cached_tick_for_slow_lifecycle_script_logging = this.event_loop.iterationNumber(); + const current_time = bun.timespec.now().ns(); + const time_running = current_time -| active_lifecycle_script_running_for_the_longest_amount_of_time.started_at; + const interval: u64 = if (log_level.isVerbose()) std.time.ns_per_s * 5 else std.time.ns_per_s * 30; + if (time_running > interval and current_time -| this.last_reported_slow_lifecycle_script_at > interval) { + this.last_reported_slow_lifecycle_script_at = current_time; + const package_name = active_lifecycle_script_running_for_the_longest_amount_of_time.package_name; + + if (!(package_name.len > 1 and package_name[package_name.len - 1] == 's')) { + Output.warn("{s}'s postinstall cost you {}\n", .{ + package_name, + bun.fmt.fmtDurationOneDecimal(time_running), + }); + } else { + Output.warn("{s}' postinstall cost you {}\n", .{ + package_name, + bun.fmt.fmtDurationOneDecimal(time_running), + }); + } + Output.flush(); + } + } +} + +pub fn loadRootLifecycleScripts(this: *PackageManager, root_package: Package) void { + const binding_dot_gyp_path = Path.joinAbsStringZ( + Fs.FileSystem.instance.top_level_dir, + &[_]string{"binding.gyp"}, + .auto, + ); + + const buf = this.lockfile.buffers.string_bytes.items; + // need to clone because this is a copy before Lockfile.cleanWithLogger + const name = root_package.name.slice(buf); + + var top_level_dir: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer top_level_dir.deinit(); + + if (root_package.scripts.hasAny()) { + const add_node_gyp_rebuild_script = root_package.scripts.install.isEmpty() and root_package.scripts.preinstall.isEmpty() and Syscall.exists(binding_dot_gyp_path); + + this.root_lifecycle_scripts = root_package.scripts.createList( + this.lockfile, + buf, + &top_level_dir, + name, + .root, + add_node_gyp_rebuild_script, + ); + } else { + if (Syscall.exists(binding_dot_gyp_path)) { + // no scripts exist but auto node gyp script needs to be added + this.root_lifecycle_scripts = root_package.scripts.createList( + this.lockfile, + buf, + &top_level_dir, + name, + .root, + true, + ); + } + } +} + +pub fn spawnPackageLifecycleScripts( + this: *PackageManager, + ctx: Command.Context, + list: Lockfile.Package.Scripts.List, + optional: bool, + foreground: bool, + install_ctx: ?LifecycleScriptSubprocess.InstallCtx, +) !void { + const log_level = this.options.log_level; + var any_scripts = false; + for (list.items) |maybe_item| { + if (maybe_item != null) { + any_scripts = true; + break; + } + } + if (!any_scripts) { + return; + } + + try this.ensureTempNodeGypScript(); + + const cwd = list.cwd; + var this_transpiler = try this.configureEnvForScripts(ctx, log_level); + + var script_env = try this_transpiler.env.map.cloneWithAllocator(bun.default_allocator); + defer script_env.map.deinit(); + + const original_path = script_env.get("PATH") orelse ""; + + var PATH: bun.EnvPath(.{}) = try .initCapacity(bun.default_allocator, original_path.len + 1 + "node_modules/.bin".len + cwd.len + 1); + defer PATH.deinit(); + + var parent: ?string = cwd; + + while (parent) |dir| { + var builder = PATH.pathComponentBuilder(); + builder.append(dir); + builder.append("node_modules/.bin"); + try builder.apply(); + + parent = std.fs.path.dirname(dir); + } + + try PATH.append(original_path); + try script_env.put("PATH", PATH.slice()); + + const envp = try script_env.createNullDelimitedEnvMap(this.allocator); + + const shell_bin = shell_bin: { + if (comptime Environment.isWindows) { + break :shell_bin null; + } + + if (this.env.get("PATH")) |env_path| { + break :shell_bin bun.CLI.RunCommand.findShell(env_path, cwd); + } + + break :shell_bin null; + }; + + try LifecycleScriptSubprocess.spawnPackageScripts(this, list, envp, shell_bin, optional, log_level, foreground, install_ctx); +} + +pub fn findTrustedDependenciesFromUpdateRequests(this: *PackageManager) std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void) { + const parts = this.lockfile.packages.slice(); + // find all deps originating from --trust packages from cli + var set: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void) = .{}; + if (this.options.do.trust_dependencies_from_args and this.lockfile.packages.len > 0) { + const root_deps = parts.items(.dependencies)[this.root_package_id.get(this.lockfile, this.workspace_name_hash)]; + var dep_id = root_deps.off; + const end = dep_id +| root_deps.len; + while (dep_id < end) : (dep_id += 1) { + const root_dep = this.lockfile.buffers.dependencies.items[dep_id]; + for (this.update_requests) |request| { + if (request.matches(root_dep, this.lockfile.buffers.string_bytes.items)) { + const package_id = this.lockfile.buffers.resolutions.items[dep_id]; + if (package_id == invalid_package_id) continue; + + const entry = set.getOrPut(this.lockfile.allocator, @truncate(root_dep.name_hash)) catch bun.outOfMemory(); + if (!entry.found_existing) { + const dependency_slice = parts.items(.dependencies)[package_id]; + addDependenciesToSet(&set, this.lockfile, dependency_slice); + } + break; + } + } + } + } + + return set; +} + +fn addDependenciesToSet( + names: *std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void), + lockfile: *Lockfile, + dependencies_slice: Lockfile.DependencySlice, +) void { + const begin = dependencies_slice.off; + const end = begin +| dependencies_slice.len; + var dep_id = begin; + while (dep_id < end) : (dep_id += 1) { + const package_id = lockfile.buffers.resolutions.items[dep_id]; + if (package_id == invalid_package_id) continue; + + const dep = lockfile.buffers.dependencies.items[dep_id]; + const entry = names.getOrPut(lockfile.allocator, @truncate(dep.name_hash)) catch bun.outOfMemory(); + if (!entry.found_existing) { + const dependency_slice = lockfile.packages.items(.dependencies)[package_id]; + addDependenciesToSet(names, lockfile, dependency_slice); + } + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const Output = bun.Output; +const Path = bun.path; +const Syscall = bun.sys; +const default_allocator = bun.default_allocator; +const string = bun.string; +const Command = bun.CLI.Command; + +const Semver = bun.Semver; +const String = Semver.String; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess; +const PackageID = bun.install.PackageID; +const PackageManager = bun.install.PackageManager; +const PreinstallState = bun.install.PreinstallState; +const TruncatedPackageNameHash = bun.install.TruncatedPackageNameHash; +const invalid_package_id = bun.install.invalid_package_id; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; diff --git a/src/install/PackageManager/PackageManagerOptions.zig b/src/install/PackageManager/PackageManagerOptions.zig index 6e2275848f..764d352a3a 100644 --- a/src/install/PackageManager/PackageManagerOptions.zig +++ b/src/install/PackageManager/PackageManagerOptions.zig @@ -57,6 +57,13 @@ save_text_lockfile: ?bool = null, lockfile_only: bool = false, +// `bun pm version` command options +git_tag_version: bool = true, +allow_same_version: bool = false, +preid: string = "", +message: ?string = null, +force: bool = false, + pub const PublishConfig = struct { access: ?Access = null, tag: string = "", @@ -584,6 +591,13 @@ pub fn load( if (cli.ca_file_name.len > 0) { this.ca_file_name = cli.ca_file_name; } + + // `bun pm version` command options + this.git_tag_version = cli.git_tag_version; + this.allow_same_version = cli.allow_same_version; + this.preid = cli.preid; + this.message = cli.message; + this.force = cli.force; } else { this.log_level = if (default_disable_progress_bar) LogLevel.default_no_progress else LogLevel.default; PackageManager.verbose_install = false; diff --git a/src/install/PackageManager/PackageManagerResolution.zig b/src/install/PackageManager/PackageManagerResolution.zig new file mode 100644 index 0000000000..18f2e156b3 --- /dev/null +++ b/src/install/PackageManager/PackageManagerResolution.zig @@ -0,0 +1,243 @@ +pub fn formatLaterVersionInCache( + this: *PackageManager, + package_name: string, + name_hash: PackageNameHash, + resolution: Resolution, +) ?Semver.Version.Formatter { + switch (resolution.tag) { + Resolution.Tag.npm => { + if (resolution.value.npm.version.tag.hasPre()) + // TODO: + return null; + + const manifest = this.manifests.byNameHash( + this, + this.scopeForPackageName(package_name), + name_hash, + .load_from_memory, + ) orelse return null; + + if (manifest.findByDistTag("latest")) |*latest_version| { + if (latest_version.version.order( + resolution.value.npm.version, + manifest.string_buf, + this.lockfile.buffers.string_bytes.items, + ) != .gt) return null; + return latest_version.version.fmt(manifest.string_buf); + } + + return null; + }, + else => return null, + } +} + +pub fn scopeForPackageName(this: *const PackageManager, name: string) *const Npm.Registry.Scope { + if (name.len == 0 or name[0] != '@') return &this.options.scope; + return this.options.registries.getPtr( + Npm.Registry.Scope.hash( + Npm.Registry.Scope.getName(name), + ), + ) orelse &this.options.scope; +} + +pub fn getInstalledVersionsFromDiskCache(this: *PackageManager, tags_buf: *std.ArrayList(u8), package_name: []const u8, allocator: std.mem.Allocator) !std.ArrayList(Semver.Version) { + var list = std.ArrayList(Semver.Version).init(allocator); + var dir = this.getCacheDirectory().openDir(package_name, .{ + .iterate = true, + }) catch |err| switch (err) { + error.FileNotFound, error.NotDir, error.AccessDenied, error.DeviceBusy => return list, + else => return err, + }; + defer dir.close(); + var iter = dir.iterate(); + + while (try iter.next()) |entry| { + if (entry.kind != .directory and entry.kind != .sym_link) continue; + const name = entry.name; + const sliced = SlicedString.init(name, name); + const parsed = Semver.Version.parse(sliced); + if (!parsed.valid or parsed.wildcard != .none) continue; + // not handling OOM + // TODO: wildcard + var version = parsed.version.min(); + const total = version.tag.build.len() + version.tag.pre.len(); + if (total > 0) { + tags_buf.ensureUnusedCapacity(total) catch unreachable; + var available = tags_buf.items.ptr[tags_buf.items.len..tags_buf.capacity]; + const new_version = version.cloneInto(name, &available); + tags_buf.items.len += total; + version = new_version; + } + + list.append(version) catch unreachable; + } + + return list; +} + +pub fn resolveFromDiskCache(this: *PackageManager, package_name: []const u8, version: Dependency.Version) ?PackageID { + if (version.tag != .npm) { + // only npm supported right now + // tags are more ambiguous + return null; + } + + var arena = bun.ArenaAllocator.init(this.allocator); + defer arena.deinit(); + const arena_alloc = arena.allocator(); + var stack_fallback = std.heap.stackFallback(4096, arena_alloc); + const allocator = stack_fallback.get(); + var tags_buf = std.ArrayList(u8).init(allocator); + const installed_versions = this.getInstalledVersionsFromDiskCache(&tags_buf, package_name, allocator) catch |err| { + Output.debug("error getting installed versions from disk cache: {s}", .{bun.span(@errorName(err))}); + return null; + }; + + // TODO: make this fewer passes + std.sort.pdq( + Semver.Version, + installed_versions.items, + @as([]const u8, tags_buf.items), + Semver.Version.sortGt, + ); + for (installed_versions.items) |installed_version| { + if (version.value.npm.version.satisfies(installed_version, this.lockfile.buffers.string_bytes.items, tags_buf.items)) { + var buf: bun.PathBuffer = undefined; + const npm_package_path = this.pathForCachedNPMPath(&buf, package_name, installed_version) catch |err| { + Output.debug("error getting path for cached npm path: {s}", .{bun.span(@errorName(err))}); + return null; + }; + const dependency = Dependency.Version{ + .tag = .npm, + .value = .{ + .npm = .{ + .name = String.init(package_name, package_name), + .version = Semver.Query.Group.from(installed_version), + }, + }, + }; + switch (FolderResolution.getOrPut(.{ .cache_folder = npm_package_path }, dependency, ".", this)) { + .new_package_id => |id| { + this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id]); + return id; + }, + .package_id => |id| { + this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id]); + return id; + }, + .err => |err| { + Output.debug("error getting or putting folder resolution: {s}", .{bun.span(@errorName(err))}); + return null; + }, + } + } + } + + return null; +} + +pub fn assignResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void { + const buffers = &this.lockfile.buffers; + if (comptime Environment.allow_assert) { + bun.assert(dependency_id < buffers.resolutions.items.len); + bun.assert(package_id < this.lockfile.packages.len); + // bun.assert(buffers.resolutions.items[dependency_id] == invalid_package_id); + } + buffers.resolutions.items[dependency_id] = package_id; + const string_buf = buffers.string_bytes.items; + var dep = &buffers.dependencies.items[dependency_id]; + if (dep.name.isEmpty() or strings.eql(dep.name.slice(string_buf), dep.version.literal.slice(string_buf))) { + dep.name = this.lockfile.packages.items(.name)[package_id]; + dep.name_hash = this.lockfile.packages.items(.name_hash)[package_id]; + } +} + +pub fn assignRootResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void { + const buffers = &this.lockfile.buffers; + if (comptime Environment.allow_assert) { + bun.assert(dependency_id < buffers.resolutions.items.len); + bun.assert(package_id < this.lockfile.packages.len); + bun.assert(buffers.resolutions.items[dependency_id] == invalid_package_id); + } + buffers.resolutions.items[dependency_id] = package_id; + const string_buf = buffers.string_bytes.items; + var dep = &buffers.dependencies.items[dependency_id]; + if (dep.name.isEmpty() or strings.eql(dep.name.slice(string_buf), dep.version.literal.slice(string_buf))) { + dep.name = this.lockfile.packages.items(.name)[package_id]; + dep.name_hash = this.lockfile.packages.items(.name_hash)[package_id]; + } +} + +pub fn verifyResolutions(this: *PackageManager, log_level: PackageManager.Options.LogLevel) void { + const lockfile = this.lockfile; + const resolutions_lists: []const Lockfile.DependencyIDSlice = lockfile.packages.items(.resolutions); + const dependency_lists: []const Lockfile.DependencySlice = lockfile.packages.items(.dependencies); + const pkg_resolutions = lockfile.packages.items(.resolution); + const dependencies_buffer = lockfile.buffers.dependencies.items; + const resolutions_buffer = lockfile.buffers.resolutions.items; + const end: PackageID = @truncate(lockfile.packages.len); + + var any_failed = false; + const string_buf = lockfile.buffers.string_bytes.items; + + for (resolutions_lists, dependency_lists, 0..) |resolution_list, dependency_list, parent_id| { + for (resolution_list.get(resolutions_buffer), dependency_list.get(dependencies_buffer)) |package_id, failed_dep| { + if (package_id < end) continue; + + // TODO lockfile rewrite: remove this and make non-optional peer dependencies error if they did not resolve. + // Need to keep this for now because old lockfiles might have a peer dependency without the optional flag set. + if (failed_dep.behavior.isPeer()) continue; + + const features = switch (pkg_resolutions[parent_id].tag) { + .root, .workspace, .folder => this.options.local_package_features, + else => this.options.remote_package_features, + }; + // even if optional dependencies are enabled, it's still allowed to fail + if (failed_dep.behavior.optional or !failed_dep.behavior.isEnabled(features)) continue; + + if (log_level != .silent) { + if (failed_dep.name.isEmpty() or strings.eqlLong(failed_dep.name.slice(string_buf), failed_dep.version.literal.slice(string_buf), true)) { + Output.errGeneric("{} failed to resolve", .{ + failed_dep.version.literal.fmt(string_buf), + }); + } else { + Output.errGeneric("{s}@{} failed to resolve", .{ + failed_dep.name.slice(string_buf), + failed_dep.version.literal.fmt(string_buf), + }); + } + } + // track this so we can log each failure instead of just the first + any_failed = true; + } + } + + if (any_failed) this.crash(); +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const OOM = bun.OOM; +const Output = bun.Output; +const string = bun.string; +const strings = bun.strings; + +const Semver = bun.Semver; +const SlicedString = Semver.SlicedString; +const String = Semver.String; + +const Dependency = bun.install.Dependency; +const DependencyID = bun.install.DependencyID; +const FolderResolution = bun.install.FolderResolution; +const Lockfile = bun.install.Lockfile; +const Npm = bun.install.Npm; +const PackageID = bun.install.PackageID; +const PackageManager = bun.install.PackageManager; +const PackageNameHash = bun.install.PackageNameHash; +const Resolution = bun.install.Resolution; +const invalid_package_id = bun.install.invalid_package_id; diff --git a/src/install/PackageManager/ProgressStrings.zig b/src/install/PackageManager/ProgressStrings.zig new file mode 100644 index 0000000000..7534c2224a --- /dev/null +++ b/src/install/PackageManager/ProgressStrings.zig @@ -0,0 +1,101 @@ +pub const ProgressStrings = struct { + pub const download_no_emoji_ = "Resolving"; + const download_no_emoji: string = download_no_emoji_ ++ "\n"; + const download_with_emoji: string = download_emoji ++ download_no_emoji_; + pub const download_emoji: string = " 🔍 "; + + pub const extract_no_emoji_ = "Resolving & extracting"; + const extract_no_emoji: string = extract_no_emoji_ ++ "\n"; + const extract_with_emoji: string = extract_emoji ++ extract_no_emoji_; + pub const extract_emoji: string = " 🚚 "; + + pub const install_no_emoji_ = "Installing"; + const install_no_emoji: string = install_no_emoji_ ++ "\n"; + const install_with_emoji: string = install_emoji ++ install_no_emoji_; + pub const install_emoji: string = " 📦 "; + + pub const save_no_emoji_ = "Saving lockfile"; + const save_no_emoji: string = save_no_emoji_; + const save_with_emoji: string = save_emoji ++ save_no_emoji_; + pub const save_emoji: string = " 🔒 "; + + pub const script_no_emoji_ = "Running script"; + const script_no_emoji: string = script_no_emoji_ ++ "\n"; + const script_with_emoji: string = script_emoji ++ script_no_emoji_; + pub const script_emoji: string = " ⚙️ "; + + pub inline fn download() string { + return if (Output.isEmojiEnabled()) download_with_emoji else download_no_emoji; + } + + pub inline fn save() string { + return if (Output.isEmojiEnabled()) save_with_emoji else save_no_emoji; + } + + pub inline fn extract() string { + return if (Output.isEmojiEnabled()) extract_with_emoji else extract_no_emoji; + } + + pub inline fn install() string { + return if (Output.isEmojiEnabled()) install_with_emoji else install_no_emoji; + } + + pub inline fn script() string { + return if (Output.isEmojiEnabled()) script_with_emoji else script_no_emoji; + } +}; + +pub fn setNodeName( + this: *PackageManager, + node: *Progress.Node, + name: string, + emoji: string, + comptime is_first: bool, +) void { + if (Output.isEmojiEnabled()) { + if (is_first) { + @memcpy(this.progress_name_buf[0..emoji.len], emoji); + @memcpy(this.progress_name_buf[emoji.len..][0..name.len], name); + node.name = this.progress_name_buf[0 .. emoji.len + name.len]; + } else { + @memcpy(this.progress_name_buf[emoji.len..][0..name.len], name); + node.name = this.progress_name_buf[0 .. emoji.len + name.len]; + } + } else { + @memcpy(this.progress_name_buf[0..name.len], name); + node.name = this.progress_name_buf[0..name.len]; + } +} + +pub fn startProgressBarIfNone(manager: *PackageManager) void { + if (manager.downloads_node == null) { + manager.startProgressBar(); + } +} +pub fn startProgressBar(manager: *PackageManager) void { + manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; + manager.downloads_node = manager.progress.start(ProgressStrings.download(), 0); + manager.setNodeName(manager.downloads_node.?, ProgressStrings.download_no_emoji_, ProgressStrings.download_emoji, true); + manager.downloads_node.?.setEstimatedTotalItems(manager.total_tasks + manager.extracted_count); + manager.downloads_node.?.setCompletedItems(manager.total_tasks - manager.pendingTaskCount()); + manager.downloads_node.?.activate(); + manager.progress.refresh(); +} + +pub fn endProgressBar(manager: *PackageManager) void { + var downloads_node = manager.downloads_node orelse return; + downloads_node.setEstimatedTotalItems(downloads_node.unprotected_estimated_total_items); + downloads_node.setCompletedItems(downloads_node.unprotected_estimated_total_items); + manager.progress.refresh(); + manager.progress.root.end(); + manager.progress = .{}; + manager.downloads_node = null; +} + +// @sortImports + +const bun = @import("bun"); +const Output = bun.Output; +const Progress = bun.Progress; +const string = bun.string; +const PackageManager = bun.install.PackageManager; diff --git a/src/install/PackageManager/UpdateRequest.zig b/src/install/PackageManager/UpdateRequest.zig new file mode 100644 index 0000000000..2c670d37a1 --- /dev/null +++ b/src/install/PackageManager/UpdateRequest.zig @@ -0,0 +1,253 @@ +name: string = "", +name_hash: PackageNameHash = 0, +version: Dependency.Version = .{}, +version_buf: []const u8 = "", +package_id: PackageID = invalid_package_id, +is_aliased: bool = false, +failed: bool = false, +// This must be cloned to handle when the AST store resets +e_string: ?*JSAst.E.String = null, + +pub const Array = std.ArrayListUnmanaged(UpdateRequest); + +pub inline fn matches(this: PackageManager.UpdateRequest, dependency: Dependency, string_buf: []const u8) bool { + return this.name_hash == if (this.name.len == 0) + String.Builder.stringHash(dependency.version.literal.slice(string_buf)) + else + dependency.name_hash; +} + +/// It is incorrect to call this function before Lockfile.cleanWithLogger() because +/// resolved_name should be populated if possible. +/// +/// `this` needs to be a pointer! If `this` is a copy and the name returned from +/// resolved_name is inlined, you will return a pointer to stack memory. +pub fn getResolvedName(this: *const UpdateRequest, lockfile: *const Lockfile) string { + return if (this.is_aliased) + this.name + else if (this.package_id == invalid_package_id) + this.version.literal.slice(this.version_buf) + else + lockfile.packages.items(.name)[this.package_id].slice(this.version_buf); +} + +pub fn fromJS(globalThis: *JSC.JSGlobalObject, input: JSC.JSValue) bun.JSError!JSC.JSValue { + var arena = std.heap.ArenaAllocator.init(bun.default_allocator); + defer arena.deinit(); + var stack = std.heap.stackFallback(1024, arena.allocator()); + const allocator = stack.get(); + var all_positionals = std.ArrayList([]const u8).init(allocator); + + var log = logger.Log.init(allocator); + + if (input.isString()) { + var input_str = input.toSliceCloneWithAllocator( + globalThis, + allocator, + ) orelse return .zero; + if (input_str.len > 0) + try all_positionals.append(input_str.slice()); + } else if (input.isArray()) { + var iter = try input.arrayIterator(globalThis); + while (try iter.next()) |item| { + const slice = item.toSliceCloneWithAllocator(globalThis, allocator) orelse return .zero; + if (globalThis.hasException()) return .zero; + if (slice.len == 0) continue; + try all_positionals.append(slice.slice()); + } + if (globalThis.hasException()) return .zero; + } else { + return .js_undefined; + } + + if (all_positionals.items.len == 0) { + return .js_undefined; + } + + var array = Array{}; + + const update_requests = parseWithError(allocator, null, &log, all_positionals.items, &array, .add, false) catch { + return globalThis.throwValue(try log.toJS(globalThis, bun.default_allocator, "Failed to parse dependencies")); + }; + if (update_requests.len == 0) return .js_undefined; + + if (log.msgs.items.len > 0) { + return globalThis.throwValue(try log.toJS(globalThis, bun.default_allocator, "Failed to parse dependencies")); + } + + if (update_requests[0].failed) { + return globalThis.throw("Failed to parse dependencies", .{}); + } + + var object = JSC.JSValue.createEmptyObject(globalThis, 2); + var name_str = bun.String.init(update_requests[0].name); + object.put(globalThis, "name", name_str.transferToJS(globalThis)); + object.put(globalThis, "version", try update_requests[0].version.toJS(update_requests[0].version_buf, globalThis)); + return object; +} + +pub fn parse( + allocator: std.mem.Allocator, + pm: ?*PackageManager, + log: *logger.Log, + positionals: []const string, + update_requests: *Array, + subcommand: Subcommand, +) []UpdateRequest { + return parseWithError(allocator, pm, log, positionals, update_requests, subcommand, true) catch Global.crash(); +} + +fn parseWithError( + allocator: std.mem.Allocator, + pm: ?*PackageManager, + log: *logger.Log, + positionals: []const string, + update_requests: *Array, + subcommand: Subcommand, + fatal: bool, +) ![]UpdateRequest { + // first one is always either: + // add + // remove + outer: for (positionals) |positional| { + var input: []u8 = bun.default_allocator.dupe(u8, std.mem.trim(u8, positional, " \n\r\t")) catch bun.outOfMemory(); + { + var temp: [2048]u8 = undefined; + const len = std.mem.replace(u8, input, "\\\\", "/", &temp); + bun.path.platformToPosixInPlace(u8, &temp); + const input2 = temp[0 .. input.len - len]; + @memcpy(input[0..input2.len], input2); + input.len = input2.len; + } + switch (subcommand) { + .link, .unlink => if (!strings.hasPrefixComptime(input, "link:")) { + input = std.fmt.allocPrint(allocator, "{0s}@link:{0s}", .{input}) catch unreachable; + }, + else => {}, + } + + var value = input; + var alias: ?string = null; + if (!Dependency.isTarball(input) and strings.isNPMPackageName(input)) { + alias = input; + value = input[input.len..]; + } else if (input.len > 1) { + if (strings.indexOfChar(input[1..], '@')) |at| { + const name = input[0 .. at + 1]; + if (strings.isNPMPackageName(name)) { + alias = name; + value = input[at + 2 ..]; + } + } + } + + const placeholder = String.from("@@@"); + var version = Dependency.parseWithOptionalTag( + allocator, + if (alias) |name| String.init(input, name) else placeholder, + if (alias) |name| String.Builder.stringHash(name) else null, + value, + null, + &SlicedString.init(input, value), + log, + pm, + ) orelse { + if (fatal) { + Output.errGeneric("unrecognised dependency format: {s}", .{ + positional, + }); + } else { + log.addErrorFmt(null, logger.Loc.Empty, allocator, "unrecognised dependency format: {s}", .{ + positional, + }) catch bun.outOfMemory(); + } + + return error.UnrecognizedDependencyFormat; + }; + if (alias != null and version.tag == .git) { + if (Dependency.parseWithOptionalTag( + allocator, + placeholder, + null, + input, + null, + &SlicedString.init(input, input), + log, + pm, + )) |ver| { + alias = null; + version = ver; + } + } + if (switch (version.tag) { + .dist_tag => version.value.dist_tag.name.eql(placeholder, input, input), + .npm => version.value.npm.name.eql(placeholder, input, input), + else => false, + }) { + if (fatal) { + Output.errGeneric("unrecognised dependency format: {s}", .{ + positional, + }); + } else { + log.addErrorFmt(null, logger.Loc.Empty, allocator, "unrecognised dependency format: {s}", .{ + positional, + }) catch bun.outOfMemory(); + } + + return error.UnrecognizedDependencyFormat; + } + + var request = UpdateRequest{ + .version = version, + .version_buf = input, + }; + if (alias) |name| { + request.is_aliased = true; + request.name = allocator.dupe(u8, name) catch unreachable; + request.name_hash = String.Builder.stringHash(name); + } else if (version.tag == .github and version.value.github.committish.isEmpty()) { + request.name_hash = String.Builder.stringHash(version.literal.slice(input)); + } else { + request.name_hash = String.Builder.stringHash(version.literal.slice(input)); + } + + for (update_requests.items) |*prev| { + if (prev.name_hash == request.name_hash and request.name.len == prev.name.len) continue :outer; + } + update_requests.append(allocator, request) catch bun.outOfMemory(); + } + + return update_requests.items; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Global = bun.Global; +const JSAst = bun.JSAst; +const JSC = bun.JSC; +const Output = bun.Output; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; + +const Semver = bun.Semver; +const SlicedString = Semver.SlicedString; +const String = Semver.String; + +const Dependency = bun.install.Dependency; +const Lockfile = bun.install.Lockfile; +const PackageID = bun.install.PackageID; +const PackageNameHash = bun.install.PackageNameHash; +const invalid_package_id = bun.install.invalid_package_id; + +const PackageManager = bun.install.PackageManager; +pub const CommandLineArguments = PackageManager.CommandLineArguments; +pub const Options = PackageManager.Options; +pub const PackageInstaller = PackageManager.PackageInstaller; +pub const PackageJSONEditor = PackageManager.PackageJSONEditor; +pub const Subcommand = PackageManager.Subcommand; +const UpdateRequest = PackageManager.UpdateRequest; diff --git a/src/install/PackageManager/WorkspacePackageJSONCache.zig b/src/install/PackageManager/WorkspacePackageJSONCache.zig new file mode 100644 index 0000000000..f935ab0c57 --- /dev/null +++ b/src/install/PackageManager/WorkspacePackageJSONCache.zig @@ -0,0 +1,166 @@ +// maybe rename to `PackageJSONCache` if we cache more than workspaces + +pub const MapEntry = struct { + root: Expr, + source: logger.Source, + indentation: JSPrinter.Options.Indentation = .{}, +}; + +pub const Map = bun.StringHashMapUnmanaged(MapEntry); + +pub const GetJSONOptions = struct { + init_reset_store: bool = true, + guess_indentation: bool = false, +}; + +pub const GetResult = union(enum) { + entry: *MapEntry, + read_err: anyerror, + parse_err: anyerror, + + pub fn unwrap(this: GetResult) !*MapEntry { + return switch (this) { + .entry => |entry| entry, + inline else => |err| err, + }; + } +}; + +map: Map = .{}, + +/// Given an absolute path to a workspace package.json, return the AST +/// and contents of the file. If the package.json is not present in the +/// cache, it will be read from disk and parsed, and stored in the cache. +pub fn getWithPath( + this: *@This(), + allocator: std.mem.Allocator, + log: *logger.Log, + abs_package_json_path: anytype, + comptime opts: GetJSONOptions, +) GetResult { + bun.assertWithLocation(std.fs.path.isAbsolute(abs_package_json_path), @src()); + + var buf: if (Environment.isWindows) bun.PathBuffer else void = undefined; + const path = if (comptime !Environment.isWindows) + abs_package_json_path + else brk: { + @memcpy(buf[0..abs_package_json_path.len], abs_package_json_path); + bun.path.dangerouslyConvertPathToPosixInPlace(u8, buf[0..abs_package_json_path.len]); + break :brk buf[0..abs_package_json_path.len]; + }; + + const entry = this.map.getOrPut(allocator, path) catch bun.outOfMemory(); + if (entry.found_existing) { + return .{ .entry = entry.value_ptr }; + } + + const key = allocator.dupeZ(u8, path) catch bun.outOfMemory(); + entry.key_ptr.* = key; + + const source = &(bun.sys.File.toSource(key, allocator, .{}).unwrap() catch |err| { + _ = this.map.remove(key); + allocator.free(key); + return .{ .read_err = err }; + }); + + if (comptime opts.init_reset_store) + initializeStore(); + + const json = JSON.parsePackageJSONUTF8WithOpts( + source, + log, + allocator, + .{ + .is_json = true, + .allow_comments = true, + .allow_trailing_commas = true, + .guess_indentation = opts.guess_indentation, + }, + ) catch |err| { + _ = this.map.remove(key); + allocator.free(source.contents); + allocator.free(key); + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + return .{ .parse_err = err }; + }; + + entry.value_ptr.* = .{ + .root = json.root.deepClone(bun.default_allocator) catch bun.outOfMemory(), + .source = source.*, + .indentation = json.indentation, + }; + + return .{ .entry = entry.value_ptr }; +} + +/// source path is used as the key, needs to be absolute +pub fn getWithSource( + this: *@This(), + allocator: std.mem.Allocator, + log: *logger.Log, + source: *const logger.Source, + comptime opts: GetJSONOptions, +) GetResult { + bun.assertWithLocation(std.fs.path.isAbsolute(source.path.text), @src()); + + var buf: if (Environment.isWindows) bun.PathBuffer else void = undefined; + const path = if (comptime !Environment.isWindows) + source.path.text + else brk: { + @memcpy(buf[0..source.path.text.len], source.path.text); + bun.path.dangerouslyConvertPathToPosixInPlace(u8, buf[0..source.path.text.len]); + break :brk buf[0..source.path.text.len]; + }; + + const entry = this.map.getOrPut(allocator, path) catch bun.outOfMemory(); + if (entry.found_existing) { + return .{ .entry = entry.value_ptr }; + } + + if (comptime opts.init_reset_store) + initializeStore(); + + const json_result = JSON.parsePackageJSONUTF8WithOpts( + source, + log, + allocator, + .{ + .is_json = true, + .allow_comments = true, + .allow_trailing_commas = true, + .guess_indentation = opts.guess_indentation, + }, + ); + + const json = json_result catch |err| { + _ = this.map.remove(path); + return .{ .parse_err = err }; + }; + + entry.value_ptr.* = .{ + .root = json.root.deepClone(allocator) catch bun.outOfMemory(), + .source = source.*, + .indentation = json.indentation, + }; + + entry.key_ptr.* = allocator.dupe(u8, path) catch bun.outOfMemory(); + + return .{ .entry = entry.value_ptr }; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const JSON = bun.JSON; +const JSPrinter = bun.js_printer; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const File = bun.sys.File; +const initializeStore = bun.install.initializeStore; + +const JSAst = bun.JSAst; +const js_ast = bun.JSAst; +const Expr = js_ast.Expr; diff --git a/src/install/PackageManager/install_with_manager.zig b/src/install/PackageManager/install_with_manager.zig new file mode 100644 index 0000000000..c68a904239 --- /dev/null +++ b/src/install/PackageManager/install_with_manager.zig @@ -0,0 +1,1022 @@ +pub fn installWithManager( + manager: *PackageManager, + ctx: Command.Context, + root_package_json_contents: string, + original_cwd: string, +) !void { + const log_level = manager.options.log_level; + + // Start resolving DNS for the default registry immediately. + // Unless you're behind a proxy. + if (!manager.env.hasHTTPProxy()) { + // And don't try to resolve DNS if it's an IP address. + if (manager.options.scope.url.hostname.len > 0 and !manager.options.scope.url.isIPAddress()) { + var hostname_stack = std.heap.stackFallback(512, ctx.allocator); + const allocator = hostname_stack.get(); + const hostname = try allocator.dupeZ(u8, manager.options.scope.url.hostname); + defer allocator.free(hostname); + bun.dns.internal.prefetch(manager.event_loop.loop(), hostname, manager.options.scope.url.getPortAuto()); + } + } + + var load_result: Lockfile.LoadResult = if (manager.options.do.load_lockfile) + manager.lockfile.loadFromCwd( + manager, + manager.allocator, + manager.log, + true, + ) + else + .{ .not_found = {} }; + + try manager.updateLockfileIfNeeded(load_result); + + var root = Lockfile.Package{}; + var needs_new_lockfile = load_result != .ok or + (load_result.ok.lockfile.buffers.dependencies.items.len == 0 and manager.update_requests.len > 0); + + manager.options.enable.force_save_lockfile = manager.options.enable.force_save_lockfile or + (load_result == .ok and + // if migrated always save a new lockfile + (load_result.ok.was_migrated or + + // if loaded from binary and save-text-lockfile is passed + (load_result.ok.format == .binary and + manager.options.save_text_lockfile orelse false))); + + // this defaults to false + // but we force allowing updates to the lockfile when you do bun add + var had_any_diffs = false; + manager.progress = .{}; + + // Step 2. Parse the package.json file + const root_package_json_source = &logger.Source.initPathString(PackageManager.package_json_cwd, root_package_json_contents); + + switch (load_result) { + .err => |cause| { + if (log_level != .silent) { + switch (cause.step) { + .open_file => Output.err(cause.value, "failed to open lockfile: '{s}'", .{ + cause.lockfile_path, + }), + .parse_file => Output.err(cause.value, "failed to parse lockfile: '{s}'", .{ + cause.lockfile_path, + }), + .read_file => Output.err(cause.value, "failed to read lockfile: '{s}'", .{ + cause.lockfile_path, + }), + .migrating => Output.err(cause.value, "failed to migrate lockfile: '{s}'", .{ + cause.lockfile_path, + }), + } + + if (!manager.options.enable.fail_early) { + Output.printErrorln("", .{}); + Output.warn("Ignoring lockfile", .{}); + } + + if (ctx.log.errors > 0) { + try manager.log.print(Output.errorWriter()); + manager.log.reset(); + } + Output.flush(); + } + + if (manager.options.enable.fail_early) Global.crash(); + }, + .ok => { + if (manager.subcommand == .update) { + // existing lockfile, get the original version is updating + const lockfile = manager.lockfile; + const packages = lockfile.packages.slice(); + const resolutions = packages.items(.resolution); + const workspace_package_id = manager.root_package_id.get(lockfile, manager.workspace_name_hash); + const workspace_dep_list = packages.items(.dependencies)[workspace_package_id]; + const workspace_res_list = packages.items(.resolutions)[workspace_package_id]; + const workspace_deps = workspace_dep_list.get(lockfile.buffers.dependencies.items); + const workspace_package_ids = workspace_res_list.get(lockfile.buffers.resolutions.items); + for (workspace_deps, workspace_package_ids) |dep, package_id| { + if (dep.version.tag != .npm and dep.version.tag != .dist_tag) continue; + if (package_id == invalid_package_id) continue; + + if (manager.updating_packages.getPtr(dep.name.slice(lockfile.buffers.string_bytes.items))) |entry_ptr| { + const original_resolution: Resolution = resolutions[package_id]; + // Just in case check if the resolution is `npm`. It should always be `npm` because the dependency version + // is `npm` or `dist_tag`. + if (original_resolution.tag != .npm) continue; + + var original = original_resolution.value.npm.version; + const tag_total = original.tag.pre.len() + original.tag.build.len(); + if (tag_total > 0) { + // clone because don't know if lockfile buffer will reallocate + const tag_buf = manager.allocator.alloc(u8, tag_total) catch bun.outOfMemory(); + var ptr = tag_buf; + original.tag = original_resolution.value.npm.version.tag.cloneInto( + lockfile.buffers.string_bytes.items, + &ptr, + ); + + entry_ptr.original_version_string_buf = tag_buf; + } + + entry_ptr.original_version = original; + } + } + } + differ: { + root = load_result.ok.lockfile.rootPackage() orelse { + needs_new_lockfile = true; + break :differ; + }; + + if (root.dependencies.len == 0) { + needs_new_lockfile = true; + } + + if (needs_new_lockfile) break :differ; + + var lockfile: Lockfile = undefined; + lockfile.initEmpty(manager.allocator); + var maybe_root = Lockfile.Package{}; + + var resolver: void = {}; + try maybe_root.parse( + &lockfile, + manager, + manager.allocator, + manager.log, + root_package_json_source, + void, + &resolver, + Features.main, + ); + const mapping = try manager.lockfile.allocator.alloc(PackageID, maybe_root.dependencies.len); + @memset(mapping, invalid_package_id); + + manager.summary = try Package.Diff.generate( + manager, + manager.allocator, + manager.log, + manager.lockfile, + &lockfile, + &root, + &maybe_root, + if (manager.to_update) manager.update_requests else null, + mapping, + ); + + had_any_diffs = manager.summary.hasDiffs(); + + if (!had_any_diffs) { + // always grab latest scripts for root package + var builder_ = manager.lockfile.stringBuilder(); + var builder = &builder_; + + maybe_root.scripts.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + try builder.allocate(); + manager.lockfile.packages.items(.scripts)[0] = maybe_root.scripts.clone( + lockfile.buffers.string_bytes.items, + *Lockfile.StringBuilder, + builder, + ); + builder.clamp(); + } else { + var builder_ = manager.lockfile.stringBuilder(); + // ensure we use one pointer to reference it instead of creating new ones and potentially aliasing + var builder = &builder_; + // If you changed packages, we will copy over the new package from the new lockfile + const new_dependencies = maybe_root.dependencies.get(lockfile.buffers.dependencies.items); + + for (new_dependencies) |new_dep| { + new_dep.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + } + + for (lockfile.workspace_paths.values()) |path| builder.count(path.slice(lockfile.buffers.string_bytes.items)); + for (lockfile.workspace_versions.values()) |version| version.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + for (lockfile.patched_dependencies.values()) |patch_dep| builder.count(patch_dep.path.slice(lockfile.buffers.string_bytes.items)); + + lockfile.overrides.count(&lockfile, builder); + lockfile.catalogs.count(&lockfile, builder); + maybe_root.scripts.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + + manager.lockfile.node_linker = lockfile.node_linker; + + const off = @as(u32, @truncate(manager.lockfile.buffers.dependencies.items.len)); + const len = @as(u32, @truncate(new_dependencies.len)); + var packages = manager.lockfile.packages.slice(); + var dep_lists = packages.items(.dependencies); + var resolution_lists = packages.items(.resolutions); + const old_resolutions_list = resolution_lists[0]; + dep_lists[0] = .{ .off = off, .len = len }; + resolution_lists[0] = .{ .off = off, .len = len }; + try builder.allocate(); + + const all_name_hashes: []PackageNameHash = brk: { + if (!manager.summary.overrides_changed) break :brk &.{}; + const hashes_len = manager.lockfile.overrides.map.entries.len + lockfile.overrides.map.entries.len; + if (hashes_len == 0) break :brk &.{}; + var all_name_hashes = try bun.default_allocator.alloc(PackageNameHash, hashes_len); + @memcpy(all_name_hashes[0..manager.lockfile.overrides.map.entries.len], manager.lockfile.overrides.map.keys()); + @memcpy(all_name_hashes[manager.lockfile.overrides.map.entries.len..], lockfile.overrides.map.keys()); + var i = manager.lockfile.overrides.map.entries.len; + while (i < all_name_hashes.len) { + if (std.mem.indexOfScalar(PackageNameHash, all_name_hashes[0..i], all_name_hashes[i]) != null) { + all_name_hashes[i] = all_name_hashes[all_name_hashes.len - 1]; + all_name_hashes.len -= 1; + } else { + i += 1; + } + } + break :brk all_name_hashes; + }; + + manager.lockfile.overrides = try lockfile.overrides.clone(manager, &lockfile, manager.lockfile, builder); + manager.lockfile.catalogs = try lockfile.catalogs.clone(manager, &lockfile, manager.lockfile, builder); + + manager.lockfile.trusted_dependencies = if (lockfile.trusted_dependencies) |trusted_dependencies| + try trusted_dependencies.clone(manager.lockfile.allocator) + else + null; + + try manager.lockfile.buffers.dependencies.ensureUnusedCapacity(manager.lockfile.allocator, len); + try manager.lockfile.buffers.resolutions.ensureUnusedCapacity(manager.lockfile.allocator, len); + + const old_resolutions = old_resolutions_list.get(manager.lockfile.buffers.resolutions.items); + + var dependencies = manager.lockfile.buffers.dependencies.items.ptr[off .. off + len]; + var resolutions = manager.lockfile.buffers.resolutions.items.ptr[off .. off + len]; + + // It is too easy to accidentally undefined memory + @memset(resolutions, invalid_package_id); + @memset(dependencies, Dependency{}); + + manager.lockfile.buffers.dependencies.items = manager.lockfile.buffers.dependencies.items.ptr[0 .. off + len]; + manager.lockfile.buffers.resolutions.items = manager.lockfile.buffers.resolutions.items.ptr[0 .. off + len]; + + for (new_dependencies, 0..) |new_dep, i| { + dependencies[i] = try new_dep.clone(manager, lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + if (mapping[i] != invalid_package_id) { + resolutions[i] = old_resolutions[mapping[i]]; + } + } + + manager.lockfile.packages.items(.scripts)[0] = maybe_root.scripts.clone( + lockfile.buffers.string_bytes.items, + *Lockfile.StringBuilder, + builder, + ); + + // Update workspace paths + try manager.lockfile.workspace_paths.ensureTotalCapacity(manager.lockfile.allocator, lockfile.workspace_paths.entries.len); + { + manager.lockfile.workspace_paths.clearRetainingCapacity(); + var iter = lockfile.workspace_paths.iterator(); + while (iter.next()) |entry| { + // The string offsets will be wrong so fix them + const path = entry.value_ptr.slice(lockfile.buffers.string_bytes.items); + const str = builder.append(String, path); + manager.lockfile.workspace_paths.putAssumeCapacity(entry.key_ptr.*, str); + } + } + + // Update workspace versions + try manager.lockfile.workspace_versions.ensureTotalCapacity(manager.lockfile.allocator, lockfile.workspace_versions.entries.len); + { + manager.lockfile.workspace_versions.clearRetainingCapacity(); + var iter = lockfile.workspace_versions.iterator(); + while (iter.next()) |entry| { + // Copy version string offsets + const version = entry.value_ptr.append(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + manager.lockfile.workspace_versions.putAssumeCapacity(entry.key_ptr.*, version); + } + } + + // Update patched dependencies + { + var iter = lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + const pkg_name_and_version_hash = entry.key_ptr.*; + bun.debugAssert(entry.value_ptr.patchfile_hash_is_null); + const gop = try manager.lockfile.patched_dependencies.getOrPut(manager.lockfile.allocator, pkg_name_and_version_hash); + if (!gop.found_existing) { + gop.value_ptr.* = .{ + .path = builder.append(String, entry.value_ptr.*.path.slice(lockfile.buffers.string_bytes.items)), + }; + gop.value_ptr.setPatchfileHash(null); + // gop.value_ptr.path = gop.value_ptr.path; + } else if (!bun.strings.eql( + gop.value_ptr.path.slice(manager.lockfile.buffers.string_bytes.items), + entry.value_ptr.path.slice(lockfile.buffers.string_bytes.items), + )) { + gop.value_ptr.path = builder.append(String, entry.value_ptr.*.path.slice(lockfile.buffers.string_bytes.items)); + gop.value_ptr.setPatchfileHash(null); + } + } + + var count: usize = 0; + iter = manager.lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + if (!lockfile.patched_dependencies.contains(entry.key_ptr.*)) { + count += 1; + } + } + if (count > 0) { + try manager.patched_dependencies_to_remove.ensureTotalCapacity(manager.allocator, count); + iter = manager.lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + if (!lockfile.patched_dependencies.contains(entry.key_ptr.*)) { + try manager.patched_dependencies_to_remove.put(manager.allocator, entry.key_ptr.*, {}); + } + } + for (manager.patched_dependencies_to_remove.keys()) |hash| { + _ = manager.lockfile.patched_dependencies.orderedRemove(hash); + } + } + } + + builder.clamp(); + + if (manager.summary.overrides_changed and all_name_hashes.len > 0) { + for (manager.lockfile.buffers.dependencies.items, 0..) |*dependency, dependency_i| { + if (std.mem.indexOfScalar(PackageNameHash, all_name_hashes, dependency.name_hash)) |_| { + manager.lockfile.buffers.resolutions.items[dependency_i] = invalid_package_id; + try manager.enqueueDependencyWithMain( + @truncate(dependency_i), + dependency, + invalid_package_id, + false, + ); + } + } + } + + if (manager.summary.catalogs_changed) { + for (manager.lockfile.buffers.dependencies.items, 0..) |*dep, _dep_id| { + const dep_id: DependencyID = @intCast(_dep_id); + if (dep.version.tag != .catalog) continue; + + manager.lockfile.buffers.resolutions.items[dep_id] = invalid_package_id; + try manager.enqueueDependencyWithMain( + dep_id, + dep, + invalid_package_id, + false, + ); + } + } + + // Split this into two passes because the below may allocate memory or invalidate pointers + if (manager.summary.add > 0 or manager.summary.update > 0) { + const changes = @as(PackageID, @truncate(mapping.len)); + var counter_i: PackageID = 0; + + _ = manager.getCacheDirectory(); + _ = manager.getTemporaryDirectory(); + + while (counter_i < changes) : (counter_i += 1) { + if (mapping[counter_i] == invalid_package_id) { + const dependency_i = counter_i + off; + const dependency = manager.lockfile.buffers.dependencies.items[dependency_i]; + try manager.enqueueDependencyWithMain( + dependency_i, + &dependency, + manager.lockfile.buffers.resolutions.items[dependency_i], + false, + ); + } + } + } + + if (manager.summary.update > 0) root.scripts = .{}; + } + } + }, + else => {}, + } + + if (needs_new_lockfile) { + root = .{}; + manager.lockfile.initEmpty(manager.allocator); + + if (manager.options.enable.frozen_lockfile and load_result != .not_found) { + if (log_level != .silent) { + Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); + } + Global.crash(); + } + + var resolver: void = {}; + try root.parse( + manager.lockfile, + manager, + manager.allocator, + manager.log, + root_package_json_source, + void, + &resolver, + Features.main, + ); + + root = try manager.lockfile.appendPackage(root); + + if (root.dependencies.len > 0) { + _ = manager.getCacheDirectory(); + _ = manager.getTemporaryDirectory(); + } + { + var iter = manager.lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| manager.enqueuePatchTaskPre(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); + } + manager.enqueueDependencyList(root.dependencies); + } else { + { + var iter = manager.lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| manager.enqueuePatchTaskPre(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); + } + // Anything that needs to be downloaded from an update needs to be scheduled here + manager.drainDependencyList(); + } + + if (manager.pendingTaskCount() > 0 or manager.peer_dependencies.readableLength() > 0) { + if (root.dependencies.len > 0) { + _ = manager.getCacheDirectory(); + _ = manager.getTemporaryDirectory(); + } + + if (log_level.showProgress()) { + manager.startProgressBar(); + } else if (log_level != .silent) { + Output.prettyErrorln("Resolving dependencies", .{}); + Output.flush(); + } + + const runAndWaitFn = struct { + pub fn runAndWaitFn(comptime check_peers: bool, comptime only_pre_patch: bool) *const fn (*PackageManager) anyerror!void { + return struct { + manager: *PackageManager, + err: ?anyerror = null, + pub fn isDone(closure: *@This()) bool { + var this = closure.manager; + if (comptime check_peers) + this.processPeerDependencyList() catch |err| { + closure.err = err; + return true; + }; + + this.drainDependencyList(); + + this.runTasks( + *PackageManager, + this, + .{ + .onExtract = {}, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, + .progress_bar = true, + }, + check_peers, + this.options.log_level, + ) catch |err| { + closure.err = err; + return true; + }; + + if (comptime check_peers) { + if (this.peer_dependencies.readableLength() > 0) { + return false; + } + } + + if (comptime only_pre_patch) { + const pending_patch = this.pending_pre_calc_hashes.load(.monotonic); + return pending_patch == 0; + } + + const pending_tasks = this.pendingTaskCount(); + + if (PackageManager.verbose_install and pending_tasks > 0) { + if (PackageManager.hasEnoughTimePassedBetweenWaitingMessages()) Output.prettyErrorln("[PackageManager] waiting for {d} tasks\n", .{pending_tasks}); + } + + return pending_tasks == 0; + } + + pub fn runAndWait(this: *PackageManager) !void { + var closure = @This(){ + .manager = this, + }; + + this.sleepUntil(&closure, &@This().isDone); + + if (closure.err) |err| { + return err; + } + } + }.runAndWait; + } + }.runAndWaitFn; + + const waitForCalcingPatchHashes = runAndWaitFn(false, true); + const waitForEverythingExceptPeers = runAndWaitFn(false, false); + const waitForPeers = runAndWaitFn(true, false); + + if (manager.lockfile.patched_dependencies.entries.len > 0) { + try waitForCalcingPatchHashes(manager); + } + + if (manager.pendingTaskCount() > 0) { + try waitForEverythingExceptPeers(manager); + } + + try waitForPeers(manager); + + if (log_level.showProgress()) { + manager.endProgressBar(); + } else if (log_level != .silent) { + Output.prettyErrorln("Resolved, downloaded and extracted [{d}]", .{manager.total_tasks}); + Output.flush(); + } + } + + const had_errors_before_cleaning_lockfile = manager.log.hasErrors(); + try manager.log.print(Output.errorWriter()); + manager.log.reset(); + + // This operation doesn't perform any I/O, so it should be relatively cheap. + const lockfile_before_clean = manager.lockfile; + + manager.lockfile = try manager.lockfile.cleanWithLogger( + manager, + manager.update_requests, + manager.log, + manager.options.enable.exact_versions, + log_level, + ); + + if (manager.lockfile.packages.len > 0) { + root = manager.lockfile.packages.get(0); + } + + if (manager.lockfile.packages.len > 0) { + for (manager.update_requests) |request| { + // prevent redundant errors + if (request.failed) { + return error.InstallFailed; + } + } + manager.verifyResolutions(log_level); + } + + // append scripts to lockfile before generating new metahash + manager.loadRootLifecycleScripts(root); + defer { + if (manager.root_lifecycle_scripts) |root_scripts| { + manager.allocator.free(root_scripts.package_name); + } + } + + if (manager.root_lifecycle_scripts) |root_scripts| { + root_scripts.appendToLockfile(manager.lockfile); + } + { + const packages = manager.lockfile.packages.slice(); + for (packages.items(.resolution), packages.items(.meta), packages.items(.scripts)) |resolution, meta, scripts| { + if (resolution.tag == .workspace) { + if (meta.hasInstallScript()) { + if (scripts.hasAny()) { + const first_index, _, const entries = scripts.getScriptEntries( + manager.lockfile, + manager.lockfile.buffers.string_bytes.items, + .workspace, + false, + ); + + if (comptime Environment.allow_assert) { + bun.assert(first_index != -1); + } + + if (first_index != -1) { + inline for (entries, 0..) |maybe_entry, i| { + if (maybe_entry) |entry| { + @field(manager.lockfile.scripts, Lockfile.Scripts.names[i]).append( + manager.lockfile.allocator, + entry, + ) catch bun.outOfMemory(); + } + } + } + } else { + const first_index, _, const entries = scripts.getScriptEntries( + manager.lockfile, + manager.lockfile.buffers.string_bytes.items, + .workspace, + true, + ); + + if (comptime Environment.allow_assert) { + bun.assert(first_index != -1); + } + + inline for (entries, 0..) |maybe_entry, i| { + if (maybe_entry) |entry| { + @field(manager.lockfile.scripts, Lockfile.Scripts.names[i]).append( + manager.lockfile.allocator, + entry, + ) catch bun.outOfMemory(); + } + } + } + } + } + } + } + + if (manager.options.global) { + try manager.setupGlobalDir(ctx); + } + + const packages_len_before_install = manager.lockfile.packages.len; + + if (manager.options.enable.frozen_lockfile and load_result != .not_found) frozen_lockfile: { + if (load_result.loadedFromTextLockfile()) { + if (manager.lockfile.eql(lockfile_before_clean, packages_len_before_install, manager.allocator) catch bun.outOfMemory()) { + break :frozen_lockfile; + } + } else { + if (!(manager.lockfile.hasMetaHashChanged(PackageManager.verbose_install or manager.options.do.print_meta_hash_string, packages_len_before_install) catch false)) { + break :frozen_lockfile; + } + } + + if (log_level != .silent) { + Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); + Output.note("try re-running without --frozen-lockfile and commit the updated lockfile", .{}); + } + Global.crash(); + } + + const lockfile_before_install = manager.lockfile; + + const save_format = load_result.saveFormat(&manager.options); + + if (manager.options.lockfile_only) { + // save the lockfile and exit. make sure metahash is generated for binary lockfile + + manager.lockfile.meta_hash = try manager.lockfile.generateMetaHash( + PackageManager.verbose_install or manager.options.do.print_meta_hash_string, + packages_len_before_install, + ); + + try manager.saveLockfile(&load_result, save_format, had_any_diffs, lockfile_before_install, packages_len_before_install, log_level); + + if (manager.options.do.summary) { + // TODO(dylan-conway): packages aren't installed but we can still print + // added/removed/updated direct dependencies. + Output.pretty("\nSaved {s} ({d} package{s}) ", .{ + switch (save_format) { + .text => "bun.lock", + .binary => "bun.lockb", + }, + manager.lockfile.packages.len, + if (manager.lockfile.packages.len == 1) "" else "s", + }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + Output.pretty("\n", .{}); + } + Output.flush(); + return; + } + + var path_buf: bun.PathBuffer = undefined; + var workspace_filters: std.ArrayListUnmanaged(WorkspaceFilter) = .{}; + // only populated when subcommand is `.install` + if (manager.subcommand == .install and manager.options.filter_patterns.len > 0) { + try workspace_filters.ensureUnusedCapacity(manager.allocator, manager.options.filter_patterns.len); + for (manager.options.filter_patterns) |pattern| { + try workspace_filters.append(manager.allocator, try WorkspaceFilter.init(manager.allocator, pattern, original_cwd, &path_buf)); + } + } + defer workspace_filters.deinit(manager.allocator); + + var install_root_dependencies = workspace_filters.items.len == 0; + if (!install_root_dependencies) { + const pkg_names = manager.lockfile.packages.items(.name); + + const abs_root_path = abs_root_path: { + if (comptime !Environment.isWindows) { + break :abs_root_path strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); + } + + var abs_path = Path.pathToPosixBuf(u8, FileSystem.instance.top_level_dir, &path_buf); + break :abs_root_path strings.withoutTrailingSlash(abs_path[Path.windowsVolumeNameLen(abs_path)[0]..]); + }; + + for (workspace_filters.items) |filter| { + const pattern, const path_or_name = switch (filter) { + .name => |pattern| .{ pattern, pkg_names[0].slice(manager.lockfile.buffers.string_bytes.items) }, + .path => |pattern| .{ pattern, abs_root_path }, + .all => { + install_root_dependencies = true; + continue; + }, + }; + + switch (bun.glob.walk.matchImpl(manager.allocator, pattern, path_or_name)) { + .match, .negate_match => install_root_dependencies = true, + + .negate_no_match => { + // always skip if a pattern specifically says "!" + install_root_dependencies = false; + break; + }, + + .no_match => {}, + } + } + } + + const install_summary: PackageInstall.Summary = install_summary: { + if (!manager.options.do.install_packages) { + break :install_summary .{}; + } + + if (manager.lockfile.node_linker == .hoisted or + // TODO + manager.lockfile.node_linker == .auto) + { + break :install_summary try installHoistedPackages( + manager, + ctx, + workspace_filters.items, + install_root_dependencies, + log_level, + ); + } + + break :install_summary installIsolatedPackages( + manager, + ctx, + install_root_dependencies, + workspace_filters.items, + ) catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + }; + }; + + if (log_level != .silent) { + try manager.log.print(Output.errorWriter()); + } + if (had_errors_before_cleaning_lockfile or manager.log.hasErrors()) Global.crash(); + + const did_meta_hash_change = + // If the lockfile was frozen, we already checked it + !manager.options.enable.frozen_lockfile and + if (load_result.loadedFromTextLockfile()) + !try manager.lockfile.eql(lockfile_before_clean, packages_len_before_install, manager.allocator) + else + try manager.lockfile.hasMetaHashChanged( + PackageManager.verbose_install or manager.options.do.print_meta_hash_string, + @min(packages_len_before_install, manager.lockfile.packages.len), + ); + + // It's unnecessary work to re-save the lockfile if there are no changes + const should_save_lockfile = + (load_result == .ok and ((load_result.ok.format == .binary and save_format == .text) or + + // make sure old versions are updated + load_result.ok.format == .text and save_format == .text and manager.lockfile.text_lockfile_version != TextLockfile.Version.current)) or + + // check `save_lockfile` after checking if loaded from binary and save format is text + // because `save_lockfile` is set to false for `--frozen-lockfile` + (manager.options.do.save_lockfile and + (did_meta_hash_change or + had_any_diffs or + manager.update_requests.len > 0 or + (load_result == .ok and load_result.ok.serializer_result.packages_need_update) or + manager.lockfile.isEmpty() or + manager.options.enable.force_save_lockfile)); + + if (should_save_lockfile) { + try manager.saveLockfile(&load_result, save_format, had_any_diffs, lockfile_before_install, packages_len_before_install, log_level); + } + + if (needs_new_lockfile) { + manager.summary.add = @as(u32, @truncate(manager.lockfile.packages.len)); + } + + if (manager.options.do.save_yarn_lock) { + var node: *Progress.Node = undefined; + if (log_level.showProgress()) { + manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; + node = manager.progress.start("Saving yarn.lock", 0); + manager.progress.refresh(); + } else if (log_level != .silent) { + Output.prettyErrorln("Saved yarn.lock", .{}); + Output.flush(); + } + + try manager.writeYarnLock(); + if (log_level.showProgress()) { + node.completeOne(); + manager.progress.refresh(); + manager.progress.root.end(); + manager.progress = .{}; + } + } + + if (manager.options.do.run_scripts and install_root_dependencies and !manager.options.global) { + if (manager.root_lifecycle_scripts) |scripts| { + if (comptime Environment.allow_assert) { + bun.assert(scripts.total > 0); + } + + if (log_level != .silent) { + Output.printError("\n", .{}); + Output.flush(); + } + // root lifecycle scripts can run now that all dependencies are installed, dependency scripts + // have finished, and lockfiles have been saved + const optional = false; + const output_in_foreground = true; + try manager.spawnPackageLifecycleScripts(ctx, scripts, optional, output_in_foreground, null); + + while (manager.pending_lifecycle_script_tasks.load(.monotonic) > 0) { + manager.reportSlowLifecycleScripts(); + + manager.sleep(); + } + } + } + + if (log_level != .silent) { + try printInstallSummary(manager, ctx, &install_summary, did_meta_hash_change, log_level); + } + + if (install_summary.fail > 0) { + manager.any_failed_to_install = true; + } + + Output.flush(); +} + +fn printInstallSummary( + this: *PackageManager, + ctx: Command.Context, + install_summary: *const PackageInstall.Summary, + did_meta_hash_change: bool, + log_level: Options.LogLevel, +) !void { + var printed_timestamp = false; + if (this.options.do.summary) { + var printer = Lockfile.Printer{ + .lockfile = this.lockfile, + .options = this.options, + .updates = this.update_requests, + .successfully_installed = install_summary.successfully_installed, + }; + + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + try Lockfile.Printer.Tree.print(&printer, this, Output.WriterType, Output.writer(), enable_ansi_colors, log_level); + }, + } + + if (!did_meta_hash_change) { + this.summary.remove = 0; + this.summary.add = 0; + this.summary.update = 0; + } + + if (install_summary.success > 0) { + // it's confusing when it shows 3 packages and says it installed 1 + const pkgs_installed = @max( + install_summary.success, + @as( + u32, + @truncate(this.update_requests.len), + ), + ); + Output.pretty("{d} package{s} installed ", .{ pkgs_installed, if (pkgs_installed == 1) "" else "s" }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + + if (this.summary.remove > 0) { + Output.pretty("Removed: {d}\n", .{this.summary.remove}); + } + } else if (this.summary.remove > 0) { + if (this.subcommand == .remove) { + for (this.update_requests) |request| { + Output.prettyln("- {s}", .{request.name}); + } + } + + Output.pretty("{d} package{s} removed ", .{ this.summary.remove, if (this.summary.remove == 1) "" else "s" }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + } else if (install_summary.skipped > 0 and install_summary.fail == 0 and this.update_requests.len == 0) { + const count = @as(PackageID, @truncate(this.lockfile.packages.len)); + if (count != install_summary.skipped) { + if (!this.options.enable.only_missing) { + Output.pretty("Checked {d} install{s} across {d} package{s} (no changes) ", .{ + install_summary.skipped, + if (install_summary.skipped == 1) "" else "s", + count, + if (count == 1) "" else "s", + }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + } + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + } else { + Output.pretty("Done! Checked {d} package{s} (no changes) ", .{ + install_summary.skipped, + if (install_summary.skipped == 1) "" else "s", + }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + } + } + + if (install_summary.fail > 0) { + Output.prettyln("Failed to install {d} package{s}\n", .{ install_summary.fail, if (install_summary.fail == 1) "" else "s" }); + Output.flush(); + } + } + + if (this.options.do.summary) { + if (!printed_timestamp) { + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + Output.prettyln(" done", .{}); + printed_timestamp = true; + } + } +} + +fn printBlockedPackagesInfo(summary: *const PackageInstall.Summary, global: bool) void { + const packages_count = summary.packages_with_blocked_scripts.count(); + var scripts_count: usize = 0; + for (summary.packages_with_blocked_scripts.values()) |count| scripts_count += count; + + if (comptime Environment.allow_assert) { + // if packages_count is greater than 0, scripts_count must also be greater than 0. + bun.assert(packages_count == 0 or scripts_count > 0); + // if scripts_count is 1, it's only possible for packages_count to be 1. + bun.assert(scripts_count != 1 or packages_count == 1); + } + + if (packages_count > 0) { + Output.prettyln("\n\nBlocked {d} postinstall{s}. Run `bun pm {s}untrusted` for details.\n", .{ + scripts_count, + if (scripts_count > 1) "s" else "", + if (global) "-g " else "", + }); + } else { + Output.pretty("\n", .{}); + } +} + +// @sortImports + +const std = @import("std"); +const installHoistedPackages = @import("../hoisted_install.zig").installHoistedPackages; +const installIsolatedPackages = @import("../isolated_install.zig").installIsolatedPackages; + +const bun = @import("bun"); +const Environment = bun.Environment; +const Global = bun.Global; +const Output = bun.Output; +const Path = bun.path; +const Progress = bun.Progress; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; +const Command = bun.CLI.Command; + +const Semver = bun.Semver; +const String = Semver.String; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const Dependency = bun.install.Dependency; +const DependencyID = bun.install.DependencyID; +const Features = bun.install.Features; +const PackageID = bun.install.PackageID; +const PackageInstall = bun.install.PackageInstall; +const PackageNameHash = bun.install.PackageNameHash; +const PatchTask = bun.install.PatchTask; +const Resolution = bun.install.Resolution; +const TextLockfile = bun.install.TextLockfile; +const invalid_package_id = bun.install.invalid_package_id; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; + +const PackageManager = bun.install.PackageManager; +const Options = PackageManager.Options; +const WorkspaceFilter = PackageManager.WorkspaceFilter; diff --git a/src/install/PackageManager/patchPackage.zig b/src/install/PackageManager/patchPackage.zig new file mode 100644 index 0000000000..f443ea0a85 --- /dev/null +++ b/src/install/PackageManager/patchPackage.zig @@ -0,0 +1,1125 @@ +pub const PatchCommitResult = struct { + patch_key: []const u8, + patchfile_path: []const u8, + not_in_workspace_root: bool = false, +}; + +/// - Arg is the dir containing the package with changes OR name and version +/// - Get the patch file contents by running git diff on the temp dir and the original package dir +/// - Write the patch file to $PATCHES_DIR/$PKG_NAME_AND_VERSION.patch +/// - Update "patchedDependencies" in package.json +/// - Run install to install newly patched pkg +pub fn doPatchCommit( + manager: *PackageManager, + pathbuf: *bun.PathBuffer, + log_level: Options.LogLevel, +) !?PatchCommitResult { + var folder_path_buf: bun.PathBuffer = undefined; + var lockfile: *Lockfile = try manager.allocator.create(Lockfile); + defer lockfile.deinit(); + switch (lockfile.loadFromCwd(manager, manager.allocator, manager.log, true)) { + .not_found => { + Output.errGeneric("Cannot find lockfile. Install packages with `bun install` before patching them.", .{}); + Global.crash(); + }, + .err => |cause| { + if (log_level != .silent) { + switch (cause.step) { + .open_file => Output.prettyError("error opening lockfile: {s}\n", .{ + @errorName(cause.value), + }), + .parse_file => Output.prettyError("error parsing lockfile: {s}\n", .{ + @errorName(cause.value), + }), + .read_file => Output.prettyError("error reading lockfile: {s}\n", .{ + @errorName(cause.value), + }), + .migrating => Output.prettyError("error migrating lockfile: {s}\n", .{ + @errorName(cause.value), + }), + } + + if (manager.options.enable.fail_early) { + Output.prettyError("failed to load lockfile\n", .{}); + } else { + Output.prettyError("ignoring lockfile\n", .{}); + } + + Output.flush(); + } + Global.crash(); + }, + .ok => {}, + } + + var argument = manager.options.positionals[1]; + const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); + + const not_in_workspace_root = manager.root_package_id.get(lockfile, manager.workspace_name_hash) != 0; + var free_argument = false; + argument = if (arg_kind == .path and + not_in_workspace_root and + (!bun.path.Platform.posix.isAbsolute(argument) or (bun.Environment.isWindows and !bun.path.Platform.windows.isAbsolute(argument)))) + brk: { + if (pathArgumentRelativeToRootWorkspacePackage(manager, lockfile, argument)) |rel_path| { + free_argument = true; + break :brk rel_path; + } + break :brk argument; + } else argument; + defer if (free_argument) manager.allocator.free(argument); + + // Attempt to open the existing node_modules folder + var root_node_modules = switch (bun.sys.openatOSPath(bun.FD.cwd(), bun.OSPathLiteral("node_modules"), bun.O.DIRECTORY | bun.O.RDONLY, 0o755)) { + .result => |fd| std.fs.Dir{ .fd = fd.cast() }, + .err => |e| { + Output.prettyError( + "error: failed to open root node_modules folder: {}\n", + .{e}, + ); + Global.crash(); + }, + }; + defer root_node_modules.close(); + + var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile); + var resolution_buf: [1024]u8 = undefined; + const _cache_dir: std.fs.Dir, const _cache_dir_subpath: stringZ, const _changes_dir: []const u8, const _pkg: Package = switch (arg_kind) { + .path => result: { + const package_json_source: *const logger.Source = &brk: { + const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); + + switch (bun.sys.File.toSource(package_json_path, manager.allocator, .{})) { + .result => |s| break :brk s, + .err => |e| { + Output.err(e, "failed to read {s}", .{bun.fmt.quote(package_json_path)}); + Global.crash(); + }, + } + }; + defer manager.allocator.free(package_json_source.contents); + + initializeStore(); + const json = JSON.parsePackageJSONUTF8(package_json_source, manager.log, manager.allocator) catch |err| { + manager.log.print(Output.errorWriter()) catch {}; + Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); + Global.crash(); + }; + + const version = version: { + if (json.asProperty("version")) |v| { + if (v.expr.asString(manager.allocator)) |s| break :version s; + } + Output.prettyError( + "error: invalid package.json, missing or invalid property \"version\": {s}\n", + .{package_json_source.path.text}, + ); + Global.crash(); + }; + + var resolver: void = {}; + var package = Lockfile.Package{}; + try package.parseWithJSON(lockfile, manager, manager.allocator, manager.log, package_json_source, json, void, &resolver, Features.folder); + + const name = lockfile.str(&package.name); + const actual_package = switch (lockfile.package_index.get(package.name_hash) orelse { + Output.prettyError( + "error: failed to find package in lockfile package index, this is a bug in Bun. Please file a GitHub issue.\n", + .{}, + ); + Global.crash(); + }) { + .id => |id| lockfile.packages.get(id), + .ids => |ids| brk: { + for (ids.items) |id| { + const pkg = lockfile.packages.get(id); + const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; + if (std.mem.eql(u8, resolution_label, version)) { + break :brk pkg; + } + } + Output.prettyError("error: could not find package with name: {s}\n", .{ + package.name.slice(lockfile.buffers.string_bytes.items), + }); + Global.crash(); + }, + }; + + const cache_result = manager.computeCacheDirAndSubpath( + name, + &actual_package.resolution, + &folder_path_buf, + null, + ); + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + + const changes_dir = argument; + + break :result .{ cache_dir, cache_dir_subpath, changes_dir, actual_package }; + }, + .name_and_version => brk: { + const name, const version = Dependency.splitNameAndMaybeVersion(argument); + const pkg_id, const node_modules = pkgInfoForNameAndVersion(lockfile, &iterator, argument, name, version); + + const changes_dir = bun.path.joinZBuf(pathbuf[0..], &[_][]const u8{ + node_modules.relative_path, + name, + }, .auto); + const pkg = lockfile.packages.get(pkg_id); + + const cache_result = manager.computeCacheDirAndSubpath( + pkg.name.slice(lockfile.buffers.string_bytes.items), + &pkg.resolution, + &folder_path_buf, + null, + ); + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + break :brk .{ cache_dir, cache_dir_subpath, changes_dir, pkg }; + }, + }; + + // zls + const cache_dir: std.fs.Dir = _cache_dir; + const cache_dir_subpath: stringZ = _cache_dir_subpath; + const changes_dir: []const u8 = _changes_dir; + const pkg: Package = _pkg; + + const name = pkg.name.slice(lockfile.buffers.string_bytes.items); + const resolution_label = std.fmt.bufPrint(&resolution_buf, "{s}@{}", .{ name, pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix) }) catch unreachable; + + const patchfile_contents = brk: { + const new_folder = changes_dir; + var buf2: bun.PathBuffer = undefined; + var buf3: bun.PathBuffer = undefined; + const old_folder = old_folder: { + const cache_dir_path = switch (bun.sys.getFdPath(.fromStdDir(cache_dir), &buf2)) { + .result => |s| s, + .err => |e| { + Output.err(e, "failed to read from cache", .{}); + Global.crash(); + }, + }; + break :old_folder bun.path.join(&[_][]const u8{ + cache_dir_path, + cache_dir_subpath, + }, .posix); + }; + + const random_tempdir = bun.span(bun.fs.FileSystem.instance.tmpname("node_modules_tmp", buf2[0..], bun.fastRandom()) catch |e| { + Output.err(e, "failed to make tempdir", .{}); + Global.crash(); + }); + + // If the package has nested a node_modules folder, we don't want this to + // appear in the patch file when we run git diff. + // + // There isn't an option to exclude it with `git diff --no-index`, so we + // will `rename()` it out and back again. + const has_nested_node_modules = has_nested_node_modules: { + var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { + Output.err(e, "failed to open directory {s}", .{new_folder}); + Global.crash(); + }; + defer new_folder_handle.close(); + + if (bun.sys.renameatConcurrently( + .fromStdDir(new_folder_handle), + "node_modules", + .fromStdDir(root_node_modules), + random_tempdir, + .{ .move_fallback = true }, + ).asErr()) |_| break :has_nested_node_modules false; + + break :has_nested_node_modules true; + }; + + const patch_tag_tmpname = bun.span(bun.fs.FileSystem.instance.tmpname("patch_tmp", buf3[0..], bun.fastRandom()) catch |e| { + Output.err(e, "failed to make tempdir", .{}); + Global.crash(); + }); + + var bunpatchtagbuf: BuntagHashBuf = undefined; + // If the package was already patched then it might have a ".bun-tag-XXXXXXXX" + // we need to rename this out and back too. + const bun_patch_tag: ?[:0]const u8 = has_bun_patch_tag: { + const name_and_version_hash = String.Builder.stringHash(resolution_label); + const patch_tag = patch_tag: { + if (lockfile.patched_dependencies.get(name_and_version_hash)) |patchdep| { + if (patchdep.patchfileHash()) |hash| { + break :patch_tag buntaghashbuf_make(&bunpatchtagbuf, hash); + } + } + break :has_bun_patch_tag null; + }; + var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { + Output.err(e, "failed to open directory {s}", .{new_folder}); + Global.crash(); + }; + defer new_folder_handle.close(); + + if (bun.sys.renameatConcurrently( + .fromStdDir(new_folder_handle), + patch_tag, + .fromStdDir(root_node_modules), + patch_tag_tmpname, + .{ .move_fallback = true }, + ).asErr()) |e| { + Output.warn("failed renaming the bun patch tag, this may cause issues: {}", .{e}); + break :has_bun_patch_tag null; + } + break :has_bun_patch_tag patch_tag; + }; + defer { + if (has_nested_node_modules or bun_patch_tag != null) { + var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { + Output.prettyError( + "error: failed to open directory {s} {s}\n", + .{ new_folder, @errorName(e) }, + ); + Global.crash(); + }; + defer new_folder_handle.close(); + + if (has_nested_node_modules) { + if (bun.sys.renameatConcurrently( + .fromStdDir(root_node_modules), + random_tempdir, + .fromStdDir(new_folder_handle), + "node_modules", + .{ .move_fallback = true }, + ).asErr()) |e| { + Output.warn("failed renaming nested node_modules folder, this may cause issues: {}", .{e}); + } + } + + if (bun_patch_tag) |patch_tag| { + if (bun.sys.renameatConcurrently( + .fromStdDir(root_node_modules), + patch_tag_tmpname, + .fromStdDir(new_folder_handle), + patch_tag, + .{ .move_fallback = true }, + ).asErr()) |e| { + Output.warn("failed renaming the bun patch tag, this may cause issues: {}", .{e}); + } + } + } + } + + var cwdbuf: bun.PathBuffer = undefined; + const cwd = switch (bun.sys.getcwdZ(&cwdbuf)) { + .result => |fd| fd, + .err => |e| { + Output.prettyError( + "error: failed to get cwd path {}\n", + .{e}, + ); + Global.crash(); + }, + }; + var gitbuf: bun.PathBuffer = undefined; + const git = bun.which(&gitbuf, bun.getenvZ("PATH") orelse "", cwd, "git") orelse { + Output.prettyError( + "error: git must be installed to use `bun patch --commit` \n", + .{}, + ); + Global.crash(); + }; + const paths = bun.patch.gitDiffPreprocessPaths(bun.default_allocator, old_folder, new_folder, false); + const opts = bun.patch.spawnOpts(paths[0], paths[1], cwd, git, &manager.event_loop); + + var spawn_result = switch (bun.spawnSync(&opts) catch |e| { + Output.prettyError( + "error: failed to make diff {s}\n", + .{@errorName(e)}, + ); + Global.crash(); + }) { + .result => |r| r, + .err => |e| { + Output.prettyError( + "error: failed to make diff {}\n", + .{e}, + ); + Global.crash(); + }, + }; + + const contents = switch (bun.patch.diffPostProcess(&spawn_result, paths[0], paths[1]) catch |e| { + Output.prettyError( + "error: failed to make diff {s}\n", + .{@errorName(e)}, + ); + Global.crash(); + }) { + .result => |stdout| stdout, + .err => |stderr| { + defer stderr.deinit(); + const Truncate = struct { + stderr: std.ArrayList(u8), + + pub fn format( + this: *const @This(), + comptime _: []const u8, + _: std.fmt.FormatOptions, + writer: anytype, + ) !void { + const truncate_stderr = this.stderr.items.len > 256; + if (truncate_stderr) { + try writer.print("{s}... ({d} more bytes)", .{ this.stderr.items[0..256], this.stderr.items.len - 256 }); + } else try writer.print("{s}", .{this.stderr.items[0..]}); + } + }; + Output.prettyError( + "error: failed to make diff {}\n", + .{ + Truncate{ .stderr = stderr }, + }, + ); + Global.crash(); + }, + }; + + if (contents.items.len == 0) { + Output.pretty("\nNo changes detected, comparing {s} to {s}\n", .{ old_folder, new_folder }); + Output.flush(); + contents.deinit(); + return null; + } + + break :brk contents; + }; + defer patchfile_contents.deinit(); + + // write the patch contents to temp file then rename + var tmpname_buf: [1024]u8 = undefined; + const tempfile_name = bun.span(try bun.fs.FileSystem.instance.tmpname("tmp", &tmpname_buf, bun.fastRandom())); + const tmpdir = manager.getTemporaryDirectory(); + const tmpfd = switch (bun.sys.openat( + .fromStdDir(tmpdir), + tempfile_name, + bun.O.RDWR | bun.O.CREAT, + 0o666, + )) { + .result => |fd| fd, + .err => |e| { + Output.err(e, "failed to open temp file", .{}); + Global.crash(); + }, + }; + defer tmpfd.close(); + + if (bun.sys.File.writeAll(.{ .handle = tmpfd }, patchfile_contents.items).asErr()) |e| { + Output.err(e, "failed to write patch to temp file", .{}); + Global.crash(); + } + + @memcpy(resolution_buf[resolution_label.len .. resolution_label.len + ".patch".len], ".patch"); + var patch_filename: []const u8 = resolution_buf[0 .. resolution_label.len + ".patch".len]; + var deinit = false; + if (escapePatchFilename(manager.allocator, patch_filename)) |escaped| { + deinit = true; + patch_filename = escaped; + } + defer if (deinit) manager.allocator.free(patch_filename); + + const path_in_patches_dir = bun.path.joinZ( + &[_][]const u8{ + manager.options.patch_features.commit.patches_dir, + patch_filename, + }, + .posix, + ); + + var nodefs = bun.JSC.Node.fs.NodeFS{}; + const args = bun.JSC.Node.fs.Arguments.Mkdir{ + .path = .{ .string = bun.PathString.init(manager.options.patch_features.commit.patches_dir) }, + }; + if (nodefs.mkdirRecursive(args).asErr()) |e| { + Output.err(e, "failed to make patches dir {}", .{bun.fmt.quote(args.path.slice())}); + Global.crash(); + } + + // rename to patches dir + if (bun.sys.renameatConcurrently( + .fromStdDir(tmpdir), + tempfile_name, + bun.FD.cwd(), + path_in_patches_dir, + .{ .move_fallback = true }, + ).asErr()) |e| { + Output.err(e, "failed renaming patch file to patches dir", .{}); + Global.crash(); + } + + const patch_key = std.fmt.allocPrint(manager.allocator, "{s}", .{resolution_label}) catch bun.outOfMemory(); + const patchfile_path = manager.allocator.dupe(u8, path_in_patches_dir) catch bun.outOfMemory(); + _ = bun.sys.unlink(bun.path.joinZ(&[_][]const u8{ changes_dir, ".bun-patch-tag" }, .auto)); + + return .{ + .patch_key = patch_key, + .patchfile_path = patchfile_path, + .not_in_workspace_root = not_in_workspace_root, + }; +} + +fn patchCommitGetVersion( + buf: *[1024]u8, + patch_tag_path: [:0]const u8, +) bun.sys.Maybe(string) { + const patch_tag_fd = switch (bun.sys.open(patch_tag_path, bun.O.RDONLY, 0)) { + .result => |fd| fd, + .err => |e| return .{ .err = e }, + }; + defer { + patch_tag_fd.close(); + // we actually need to delete this + _ = bun.sys.unlink(patch_tag_path); + } + + const version = switch (bun.sys.File.readFillBuf(.{ .handle = patch_tag_fd }, buf[0..])) { + .result => |v| v, + .err => |e| return .{ .err = e }, + }; + + // maybe if someone opens it in their editor and hits save a newline will be inserted, + // so trim that off + return .{ .result = std.mem.trimRight(u8, version, " \n\r\t") }; +} + +fn escapePatchFilename(allocator: std.mem.Allocator, name: []const u8) ?[]const u8 { + const EscapeVal = enum { + @"/", + @"\\", + @" ", + @"\n", + @"\r", + @"\t", + // @".", + other, + + pub fn escaped(this: @This()) ?[]const u8 { + return switch (this) { + .@"/" => "%2F", + .@"\\" => "%5c", + .@" " => "%20", + .@"\n" => "%0A", + .@"\r" => "%0D", + .@"\t" => "%09", + // .@"." => "%2E", + .other => null, + }; + } + }; + const ESCAPE_TABLE: [256]EscapeVal = comptime brk: { + var table: [256]EscapeVal = [_]EscapeVal{.other} ** 256; + const ty = @typeInfo(EscapeVal); + for (ty.@"enum".fields) |field| { + if (field.name.len == 1) { + const c = field.name[0]; + table[c] = @enumFromInt(field.value); + } + } + break :brk table; + }; + var count: usize = 0; + for (name) |c| count += if (ESCAPE_TABLE[c].escaped()) |e| e.len else 1; + if (count == name.len) return null; + var buf = allocator.alloc(u8, count) catch bun.outOfMemory(); + var i: usize = 0; + for (name) |c| { + const e = ESCAPE_TABLE[c].escaped() orelse &[_]u8{c}; + @memcpy(buf[i..][0..e.len], e); + i += e.len; + } + return buf; +} + +/// 1. Arg is either: +/// - name and possibly version (e.g. "is-even" or "is-even@1.0.0") +/// - path to package in node_modules +/// 2. Calculate cache dir for package +/// 3. Overwrite the input package with the one from the cache (cuz it could be hardlinked) +/// 4. Print to user +pub fn preparePatch(manager: *PackageManager) !void { + const strbuf = manager.lockfile.buffers.string_bytes.items; + var argument = manager.options.positionals[1]; + + const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); + + var folder_path_buf: bun.PathBuffer = undefined; + var iterator = Lockfile.Tree.Iterator(.node_modules).init(manager.lockfile); + var resolution_buf: [1024]u8 = undefined; + + var win_normalizer: if (bun.Environment.isWindows) bun.PathBuffer else struct {} = undefined; + + const not_in_workspace_root = manager.root_package_id.get(manager.lockfile, manager.workspace_name_hash) != 0; + var free_argument = false; + argument = if (arg_kind == .path and + not_in_workspace_root and + (!bun.path.Platform.posix.isAbsolute(argument) or (bun.Environment.isWindows and !bun.path.Platform.windows.isAbsolute(argument)))) + brk: { + if (pathArgumentRelativeToRootWorkspacePackage(manager, manager.lockfile, argument)) |rel_path| { + free_argument = true; + break :brk rel_path; + } + break :brk argument; + } else argument; + defer if (free_argument) manager.allocator.free(argument); + + const cache_dir: std.fs.Dir, const cache_dir_subpath: []const u8, const module_folder: []const u8, const pkg_name: []const u8 = switch (arg_kind) { + .path => brk: { + var lockfile = manager.lockfile; + + const package_json_source: *const logger.Source = &src: { + const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); + + switch (bun.sys.File.toSource(package_json_path, manager.allocator, .{})) { + .result => |s| break :src s, + .err => |e| { + Output.err(e, "failed to read {s}", .{bun.fmt.quote(package_json_path)}); + Global.crash(); + }, + } + }; + defer manager.allocator.free(package_json_source.contents); + + initializeStore(); + const json = JSON.parsePackageJSONUTF8(package_json_source, manager.log, manager.allocator) catch |err| { + manager.log.print(Output.errorWriter()) catch {}; + Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); + Global.crash(); + }; + + const version = version: { + if (json.asProperty("version")) |v| { + if (v.expr.asString(manager.allocator)) |s| break :version s; + } + Output.prettyError( + "error: invalid package.json, missing or invalid property \"version\": {s}\n", + .{package_json_source.path.text}, + ); + Global.crash(); + }; + + var resolver: void = {}; + var package = Lockfile.Package{}; + try package.parseWithJSON(lockfile, manager, manager.allocator, manager.log, package_json_source, json, void, &resolver, Features.folder); + + const name = lockfile.str(&package.name); + const actual_package = switch (lockfile.package_index.get(package.name_hash) orelse { + Output.prettyError( + "error: failed to find package in lockfile package index, this is a bug in Bun. Please file a GitHub issue.\n", + .{}, + ); + Global.crash(); + }) { + .id => |id| lockfile.packages.get(id), + .ids => |ids| id: { + for (ids.items) |id| { + const pkg = lockfile.packages.get(id); + const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; + if (std.mem.eql(u8, resolution_label, version)) { + break :id pkg; + } + } + Output.prettyError("error: could not find package with name: {s}\n", .{ + package.name.slice(lockfile.buffers.string_bytes.items), + }); + Global.crash(); + }, + }; + + const existing_patchfile_hash = existing_patchfile_hash: { + var __sfb = std.heap.stackFallback(1024, manager.allocator); + const allocator = __sfb.get(); + const name_and_version = std.fmt.allocPrint(allocator, "{s}@{}", .{ name, actual_package.resolution.fmt(strbuf, .posix) }) catch unreachable; + defer allocator.free(name_and_version); + const name_and_version_hash = String.Builder.stringHash(name_and_version); + if (lockfile.patched_dependencies.get(name_and_version_hash)) |patched_dep| { + if (patched_dep.patchfileHash()) |hash| break :existing_patchfile_hash hash; + } + break :existing_patchfile_hash null; + }; + + const cache_result = manager.computeCacheDirAndSubpath( + name, + &actual_package.resolution, + &folder_path_buf, + existing_patchfile_hash, + ); + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + + const buf = if (comptime bun.Environment.isWindows) bun.path.pathToPosixBuf(u8, argument, win_normalizer[0..]) else argument; + + break :brk .{ + cache_dir, + cache_dir_subpath, + buf, + name, + }; + }, + .name_and_version => brk: { + const pkg_maybe_version_to_patch = argument; + const name, const version = Dependency.splitNameAndMaybeVersion(pkg_maybe_version_to_patch); + const pkg_id, const folder = pkgInfoForNameAndVersion(manager.lockfile, &iterator, pkg_maybe_version_to_patch, name, version); + + const pkg = manager.lockfile.packages.get(pkg_id); + const pkg_name = pkg.name.slice(strbuf); + + const existing_patchfile_hash = existing_patchfile_hash: { + var __sfb = std.heap.stackFallback(1024, manager.allocator); + const sfballoc = __sfb.get(); + const name_and_version = std.fmt.allocPrint(sfballoc, "{s}@{}", .{ name, pkg.resolution.fmt(strbuf, .posix) }) catch unreachable; + defer sfballoc.free(name_and_version); + const name_and_version_hash = String.Builder.stringHash(name_and_version); + if (manager.lockfile.patched_dependencies.get(name_and_version_hash)) |patched_dep| { + if (patched_dep.patchfileHash()) |hash| break :existing_patchfile_hash hash; + } + break :existing_patchfile_hash null; + }; + + const cache_result = manager.computeCacheDirAndSubpath( + pkg_name, + &pkg.resolution, + &folder_path_buf, + existing_patchfile_hash, + ); + + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + + const module_folder_ = bun.path.join(&[_][]const u8{ folder.relative_path, name }, .auto); + const buf = if (comptime bun.Environment.isWindows) bun.path.pathToPosixBuf(u8, module_folder_, win_normalizer[0..]) else module_folder_; + + break :brk .{ + cache_dir, + cache_dir_subpath, + buf, + pkg_name, + }; + }, + }; + + // The package may be installed using the hard link method, + // meaning that changes to the folder will also change the package in the cache. + // + // So we will overwrite the folder by directly copying the package in cache into it + overwritePackageInNodeModulesFolder(manager, cache_dir, cache_dir_subpath, module_folder) catch |e| { + Output.prettyError( + "error: error overwriting folder in node_modules: {s}\n", + .{@errorName(e)}, + ); + Global.crash(); + }; + + if (not_in_workspace_root) { + var bufn: bun.PathBuffer = undefined; + Output.pretty("\nTo patch {s}, edit the following folder:\n\n {s}\n", .{ pkg_name, bun.path.joinStringBuf(bufn[0..], &[_][]const u8{ bun.fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(), module_folder }, .posix) }); + Output.pretty("\nOnce you're done with your changes, run:\n\n bun patch --commit '{s}'\n", .{bun.path.joinStringBuf(bufn[0..], &[_][]const u8{ bun.fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(), module_folder }, .posix)}); + } else { + Output.pretty("\nTo patch {s}, edit the following folder:\n\n {s}\n", .{ pkg_name, module_folder }); + Output.pretty("\nOnce you're done with your changes, run:\n\n bun patch --commit '{s}'\n", .{module_folder}); + } + + return; +} + +fn overwritePackageInNodeModulesFolder( + manager: *PackageManager, + cache_dir: std.fs.Dir, + cache_dir_subpath: []const u8, + node_modules_folder_path: []const u8, +) !void { + var node_modules_folder = try std.fs.cwd().openDir(node_modules_folder_path, .{ .iterate = true }); + defer node_modules_folder.close(); + + const IGNORED_PATHS: []const bun.OSPathSlice = &[_][]const bun.OSPathChar{ + bun.OSPathLiteral("node_modules"), + bun.OSPathLiteral(".git"), + bun.OSPathLiteral("CMakeFiles"), + }; + + const FileCopier = struct { + pub fn copy( + destination_dir_: std.fs.Dir, + walker: *Walker, + in_dir: if (bun.Environment.isWindows) []const u16 else void, + out_dir: if (bun.Environment.isWindows) []const u16 else void, + buf1: if (bun.Environment.isWindows) []u16 else void, + buf2: if (bun.Environment.isWindows) []u16 else void, + tmpdir_in_node_modules: if (bun.Environment.isWindows) std.fs.Dir else void, + ) !u32 { + var real_file_count: u32 = 0; + + var copy_file_state: bun.CopyFileState = .{}; + var pathbuf: bun.PathBuffer = undefined; + var pathbuf2: bun.PathBuffer = undefined; + // _ = pathbuf; // autofix + + while (try walker.next().unwrap()) |entry| { + if (entry.kind != .file) continue; + real_file_count += 1; + const createFile = std.fs.Dir.createFile; + + // 1. rename original file in node_modules to tmp_dir_in_node_modules + // 2. create the file again + // 3. copy cache flie to the newly re-created file + // 4. profit + if (comptime bun.Environment.isWindows) { + var tmpbuf: [1024]u8 = undefined; + const basename = bun.strings.fromWPath(pathbuf2[0..], entry.basename); + const tmpname = bun.span(bun.fs.FileSystem.instance.tmpname(basename, tmpbuf[0..], bun.fastRandom()) catch |e| { + Output.prettyError("error: copying file {s}", .{@errorName(e)}); + Global.crash(); + }); + + const entrypath = bun.strings.fromWPath(pathbuf[0..], entry.path); + pathbuf[entrypath.len] = 0; + const entrypathZ = pathbuf[0..entrypath.len :0]; + + if (bun.sys.renameatConcurrently( + .fromStdDir(destination_dir_), + entrypathZ, + .fromStdDir(tmpdir_in_node_modules), + tmpname, + .{ .move_fallback = true }, + ).asErr()) |e| { + Output.prettyError("error: copying file {}", .{e}); + Global.crash(); + } + + var outfile = createFile(destination_dir_, entrypath, .{}) catch |e| { + Output.prettyError("error: failed to create file {s} ({s})", .{ entrypath, @errorName(e) }); + Global.crash(); + }; + outfile.close(); + + const infile_path = bun.path.joinStringBufWZ(buf1, &[_][]const u16{ in_dir, entry.path }, .auto); + const outfile_path = bun.path.joinStringBufWZ(buf2, &[_][]const u16{ out_dir, entry.path }, .auto); + + bun.copyFileWithState(infile_path, outfile_path, ©_file_state).unwrap() catch |err| { + Output.prettyError("{s}: copying file {}", .{ @errorName(err), bun.fmt.fmtOSPath(entry.path, .{}) }); + Global.crash(); + }; + } else if (comptime Environment.isPosix) { + var in_file = try entry.dir.openat(entry.basename, bun.O.RDONLY, 0).unwrap(); + defer in_file.close(); + + @memcpy(pathbuf[0..entry.path.len], entry.path); + pathbuf[entry.path.len] = 0; + + if (bun.sys.unlinkat( + .fromStdDir(destination_dir_), + pathbuf[0..entry.path.len :0], + ).asErr()) |e| { + Output.prettyError("error: copying file {}", .{e.withPath(entry.path)}); + Global.crash(); + } + + var outfile = try createFile(destination_dir_, entry.path, .{}); + defer outfile.close(); + + const stat = in_file.stat().unwrap() catch continue; + _ = bun.c.fchmod(outfile.handle, @intCast(stat.mode)); + + bun.copyFileWithState(in_file, .fromStdFile(outfile), ©_file_state).unwrap() catch |err| { + Output.prettyError("{s}: copying file {}", .{ @errorName(err), bun.fmt.fmtOSPath(entry.path, .{}) }); + Global.crash(); + }; + } + } + + return real_file_count; + } + }; + + var pkg_in_cache_dir = try cache_dir.openDir(cache_dir_subpath, .{ .iterate = true }); + defer pkg_in_cache_dir.close(); + var walker = Walker.walk(.fromStdDir(pkg_in_cache_dir), manager.allocator, &.{}, IGNORED_PATHS) catch bun.outOfMemory(); + defer walker.deinit(); + + var buf1: if (bun.Environment.isWindows) bun.WPathBuffer else void = undefined; + var buf2: if (bun.Environment.isWindows) bun.WPathBuffer else void = undefined; + var in_dir: if (bun.Environment.isWindows) []const u16 else void = undefined; + var out_dir: if (bun.Environment.isWindows) []const u16 else void = undefined; + + if (comptime bun.Environment.isWindows) { + const inlen = bun.windows.GetFinalPathNameByHandleW(pkg_in_cache_dir.fd, &buf1, buf1.len, 0); + if (inlen == 0) { + const e = bun.windows.Win32Error.get(); + const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; + Output.prettyError("error: copying file {}", .{err}); + Global.crash(); + } + in_dir = buf1[0..inlen]; + const outlen = bun.windows.GetFinalPathNameByHandleW(node_modules_folder.fd, &buf2, buf2.len, 0); + if (outlen == 0) { + const e = bun.windows.Win32Error.get(); + const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; + Output.prettyError("error: copying file {}", .{err}); + Global.crash(); + } + out_dir = buf2[0..outlen]; + var tmpbuf: [1024]u8 = undefined; + const tmpname = bun.span(bun.fs.FileSystem.instance.tmpname("tffbp", tmpbuf[0..], bun.fastRandom()) catch |e| { + Output.prettyError("error: copying file {s}", .{@errorName(e)}); + Global.crash(); + }); + const temp_folder_in_node_modules = try node_modules_folder.makeOpenPath(tmpname, .{}); + defer { + node_modules_folder.deleteTree(tmpname) catch {}; + } + _ = try FileCopier.copy( + node_modules_folder, + &walker, + in_dir, + out_dir, + &buf1, + &buf2, + temp_folder_in_node_modules, + ); + } else if (Environment.isPosix) { + _ = try FileCopier.copy( + node_modules_folder, + &walker, + {}, + {}, + {}, + {}, + {}, + ); + } +} + +fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator(.node_modules), ids: []const IdPair) !?Lockfile.Tree.Iterator(.node_modules).Next { + while (iterator.next(null)) |node_modules| { + for (ids) |id| { + _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, id[0]) orelse continue; + return node_modules; + } + } + return null; +} + +fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator(.node_modules), dependency_id: DependencyID) !?Lockfile.Tree.Iterator(.node_modules).Next { + while (iterator.next(null)) |node_modules| { + _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue; + return node_modules; + } + + return null; +} + +const IdPair = struct { DependencyID, PackageID }; + +fn pkgInfoForNameAndVersion( + lockfile: *Lockfile, + iterator: *Lockfile.Tree.Iterator(.node_modules), + pkg_maybe_version_to_patch: []const u8, + name: []const u8, + version: ?[]const u8, +) struct { PackageID, Lockfile.Tree.Iterator(.node_modules).Next } { + var sfb = std.heap.stackFallback(@sizeOf(IdPair) * 4, lockfile.allocator); + var pairs = std.ArrayList(IdPair).initCapacity(sfb.get(), 8) catch bun.outOfMemory(); + defer pairs.deinit(); + + const name_hash = String.Builder.stringHash(name); + + const strbuf = lockfile.buffers.string_bytes.items; + + var buf: [1024]u8 = undefined; + const dependencies = lockfile.buffers.dependencies.items; + + for (dependencies, 0..) |dep, dep_id| { + if (dep.name_hash != name_hash) continue; + const pkg_id = lockfile.buffers.resolutions.items[dep_id]; + if (pkg_id == invalid_package_id) continue; + const pkg = lockfile.packages.get(pkg_id); + if (version) |v| { + const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long"); + if (std.mem.eql(u8, label, v)) { + pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory(); + } + } else { + pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory(); + } + } + + if (pairs.items.len == 0) { + Output.prettyErrorln("\nerror: package {s} not found", .{pkg_maybe_version_to_patch}); + Global.crash(); + return; + } + + // user supplied a version e.g. `is-even@1.0.0` + if (version != null) { + if (pairs.items.len == 1) { + const dep_id, const pkg_id = pairs.items[0]; + const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse { + Output.prettyError( + "error: could not find the folder for {s} in node_modules\n", + .{pkg_maybe_version_to_patch}, + ); + Global.crash(); + }; + return .{ + pkg_id, + folder, + }; + } + + // we found multiple dependents of the supplied pkg + version + // the final package in the node_modules might be hoisted + // so we are going to try looking for each dep id in node_modules + _, const pkg_id = pairs.items[0]; + const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse { + Output.prettyError( + "error: could not find the folder for {s} in node_modules\n", + .{pkg_maybe_version_to_patch}, + ); + Global.crash(); + }; + + return .{ + pkg_id, + folder, + }; + } + + // Otherwise the user did not supply a version, just the pkg name + + // Only one match, let's use it + if (pairs.items.len == 1) { + const dep_id, const pkg_id = pairs.items[0]; + const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse { + Output.prettyError( + "error: could not find the folder for {s} in node_modules\n", + .{pkg_maybe_version_to_patch}, + ); + Global.crash(); + }; + return .{ + pkg_id, + folder, + }; + } + + // Otherwise we have multiple matches + // + // There are two cases: + // a) the multiple matches are all the same underlying package (this happens because there could be multiple dependents of the same package) + // b) the matches are actually different packages, we'll prompt the user to select which one + + _, const pkg_id = pairs.items[0]; + const count = count: { + var count: u32 = 0; + for (pairs.items) |pair| { + if (pair[1] == pkg_id) count += 1; + } + break :count count; + }; + + // Disambiguate case a) from b) + if (count == pairs.items.len) { + // It may be hoisted, so we'll try the first one that matches + const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse { + Output.prettyError( + "error: could not find the folder for {s} in node_modules\n", + .{pkg_maybe_version_to_patch}, + ); + Global.crash(); + }; + return .{ + pkg_id, + folder, + }; + } + + Output.prettyErrorln( + "\nerror: Found multiple versions of {s}, please specify a precise version from the following list:\n", + .{name}, + ); + var i: usize = 0; + while (i < pairs.items.len) : (i += 1) { + _, const pkgid = pairs.items[i]; + if (pkgid == invalid_package_id) + continue; + + const pkg = lockfile.packages.get(pkgid); + + Output.prettyError(" {s}@{}\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) }); + + if (i + 1 < pairs.items.len) { + for (pairs.items[i + 1 ..]) |*p| { + if (p[1] == pkgid) { + p[1] = invalid_package_id; + } + } + } + } + Global.crash(); +} + +fn pathArgumentRelativeToRootWorkspacePackage(manager: *PackageManager, lockfile: *const Lockfile, argument: []const u8) ?[]const u8 { + const workspace_package_id = manager.root_package_id.get(lockfile, manager.workspace_name_hash); + if (workspace_package_id == 0) return null; + const workspace_res = lockfile.packages.items(.resolution)[workspace_package_id]; + const rel_path: []const u8 = workspace_res.value.workspace.slice(lockfile.buffers.string_bytes.items); + return bun.default_allocator.dupe(u8, bun.path.join(&[_][]const u8{ rel_path, argument }, .posix)) catch bun.outOfMemory(); +} + +const PatchArgKind = enum { + path, + name_and_version, + + pub fn fromArg(argument: []const u8) PatchArgKind { + if (bun.strings.containsComptime(argument, "node_modules/")) return .path; + if (bun.Environment.isWindows and bun.strings.hasPrefix(argument, "node_modules\\")) return .path; + return .name_and_version; + } +}; + +// @sortImports + +const Walker = @import("../../walker_skippable.zig"); +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const FD = bun.FD; +const Global = bun.Global; +const JSC = bun.JSC; +const JSON = bun.JSON; +const Output = bun.Output; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const stringZ = bun.stringZ; +const strings = bun.strings; +const File = bun.sys.File; + +const Semver = bun.Semver; +const String = Semver.String; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const BuntagHashBuf = bun.install.BuntagHashBuf; +const Dependency = bun.install.Dependency; +const DependencyID = bun.install.DependencyID; +const Features = bun.install.Features; +const PackageID = bun.install.PackageID; +const Resolution = bun.install.Resolution; +const buntaghashbuf_make = bun.install.buntaghashbuf_make; +const initializeStore = bun.install.initializeStore; +const invalid_package_id = bun.install.invalid_package_id; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; + +const PackageManager = bun.install.PackageManager; +const Options = PackageManager.Options; diff --git a/src/install/PackageManager/processDependencyList.zig b/src/install/PackageManager/processDependencyList.zig new file mode 100644 index 0000000000..f2ce90170b --- /dev/null +++ b/src/install/PackageManager/processDependencyList.zig @@ -0,0 +1,352 @@ +pub const GitResolver = struct { + resolved: string, + resolution: *const Resolution, + dep_id: DependencyID, + new_name: []u8 = "", + + pub fn count(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) void { + builder.count(this.resolved); + } + + pub fn resolve(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) !Resolution { + var resolution = this.resolution.*; + resolution.value.github.resolved = builder.append(String, this.resolved); + return resolution; + } + + pub fn checkBundledDependencies() bool { + return true; + } +}; + +const TarballResolver = struct { + url: string, + resolution: *const Resolution, + + pub fn count(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) void { + builder.count(this.url); + } + + pub fn resolve(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) !Resolution { + var resolution = this.resolution.*; + switch (resolution.tag) { + .local_tarball => { + resolution.value.local_tarball = builder.append(String, this.url); + }, + .remote_tarball => { + resolution.value.remote_tarball = builder.append(String, this.url); + }, + else => unreachable, + } + return resolution; + } + + pub fn checkBundledDependencies() bool { + return true; + } +}; + +/// Returns true if we need to drain dependencies +pub fn processExtractedTarballPackage( + manager: *PackageManager, + package_id: *PackageID, + dep_id: DependencyID, + resolution: *const Resolution, + data: *const ExtractData, + log_level: Options.LogLevel, +) ?Lockfile.Package { + switch (resolution.tag) { + .git, .github => { + var package = package: { + var resolver = GitResolver{ + .resolved = data.resolved, + .resolution = resolution, + .dep_id = dep_id, + }; + + var pkg = Lockfile.Package{}; + if (data.json) |json| { + const package_json_source = &logger.Source.initPathString( + json.path, + json.buf, + ); + + pkg.parse( + manager.lockfile, + manager, + manager.allocator, + manager.log, + package_json_source, + GitResolver, + &resolver, + Features.npm, + ) catch |err| { + if (log_level != .silent) { + const string_buf = manager.lockfile.buffers.string_bytes.items; + Output.err(err, "failed to parse package.json for {}", .{ + resolution.fmtURL(string_buf), + }); + } + Global.crash(); + }; + + const has_scripts = pkg.scripts.hasAny() or brk: { + const dir = std.fs.path.dirname(json.path) orelse ""; + const binding_dot_gyp_path = Path.joinAbsStringZ( + dir, + &[_]string{"binding.gyp"}, + .auto, + ); + + break :brk Syscall.exists(binding_dot_gyp_path); + }; + + pkg.meta.setHasInstallScript(has_scripts); + break :package pkg; + } + + // package.json doesn't exist, no dependencies to worry about but we need to decide on a name for the dependency + var repo = switch (resolution.tag) { + .git => resolution.value.git, + .github => resolution.value.github, + else => unreachable, + }; + + const new_name = Repository.createDependencyNameFromVersionLiteral(manager.allocator, &repo, manager.lockfile, dep_id); + defer manager.allocator.free(new_name); + + { + var builder = manager.lockfile.stringBuilder(); + + builder.count(new_name); + resolver.count(*Lockfile.StringBuilder, &builder, undefined); + + builder.allocate() catch bun.outOfMemory(); + + const name = builder.append(ExternalString, new_name); + pkg.name = name.value; + pkg.name_hash = name.hash; + + pkg.resolution = resolver.resolve(*Lockfile.StringBuilder, &builder, undefined) catch unreachable; + } + + break :package pkg; + }; + + package = manager.lockfile.appendPackage(package) catch unreachable; + package_id.* = package.meta.id; + + if (package.dependencies.len > 0) { + manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch bun.outOfMemory(); + } + + return package; + }, + .local_tarball, .remote_tarball => { + const json = data.json.?; + const package_json_source = &logger.Source.initPathString( + json.path, + json.buf, + ); + var package = Lockfile.Package{}; + + var resolver: TarballResolver = .{ + .url = data.url, + .resolution = resolution, + }; + + package.parse( + manager.lockfile, + manager, + manager.allocator, + manager.log, + package_json_source, + TarballResolver, + &resolver, + Features.npm, + ) catch |err| { + if (log_level != .silent) { + const string_buf = manager.lockfile.buffers.string_bytes.items; + Output.prettyErrorln("error: expected package.json in {any} to be a JSON file: {s}\n", .{ + resolution.fmtURL(string_buf), + @errorName(err), + }); + } + Global.crash(); + }; + + const has_scripts = package.scripts.hasAny() or brk: { + const dir = std.fs.path.dirname(json.path) orelse ""; + const binding_dot_gyp_path = Path.joinAbsStringZ( + dir, + &[_]string{"binding.gyp"}, + .auto, + ); + + break :brk Syscall.exists(binding_dot_gyp_path); + }; + + package.meta.setHasInstallScript(has_scripts); + + package = manager.lockfile.appendPackage(package) catch unreachable; + package_id.* = package.meta.id; + + if (package.dependencies.len > 0) { + manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch bun.outOfMemory(); + } + + return package; + }, + else => if (data.json.?.buf.len > 0) { + const json = data.json.?; + const package_json_source = &logger.Source.initPathString( + json.path, + json.buf, + ); + initializeStore(); + const json_root = JSON.parsePackageJSONUTF8( + package_json_source, + manager.log, + manager.allocator, + ) catch |err| { + if (log_level != .silent) { + const string_buf = manager.lockfile.buffers.string_bytes.items; + Output.prettyErrorln("error: expected package.json in {any} to be a JSON file: {s}\n", .{ + resolution.fmtURL(string_buf), + @errorName(err), + }); + } + Global.crash(); + }; + var builder = manager.lockfile.stringBuilder(); + Lockfile.Package.Scripts.parseCount(manager.allocator, &builder, json_root); + builder.allocate() catch unreachable; + if (comptime Environment.allow_assert) bun.assert(package_id.* != invalid_package_id); + var scripts = manager.lockfile.packages.items(.scripts)[package_id.*]; + scripts.parseAlloc(manager.allocator, &builder, json_root); + scripts.filled = true; + }, + } + + return null; +} + +pub fn processDependencyListItem( + this: *PackageManager, + item: TaskCallbackContext, + any_root: ?*bool, + install_peer: bool, +) !void { + switch (item) { + .dependency => |dependency_id| { + const dependency = this.lockfile.buffers.dependencies.items[dependency_id]; + const resolution = this.lockfile.buffers.resolutions.items[dependency_id]; + + try this.enqueueDependencyWithMain( + dependency_id, + &dependency, + resolution, + install_peer, + ); + }, + .root_dependency => |dependency_id| { + const dependency = this.lockfile.buffers.dependencies.items[dependency_id]; + const resolution = this.lockfile.buffers.resolutions.items[dependency_id]; + + try this.enqueueDependencyWithMainAndSuccessFn( + dependency_id, + &dependency, + resolution, + install_peer, + assignRootResolution, + failRootResolution, + ); + if (any_root) |ptr| { + const new_resolution_id = this.lockfile.buffers.resolutions.items[dependency_id]; + if (new_resolution_id != resolution) { + ptr.* = true; + } + } + }, + else => {}, + } +} + +pub fn processPeerDependencyList( + this: *PackageManager, +) !void { + while (this.peer_dependencies.readItem()) |peer_dependency_id| { + const dependency = this.lockfile.buffers.dependencies.items[peer_dependency_id]; + const resolution = this.lockfile.buffers.resolutions.items[peer_dependency_id]; + + try this.enqueueDependencyWithMain( + peer_dependency_id, + &dependency, + resolution, + true, + ); + } +} + +pub fn processDependencyList( + this: *PackageManager, + dep_list: TaskCallbackList, + comptime Ctx: type, + ctx: Ctx, + comptime callbacks: anytype, + install_peer: bool, +) !void { + if (dep_list.items.len > 0) { + var dependency_list = dep_list; + var any_root = false; + for (dependency_list.items) |item| { + try this.processDependencyListItem(item, &any_root, install_peer); + } + + if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) { + if (any_root) { + callbacks.onResolve(ctx); + } + } + + dependency_list.deinit(this.allocator); + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const Global = bun.Global; +const JSAst = bun.JSAst; +const JSON = bun.JSON; +const Output = bun.Output; +const Path = bun.path; +const Syscall = bun.sys; +const logger = bun.logger; +const string = bun.string; + +const Semver = bun.Semver; +const ExternalString = Semver.ExternalString; +const String = Semver.String; + +const DependencyID = bun.install.DependencyID; +const ExtractData = bun.install.ExtractData; +const Features = bun.install.Features; +const PackageID = bun.install.PackageID; +const Repository = bun.install.Repository; +const Resolution = bun.install.Resolution; +const TaskCallbackContext = bun.install.TaskCallbackContext; +const initializeStore = bun.install.initializeStore; +const invalid_package_id = bun.install.invalid_package_id; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; + +const PackageManager = bun.install.PackageManager; +const Options = PackageManager.Options; +const TaskCallbackList = PackageManager.TaskCallbackList; +const assignRootResolution = PackageManager.assignRootResolution; +const failRootResolution = PackageManager.failRootResolution; diff --git a/src/install/PackageManager/runTasks.zig b/src/install/PackageManager/runTasks.zig new file mode 100644 index 0000000000..000f3d8907 --- /dev/null +++ b/src/install/PackageManager/runTasks.zig @@ -0,0 +1,1092 @@ +pub fn runTasks( + manager: *PackageManager, + comptime Ctx: type, + extract_ctx: Ctx, + comptime callbacks: anytype, + install_peer: bool, + log_level: Options.LogLevel, +) anyerror!void { + var has_updated_this_run = false; + var has_network_error = false; + + var timestamp_this_tick: ?u32 = null; + + defer { + manager.drainDependencyList(); + + if (log_level.showProgress()) { + manager.startProgressBarIfNone(); + + if (@hasField(@TypeOf(callbacks), "progress_bar") and callbacks.progress_bar == true) { + const completed_items = manager.total_tasks - manager.pendingTaskCount(); + if (completed_items != manager.downloads_node.?.unprotected_completed_items or has_updated_this_run) { + manager.downloads_node.?.setCompletedItems(completed_items); + manager.downloads_node.?.setEstimatedTotalItems(manager.total_tasks); + } + } + manager.downloads_node.?.activate(); + manager.progress.maybeRefresh(); + } + } + + var patch_tasks_batch = manager.patch_task_queue.popBatch(); + var patch_tasks_iter = patch_tasks_batch.iterator(); + while (patch_tasks_iter.next()) |ptask| { + if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0); + manager.decrementPendingTasks(); + defer ptask.deinit(); + try ptask.runFromMainThread(manager, log_level); + if (ptask.callback == .apply) { + if (ptask.callback.apply.logger.errors == 0) { + if (comptime @TypeOf(callbacks.onExtract) != void) { + if (ptask.callback.apply.task_id) |task_id| { + _ = task_id; // autofix + + } else if (Ctx == *PackageInstaller) { + if (ptask.callback.apply.install_context) |*ctx| { + var installer: *PackageInstaller = extract_ctx; + const path = ctx.path; + ctx.path = std.ArrayList(u8).init(bun.default_allocator); + installer.node_modules.path = path; + installer.current_tree_id = ctx.tree_id; + const pkg_id = ptask.callback.apply.pkg_id; + const resolution = &manager.lockfile.packages.items(.resolution)[pkg_id]; + + installer.installPackageWithNameAndResolution( + ctx.dependency_id, + pkg_id, + log_level, + ptask.callback.apply.pkgname, + resolution, + false, + false, + ); + } + } + } + } + } + } + + if (Ctx == *Store.Installer) { + const installer: *Store.Installer = extract_ctx; + const batch = installer.tasks.popBatch(); + var iter = batch.iterator(); + while (iter.next()) |task| { + defer installer.preallocated_tasks.put(task); + switch (task.result) { + .none => { + if (comptime Environment.ci_assert) { + bun.assertWithLocation(false, @src()); + } + installer.onTaskComplete(task.entry_id, .success); + }, + .err => |err| { + installer.onTaskFail(task.entry_id, err); + }, + .blocked => { + installer.onTaskBlocked(task.entry_id); + }, + .done => { + if (comptime Environment.ci_assert) { + const step = installer.store.entries.items(.step)[task.entry_id.get()].load(.monotonic); + bun.assertWithLocation(step == .done, @src()); + } + installer.onTaskComplete(task.entry_id, .success); + }, + } + } + } + + var network_tasks_batch = manager.async_network_task_queue.popBatch(); + var network_tasks_iter = network_tasks_batch.iterator(); + while (network_tasks_iter.next()) |task| { + if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0); + manager.decrementPendingTasks(); + // We cannot free the network task at the end of this scope. + // It may continue to be referenced in a future task. + + switch (task.callback) { + .package_manifest => |*manifest_req| { + const name = manifest_req.name; + if (log_level.showProgress()) { + if (!has_updated_this_run) { + manager.setNodeName(manager.downloads_node.?, name.slice(), ProgressStrings.download_emoji, true); + has_updated_this_run = true; + } + } + + if (!has_network_error and task.response.metadata == null) { + has_network_error = true; + const min = manager.options.min_simultaneous_requests; + const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); + if (max > min) { + AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); + } + } + + // Handle retry-able errors. + if (task.response.metadata == null or task.response.metadata.?.response.status_code > 499) { + const err = task.response.fail orelse error.HTTPError; + + if (task.retried < manager.options.max_retry_count) { + task.retried += 1; + manager.enqueueNetworkTask(task); + + if (manager.options.log_level.isVerbose()) { + manager.log.addWarningFmt( + null, + logger.Loc.Empty, + manager.allocator, + "{s} downloading package manifest {s}. Retry {d}/{d}...", + .{ bun.span(@errorName(err)), name.slice(), task.retried, manager.options.max_retry_count }, + ) catch unreachable; + } + + continue; + } + } + + const metadata = task.response.metadata orelse { + // Handle non-retry-able errors. + const err = task.response.fail orelse error.HTTPError; + + if (@TypeOf(callbacks.onPackageManifestError) != void) { + callbacks.onPackageManifestError( + extract_ctx, + name.slice(), + err, + task.url_buf, + ); + } else { + const fmt = "{s} downloading package manifest {s}"; + if (manager.isNetworkTaskRequired(task.task_id)) { + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + fmt, + .{ @errorName(err), name.slice() }, + ) catch bun.outOfMemory(); + } else { + manager.log.addWarningFmt( + null, + logger.Loc.Empty, + manager.allocator, + fmt, + .{ @errorName(err), name.slice() }, + ) catch bun.outOfMemory(); + } + + if (manager.subcommand != .remove) { + for (manager.update_requests) |*request| { + if (strings.eql(request.name, name.slice())) { + request.failed = true; + manager.options.do.save_lockfile = false; + manager.options.do.save_yarn_lock = false; + manager.options.do.install_packages = false; + } + } + } + } + + continue; + }; + const response = metadata.response; + + if (response.status_code > 399) { + if (@TypeOf(callbacks.onPackageManifestError) != void) { + const err: PackageManifestError = switch (response.status_code) { + 400 => error.PackageManifestHTTP400, + 401 => error.PackageManifestHTTP401, + 402 => error.PackageManifestHTTP402, + 403 => error.PackageManifestHTTP403, + 404 => error.PackageManifestHTTP404, + 405...499 => error.PackageManifestHTTP4xx, + else => error.PackageManifestHTTP5xx, + }; + + callbacks.onPackageManifestError( + extract_ctx, + name.slice(), + err, + task.url_buf, + ); + + continue; + } + + if (manager.isNetworkTaskRequired(task.task_id)) { + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + "GET {s} - {d}", + .{ metadata.url, response.status_code }, + ) catch bun.outOfMemory(); + } else { + manager.log.addWarningFmt( + null, + logger.Loc.Empty, + manager.allocator, + "GET {s} - {d}", + .{ metadata.url, response.status_code }, + ) catch bun.outOfMemory(); + } + if (manager.subcommand != .remove) { + for (manager.update_requests) |*request| { + if (strings.eql(request.name, name.slice())) { + request.failed = true; + manager.options.do.save_lockfile = false; + manager.options.do.save_yarn_lock = false; + manager.options.do.install_packages = false; + } + } + } + + continue; + } + + if (log_level.isVerbose()) { + Output.prettyError(" ", .{}); + Output.printElapsed(@as(f64, @floatFromInt(task.unsafe_http_client.elapsed)) / std.time.ns_per_ms); + Output.prettyError("\nDownloaded {s} versions\n", .{name.slice()}); + Output.flush(); + } + + if (response.status_code == 304) { + // The HTTP request was cached + if (manifest_req.loaded_manifest) |manifest| { + const entry = try manager.manifests.hash_map.getOrPut(manager.allocator, manifest.pkg.name.hash); + entry.value_ptr.* = .{ .manifest = manifest }; + + if (timestamp_this_tick == null) { + timestamp_this_tick = @as(u32, @truncate(@as(u64, @intCast(@max(0, std.time.timestamp()))))) +| 300; + } + + entry.value_ptr.manifest.pkg.public_max_age = timestamp_this_tick.?; + + if (manager.options.enable.manifest_cache) { + Npm.PackageManifest.Serializer.saveAsync( + &entry.value_ptr.manifest, + manager.scopeForPackageName(name.slice()), + manager.getTemporaryDirectory(), + manager.getCacheDirectory(), + ); + } + + if (@hasField(@TypeOf(callbacks), "manifests_only") and callbacks.manifests_only) { + continue; + } + + const dependency_list_entry = manager.task_queue.getEntry(task.task_id).?; + + const dependency_list = dependency_list_entry.value_ptr.*; + dependency_list_entry.value_ptr.* = .{}; + + try manager.processDependencyList( + dependency_list, + Ctx, + extract_ctx, + callbacks, + install_peer, + ); + + continue; + } + } + + manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueParseNPMPackage(task.task_id, name, task))); + }, + .extract => |*extract| { + if (!has_network_error and task.response.metadata == null) { + has_network_error = true; + const min = manager.options.min_simultaneous_requests; + const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); + if (max > min) { + AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); + } + } + + if (task.response.metadata == null or task.response.metadata.?.response.status_code > 499) { + const err = task.response.fail orelse error.TarballFailedToDownload; + + if (task.retried < manager.options.max_retry_count) { + task.retried += 1; + manager.enqueueNetworkTask(task); + + if (manager.options.log_level.isVerbose()) { + manager.log.addWarningFmt( + null, + logger.Loc.Empty, + manager.allocator, + "warn: {s} downloading tarball {s}@{s}. Retrying {d}/{d}...", + .{ + bun.span(@errorName(err)), + extract.name.slice(), + extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .auto), + task.retried, + manager.options.max_retry_count, + }, + ) catch unreachable; + } + + continue; + } + } + + const metadata = task.response.metadata orelse { + const err = task.response.fail orelse error.TarballFailedToDownload; + + if (@TypeOf(callbacks.onPackageDownloadError) != void) { + const package_id = manager.lockfile.buffers.resolutions.items[extract.dependency_id]; + callbacks.onPackageDownloadError( + extract_ctx, + package_id, + extract.name.slice(), + &extract.resolution, + err, + task.url_buf, + ); + continue; + } + + const fmt = "{s} downloading tarball {s}@{s}"; + if (manager.isNetworkTaskRequired(task.task_id)) { + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + fmt, + .{ + @errorName(err), + extract.name.slice(), + extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .auto), + }, + ) catch bun.outOfMemory(); + } else { + manager.log.addWarningFmt( + null, + logger.Loc.Empty, + manager.allocator, + fmt, + .{ + @errorName(err), + extract.name.slice(), + extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .auto), + }, + ) catch bun.outOfMemory(); + } + if (manager.subcommand != .remove) { + for (manager.update_requests) |*request| { + if (strings.eql(request.name, extract.name.slice())) { + request.failed = true; + manager.options.do.save_lockfile = false; + manager.options.do.save_yarn_lock = false; + manager.options.do.install_packages = false; + } + } + } + + continue; + }; + + const response = metadata.response; + + if (response.status_code > 399) { + if (@TypeOf(callbacks.onPackageDownloadError) != void) { + const err = switch (response.status_code) { + 400 => error.TarballHTTP400, + 401 => error.TarballHTTP401, + 402 => error.TarballHTTP402, + 403 => error.TarballHTTP403, + 404 => error.TarballHTTP404, + 405...499 => error.TarballHTTP4xx, + else => error.TarballHTTP5xx, + }; + const package_id = manager.lockfile.buffers.resolutions.items[extract.dependency_id]; + + callbacks.onPackageDownloadError( + extract_ctx, + package_id, + extract.name.slice(), + &extract.resolution, + err, + task.url_buf, + ); + continue; + } + + if (manager.isNetworkTaskRequired(task.task_id)) { + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + "GET {s} - {d}", + .{ + metadata.url, + response.status_code, + }, + ) catch bun.outOfMemory(); + } else { + manager.log.addWarningFmt( + null, + logger.Loc.Empty, + manager.allocator, + "GET {s} - {d}", + .{ + metadata.url, + response.status_code, + }, + ) catch bun.outOfMemory(); + } + if (manager.subcommand != .remove) { + for (manager.update_requests) |*request| { + if (strings.eql(request.name, extract.name.slice())) { + request.failed = true; + manager.options.do.save_lockfile = false; + manager.options.do.save_yarn_lock = false; + manager.options.do.install_packages = false; + } + } + } + + continue; + } + + if (log_level.isVerbose()) { + Output.prettyError(" ", .{}); + Output.printElapsed(@as(f64, @floatCast(@as(f64, @floatFromInt(task.unsafe_http_client.elapsed)) / std.time.ns_per_ms))); + Output.prettyError(" Downloaded {s} tarball\n", .{extract.name.slice()}); + Output.flush(); + } + + if (log_level.showProgress()) { + if (!has_updated_this_run) { + manager.setNodeName(manager.downloads_node.?, extract.name.slice(), ProgressStrings.extract_emoji, true); + has_updated_this_run = true; + } + } + + manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueExtractNPMPackage(extract, task))); + }, + else => unreachable, + } + } + + var resolve_tasks_batch = manager.resolve_tasks.popBatch(); + var resolve_tasks_iter = resolve_tasks_batch.iterator(); + while (resolve_tasks_iter.next()) |task| { + if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0); + defer manager.preallocated_resolve_tasks.put(task); + manager.decrementPendingTasks(); + + if (task.log.msgs.items.len > 0) { + try task.log.print(Output.errorWriter()); + if (task.log.errors > 0) { + manager.any_failed_to_install = true; + } + task.log.deinit(); + } + + switch (task.tag) { + .package_manifest => { + defer manager.preallocated_network_tasks.put(task.request.package_manifest.network); + if (task.status == .fail) { + const name = task.request.package_manifest.name; + const err = task.err orelse error.Failed; + + if (@TypeOf(callbacks.onPackageManifestError) != void) { + callbacks.onPackageManifestError( + extract_ctx, + name.slice(), + err, + task.request.package_manifest.network.url_buf, + ); + } else { + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + "{s} parsing package manifest for {s}", + .{ + @errorName(err), + name.slice(), + }, + ) catch bun.outOfMemory(); + } + + continue; + } + const manifest = &task.data.package_manifest; + + try manager.manifests.insert(manifest.pkg.name.hash, manifest); + + if (@hasField(@TypeOf(callbacks), "manifests_only") and callbacks.manifests_only) { + continue; + } + + const dependency_list_entry = manager.task_queue.getEntry(task.id).?; + const dependency_list = dependency_list_entry.value_ptr.*; + dependency_list_entry.value_ptr.* = .{}; + + try manager.processDependencyList(dependency_list, Ctx, extract_ctx, callbacks, install_peer); + + if (log_level.showProgress()) { + if (!has_updated_this_run) { + manager.setNodeName(manager.downloads_node.?, manifest.name(), ProgressStrings.download_emoji, true); + has_updated_this_run = true; + } + } + }, + .extract, .local_tarball => { + defer { + switch (task.tag) { + .extract => manager.preallocated_network_tasks.put(task.request.extract.network), + else => {}, + } + } + + const tarball = switch (task.tag) { + .extract => &task.request.extract.tarball, + .local_tarball => &task.request.local_tarball.tarball, + else => unreachable, + }; + const dependency_id = tarball.dependency_id; + var package_id = manager.lockfile.buffers.resolutions.items[dependency_id]; + const alias = tarball.name.slice(); + const resolution = &tarball.resolution; + + if (task.status == .fail) { + const err = task.err orelse error.TarballFailedToExtract; + + if (@TypeOf(callbacks.onPackageDownloadError) != void) { + callbacks.onPackageDownloadError( + extract_ctx, + package_id, + alias, + resolution, + err, + switch (task.tag) { + .extract => task.request.extract.network.url_buf, + .local_tarball => task.request.local_tarball.tarball.url.slice(), + else => unreachable, + }, + ); + } else { + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + "{s} extracting tarball from {s}", + .{ + @errorName(err), + alias, + }, + ) catch bun.outOfMemory(); + } + continue; + } + + manager.extracted_count += 1; + bun.Analytics.Features.extracted_packages += 1; + + if (comptime @TypeOf(callbacks.onExtract) != void) { + switch (Ctx) { + *PackageInstaller => { + extract_ctx.fixCachedLockfilePackageSlices(); + callbacks.onExtract( + extract_ctx, + task.id, + dependency_id, + &task.data.extract, + log_level, + ); + }, + *Store.Installer => { + callbacks.onExtract( + extract_ctx, + task.id, + ); + }, + else => @compileError("unexpected context type"), + } + } else if (manager.processExtractedTarballPackage(&package_id, dependency_id, resolution, &task.data.extract, log_level)) |pkg| handle_pkg: { + // In the middle of an install, you could end up needing to downlaod the github tarball for a dependency + // We need to make sure we resolve the dependencies first before calling the onExtract callback + // TODO: move this into a separate function + var any_root = false; + var dependency_list_entry = manager.task_queue.getEntry(task.id) orelse break :handle_pkg; + var dependency_list = dependency_list_entry.value_ptr.*; + dependency_list_entry.value_ptr.* = .{}; + + defer { + dependency_list.deinit(manager.allocator); + if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) { + if (any_root) { + callbacks.onResolve(extract_ctx); + } + } + } + + for (dependency_list.items) |dep| { + switch (dep) { + .dependency, .root_dependency => |id| { + var version = &manager.lockfile.buffers.dependencies.items[id].version; + switch (version.tag) { + .git => { + version.value.git.package_name = pkg.name; + }, + .github => { + version.value.github.package_name = pkg.name; + }, + .tarball => { + version.value.tarball.package_name = pkg.name; + }, + + // `else` is reachable if this package is from `overrides`. Version in `lockfile.buffer.dependencies` + // will still have the original. + else => {}, + } + try manager.processDependencyListItem(dep, &any_root, install_peer); + }, + else => { + // if it's a node_module folder to install, handle that after we process all the dependencies within the onExtract callback. + dependency_list_entry.value_ptr.append(manager.allocator, dep) catch unreachable; + }, + } + } + } else if (manager.task_queue.getEntry(Task.Id.forManifest( + manager.lockfile.str(&manager.lockfile.packages.items(.name)[package_id]), + ))) |dependency_list_entry| { + // Peer dependencies do not initiate any downloads of their own, thus need to be resolved here instead + const dependency_list = dependency_list_entry.value_ptr.*; + dependency_list_entry.value_ptr.* = .{}; + + try manager.processDependencyList(dependency_list, void, {}, {}, install_peer); + } + + manager.setPreinstallState(package_id, manager.lockfile, .done); + + if (log_level.showProgress()) { + if (!has_updated_this_run) { + manager.setNodeName(manager.downloads_node.?, alias, ProgressStrings.extract_emoji, true); + has_updated_this_run = true; + } + } + }, + .git_clone => { + const clone = &task.request.git_clone; + const repo_fd = task.data.git_clone; + const name = clone.name.slice(); + const url = clone.url.slice(); + + manager.git_repositories.put(manager.allocator, task.id, repo_fd) catch unreachable; + + if (task.status == .fail) { + const err = task.err orelse error.Failed; + + if (@TypeOf(callbacks.onPackageManifestError) != void) { + callbacks.onPackageManifestError( + extract_ctx, + name, + err, + url, + ); + } else if (log_level != .silent) { + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + "{s} cloning repository for {s}", + .{ + @errorName(err), + name, + }, + ) catch bun.outOfMemory(); + } + continue; + } + + if (comptime @TypeOf(callbacks.onExtract) != void and Ctx == *PackageInstaller) { + // Installing! + // this dependency might be something other than a git dependency! only need the name and + // behavior, use the resolution from the task. + const dep_id = clone.dep_id; + const dep = manager.lockfile.buffers.dependencies.items[dep_id]; + const dep_name = dep.name.slice(manager.lockfile.buffers.string_bytes.items); + + const git = clone.res.value.git; + const committish = git.committish.slice(manager.lockfile.buffers.string_bytes.items); + const repo = git.repo.slice(manager.lockfile.buffers.string_bytes.items); + + const resolved = try Repository.findCommit( + manager.allocator, + manager.env, + manager.log, + task.data.git_clone.stdDir(), + dep_name, + committish, + task.id, + ); + + const checkout_id = Task.Id.forGitCheckout(repo, resolved); + + if (manager.hasCreatedNetworkTask(checkout_id, dep.behavior.isRequired())) continue; + + manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueGitCheckout( + checkout_id, + repo_fd, + dep_id, + dep_name, + clone.res, + resolved, + null, + ))); + } else { + // Resolving! + const dependency_list_entry = manager.task_queue.getEntry(task.id).?; + const dependency_list = dependency_list_entry.value_ptr.*; + dependency_list_entry.value_ptr.* = .{}; + + try manager.processDependencyList(dependency_list, Ctx, extract_ctx, callbacks, install_peer); + } + + if (log_level.showProgress()) { + if (!has_updated_this_run) { + manager.setNodeName(manager.downloads_node.?, name, ProgressStrings.download_emoji, true); + has_updated_this_run = true; + } + } + }, + .git_checkout => { + const git_checkout = &task.request.git_checkout; + const alias = &git_checkout.name; + const resolution = &git_checkout.resolution; + var package_id: PackageID = invalid_package_id; + + if (task.status == .fail) { + const err = task.err orelse error.Failed; + + manager.log.addErrorFmt( + null, + logger.Loc.Empty, + manager.allocator, + "{s} checking out repository for {s}", + .{ + @errorName(err), + alias.slice(), + }, + ) catch bun.outOfMemory(); + + continue; + } + + if (comptime @TypeOf(callbacks.onExtract) != void) { + // We've populated the cache, package already exists in memory. Call the package installer callback + // and don't enqueue dependencies + switch (Ctx) { + *PackageInstaller => { + + // TODO(dylan-conway) most likely don't need to call this now that the package isn't appended, but + // keeping just in case for now + extract_ctx.fixCachedLockfilePackageSlices(); + + callbacks.onExtract( + extract_ctx, + task.id, + git_checkout.dependency_id, + &task.data.git_checkout, + log_level, + ); + }, + *Store.Installer => { + callbacks.onExtract( + extract_ctx, + task.id, + ); + }, + else => @compileError("unexpected context type"), + } + } else if (manager.processExtractedTarballPackage( + &package_id, + git_checkout.dependency_id, + resolution, + &task.data.git_checkout, + log_level, + )) |pkg| handle_pkg: { + var any_root = false; + var dependency_list_entry = manager.task_queue.getEntry(task.id) orelse break :handle_pkg; + var dependency_list = dependency_list_entry.value_ptr.*; + dependency_list_entry.value_ptr.* = .{}; + + defer { + dependency_list.deinit(manager.allocator); + if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) { + if (any_root) { + callbacks.onResolve(extract_ctx); + } + } + } + + for (dependency_list.items) |dep| { + switch (dep) { + .dependency, .root_dependency => |id| { + var repo = &manager.lockfile.buffers.dependencies.items[id].version.value.git; + repo.resolved = pkg.resolution.value.git.resolved; + repo.package_name = pkg.name; + try manager.processDependencyListItem(dep, &any_root, install_peer); + }, + else => { + // if it's a node_module folder to install, handle that after we process all the dependencies within the onExtract callback. + dependency_list_entry.value_ptr.append(manager.allocator, dep) catch unreachable; + }, + } + } + + if (@TypeOf(callbacks.onExtract) != void) { + @compileError("ctx should be void"); + } + } + + if (log_level.showProgress()) { + if (!has_updated_this_run) { + manager.setNodeName(manager.downloads_node.?, alias.slice(), ProgressStrings.download_emoji, true); + has_updated_this_run = true; + } + } + }, + } + } +} + +pub inline fn pendingTaskCount(manager: *const PackageManager) u32 { + return manager.pending_tasks.load(.monotonic); +} + +pub inline fn incrementPendingTasks(manager: *PackageManager, count: u32) u32 { + manager.total_tasks += count; + return manager.pending_tasks.fetchAdd(count, .monotonic); +} + +pub inline fn decrementPendingTasks(manager: *PackageManager) void { + _ = manager.pending_tasks.fetchSub(1, .monotonic); +} + +pub fn flushNetworkQueue(this: *PackageManager) void { + var network = &this.network_task_fifo; + + while (network.readItem()) |network_task| { + network_task.schedule(if (network_task.callback == .extract) &this.network_tarball_batch else &this.network_resolve_batch); + } +} + +pub fn flushPatchTaskQueue(this: *PackageManager) void { + var patch_task_fifo = &this.patch_task_fifo; + + while (patch_task_fifo.readItem()) |patch_task| { + patch_task.schedule(if (patch_task.callback == .apply) &this.patch_apply_batch else &this.patch_calc_hash_batch); + } +} + +fn doFlushDependencyQueue(this: *PackageManager) void { + var lockfile = this.lockfile; + var dependency_queue = &lockfile.scratch.dependency_list_queue; + + while (dependency_queue.readItem()) |dependencies_list| { + var i: u32 = dependencies_list.off; + const end = dependencies_list.off + dependencies_list.len; + while (i < end) : (i += 1) { + const dependency = lockfile.buffers.dependencies.items[i]; + this.enqueueDependencyWithMain( + i, + &dependency, + lockfile.buffers.resolutions.items[i], + false, + ) catch {}; + } + } + + this.flushNetworkQueue(); +} +pub fn flushDependencyQueue(this: *PackageManager) void { + var last_count = this.total_tasks; + while (true) : (last_count = this.total_tasks) { + this.flushNetworkQueue(); + doFlushDependencyQueue(this); + this.flushNetworkQueue(); + this.flushPatchTaskQueue(); + + if (this.total_tasks == last_count) break; + } +} + +pub fn scheduleTasks(manager: *PackageManager) usize { + const count = manager.task_batch.len + manager.network_resolve_batch.len + manager.network_tarball_batch.len + manager.patch_apply_batch.len + manager.patch_calc_hash_batch.len; + + _ = manager.incrementPendingTasks(@truncate(count)); + manager.thread_pool.schedule(manager.patch_apply_batch); + manager.thread_pool.schedule(manager.patch_calc_hash_batch); + manager.thread_pool.schedule(manager.task_batch); + manager.network_resolve_batch.push(manager.network_tarball_batch); + HTTP.http_thread.schedule(manager.network_resolve_batch); + manager.task_batch = .{}; + manager.network_tarball_batch = .{}; + manager.network_resolve_batch = .{}; + manager.patch_apply_batch = .{}; + manager.patch_calc_hash_batch = .{}; + return count; +} + +pub fn drainDependencyList(this: *PackageManager) void { + // Step 2. If there were cached dependencies, go through all of those but don't download the devDependencies for them. + this.flushDependencyQueue(); + + if (PackageManager.verbose_install) Output.flush(); + + // It's only network requests here because we don't store tarballs. + _ = this.scheduleTasks(); +} + +pub fn getNetworkTask(this: *PackageManager) *NetworkTask { + return this.preallocated_network_tasks.get(); +} + +pub fn allocGitHubURL(this: *const PackageManager, repository: *const Repository) string { + var github_api_url: string = "https://api.github.com"; + if (this.env.get("GITHUB_API_URL")) |url| { + if (url.len > 0) { + github_api_url = url; + } + } + + const owner = this.lockfile.str(&repository.owner); + const repo = this.lockfile.str(&repository.repo); + const committish = this.lockfile.str(&repository.committish); + + return std.fmt.allocPrint( + this.allocator, + "{s}/repos/{s}/{s}{s}tarball/{s}", + .{ + strings.withoutTrailingSlash(github_api_url), + owner, + repo, + // repo might be empty if dep is https://github.com/... style + if (repo.len > 0) "/" else "", + committish, + }, + ) catch unreachable; +} + +pub fn hasCreatedNetworkTask(this: *PackageManager, task_id: Task.Id, is_required: bool) bool { + const gpe = this.network_dedupe_map.getOrPut(task_id) catch bun.outOfMemory(); + + // if there's an existing network task that is optional, we want to make it non-optional if this one would be required + gpe.value_ptr.is_required = if (!gpe.found_existing) + is_required + else + gpe.value_ptr.is_required or is_required; + + return gpe.found_existing; +} + +pub fn isNetworkTaskRequired(this: *const PackageManager, task_id: Task.Id) bool { + return (this.network_dedupe_map.get(task_id) orelse return true).is_required; +} + +pub fn generateNetworkTaskForTarball( + this: *PackageManager, + task_id: Task.Id, + url: string, + is_required: bool, + dependency_id: DependencyID, + package: Lockfile.Package, + patch_name_and_version_hash: ?u64, + authorization: NetworkTask.Authorization, +) NetworkTask.ForTarballError!?*NetworkTask { + if (this.hasCreatedNetworkTask(task_id, is_required)) { + return null; + } + + var network_task = this.getNetworkTask(); + + network_task.* = .{ + .task_id = task_id, + .callback = undefined, + .allocator = this.allocator, + .package_manager = this, + .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { + const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; + const task = PatchTask.newApplyPatchHash(this, package.meta.id, patch_hash, h); + task.callback.apply.task_id = task_id; + break :brk task; + } else null, + }; + + const scope = this.scopeForPackageName(this.lockfile.str(&package.name)); + + try network_task.forTarball( + this.allocator, + &.{ + .package_manager = this, + .name = strings.StringOrTinyString.initAppendIfNeeded( + this.lockfile.str(&package.name), + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch bun.outOfMemory(), + .resolution = package.resolution, + .cache_dir = this.getCacheDirectory(), + .temp_dir = this.getTemporaryDirectory(), + .dependency_id = dependency_id, + .integrity = package.meta.integrity, + .url = strings.StringOrTinyString.initAppendIfNeeded( + url, + *FileSystem.FilenameStore, + FileSystem.FilenameStore.instance, + ) catch bun.outOfMemory(), + }, + scope, + authorization, + ); + + return network_task; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const Output = bun.Output; +const ThreadPool = bun.ThreadPool; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const HTTP = bun.http; +const AsyncHTTP = HTTP.AsyncHTTP; + +const DependencyID = bun.install.DependencyID; +const Features = bun.install.Features; +const NetworkTask = bun.install.NetworkTask; +const Npm = bun.install.Npm; +const PackageID = bun.install.PackageID; +const PackageManifestError = bun.install.PackageManifestError; +const PatchTask = bun.install.PatchTask; +const Repository = bun.install.Repository; +const Store = bun.install.Store; +const Task = bun.install.Task; +const invalid_package_id = bun.install.invalid_package_id; + +const Lockfile = bun.install.Lockfile; +const Package = Lockfile.Package; + +const PackageManager = bun.install.PackageManager; +const Options = PackageManager.Options; +const PackageInstaller = PackageManager.PackageInstaller; +const ProgressStrings = PackageManager.ProgressStrings; diff --git a/src/install/PackageManager/updatePackageJSONAndInstall.zig b/src/install/PackageManager/updatePackageJSONAndInstall.zig new file mode 100644 index 0000000000..6a43158915 --- /dev/null +++ b/src/install/PackageManager/updatePackageJSONAndInstall.zig @@ -0,0 +1,749 @@ +pub fn updatePackageJSONAndInstallWithManager( + manager: *PackageManager, + ctx: Command.Context, + original_cwd: string, +) !void { + var update_requests = UpdateRequest.Array.initCapacity(manager.allocator, 64) catch bun.outOfMemory(); + defer update_requests.deinit(manager.allocator); + + if (manager.options.positionals.len <= 1) { + switch (manager.subcommand) { + .add => { + Output.errGeneric("no package specified to add", .{}); + Output.flush(); + PackageManager.CommandLineArguments.printHelp(.add); + + Global.exit(0); + }, + .remove => { + Output.errGeneric("no package specified to remove", .{}); + Output.flush(); + PackageManager.CommandLineArguments.printHelp(.remove); + + Global.exit(0); + }, + else => {}, + } + } + + return try updatePackageJSONAndInstallWithManagerWithUpdatesAndUpdateRequests( + manager, + ctx, + original_cwd, + manager.options.positionals[1..], + &update_requests, + ); +} + +fn updatePackageJSONAndInstallWithManagerWithUpdatesAndUpdateRequests( + manager: *PackageManager, + ctx: Command.Context, + original_cwd: string, + positionals: []const string, + update_requests: *UpdateRequest.Array, +) !void { + var updates: []UpdateRequest = if (manager.subcommand == .@"patch-commit" or manager.subcommand == .patch) + &[_]UpdateRequest{} + else + UpdateRequest.parse(ctx.allocator, manager, ctx.log, positionals, update_requests, manager.subcommand); + try updatePackageJSONAndInstallWithManagerWithUpdates( + manager, + ctx, + &updates, + manager.subcommand, + original_cwd, + ); +} +fn updatePackageJSONAndInstallWithManagerWithUpdates( + manager: *PackageManager, + ctx: Command.Context, + updates: *[]UpdateRequest, + subcommand: Subcommand, + original_cwd: string, +) !void { + const log_level = manager.options.log_level; + if (manager.log.errors > 0) { + if (log_level != .silent) { + manager.log.print(Output.errorWriter()) catch {}; + } + Global.crash(); + } + + var current_package_json = switch (manager.workspace_package_json_cache.getWithPath( + manager.allocator, + manager.log, + manager.original_package_json_path, + .{ + .guess_indentation = true, + }, + )) { + .parse_err => |err| { + manager.log.print(Output.errorWriter()) catch {}; + Output.errGeneric("failed to parse package.json \"{s}\": {s}", .{ + manager.original_package_json_path, + @errorName(err), + }); + Global.crash(); + }, + .read_err => |err| { + Output.errGeneric("failed to read package.json \"{s}\": {s}", .{ + manager.original_package_json_path, + @errorName(err), + }); + Global.crash(); + }, + .entry => |entry| entry, + }; + const current_package_json_indent = current_package_json.indentation; + + // If there originally was a newline at the end of their package.json, preserve it + // so that we don't cause unnecessary diffs in their git history. + // https://github.com/oven-sh/bun/issues/1375 + const preserve_trailing_newline_at_eof_for_package_json = current_package_json.source.contents.len > 0 and + current_package_json.source.contents[current_package_json.source.contents.len - 1] == '\n'; + + if (subcommand == .remove) { + if (current_package_json.root.data != .e_object) { + Output.errGeneric("package.json is not an Object {{}}, so there's nothing to {s}!", .{@tagName(subcommand)}); + Global.crash(); + } else if (current_package_json.root.data.e_object.properties.len == 0) { + Output.errGeneric("package.json is empty {{}}, so there's nothing to {s}!", .{@tagName(subcommand)}); + Global.crash(); + } else if (current_package_json.root.asProperty("devDependencies") == null and + current_package_json.root.asProperty("dependencies") == null and + current_package_json.root.asProperty("optionalDependencies") == null and + current_package_json.root.asProperty("peerDependencies") == null) + { + Output.prettyErrorln("package.json doesn't have dependencies, there's nothing to {s}!", .{@tagName(subcommand)}); + Global.exit(0); + } + } + + const dependency_list = if (manager.options.update.development) + "devDependencies" + else if (manager.options.update.optional) + "optionalDependencies" + else if (manager.options.update.peer) + "peerDependencies" + else + "dependencies"; + var any_changes = false; + + var not_in_workspace_root: ?PatchCommitResult = null; + switch (subcommand) { + .remove => { + // if we're removing, they don't have to specify where it is installed in the dependencies list + // they can even put it multiple times and we will just remove all of them + for (updates.*) |request| { + inline for ([_]string{ "dependencies", "devDependencies", "optionalDependencies", "peerDependencies" }) |list| { + if (current_package_json.root.asProperty(list)) |query| { + if (query.expr.data == .e_object) { + var dependencies = query.expr.data.e_object.properties.slice(); + var i: usize = 0; + var new_len = dependencies.len; + while (i < dependencies.len) : (i += 1) { + if (dependencies[i].key.?.data == .e_string) { + if (dependencies[i].key.?.data.e_string.eql(string, request.name)) { + if (new_len > 1) { + dependencies[i] = dependencies[new_len - 1]; + new_len -= 1; + } else { + new_len = 0; + } + + any_changes = true; + } + } + } + + const changed = new_len != dependencies.len; + if (changed) { + query.expr.data.e_object.properties.len = @as(u32, @truncate(new_len)); + + // If the dependencies list is now empty, remove it from the package.json + // since we're swapRemove, we have to re-sort it + if (query.expr.data.e_object.properties.len == 0) { + var arraylist = current_package_json.root.data.e_object.properties.list(); + _ = arraylist.swapRemove(query.i); + current_package_json.root.data.e_object.properties.update(arraylist); + current_package_json.root.data.e_object.packageJSONSort(); + } else { + var obj = query.expr.data.e_object; + obj.alphabetizeProperties(); + } + } + } + } + } + } + }, + + .link, .add, .update => { + // `bun update ` is basically the same as `bun add `, except + // update will not exceed the current dependency range if it exists + + if (updates.len != 0) { + try PackageJSONEditor.edit( + manager, + updates, + ¤t_package_json.root, + dependency_list, + .{ + .exact_versions = manager.options.enable.exact_versions, + .before_install = true, + }, + ); + } else if (subcommand == .update) { + try PackageJSONEditor.editUpdateNoArgs( + manager, + ¤t_package_json.root, + .{ + .exact_versions = true, + .before_install = true, + }, + ); + } + }, + else => { + if (manager.options.patch_features == .commit) { + var pathbuf: bun.PathBuffer = undefined; + if (try manager.doPatchCommit(&pathbuf, log_level)) |stuff| { + // we're inside a workspace package, we need to edit the + // root json, not the `current_package_json` + if (stuff.not_in_workspace_root) { + not_in_workspace_root = stuff; + } else { + try PackageJSONEditor.editPatchedDependencies( + manager, + ¤t_package_json.root, + stuff.patch_key, + stuff.patchfile_path, + ); + } + } + } + }, + } + + manager.to_update = subcommand == .update; + + { + // Incase it's a pointer to self. Avoid RLS. + const cloned = updates.*; + manager.update_requests = cloned; + } + + var buffer_writer = JSPrinter.BufferWriter.init(manager.allocator); + try buffer_writer.buffer.list.ensureTotalCapacity(manager.allocator, current_package_json.source.contents.len + 1); + buffer_writer.append_newline = preserve_trailing_newline_at_eof_for_package_json; + var package_json_writer = JSPrinter.BufferPrinter.init(buffer_writer); + + var written = JSPrinter.printJSON( + @TypeOf(&package_json_writer), + &package_json_writer, + current_package_json.root, + ¤t_package_json.source, + .{ + .indent = current_package_json_indent, + .mangled_props = null, + }, + ) catch |err| { + Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + + // There are various tradeoffs with how we commit updates when you run `bun add` or `bun remove` + // The one we chose here is to effectively pretend a human did: + // 1. "bun add react@latest" + // 2. open lockfile, find what react resolved to + // 3. open package.json + // 4. replace "react" : "latest" with "react" : "^16.2.0" + // 5. save package.json + // The Smarter™ approach is you resolve ahead of time and write to disk once! + // But, turns out that's slower in any case where more than one package has to be resolved (most of the time!) + // Concurrent network requests are faster than doing one and then waiting until the next batch + var new_package_json_source = try manager.allocator.dupe(u8, package_json_writer.ctx.writtenWithoutTrailingZero()); + current_package_json.source.contents = new_package_json_source; + + // may or may not be the package json we are editing + const top_level_dir_without_trailing_slash = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); + + var root_package_json_path_buf: bun.PathBuffer = undefined; + const root_package_json_source, const root_package_json_path = brk: { + @memcpy(root_package_json_path_buf[0..top_level_dir_without_trailing_slash.len], top_level_dir_without_trailing_slash); + @memcpy(root_package_json_path_buf[top_level_dir_without_trailing_slash.len..][0.."/package.json".len], "/package.json"); + const root_package_json_path = root_package_json_path_buf[0 .. top_level_dir_without_trailing_slash.len + "/package.json".len]; + root_package_json_path_buf[root_package_json_path.len] = 0; + + // The lifetime of this pointer is only valid until the next call to `getWithPath`, which can happen after this scope. + // https://github.com/oven-sh/bun/issues/12288 + const root_package_json = switch (manager.workspace_package_json_cache.getWithPath( + manager.allocator, + manager.log, + root_package_json_path, + .{ + .guess_indentation = true, + }, + )) { + .parse_err => |err| { + manager.log.print(Output.errorWriter()) catch {}; + Output.errGeneric("failed to parse package.json \"{s}\": {s}", .{ + root_package_json_path, + @errorName(err), + }); + Global.crash(); + }, + .read_err => |err| { + Output.errGeneric("failed to read package.json \"{s}\": {s}", .{ + manager.original_package_json_path, + @errorName(err), + }); + Global.crash(); + }, + .entry => |entry| entry, + }; + + if (not_in_workspace_root) |stuff| { + try PackageJSONEditor.editPatchedDependencies( + manager, + &root_package_json.root, + stuff.patch_key, + stuff.patchfile_path, + ); + var buffer_writer2 = JSPrinter.BufferWriter.init(manager.allocator); + try buffer_writer2.buffer.list.ensureTotalCapacity(manager.allocator, root_package_json.source.contents.len + 1); + buffer_writer2.append_newline = preserve_trailing_newline_at_eof_for_package_json; + var package_json_writer2 = JSPrinter.BufferPrinter.init(buffer_writer2); + + _ = JSPrinter.printJSON( + @TypeOf(&package_json_writer2), + &package_json_writer2, + root_package_json.root, + &root_package_json.source, + .{ + .indent = root_package_json.indentation, + .mangled_props = null, + }, + ) catch |err| { + Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + root_package_json.source.contents = try manager.allocator.dupe(u8, package_json_writer2.ctx.writtenWithoutTrailingZero()); + } + + break :brk .{ root_package_json.source.contents, root_package_json_path_buf[0..root_package_json_path.len :0] }; + }; + + try manager.installWithManager(ctx, root_package_json_source, original_cwd); + + if (subcommand == .update or subcommand == .add or subcommand == .link) { + for (updates.*) |request| { + if (request.failed) { + Global.exit(1); + return; + } + } + + const source = &logger.Source.initPathString("package.json", new_package_json_source); + + // Now, we _re_ parse our in-memory edited package.json + // so we can commit the version we changed from the lockfile + var new_package_json = JSON.parsePackageJSONUTF8(source, manager.log, manager.allocator) catch |err| { + Output.prettyErrorln("package.json failed to parse due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + + if (updates.len == 0) { + try PackageJSONEditor.editUpdateNoArgs( + manager, + &new_package_json, + .{ + .exact_versions = manager.options.enable.exact_versions, + }, + ); + } else { + try PackageJSONEditor.edit( + manager, + updates, + &new_package_json, + dependency_list, + .{ + .exact_versions = manager.options.enable.exact_versions, + .add_trusted_dependencies = manager.options.do.trust_dependencies_from_args, + }, + ); + } + var buffer_writer_two = JSPrinter.BufferWriter.init(manager.allocator); + try buffer_writer_two.buffer.list.ensureTotalCapacity(manager.allocator, source.contents.len + 1); + buffer_writer_two.append_newline = + preserve_trailing_newline_at_eof_for_package_json; + var package_json_writer_two = JSPrinter.BufferPrinter.init(buffer_writer_two); + + written = JSPrinter.printJSON( + @TypeOf(&package_json_writer_two), + &package_json_writer_two, + new_package_json, + source, + .{ + .indent = current_package_json_indent, + .mangled_props = null, + }, + ) catch |err| { + Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + + new_package_json_source = try manager.allocator.dupe(u8, package_json_writer_two.ctx.writtenWithoutTrailingZero()); + } + + if (manager.options.do.write_package_json) { + const source, const path = if (manager.options.patch_features == .commit) + .{ root_package_json_source, root_package_json_path } + else + .{ new_package_json_source, manager.original_package_json_path }; + + // Now that we've run the install step + // We can save our in-memory package.json to disk + const workspace_package_json_file = (try bun.sys.File.openat( + .cwd(), + path, + bun.O.RDWR, + 0, + ).unwrap()).handle.stdFile(); + + try workspace_package_json_file.pwriteAll(source, 0); + std.posix.ftruncate(workspace_package_json_file.handle, source.len) catch {}; + workspace_package_json_file.close(); + + if (subcommand == .remove) { + if (!any_changes) { + Global.exit(0); + return; + } + + var cwd = std.fs.cwd(); + // This is not exactly correct + var node_modules_buf: bun.PathBuffer = undefined; + bun.copy(u8, &node_modules_buf, "node_modules" ++ std.fs.path.sep_str); + const offset_buf = node_modules_buf["node_modules/".len..]; + const name_hashes = manager.lockfile.packages.items(.name_hash); + for (updates.*) |request| { + // If the package no longer exists in the updated lockfile, delete the directory + // This is not thorough. + // It does not handle nested dependencies + // This is a quick & dirty cleanup intended for when deleting top-level dependencies + if (std.mem.indexOfScalar(PackageNameHash, name_hashes, String.Builder.stringHash(request.name)) == null) { + bun.copy(u8, offset_buf, request.name); + cwd.deleteTree(node_modules_buf[0 .. "node_modules/".len + request.name.len]) catch {}; + } + } + + // This is where we clean dangling symlinks + // This could be slow if there are a lot of symlinks + if (bun.openDir(cwd, manager.options.bin_path)) |node_modules_bin_handle| { + var node_modules_bin: std.fs.Dir = node_modules_bin_handle; + defer node_modules_bin.close(); + var iter: std.fs.Dir.Iterator = node_modules_bin.iterate(); + iterator: while (iter.next() catch null) |entry| { + switch (entry.kind) { + std.fs.Dir.Entry.Kind.sym_link => { + + // any symlinks which we are unable to open are assumed to be dangling + // note that using access won't work here, because access doesn't resolve symlinks + bun.copy(u8, &node_modules_buf, entry.name); + node_modules_buf[entry.name.len] = 0; + const buf: [:0]u8 = node_modules_buf[0..entry.name.len :0]; + + var file = node_modules_bin.openFileZ(buf, .{ .mode = .read_only }) catch { + node_modules_bin.deleteFileZ(buf) catch {}; + continue :iterator; + }; + + file.close(); + }, + else => {}, + } + } + } else |err| { + if (err != error.ENOENT) { + Output.err(err, "while reading node_modules/.bin", .{}); + Global.crash(); + } + } + } + } +} + +pub fn updatePackageJSONAndInstallCatchError( + ctx: Command.Context, + subcommand: Subcommand, +) !void { + updatePackageJSONAndInstall(ctx, subcommand) catch |err| { + switch (err) { + error.InstallFailed, + error.InvalidPackageJSON, + => { + const log = &bun.CLI.Cli.log_; + log.print(bun.Output.errorWriter()) catch {}; + bun.Global.exit(1); + return; + }, + else => return err, + } + }; +} + +fn updatePackageJSONAndInstallAndCLI( + ctx: Command.Context, + subcommand: Subcommand, + cli: CommandLineArguments, +) !void { + var manager, const original_cwd = PackageManager.init(ctx, cli, subcommand) catch |err| brk: { + if (err == error.MissingPackageJSON) { + switch (subcommand) { + .update => { + Output.prettyErrorln("No package.json, so nothing to update", .{}); + Global.crash(); + }, + .remove => { + Output.prettyErrorln("No package.json, so nothing to remove", .{}); + Global.crash(); + }, + .patch, .@"patch-commit" => { + Output.prettyErrorln("No package.json, so nothing to patch", .{}); + Global.crash(); + }, + else => { + try attemptToCreatePackageJSON(); + break :brk try PackageManager.init(ctx, cli, subcommand); + }, + } + } + + return err; + }; + defer ctx.allocator.free(original_cwd); + + if (manager.options.shouldPrintCommandName()) { + Output.prettyln("bun {s} v" ++ Global.package_json_version_with_sha ++ "\n", .{@tagName(subcommand)}); + Output.flush(); + } + + // When you run `bun add -g ` or `bun install -g ` and the global bin dir is not in $PATH + // We should tell the user to add it to $PATH so they don't get confused. + if (subcommand.canGloballyInstallPackages()) { + if (manager.options.global and manager.options.log_level != .silent) { + manager.track_installed_bin = .{ .pending = {} }; + } + } + + try updatePackageJSONAndInstallWithManager(manager, ctx, original_cwd); + + if (manager.options.patch_features == .patch) { + try manager.preparePatch(); + } + + if (manager.any_failed_to_install) { + Global.exit(1); + } + + // Check if we need to print a warning like: + // + // > warn: To run "vite", add the global bin folder to $PATH: + // > + // > fish_add_path "/private/tmp/test" + // + if (subcommand.canGloballyInstallPackages()) { + if (manager.options.global) { + if (manager.options.bin_path.len > 0 and manager.track_installed_bin == .basename) { + const needs_to_print = if (bun.getenvZ("PATH")) |PATH| + // This is not perfect + // + // If you already have a different binary of the same + // name, it will not detect that case. + // + // The problem is there are too many edgecases with filesystem paths. + // + // We want to veer towards false negative than false + // positive. It would be annoying if this message + // appears unnecessarily. It's kind of okay if it doesn't appear + // when it should. + // + // If you set BUN_INSTALL_BIN to "/tmp/woo" on macOS and + // we just checked for "/tmp/woo" in $PATH, it would + // incorrectly print a warning because /tmp/ on macOS is + // aliased to /private/tmp/ + // + // Another scenario is case-insensitive filesystems. If you + // have a binary called "esbuild" in /tmp/TeST and you + // install esbuild, it will not detect that case if we naively + // just checked for "esbuild" in $PATH where "$PATH" is /tmp/test + bun.which( + &PackageManager.package_json_cwd_buf, + PATH, + bun.fs.FileSystem.instance.top_level_dir, + manager.track_installed_bin.basename, + ) == null + else + true; + + if (needs_to_print) { + const MoreInstructions = struct { + shell: bun.CLI.ShellCompletions.Shell = .unknown, + folder: []const u8, + + // Convert "/Users/Jarred Sumner" => "/Users/Jarred\ Sumner" + const ShellPathFormatter = struct { + folder: []const u8, + + pub fn format(instructions: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + var remaining = instructions.folder; + while (bun.strings.indexOfChar(remaining, ' ')) |space| { + try writer.print( + "{}", + .{bun.fmt.fmtPath(u8, remaining[0..space], .{ + .escape_backslashes = true, + .path_sep = if (Environment.isWindows) .windows else .posix, + })}, + ); + try writer.writeAll("\\ "); + remaining = remaining[@min(space + 1, remaining.len)..]; + } + + try writer.print( + "{}", + .{bun.fmt.fmtPath(u8, remaining, .{ + .escape_backslashes = true, + .path_sep = if (Environment.isWindows) .windows else .posix, + })}, + ); + } + }; + + pub fn format(instructions: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + const path = ShellPathFormatter{ .folder = instructions.folder }; + switch (instructions.shell) { + .unknown => { + // Unfortunately really difficult to do this in one line on PowerShell. + try writer.print("{}", .{path}); + }, + .bash => { + try writer.print("export PATH=\"{}:$PATH\"", .{path}); + }, + .zsh => { + try writer.print("export PATH=\"{}:$PATH\"", .{path}); + }, + .fish => { + // Regular quotes will do here. + try writer.print("fish_add_path {}", .{bun.fmt.quote(instructions.folder)}); + }, + .pwsh => { + try writer.print("$env:PATH += \";{}\"", .{path}); + }, + } + } + }; + + Output.prettyError("\n", .{}); + + Output.warn( + \\To run {}, add the global bin folder to $PATH: + \\ + \\{} + \\ + , + .{ + bun.fmt.quote(manager.track_installed_bin.basename), + MoreInstructions{ .shell = bun.CLI.ShellCompletions.Shell.fromEnv([]const u8, bun.getenvZ("SHELL") orelse ""), .folder = manager.options.bin_path }, + }, + ); + Output.flush(); + } + } + } + } +} + +pub fn updatePackageJSONAndInstall( + ctx: Command.Context, + subcommand: Subcommand, +) !void { + var cli = switch (subcommand) { + inline else => |cmd| try PackageManager.CommandLineArguments.parse(ctx.allocator, cmd), + }; + + // The way this works: + // 1. Run the bundler on source files + // 2. Rewrite positional arguments to act identically to the developer + // typing in the dependency names + // 3. Run the install command + if (cli.analyze) { + const Analyzer = struct { + ctx: Command.Context, + cli: *PackageManager.CommandLineArguments, + subcommand: Subcommand, + pub fn onAnalyze( + this: *@This(), + result: *bun.bundle_v2.BundleV2.DependenciesScanner.Result, + ) anyerror!void { + // TODO: add separate argument that makes it so positionals[1..] is not done and instead the positionals are passed + var positionals = bun.default_allocator.alloc(string, result.dependencies.keys().len + 1) catch bun.outOfMemory(); + positionals[0] = "add"; + bun.copy(string, positionals[1..], result.dependencies.keys()); + this.cli.positionals = positionals; + + try updatePackageJSONAndInstallAndCLI(this.ctx, this.subcommand, this.cli.*); + + Global.exit(0); + } + }; + var analyzer = Analyzer{ + .ctx = ctx, + .cli = &cli, + .subcommand = subcommand, + }; + var fetcher = bun.bundle_v2.BundleV2.DependenciesScanner{ + .ctx = &analyzer, + .entry_points = cli.positionals[1..], + .onFetch = @ptrCast(&Analyzer.onAnalyze), + }; + + // This runs the bundler. + try bun.CLI.BuildCommand.exec(bun.CLI.Command.get(), &fetcher); + return; + } + + return updatePackageJSONAndInstallAndCLI(ctx, subcommand, cli); +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const Global = bun.Global; +const JSON = bun.JSON; +const JSPrinter = bun.js_printer; +const Output = bun.Output; +const default_allocator = bun.default_allocator; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; +const Command = bun.CLI.Command; +const File = bun.sys.File; +const PackageNameHash = bun.install.PackageNameHash; + +const Semver = bun.Semver; +const String = Semver.String; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const PackageManager = bun.install.PackageManager; +const CommandLineArguments = PackageManager.CommandLineArguments; +const PackageJSONEditor = PackageManager.PackageJSONEditor; +const PatchCommitResult = PackageManager.PatchCommitResult; +const Subcommand = PackageManager.Subcommand; +const UpdateRequest = PackageManager.UpdateRequest; +const attemptToCreatePackageJSON = PackageManager.attemptToCreatePackageJSON; diff --git a/src/install/PackageManagerTask.zig b/src/install/PackageManagerTask.zig new file mode 100644 index 0000000000..c0d17a0d06 --- /dev/null +++ b/src/install/PackageManagerTask.zig @@ -0,0 +1,384 @@ +//! Schedule long-running callbacks for a task +//! Slow stuff is broken into tasks, each can run independently without locks + +tag: Tag, +request: Request, +data: Data, +status: Status = Status.waiting, +threadpool_task: ThreadPool.Task = ThreadPool.Task{ .callback = &callback }, +log: logger.Log, +id: Id, +err: ?anyerror = null, +package_manager: *PackageManager, +apply_patch_task: ?*PatchTask = null, +next: ?*Task = null, + +/// An ID that lets us register a callback without keeping the same pointer around +pub const Id = enum(u64) { + _, + + pub fn get(this: @This()) u64 { + return @intFromEnum(this); + } + + pub fn forNPMPackage(package_name: string, package_version: Semver.Version) Id { + var hasher = bun.Wyhash11.init(0); + hasher.update("npm-package:"); + hasher.update(package_name); + hasher.update("@"); + hasher.update(std.mem.asBytes(&package_version)); + return @enumFromInt(hasher.final()); + } + + pub fn forBinLink(package_id: PackageID) Id { + var hasher = bun.Wyhash11.init(0); + hasher.update("bin-link:"); + hasher.update(std.mem.asBytes(&package_id)); + return @enumFromInt(hasher.final()); + } + + pub fn forManifest(name: string) Id { + var hasher = bun.Wyhash11.init(0); + hasher.update("manifest:"); + hasher.update(name); + return @enumFromInt(hasher.final()); + } + + pub fn forTarball(url: string) Id { + var hasher = bun.Wyhash11.init(0); + hasher.update("tarball:"); + hasher.update(url); + return @enumFromInt(hasher.final()); + } + + // These cannot change: + // We persist them to the filesystem. + pub fn forGitClone(url: string) Id { + var hasher = bun.Wyhash11.init(0); + hasher.update(url); + return @enumFromInt(@as(u64, 4 << 61) | @as(u64, @as(u61, @truncate(hasher.final())))); + } + + pub fn forGitCheckout(url: string, resolved: string) Id { + var hasher = bun.Wyhash11.init(0); + hasher.update(url); + hasher.update("@"); + hasher.update(resolved); + return @enumFromInt(@as(u64, 5 << 61) | @as(u64, @as(u61, @truncate(hasher.final())))); + } +}; + +pub fn callback(task: *ThreadPool.Task) void { + Output.Source.configureThread(); + defer Output.flush(); + + var this: *Task = @fieldParentPtr("threadpool_task", task); + const manager = this.package_manager; + defer { + if (this.status == .success) { + if (this.apply_patch_task) |pt| { + defer pt.deinit(); + pt.apply() catch bun.outOfMemory(); + if (pt.callback.apply.logger.errors > 0) { + defer pt.callback.apply.logger.deinit(); + // this.log.addErrorFmt(null, logger.Loc.Empty, bun.default_allocator, "failed to apply patch: {}", .{e}) catch unreachable; + pt.callback.apply.logger.print(Output.errorWriter()) catch {}; + } + } + } + manager.resolve_tasks.push(this); + manager.wake(); + } + + switch (this.tag) { + .package_manifest => { + const allocator = bun.default_allocator; + var manifest = &this.request.package_manifest; + const body = manifest.network.response_buffer.move(); + + defer { + bun.default_allocator.free(body); + } + + const package_manifest = Npm.Registry.getPackageMetadata( + allocator, + manager.scopeForPackageName(manifest.name.slice()), + (manifest.network.response.metadata orelse @panic("Assertion failure: Expected metadata to be set")).response, + body, + &this.log, + manifest.name.slice(), + manifest.network.callback.package_manifest.loaded_manifest, + manager, + ) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + + this.err = err; + this.status = Status.fail; + this.data = .{ .package_manifest = .{} }; + return; + }; + + switch (package_manifest) { + .fresh, .cached => |result| { + this.status = Status.success; + this.data = .{ .package_manifest = result }; + return; + }, + .not_found => { + this.log.addErrorFmt(null, logger.Loc.Empty, allocator, "404 - GET {s}", .{ + this.request.package_manifest.name.slice(), + }) catch unreachable; + this.status = Status.fail; + this.data = .{ .package_manifest = .{} }; + return; + }, + } + }, + .extract => { + const bytes = this.request.extract.network.response_buffer.move(); + + defer { + bun.default_allocator.free(bytes); + } + + const result = this.request.extract.tarball.run( + &this.log, + bytes, + ) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + + this.err = err; + this.status = Status.fail; + this.data = .{ .extract = .{} }; + return; + }; + + this.data = .{ .extract = result }; + this.status = Status.success; + }, + .git_clone => { + const name = this.request.git_clone.name.slice(); + const url = this.request.git_clone.url.slice(); + var attempt: u8 = 1; + const dir = brk: { + if (Repository.tryHTTPS(url)) |https| break :brk Repository.download( + manager.allocator, + this.request.git_clone.env, + &this.log, + manager.getCacheDirectory(), + this.id, + name, + https, + attempt, + ) catch |err| { + // Exit early if git checked and could + // not find the repository, skip ssh + if (err == error.RepositoryNotFound) { + this.err = err; + this.status = Status.fail; + this.data = .{ .git_clone = bun.invalid_fd }; + + return; + } + + this.err = err; + this.status = Status.fail; + this.data = .{ .git_clone = bun.invalid_fd }; + attempt += 1; + break :brk null; + }; + break :brk null; + } orelse if (Repository.trySSH(url)) |ssh| Repository.download( + manager.allocator, + this.request.git_clone.env, + &this.log, + manager.getCacheDirectory(), + this.id, + name, + ssh, + attempt, + ) catch |err| { + this.err = err; + this.status = Status.fail; + this.data = .{ .git_clone = bun.invalid_fd }; + return; + } else { + return; + }; + + this.err = null; + this.data = .{ .git_clone = .fromStdDir(dir) }; + this.status = Status.success; + }, + .git_checkout => { + const git_checkout = &this.request.git_checkout; + const data = Repository.checkout( + manager.allocator, + this.request.git_checkout.env, + &this.log, + manager.getCacheDirectory(), + git_checkout.repo_dir.stdDir(), + git_checkout.name.slice(), + git_checkout.url.slice(), + git_checkout.resolved.slice(), + ) catch |err| { + this.err = err; + this.status = Status.fail; + this.data = .{ .git_checkout = .{} }; + + return; + }; + + this.data = .{ + .git_checkout = data, + }; + this.status = Status.success; + }, + .local_tarball => { + const workspace_pkg_id = manager.lockfile.getWorkspacePkgIfWorkspaceDep(this.request.local_tarball.tarball.dependency_id); + + var abs_buf: bun.PathBuffer = undefined; + const tarball_path, const normalize = if (workspace_pkg_id != invalid_package_id) tarball_path: { + const workspace_res = manager.lockfile.packages.items(.resolution)[workspace_pkg_id]; + + if (workspace_res.tag != .workspace) break :tarball_path .{ this.request.local_tarball.tarball.url.slice(), true }; + + // Construct an absolute path to the tarball. + // Normally tarball paths are always relative to the root directory, but if a + // workspace depends on a tarball path, it should be relative to the workspace. + const workspace_path = workspace_res.value.workspace.slice(manager.lockfile.buffers.string_bytes.items); + break :tarball_path .{ + Path.joinAbsStringBuf( + FileSystem.instance.top_level_dir, + &abs_buf, + &[_][]const u8{ + workspace_path, + this.request.local_tarball.tarball.url.slice(), + }, + .auto, + ), + false, + }; + } else .{ this.request.local_tarball.tarball.url.slice(), true }; + + const result = readAndExtract( + manager.allocator, + &this.request.local_tarball.tarball, + tarball_path, + normalize, + &this.log, + ) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + + this.err = err; + this.status = Status.fail; + this.data = .{ .extract = .{} }; + + return; + }; + + this.data = .{ .extract = result }; + this.status = Status.success; + }, + } +} + +fn readAndExtract( + allocator: std.mem.Allocator, + tarball: *const ExtractTarball, + tarball_path: string, + normalize: bool, + log: *logger.Log, +) !ExtractData { + const bytes = if (normalize) + try File.readFromUserInput(std.fs.cwd(), tarball_path, allocator).unwrap() + else + try File.readFrom(bun.FD.cwd(), tarball_path, allocator).unwrap(); + defer allocator.free(bytes); + return tarball.run(log, bytes); +} + +pub const Tag = enum(u3) { + package_manifest = 0, + extract = 1, + git_clone = 2, + git_checkout = 3, + local_tarball = 4, +}; + +pub const Status = enum { + waiting, + success, + fail, +}; + +pub const Data = union { + package_manifest: Npm.PackageManifest, + extract: ExtractData, + git_clone: bun.FileDescriptor, + git_checkout: ExtractData, +}; + +pub const Request = union { + /// package name + // todo: Registry URL + package_manifest: struct { + name: strings.StringOrTinyString, + network: *NetworkTask, + }, + extract: struct { + network: *NetworkTask, + tarball: ExtractTarball, + }, + git_clone: struct { + name: strings.StringOrTinyString, + url: strings.StringOrTinyString, + env: DotEnv.Map, + dep_id: DependencyID, + res: Resolution, + }, + git_checkout: struct { + repo_dir: bun.FileDescriptor, + dependency_id: DependencyID, + name: strings.StringOrTinyString, + url: strings.StringOrTinyString, + resolved: strings.StringOrTinyString, + resolution: Resolution, + env: DotEnv.Map, + }, + local_tarball: struct { + tarball: ExtractTarball, + }, +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const DotEnv = bun.DotEnv; +const Output = bun.Output; +const Path = bun.path; +const Semver = bun.Semver; +const ThreadPool = bun.ThreadPool; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; +const File = bun.sys.File; + +const Fs = bun.fs; +const FileSystem = Fs.FileSystem; + +const install = @import("install.zig"); +const DependencyID = install.DependencyID; +const ExtractData = install.ExtractData; +const ExtractTarball = install.ExtractTarball; +const NetworkTask = install.NetworkTask; +const Npm = install.Npm; +const PackageID = install.PackageID; +const PackageManager = install.PackageManager; +const PatchTask = install.PatchTask; +const Repository = install.Repository; +const Resolution = install.Resolution; +const Task = install.Task; +const invalid_package_id = install.invalid_package_id; diff --git a/src/install/PackageManifestMap.zig b/src/install/PackageManifestMap.zig new file mode 100644 index 0000000000..97623c79ef --- /dev/null +++ b/src/install/PackageManifestMap.zig @@ -0,0 +1,111 @@ +hash_map: HashMap = .{}, + +const Value = union(enum) { + expired: Npm.PackageManifest, + manifest: Npm.PackageManifest, + + // Avoid checking the filesystem again. + not_found: void, +}; +const HashMap = std.HashMapUnmanaged(PackageNameHash, Value, IdentityContext(PackageNameHash), 80); + +pub fn byName(this: *PackageManifestMap, pm: *PackageManager, scope: *const Npm.Registry.Scope, name: []const u8, cache_behavior: CacheBehavior) ?*Npm.PackageManifest { + return this.byNameHash(pm, scope, String.Builder.stringHash(name), cache_behavior); +} + +pub fn insert(this: *PackageManifestMap, name_hash: PackageNameHash, manifest: *const Npm.PackageManifest) !void { + try this.hash_map.put(bun.default_allocator, name_hash, .{ .manifest = manifest.* }); +} + +pub fn byNameHash(this: *PackageManifestMap, pm: *PackageManager, scope: *const Npm.Registry.Scope, name_hash: PackageNameHash, cache_behavior: CacheBehavior) ?*Npm.PackageManifest { + return byNameHashAllowExpired(this, pm, scope, name_hash, null, cache_behavior); +} + +pub fn byNameAllowExpired(this: *PackageManifestMap, pm: *PackageManager, scope: *const Npm.Registry.Scope, name: string, is_expired: ?*bool, cache_behavior: CacheBehavior) ?*Npm.PackageManifest { + return byNameHashAllowExpired(this, pm, scope, String.Builder.stringHash(name), is_expired, cache_behavior); +} + +pub const CacheBehavior = enum { + load_from_memory, + load_from_memory_fallback_to_disk, +}; + +pub fn byNameHashAllowExpired( + this: *PackageManifestMap, + pm: *PackageManager, + scope: *const Npm.Registry.Scope, + name_hash: PackageNameHash, + is_expired: ?*bool, + cache_behavior: CacheBehavior, +) ?*Npm.PackageManifest { + if (cache_behavior == .load_from_memory) { + const entry = this.hash_map.getPtr(name_hash) orelse return null; + return switch (entry.*) { + .manifest => &entry.manifest, + .expired => if (is_expired) |expiry| { + expiry.* = true; + return &entry.expired; + } else null, + .not_found => null, + }; + } + + const entry = this.hash_map.getOrPut(bun.default_allocator, name_hash) catch bun.outOfMemory(); + if (entry.found_existing) { + if (entry.value_ptr.* == .manifest) { + return &entry.value_ptr.manifest; + } + + if (is_expired) |expiry| { + if (entry.value_ptr.* == .expired) { + expiry.* = true; + return &entry.value_ptr.expired; + } + } + + return null; + } + + if (pm.options.enable.manifest_cache) { + if (Npm.PackageManifest.Serializer.loadByFileID( + pm.allocator, + scope, + pm.getCacheDirectory(), + name_hash, + ) catch null) |manifest| { + if (pm.options.enable.manifest_cache_control and manifest.pkg.public_max_age > pm.timestamp_for_manifest_cache_control) { + entry.value_ptr.* = .{ .manifest = manifest }; + return &entry.value_ptr.manifest; + } else { + entry.value_ptr.* = .{ .expired = manifest }; + + if (is_expired) |expiry| { + expiry.* = true; + return &entry.value_ptr.expired; + } + + return null; + } + } + } + + entry.value_ptr.* = .{ .not_found = {} }; + return null; +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const IdentityContext = bun.IdentityContext; +const string = bun.string; + +const Semver = bun.Semver; +const String = Semver.String; + +const install = @import("install.zig"); +const Npm = install.Npm; +const PackageManager = install.PackageManager; +const PackageManifestMap = install.PackageManifestMap; +const PackageNameHash = install.PackageNameHash; diff --git a/src/install/bin.zig b/src/install/bin.zig index 34d8f4ccb0..225c2eb075 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -563,8 +563,7 @@ pub const Bin = extern struct { // linking each tree. seen: ?*bun.StringHashMap(void), - node_modules: bun.FileDescriptor, - node_modules_path: []const u8, + node_modules_path: *bun.AbsPath(.{}), /// Used for generating relative paths package_name: strings.StringOrTinyString, @@ -692,7 +691,11 @@ pub const Bin = extern struct { return; } - bun.makePath(this.node_modules.stdDir(), ".bin") catch {}; + const node_modules_path_save = this.node_modules_path.save(); + this.node_modules_path.append(".bin"); + bun.makePath(std.fs.cwd(), this.node_modules_path.slice()) catch {}; + node_modules_path_save.restore(); + break :bunx_file bun.sys.File.openatOSPath(bun.invalid_fd, abs_bunx_file, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664).unwrap() catch |real_err| { this.err = real_err; return; @@ -785,7 +788,11 @@ pub const Bin = extern struct { return; } - bun.makePath(this.node_modules.stdDir(), ".bin") catch {}; + const node_modules_path_save = this.node_modules_path.save(); + this.node_modules_path.append(".bin"); + bun.makePath(std.fs.cwd(), this.node_modules_path.slice()) catch {}; + node_modules_path_save.restore(); + switch (bun.sys.symlink(rel_target, abs_dest)) { .err => |real_error| { // It was just created, no need to delete destination and symlink again @@ -815,7 +822,7 @@ pub const Bin = extern struct { /// uses `this.abs_target_buf` pub fn buildTargetPackageDir(this: *const Linker) []const u8 { - const dest_dir_without_trailing_slash = strings.withoutTrailingSlash(this.node_modules_path); + const dest_dir_without_trailing_slash = strings.withoutTrailingSlash(this.node_modules_path.slice()); var remain = this.abs_target_buf; @@ -834,7 +841,7 @@ pub const Bin = extern struct { } pub fn buildDestinationDir(this: *const Linker, global: bool) []u8 { - const dest_dir_without_trailing_slash = strings.withoutTrailingSlash(this.node_modules_path); + const dest_dir_without_trailing_slash = strings.withoutTrailingSlash(this.node_modules_path.slice()); var remain = this.abs_dest_buf; if (global) { diff --git a/src/install/default-trusted-dependencies.txt b/src/install/default-trusted-dependencies.txt index cd8c8096b3..5d4c3ce8c0 100644 --- a/src/install/default-trusted-dependencies.txt +++ b/src/install/default-trusted-dependencies.txt @@ -359,7 +359,6 @@ wordpos workerd wrtc xxhash -yarn yo yorkie zeromq diff --git a/src/install/dependency.zig b/src/install/dependency.zig index 217160edb7..4ea40ac399 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -165,6 +165,12 @@ pub fn toExternal(this: Dependency) External { return bytes; } +// Needed when a dependency uses workspace: protocol and isn't +// marked with workspace behavior. +pub fn isWorkspaceDep(this: *const Dependency) bool { + return this.behavior.isWorkspace() or this.version.tag == .workspace; +} + pub inline fn isSCPLikePath(dependency: string) bool { // Shortest valid expression: h:p if (dependency.len < 3) return false; @@ -1399,6 +1405,14 @@ pub const Behavior = packed struct(u8) { return .eq; } + if (lhs.isWorkspaceOnly() != rhs.isWorkspaceOnly()) { + // ensure isWorkspaceOnly deps are placed at the beginning + return if (lhs.isWorkspaceOnly()) + .lt + else + .gt; + } + if (lhs.isProd() != rhs.isProd()) { return if (lhs.isProd()) .gt diff --git a/src/install/hoisted_install.zig b/src/install/hoisted_install.zig index 79cce4ef2e..8cca648f7c 100644 --- a/src/install/hoisted_install.zig +++ b/src/install/hoisted_install.zig @@ -16,32 +16,9 @@ const ProgressStrings = PackageManager.ProgressStrings; const Bin = install.Bin; const PackageInstaller = PackageManager.PackageInstaller; const Bitset = bun.bit_set.DynamicBitSetUnmanaged; -const TruncatedPackageNameHash = install.TruncatedPackageNameHash; const PackageID = install.PackageID; -const invalid_package_id = install.invalid_package_id; const TreeContext = PackageInstaller.TreeContext; -fn addDependenciesToSet( - names: *std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void), - lockfile: *Lockfile, - dependencies_slice: Lockfile.DependencySlice, -) void { - const begin = dependencies_slice.off; - const end = begin +| dependencies_slice.len; - var dep_id = begin; - while (dep_id < end) : (dep_id += 1) { - const package_id = lockfile.buffers.resolutions.items[dep_id]; - if (package_id == invalid_package_id) continue; - - const dep = lockfile.buffers.dependencies.items[dep_id]; - const entry = names.getOrPut(lockfile.allocator, @truncate(dep.name_hash)) catch bun.outOfMemory(); - if (!entry.found_existing) { - const dependency_slice = lockfile.packages.items(.dependencies)[package_id]; - addDependenciesToSet(names, lockfile, dependency_slice); - } - } -} - pub fn installHoistedPackages( this: *PackageManager, ctx: Command.Context, @@ -49,6 +26,8 @@ pub fn installHoistedPackages( install_root_dependencies: bool, log_level: PackageManager.Options.LogLevel, ) !PackageInstall.Summary { + bun.Analytics.Features.hoisted_bun_install += 1; + const original_trees = this.lockfile.buffers.trees; const original_tree_dep_ids = this.lockfile.buffers.hoisted_dependencies; @@ -182,35 +161,6 @@ pub fn installHoistedPackages( // to make mistakes harder var parts = this.lockfile.packages.slice(); - const trusted_dependencies_from_update_requests: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void) = trusted_deps: { - - // find all deps originating from --trust packages from cli - var set: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void) = .{}; - if (this.options.do.trust_dependencies_from_args and this.lockfile.packages.len > 0) { - const root_deps = parts.items(.dependencies)[this.root_package_id.get(this.lockfile, this.workspace_name_hash)]; - var dep_id = root_deps.off; - const end = dep_id +| root_deps.len; - while (dep_id < end) : (dep_id += 1) { - const root_dep = this.lockfile.buffers.dependencies.items[dep_id]; - for (this.update_requests) |request| { - if (request.matches(root_dep, this.lockfile.buffers.string_bytes.items)) { - const package_id = this.lockfile.buffers.resolutions.items[dep_id]; - if (package_id == invalid_package_id) continue; - - const entry = set.getOrPut(this.lockfile.allocator, @truncate(root_dep.name_hash)) catch bun.outOfMemory(); - if (!entry.found_existing) { - const dependency_slice = parts.items(.dependencies)[package_id]; - addDependenciesToSet(&set, this.lockfile, dependency_slice); - } - break; - } - } - } - } - - break :trusted_deps set; - }; - break :brk PackageInstaller{ .manager = this, .options = &this.options, @@ -258,7 +208,7 @@ pub fn installHoistedPackages( } break :trees trees; }, - .trusted_dependencies_from_update_requests = trusted_dependencies_from_update_requests, + .trusted_dependencies_from_update_requests = this.findTrustedDependenciesFromUpdateRequests(), .seen_bin_links = bun.StringHashMap(void).init(this.allocator), }; }; @@ -298,7 +248,6 @@ pub fn installHoistedPackages( &installer, .{ .onExtract = PackageInstaller.installEnqueuedPackagesAfterExtraction, - .onPatch = PackageInstaller.installEnqueuedPackagesImpl, .onResolve = {}, .onPackageManifestError = {}, .onPackageDownloadError = {}, @@ -321,7 +270,6 @@ pub fn installHoistedPackages( &installer, .{ .onExtract = PackageInstaller.installEnqueuedPackagesAfterExtraction, - .onPatch = PackageInstaller.installEnqueuedPackagesImpl, .onResolve = {}, .onPackageManifestError = {}, .onPackageDownloadError = {}, @@ -348,7 +296,6 @@ pub fn installHoistedPackages( closure.installer, .{ .onExtract = PackageInstaller.installEnqueuedPackagesAfterExtraction, - .onPatch = PackageInstaller.installEnqueuedPackagesImpl, .onResolve = {}, .onPackageManifestError = {}, .onPackageDownloadError = {}, diff --git a/src/install/install.zig b/src/install/install.zig index 3766862d54..32464ef26f 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -1,59 +1,5 @@ -// Default to a maximum of 64 simultaneous HTTP requests for bun install if no proxy is specified -// if a proxy IS specified, default to 64. We have different values because we might change this in the future. -// https://github.com/npm/cli/issues/7072 -// https://pnpm.io/npmrc#network-concurrency (pnpm defaults to 16) -// https://yarnpkg.com/configuration/yarnrc#networkConcurrency (defaults to 50) -const default_max_simultaneous_requests_for_bun_install = 64; -const default_max_simultaneous_requests_for_bun_install_for_proxies = 64; - -const bun = @import("bun"); -const string = bun.string; -const Output = bun.Output; -const Global = bun.Global; -const Environment = bun.Environment; -const strings = bun.strings; -const MutableString = bun.MutableString; -const stringZ = bun.stringZ; -const default_allocator = bun.default_allocator; -const std = @import("std"); -const JSC = bun.JSC; -const DirInfo = @import("../resolver/dir_info.zig"); -const File = bun.sys.File; -const logger = bun.logger; -const OOM = bun.OOM; -const FD = bun.FD; - -const JSON = bun.JSON; -const JSPrinter = bun.js_printer; - -const Api = @import("../api/schema.zig").Api; -const Path = bun.path; -const Command = @import("../cli.zig").Command; -const BunArguments = @import("../cli.zig").Arguments; -const transpiler = bun.transpiler; - -const DotEnv = @import("../env_loader.zig"); -const which = @import("../which.zig").which; -const Run = @import("../bun_js.zig").Run; -const Fs = @import("../fs.zig"); -const FileSystem = Fs.FileSystem; -const URL = @import("../url.zig").URL; -const HTTP = bun.http; -const AsyncHTTP = HTTP.AsyncHTTP; - -const HeaderBuilder = HTTP.HeaderBuilder; - -const ExtractTarball = @import("./extract_tarball.zig"); -pub const Npm = @import("./npm.zig"); -const Syscall = bun.sys; -const RunCommand = @import("../cli/run_command.zig").RunCommand; threadlocal var initialized_store = false; -pub const Lockfile = @import("./lockfile.zig"); -pub const TextLockfile = @import("./lockfile/bun.lock.zig"); -pub const PatchedDep = Lockfile.PatchedDep; -const Walker = @import("../walker_skippable.zig"); - pub const bun_hash_tag = ".bun-tag-"; pub const max_hex_hash_len: comptime_int = brk: { var buf: [128]u8 = undefined; @@ -70,16 +16,36 @@ pub fn buntaghashbuf_make(buf: *BuntagHashBuf, patch_hash: u64) [:0]u8 { return bunhashtag; } -pub const patch = @import("./patch_install.zig"); -pub const PatchTask = patch.PatchTask; +pub const StorePathFormatter = struct { + str: string, + + pub fn format(this: StorePathFormatter, comptime _: string, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + // if (!this.opts.replace_slashes) { + // try writer.writeAll(this.str); + // return; + // } + + for (this.str) |c| { + switch (c) { + '/' => try writer.writeByte('+'), + '\\' => try writer.writeByte('+'), + else => try writer.writeByte(c), + } + } + } +}; + +pub fn fmtStorePath(str: string) StorePathFormatter { + return .{ + .str = str, + }; +} // these bytes are skipped // so we just make it repeat bun bun bun bun bun bun bun bun bun // because why not pub const alignment_bytes_to_repeat_buffer = [_]u8{0} ** 144; -const JSAst = bun.JSAst; - pub fn initializeStore() void { if (initialized_store) { JSAst.Expr.Data.Store.reset(); @@ -124,77 +90,6 @@ pub fn initializeMiniStore() void { } } -const IdentityContext = @import("../identity_context.zig").IdentityContext; -const ArrayIdentityContext = @import("../identity_context.zig").ArrayIdentityContext; -const NetworkQueue = std.fifo.LinearFifo(*NetworkTask, .{ .Static = 32 }); -const PatchTaskFifo = std.fifo.LinearFifo(*PatchTask, .{ .Static = 32 }); -const Semver = bun.Semver; -const ExternalString = Semver.ExternalString; -const String = Semver.String; -const GlobalStringBuilder = bun.StringBuilder; -const SlicedString = Semver.SlicedString; -pub const Repository = @import("./repository.zig").Repository; -pub const Bin = @import("./bin.zig").Bin; -pub const Dependency = @import("./dependency.zig"); -const Behavior = @import("./dependency.zig").Behavior; -const FolderResolution = @import("./resolvers/folder_resolver.zig").FolderResolution; - -pub fn ExternalSlice(comptime Type: type) type { - return extern struct { - pub const Slice = @This(); - - pub const Child: type = Type; - - off: u32 = 0, - len: u32 = 0, - - pub const invalid: @This() = .{ .off = std.math.maxInt(u32), .len = std.math.maxInt(u32) }; - - pub inline fn isInvalid(this: Slice) bool { - return this.off == std.math.maxInt(u32) and this.len == std.math.maxInt(u32); - } - - pub inline fn contains(this: Slice, id: u32) bool { - return id >= this.off and id < (this.len + this.off); - } - - pub inline fn get(this: Slice, in: []const Type) []const Type { - if (comptime Environment.allow_assert) { - bun.assert(this.off + this.len <= in.len); - } - // it should be impossible to address this out of bounds due to the minimum here - return in.ptr[this.off..@min(in.len, this.off + this.len)]; - } - - pub inline fn mut(this: Slice, in: []Type) []Type { - if (comptime Environment.allow_assert) { - bun.assert(this.off + this.len <= in.len); - } - return in.ptr[this.off..@min(in.len, this.off + this.len)]; - } - - pub inline fn begin(this: Slice) u32 { - return this.off; - } - - pub inline fn end(this: Slice) u32 { - return this.off + this.len; - } - - pub fn init(buf: []const Type, in: []const Type) Slice { - // if (comptime Environment.allow_assert) { - // bun.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr)); - // bun.assert((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len)); - // } - - return Slice{ - .off = @as(u32, @truncate((@intFromPtr(in.ptr) - @intFromPtr(buf.ptr)) / @sizeOf(Type))), - .len = @as(u32, @truncate(in.len)), - }; - } - }; -} - pub const PackageID = u32; pub const DependencyID = u32; @@ -209,15 +104,6 @@ pub const DependencyID = u32; pub const invalid_package_id = std.math.maxInt(PackageID); pub const invalid_dependency_id = std.math.maxInt(DependencyID); -pub const ExternalStringList = ExternalSlice(ExternalString); -pub const ExternalPackageNameHashList = ExternalSlice(PackageNameHash); -pub const VersionSlice = ExternalSlice(Semver.Version); - -pub const ExternalStringMap = extern struct { - name: ExternalStringList = .{}, - value: ExternalStringList = .{}, -}; - pub const PackageNameAndVersionHash = u64; pub const PackageNameHash = u64; // Use String.Builder.stringHash to compute this pub const TruncatedPackageNameHash = u32; // @truncate String.Builder.stringHash to compute this @@ -237,311 +123,6 @@ pub const Aligner = struct { } }; -pub const NetworkTask = struct { - unsafe_http_client: AsyncHTTP = undefined, - response: bun.http.HTTPClientResult = .{}, - task_id: u64, - url_buf: []const u8 = &[_]u8{}, - retried: u16 = 0, - allocator: std.mem.Allocator, - request_buffer: MutableString = undefined, - response_buffer: MutableString = undefined, - package_manager: *PackageManager, - callback: union(Task.Tag) { - package_manifest: struct { - loaded_manifest: ?Npm.PackageManifest = null, - name: strings.StringOrTinyString, - }, - extract: ExtractTarball, - git_clone: void, - git_checkout: void, - local_tarball: void, - }, - /// Key in patchedDependencies in package.json - apply_patch_task: ?*PatchTask = null, - next: ?*NetworkTask = null, - - pub const DedupeMapEntry = struct { - is_required: bool, - }; - pub const DedupeMap = std.HashMap(u64, DedupeMapEntry, IdentityContext(u64), 80); - - pub fn notify(this: *NetworkTask, async_http: *AsyncHTTP, result: bun.http.HTTPClientResult) void { - defer this.package_manager.wake(); - async_http.real.?.* = async_http.*; - async_http.real.?.response_buffer = async_http.response_buffer; - this.response = result; - this.package_manager.async_network_task_queue.push(this); - } - - pub const Authorization = enum { - no_authorization, - allow_authorization, - }; - - // We must use a less restrictive Accept header value - // https://github.com/oven-sh/bun/issues/341 - // https://www.jfrog.com/jira/browse/RTFACT-18398 - const accept_header_value = "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"; - - const default_headers_buf: string = "Accept" ++ accept_header_value; - - fn appendAuth(header_builder: *HeaderBuilder, scope: *const Npm.Registry.Scope) void { - if (scope.token.len > 0) { - header_builder.appendFmt("Authorization", "Bearer {s}", .{scope.token}); - } else if (scope.auth.len > 0) { - header_builder.appendFmt("Authorization", "Basic {s}", .{scope.auth}); - } else { - return; - } - header_builder.append("npm-auth-type", "legacy"); - } - - fn countAuth(header_builder: *HeaderBuilder, scope: *const Npm.Registry.Scope) void { - if (scope.token.len > 0) { - header_builder.count("Authorization", ""); - header_builder.content.cap += "Bearer ".len + scope.token.len; - } else if (scope.auth.len > 0) { - header_builder.count("Authorization", ""); - header_builder.content.cap += "Basic ".len + scope.auth.len; - } else { - return; - } - header_builder.count("npm-auth-type", "legacy"); - } - - pub fn forManifest( - this: *NetworkTask, - name: string, - allocator: std.mem.Allocator, - scope: *const Npm.Registry.Scope, - loaded_manifest: ?*const Npm.PackageManifest, - is_optional: bool, - ) !void { - this.url_buf = blk: { - - // Not all registries support scoped package names when fetching the manifest. - // registry.npmjs.org supports both "@storybook%2Faddons" and "@storybook/addons" - // Other registries like AWS codeartifact only support the former. - // "npm" CLI requests the manifest with the encoded name. - var arena = std.heap.ArenaAllocator.init(bun.default_allocator); - defer arena.deinit(); - var stack_fallback_allocator = std.heap.stackFallback(512, arena.allocator()); - var encoded_name = name; - if (strings.containsChar(name, '/')) { - encoded_name = try std.mem.replaceOwned(u8, stack_fallback_allocator.get(), name, "/", "%2f"); - } - - const tmp = bun.JSC.URL.join( - bun.String.fromUTF8(scope.url.href), - bun.String.fromUTF8(encoded_name), - ); - defer tmp.deref(); - - if (tmp.tag == .Dead) { - if (!is_optional) { - this.package_manager.log.addErrorFmt( - null, - logger.Loc.Empty, - allocator, - "Failed to join registry {} and package {} URLs", - .{ bun.fmt.QuotedFormatter{ .text = scope.url.href }, bun.fmt.QuotedFormatter{ .text = name } }, - ) catch bun.outOfMemory(); - } else { - this.package_manager.log.addWarningFmt( - null, - logger.Loc.Empty, - allocator, - "Failed to join registry {} and package {} URLs", - .{ bun.fmt.QuotedFormatter{ .text = scope.url.href }, bun.fmt.QuotedFormatter{ .text = name } }, - ) catch bun.outOfMemory(); - } - return error.InvalidURL; - } - - if (!(tmp.hasPrefixComptime("https://") or tmp.hasPrefixComptime("http://"))) { - if (!is_optional) { - this.package_manager.log.addErrorFmt( - null, - logger.Loc.Empty, - allocator, - "Registry URL must be http:// or https://\nReceived: \"{}\"", - .{tmp}, - ) catch bun.outOfMemory(); - } else { - this.package_manager.log.addWarningFmt( - null, - logger.Loc.Empty, - allocator, - "Registry URL must be http:// or https://\nReceived: \"{}\"", - .{tmp}, - ) catch bun.outOfMemory(); - } - return error.InvalidURL; - } - - // This actually duplicates the string! So we defer deref the WTF managed one above. - break :blk try tmp.toOwnedSlice(allocator); - }; - - var last_modified: string = ""; - var etag: string = ""; - if (loaded_manifest) |manifest| { - last_modified = manifest.pkg.last_modified.slice(manifest.string_buf); - etag = manifest.pkg.etag.slice(manifest.string_buf); - } - - var header_builder = HeaderBuilder{}; - - countAuth(&header_builder, scope); - - if (etag.len != 0) { - header_builder.count("If-None-Match", etag); - } - - if (last_modified.len != 0) { - header_builder.count("If-Modified-Since", last_modified); - } - - if (header_builder.header_count > 0) { - header_builder.count("Accept", accept_header_value); - if (last_modified.len > 0 and etag.len > 0) { - header_builder.content.count(last_modified); - } - try header_builder.allocate(allocator); - - appendAuth(&header_builder, scope); - - if (etag.len != 0) { - header_builder.append("If-None-Match", etag); - } else if (last_modified.len != 0) { - header_builder.append("If-Modified-Since", last_modified); - } - - header_builder.append("Accept", accept_header_value); - - if (last_modified.len > 0 and etag.len > 0) { - last_modified = header_builder.content.append(last_modified); - } - } else { - try header_builder.entries.append( - allocator, - .{ - .name = .{ .offset = 0, .length = @as(u32, @truncate("Accept".len)) }, - .value = .{ .offset = "Accept".len, .length = @as(u32, @truncate(default_headers_buf.len - "Accept".len)) }, - }, - ); - header_builder.header_count = 1; - header_builder.content = GlobalStringBuilder{ .ptr = @as([*]u8, @ptrFromInt(@intFromPtr(bun.span(default_headers_buf).ptr))), .len = default_headers_buf.len, .cap = default_headers_buf.len }; - } - - this.response_buffer = try MutableString.init(allocator, 0); - this.allocator = allocator; - - const url = URL.parse(this.url_buf); - this.unsafe_http_client = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_builder.content.ptr.?[0..header_builder.content.len], &this.response_buffer, "", this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ - .http_proxy = this.package_manager.httpProxy(url), - }); - this.unsafe_http_client.client.flags.reject_unauthorized = this.package_manager.tlsRejectUnauthorized(); - - if (PackageManager.verbose_install) { - this.unsafe_http_client.client.verbose = .headers; - } - - this.callback = .{ - .package_manifest = .{ - .name = try strings.StringOrTinyString.initAppendIfNeeded(name, *FileSystem.FilenameStore, FileSystem.FilenameStore.instance), - .loaded_manifest = if (loaded_manifest) |manifest| manifest.* else null, - }, - }; - - if (PackageManager.verbose_install) { - this.unsafe_http_client.verbose = .headers; - this.unsafe_http_client.client.verbose = .headers; - } - - // Incase the ETag causes invalidation, we fallback to the last modified date. - if (last_modified.len != 0 and bun.getRuntimeFeatureFlag(.BUN_FEATURE_FLAG_LAST_MODIFIED_PRETEND_304)) { - this.unsafe_http_client.client.flags.force_last_modified = true; - this.unsafe_http_client.client.if_modified_since = last_modified; - } - } - - pub fn getCompletionCallback(this: *NetworkTask) HTTP.HTTPClientResult.Callback { - return HTTP.HTTPClientResult.Callback.New(*NetworkTask, notify).init(this); - } - - pub fn schedule(this: *NetworkTask, batch: *ThreadPool.Batch) void { - this.unsafe_http_client.schedule(this.allocator, batch); - } - - pub const ForTarballError = OOM || error{ - InvalidURL, - }; - - pub fn forTarball( - this: *NetworkTask, - allocator: std.mem.Allocator, - tarball_: *const ExtractTarball, - scope: *const Npm.Registry.Scope, - authorization: NetworkTask.Authorization, - ) ForTarballError!void { - this.callback = .{ .extract = tarball_.* }; - const tarball = &this.callback.extract; - const tarball_url = tarball.url.slice(); - if (tarball_url.len == 0) { - this.url_buf = try ExtractTarball.buildURL( - scope.url.href, - tarball.name, - tarball.resolution.value.npm.version, - this.package_manager.lockfile.buffers.string_bytes.items, - ); - } else { - this.url_buf = tarball_url; - } - - if (!(strings.hasPrefixComptime(this.url_buf, "https://") or strings.hasPrefixComptime(this.url_buf, "http://"))) { - const msg = .{ - .fmt = "Expected tarball URL to start with https:// or http://, got {} while fetching package {}", - .args = .{ bun.fmt.QuotedFormatter{ .text = this.url_buf }, bun.fmt.QuotedFormatter{ .text = tarball.name.slice() } }, - }; - - try this.package_manager.log.addErrorFmt(null, .{}, allocator, msg.fmt, msg.args); - return error.InvalidURL; - } - - this.response_buffer = MutableString.initEmpty(allocator); - this.allocator = allocator; - - var header_builder = HeaderBuilder{}; - var header_buf: string = ""; - - if (authorization == .allow_authorization) { - countAuth(&header_builder, scope); - } - - if (header_builder.header_count > 0) { - try header_builder.allocate(allocator); - - if (authorization == .allow_authorization) { - appendAuth(&header_builder, scope); - } - - header_buf = header_builder.content.ptr.?[0..header_builder.content.len]; - } - - const url = URL.parse(this.url_buf); - - this.unsafe_http_client = AsyncHTTP.init(allocator, .GET, url, header_builder.entries, header_buf, &this.response_buffer, "", this.getCompletionCallback(), HTTP.FetchRedirect.follow, .{ - .http_proxy = this.package_manager.httpProxy(url), - }); - this.unsafe_http_client.client.flags.reject_unauthorized = this.package_manager.tlsRejectUnauthorized(); - if (PackageManager.verbose_install) { - this.unsafe_http_client.client.verbose = .headers; - } - } -}; - pub const Origin = enum(u8) { local = 0, npm = 1, @@ -618,355 +199,6 @@ pub const PreinstallState = enum(u4) { applying_patch, }; -/// Schedule long-running callbacks for a task -/// Slow stuff is broken into tasks, each can run independently without locks -pub const Task = struct { - tag: Tag, - request: Request, - data: Data, - status: Status = Status.waiting, - threadpool_task: ThreadPool.Task = ThreadPool.Task{ .callback = &callback }, - log: logger.Log, - id: u64, - err: ?anyerror = null, - package_manager: *PackageManager, - apply_patch_task: ?*PatchTask = null, - next: ?*Task = null, - - /// An ID that lets us register a callback without keeping the same pointer around - pub fn NewID(comptime Hasher: type, comptime IDType: type) type { - return struct { - pub const Type = IDType; - pub fn forNPMPackage(package_name: string, package_version: Semver.Version) IDType { - var hasher = Hasher.init(0); - hasher.update("npm-package:"); - hasher.update(package_name); - hasher.update("@"); - hasher.update(std.mem.asBytes(&package_version)); - return hasher.final(); - } - - pub fn forBinLink(package_id: PackageID) IDType { - var hasher = Hasher.init(0); - hasher.update("bin-link:"); - hasher.update(std.mem.asBytes(&package_id)); - return hasher.final(); - } - - pub fn forManifest(name: string) IDType { - var hasher = Hasher.init(0); - hasher.update("manifest:"); - hasher.update(name); - return hasher.final(); - } - - pub fn forTarball(url: string) IDType { - var hasher = Hasher.init(0); - hasher.update("tarball:"); - hasher.update(url); - return hasher.final(); - } - - // These cannot change: - // We persist them to the filesystem. - pub fn forGitClone(url: string) IDType { - var hasher = Hasher.init(0); - hasher.update(url); - return @as(u64, 4 << 61) | @as(u64, @as(u61, @truncate(hasher.final()))); - } - - pub fn forGitCheckout(url: string, resolved: string) IDType { - var hasher = Hasher.init(0); - hasher.update(url); - hasher.update("@"); - hasher.update(resolved); - return @as(u64, 5 << 61) | @as(u64, @as(u61, @truncate(hasher.final()))); - } - }; - } - pub const Id = NewID(bun.Wyhash11, u64); - - pub fn callback(task: *ThreadPool.Task) void { - Output.Source.configureThread(); - defer Output.flush(); - - var this: *Task = @fieldParentPtr("threadpool_task", task); - const manager = this.package_manager; - defer { - if (this.status == .success) { - if (this.apply_patch_task) |pt| { - defer pt.deinit(); - pt.apply() catch bun.outOfMemory(); - if (pt.callback.apply.logger.errors > 0) { - defer pt.callback.apply.logger.deinit(); - // this.log.addErrorFmt(null, logger.Loc.Empty, bun.default_allocator, "failed to apply patch: {}", .{e}) catch unreachable; - pt.callback.apply.logger.print(Output.errorWriter()) catch {}; - } - } - } - manager.resolve_tasks.push(this); - manager.wake(); - } - - switch (this.tag) { - .package_manifest => { - const allocator = bun.default_allocator; - var manifest = &this.request.package_manifest; - const body = manifest.network.response_buffer.move(); - - defer { - bun.default_allocator.free(body); - } - - const package_manifest = Npm.Registry.getPackageMetadata( - allocator, - manager.scopeForPackageName(manifest.name.slice()), - (manifest.network.response.metadata orelse @panic("Assertion failure: Expected metadata to be set")).response, - body, - &this.log, - manifest.name.slice(), - manifest.network.callback.package_manifest.loaded_manifest, - manager, - ) catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - - this.err = err; - this.status = Status.fail; - this.data = .{ .package_manifest = .{} }; - return; - }; - - switch (package_manifest) { - .fresh, .cached => |result| { - this.status = Status.success; - this.data = .{ .package_manifest = result }; - return; - }, - .not_found => { - this.log.addErrorFmt(null, logger.Loc.Empty, allocator, "404 - GET {s}", .{ - this.request.package_manifest.name.slice(), - }) catch unreachable; - this.status = Status.fail; - this.data = .{ .package_manifest = .{} }; - return; - }, - } - }, - .extract => { - const bytes = this.request.extract.network.response_buffer.move(); - - defer { - bun.default_allocator.free(bytes); - } - - const result = this.request.extract.tarball.run( - &this.log, - bytes, - ) catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - - this.err = err; - this.status = Status.fail; - this.data = .{ .extract = .{} }; - return; - }; - - this.data = .{ .extract = result }; - this.status = Status.success; - }, - .git_clone => { - const name = this.request.git_clone.name.slice(); - const url = this.request.git_clone.url.slice(); - var attempt: u8 = 1; - const dir = brk: { - if (Repository.tryHTTPS(url)) |https| break :brk Repository.download( - manager.allocator, - this.request.git_clone.env, - &this.log, - manager.getCacheDirectory(), - this.id, - name, - https, - attempt, - ) catch |err| { - // Exit early if git checked and could - // not find the repository, skip ssh - if (err == error.RepositoryNotFound) { - this.err = err; - this.status = Status.fail; - this.data = .{ .git_clone = bun.invalid_fd }; - - return; - } - - attempt += 1; - break :brk null; - }; - break :brk null; - } orelse if (Repository.trySSH(url)) |ssh| Repository.download( - manager.allocator, - this.request.git_clone.env, - &this.log, - manager.getCacheDirectory(), - this.id, - name, - ssh, - attempt, - ) catch |err| { - this.err = err; - this.status = Status.fail; - this.data = .{ .git_clone = bun.invalid_fd }; - - return; - } else { - return; - }; - - this.data = .{ .git_clone = .fromStdDir(dir) }; - this.status = Status.success; - }, - .git_checkout => { - const git_checkout = &this.request.git_checkout; - const data = Repository.checkout( - manager.allocator, - this.request.git_checkout.env, - &this.log, - manager.getCacheDirectory(), - git_checkout.repo_dir.stdDir(), - git_checkout.name.slice(), - git_checkout.url.slice(), - git_checkout.resolved.slice(), - ) catch |err| { - this.err = err; - this.status = Status.fail; - this.data = .{ .git_checkout = .{} }; - - return; - }; - - this.data = .{ - .git_checkout = data, - }; - this.status = Status.success; - }, - .local_tarball => { - const workspace_pkg_id = manager.lockfile.getWorkspacePkgIfWorkspaceDep(this.request.local_tarball.tarball.dependency_id); - - var abs_buf: bun.PathBuffer = undefined; - const tarball_path, const normalize = if (workspace_pkg_id != invalid_package_id) tarball_path: { - const workspace_res = manager.lockfile.packages.items(.resolution)[workspace_pkg_id]; - - if (workspace_res.tag != .workspace) break :tarball_path .{ this.request.local_tarball.tarball.url.slice(), true }; - - // Construct an absolute path to the tarball. - // Normally tarball paths are always relative to the root directory, but if a - // workspace depends on a tarball path, it should be relative to the workspace. - const workspace_path = workspace_res.value.workspace.slice(manager.lockfile.buffers.string_bytes.items); - break :tarball_path .{ - Path.joinAbsStringBuf( - FileSystem.instance.top_level_dir, - &abs_buf, - &[_][]const u8{ - workspace_path, - this.request.local_tarball.tarball.url.slice(), - }, - .auto, - ), - false, - }; - } else .{ this.request.local_tarball.tarball.url.slice(), true }; - - const result = readAndExtract( - manager.allocator, - &this.request.local_tarball.tarball, - tarball_path, - normalize, - &this.log, - ) catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - - this.err = err; - this.status = Status.fail; - this.data = .{ .extract = .{} }; - - return; - }; - - this.data = .{ .extract = result }; - this.status = Status.success; - }, - } - } - - fn readAndExtract( - allocator: std.mem.Allocator, - tarball: *const ExtractTarball, - tarball_path: string, - normalize: bool, - log: *logger.Log, - ) !ExtractData { - const bytes = if (normalize) - try File.readFromUserInput(std.fs.cwd(), tarball_path, allocator).unwrap() - else - try File.readFrom(bun.FD.cwd(), tarball_path, allocator).unwrap(); - defer allocator.free(bytes); - return tarball.run(log, bytes); - } - - pub const Tag = enum(u3) { - package_manifest = 0, - extract = 1, - git_clone = 2, - git_checkout = 3, - local_tarball = 4, - }; - - pub const Status = enum { - waiting, - success, - fail, - }; - - pub const Data = union { - package_manifest: Npm.PackageManifest, - extract: ExtractData, - git_clone: bun.FileDescriptor, - git_checkout: ExtractData, - }; - - pub const Request = union { - /// package name - // todo: Registry URL - package_manifest: struct { - name: strings.StringOrTinyString, - network: *NetworkTask, - }, - extract: struct { - network: *NetworkTask, - tarball: ExtractTarball, - }, - git_clone: struct { - name: strings.StringOrTinyString, - url: strings.StringOrTinyString, - env: DotEnv.Map, - dep_id: DependencyID, - res: Resolution, - }, - git_checkout: struct { - repo_dir: bun.FileDescriptor, - dependency_id: DependencyID, - name: strings.StringOrTinyString, - url: strings.StringOrTinyString, - resolved: strings.StringOrTinyString, - resolution: Resolution, - env: DotEnv.Map, - }, - local_tarball: struct { - tarball: ExtractTarball, - }, - }; -}; - pub const ExtractData = struct { url: string = "", resolved: string = "", @@ -976,11 +208,6 @@ pub const ExtractData = struct { } = null, }; -pub const PackageInstall = @import("./PackageInstall.zig").PackageInstall; - -pub const Resolution = @import("./resolution.zig").Resolution; -const Progress = bun.Progress; - pub const DependencyInstallContext = struct { tree_id: Lockfile.Tree.Id = 0, path: std.ArrayList(u8) = std.ArrayList(u8).init(bun.default_allocator), @@ -990,9200 +217,15 @@ pub const DependencyInstallContext = struct { pub const TaskCallbackContext = union(enum) { dependency: DependencyID, dependency_install_context: DependencyInstallContext, + isolated_package_install_context: Store.Entry.Id, root_dependency: DependencyID, root_request_id: PackageID, }; -const TaskCallbackList = std.ArrayListUnmanaged(TaskCallbackContext); -const TaskDependencyQueue = std.HashMapUnmanaged(u64, TaskCallbackList, IdentityContext(u64), 80); - -const PreallocatedTaskStore = bun.HiveArray(Task, 64).Fallback; -const PreallocatedNetworkTasks = bun.HiveArray(NetworkTask, 128).Fallback; -const ResolveTaskQueue = bun.UnboundedQueue(Task, .next); - -const ThreadPool = bun.ThreadPool; -const RepositoryMap = std.HashMapUnmanaged(u64, bun.FileDescriptor, IdentityContext(u64), 80); -const NpmAliasMap = std.HashMapUnmanaged(PackageNameHash, Dependency.Version, IdentityContext(u64), 80); - -const PackageManifestMap = struct { - hash_map: HashMap = .{}, - - const Value = union(enum) { - expired: Npm.PackageManifest, - manifest: Npm.PackageManifest, - - // Avoid checking the filesystem again. - not_found: void, - }; - const HashMap = std.HashMapUnmanaged(PackageNameHash, Value, IdentityContext(PackageNameHash), 80); - - pub fn byName(this: *PackageManifestMap, pm: *PackageManager, scope: *const Npm.Registry.Scope, name: []const u8, cache_behavior: CacheBehavior) ?*Npm.PackageManifest { - return this.byNameHash(pm, scope, String.Builder.stringHash(name), cache_behavior); - } - - pub fn insert(this: *PackageManifestMap, name_hash: PackageNameHash, manifest: *const Npm.PackageManifest) !void { - try this.hash_map.put(bun.default_allocator, name_hash, .{ .manifest = manifest.* }); - } - - pub fn byNameHash(this: *PackageManifestMap, pm: *PackageManager, scope: *const Npm.Registry.Scope, name_hash: PackageNameHash, cache_behavior: CacheBehavior) ?*Npm.PackageManifest { - return byNameHashAllowExpired(this, pm, scope, name_hash, null, cache_behavior); - } - - pub fn byNameAllowExpired(this: *PackageManifestMap, pm: *PackageManager, scope: *const Npm.Registry.Scope, name: string, is_expired: ?*bool, cache_behavior: CacheBehavior) ?*Npm.PackageManifest { - return byNameHashAllowExpired(this, pm, scope, String.Builder.stringHash(name), is_expired, cache_behavior); - } - - pub const CacheBehavior = enum { - load_from_memory, - load_from_memory_fallback_to_disk, - }; - - pub fn byNameHashAllowExpired( - this: *PackageManifestMap, - pm: *PackageManager, - scope: *const Npm.Registry.Scope, - name_hash: PackageNameHash, - is_expired: ?*bool, - cache_behavior: CacheBehavior, - ) ?*Npm.PackageManifest { - if (cache_behavior == .load_from_memory) { - const entry = this.hash_map.getPtr(name_hash) orelse return null; - return switch (entry.*) { - .manifest => &entry.manifest, - .expired => if (is_expired) |expiry| { - expiry.* = true; - return &entry.expired; - } else null, - .not_found => null, - }; - } - - const entry = this.hash_map.getOrPut(bun.default_allocator, name_hash) catch bun.outOfMemory(); - if (entry.found_existing) { - if (entry.value_ptr.* == .manifest) { - return &entry.value_ptr.manifest; - } - - if (is_expired) |expiry| { - if (entry.value_ptr.* == .expired) { - expiry.* = true; - return &entry.value_ptr.expired; - } - } - - return null; - } - - if (pm.options.enable.manifest_cache) { - if (Npm.PackageManifest.Serializer.loadByFileID( - pm.allocator, - scope, - pm.getCacheDirectory(), - name_hash, - ) catch null) |manifest| { - if (pm.options.enable.manifest_cache_control and manifest.pkg.public_max_age > pm.timestamp_for_manifest_cache_control) { - entry.value_ptr.* = .{ .manifest = manifest }; - return &entry.value_ptr.manifest; - } else { - entry.value_ptr.* = .{ .expired = manifest }; - - if (is_expired) |expiry| { - expiry.* = true; - return &entry.value_ptr.expired; - } - - return null; - } - } - } - - entry.value_ptr.* = .{ .not_found = {} }; - return null; - } -}; - // We can't know all the packages we need until we've downloaded all the packages // The easy way would be: // 1. Download all packages, parsing their dependencies and enqueuing all dependencies for resolution // 2. -pub const PackageManager = struct { - cache_directory_: ?std.fs.Dir = null, - - // TODO(dylan-conway): remove this field when we move away from `std.ChildProcess` in repository.zig - cache_directory_path: stringZ = "", - temp_dir_: ?std.fs.Dir = null, - temp_dir_path: stringZ = "", - temp_dir_name: string = "", - root_dir: *Fs.FileSystem.DirEntry, - allocator: std.mem.Allocator, - log: *logger.Log, - resolve_tasks: ResolveTaskQueue = .{}, - timestamp_for_manifest_cache_control: u32 = 0, - extracted_count: u32 = 0, - default_features: Features = .{}, - summary: Lockfile.Package.Diff.Summary = .{}, - env: *DotEnv.Loader, - progress: Progress = .{}, - downloads_node: ?*Progress.Node = null, - scripts_node: ?*Progress.Node = null, - progress_name_buf: [768]u8 = undefined, - progress_name_buf_dynamic: []u8 = &[_]u8{}, - cpu_count: u32 = 0, - - track_installed_bin: TrackInstalledBin = .{ - .none = {}, - }, - - // progress bar stuff when not stack allocated - root_progress_node: *Progress.Node = undefined, - - to_update: bool = false, - - subcommand: Subcommand, - update_requests: []UpdateRequest = &[_]UpdateRequest{}, - - /// Only set in `bun pm` - root_package_json_name_at_time_of_init: []const u8 = "", - - root_package_json_file: std.fs.File, - - /// The package id corresponding to the workspace the install is happening in. Could be root, or - /// could be any of the workspaces. - root_package_id: struct { - id: ?PackageID = null, - pub fn get(this: *@This(), lockfile: *const Lockfile, workspace_name_hash: ?PackageNameHash) PackageID { - return this.id orelse { - this.id = lockfile.getWorkspacePackageID(workspace_name_hash); - return this.id.?; - }; - } - } = .{}, - - thread_pool: ThreadPool, - task_batch: ThreadPool.Batch = .{}, - task_queue: TaskDependencyQueue = .{}, - - manifests: PackageManifestMap = .{}, - folders: FolderResolution.Map = .{}, - git_repositories: RepositoryMap = .{}, - - network_dedupe_map: NetworkTask.DedupeMap = NetworkTask.DedupeMap.init(bun.default_allocator), - async_network_task_queue: AsyncNetworkTaskQueue = .{}, - network_tarball_batch: ThreadPool.Batch = .{}, - network_resolve_batch: ThreadPool.Batch = .{}, - network_task_fifo: NetworkQueue = undefined, - patch_apply_batch: ThreadPool.Batch = .{}, - patch_calc_hash_batch: ThreadPool.Batch = .{}, - patch_task_fifo: PatchTaskFifo = PatchTaskFifo.init(), - patch_task_queue: PatchTaskQueue = .{}, - /// We actually need to calculate the patch file hashes - /// every single time, because someone could edit the patchfile at anytime - pending_pre_calc_hashes: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - pending_tasks: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - total_tasks: u32 = 0, - preallocated_network_tasks: PreallocatedNetworkTasks, - preallocated_resolve_tasks: PreallocatedTaskStore, - - /// items are only inserted into this if they took more than 500ms - lifecycle_script_time_log: LifecycleScriptTimeLog = .{}, - - pending_lifecycle_script_tasks: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - finished_installing: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), - total_scripts: usize = 0, - - root_lifecycle_scripts: ?Package.Scripts.List = null, - - node_gyp_tempdir_name: string = "", - - env_configure: ?ScriptRunEnvironment = null, - - lockfile: *Lockfile = undefined, - - options: Options, - preinstall_state: std.ArrayListUnmanaged(PreinstallState) = .{}, - - global_link_dir: ?std.fs.Dir = null, - global_dir: ?std.fs.Dir = null, - global_link_dir_path: string = "", - wait_count: std.atomic.Value(usize) = std.atomic.Value(usize).init(0), - - onWake: WakeHandler = .{}, - ci_mode: bun.LazyBool(computeIsContinuousIntegration, @This(), "ci_mode") = .{}, - - peer_dependencies: std.fifo.LinearFifo(DependencyID, .Dynamic) = std.fifo.LinearFifo(DependencyID, .Dynamic).init(default_allocator), - - // name hash from alias package name -> aliased package dependency version info - known_npm_aliases: NpmAliasMap = .{}, - - event_loop: JSC.AnyEventLoop, - - // During `installPackages` we learn exactly what dependencies from --trust - // actually have scripts to run, and we add them to this list - trusted_deps_to_add_to_package_json: std.ArrayListUnmanaged(string) = .{}, - - any_failed_to_install: bool = false, - - // When adding a `file:` dependency in a workspace package, we want to install it - // relative to the workspace root, but the path provided is relative to the - // workspace package. We keep track of the original here. - original_package_json_path: stringZ, - - // null means root. Used during `cleanWithLogger` to identifier which - // workspace is adding/removing packages - workspace_name_hash: ?PackageNameHash = null, - - workspace_package_json_cache: WorkspacePackageJSONCache = .{}, - - // normally we have `UpdateRequests` to work with for adding/deleting/updating packages, but - // if `bun update` is used without any package names we need a way to keep information for - // the original packages that are updating. - // - // dependency name -> original version information - updating_packages: bun.StringArrayHashMapUnmanaged(PackageUpdateInfo) = .{}, - - patched_dependencies_to_remove: std.ArrayHashMapUnmanaged(PackageNameAndVersionHash, void, ArrayIdentityContext.U64, false) = .{}, - - active_lifecycle_scripts: LifecycleScriptSubprocess.List, - last_reported_slow_lifecycle_script_at: u64 = 0, - cached_tick_for_slow_lifecycle_script_logging: u64 = 0, - - pub const WorkspaceFilter = union(enum) { - all, - name: []const u8, - path: []const u8, - - pub fn init(allocator: std.mem.Allocator, input: string, cwd: string, path_buf: []u8) OOM!WorkspaceFilter { - if ((input.len == 1 and input[0] == '*') or strings.eqlComptime(input, "**")) { - return .all; - } - - var remain = input; - - var prepend_negate = false; - while (remain.len > 0 and remain[0] == '!') { - prepend_negate = !prepend_negate; - remain = remain[1..]; - } - - const is_path = remain.len > 0 and remain[0] == '.'; - - const filter = if (is_path) - strings.withoutTrailingSlash(bun.path.joinAbsStringBuf(cwd, path_buf, &.{remain}, .posix)) - else - remain; - - if (filter.len == 0) { - // won't match anything - return .{ .path = &.{} }; - } - const copy_start = @intFromBool(prepend_negate); - const copy_end = copy_start + filter.len; - - const buf = try allocator.alloc(u8, copy_end); - @memcpy(buf[copy_start..copy_end], filter); - - if (prepend_negate) { - buf[0] = '!'; - } - - const pattern = buf[0..copy_end]; - - return if (is_path) - .{ .path = pattern } - else - .{ .name = pattern }; - } - - pub fn deinit(this: WorkspaceFilter, allocator: std.mem.Allocator) void { - switch (this) { - .name, - .path, - => |pattern| allocator.free(pattern), - .all => {}, - } - } - }; - - pub const Options = @import("./PackageManager/PackageManagerOptions.zig"); - - pub fn reportSlowLifecycleScripts(this: *PackageManager) void { - const log_level = this.options.log_level; - if (log_level == .silent) return; - if (bun.getRuntimeFeatureFlag(.BUN_DISABLE_SLOW_LIFECYCLE_SCRIPT_LOGGING)) { - return; - } - - if (this.active_lifecycle_scripts.peek()) |active_lifecycle_script_running_for_the_longest_amount_of_time| { - if (this.cached_tick_for_slow_lifecycle_script_logging == this.event_loop.iterationNumber()) { - return; - } - this.cached_tick_for_slow_lifecycle_script_logging = this.event_loop.iterationNumber(); - const current_time = bun.timespec.now().ns(); - const time_running = current_time -| active_lifecycle_script_running_for_the_longest_amount_of_time.started_at; - const interval: u64 = if (log_level.isVerbose()) std.time.ns_per_s * 5 else std.time.ns_per_s * 30; - if (time_running > interval and current_time -| this.last_reported_slow_lifecycle_script_at > interval) { - this.last_reported_slow_lifecycle_script_at = current_time; - const package_name = active_lifecycle_script_running_for_the_longest_amount_of_time.package_name; - - if (!(package_name.len > 1 and package_name[package_name.len - 1] == 's')) { - Output.warn("{s}'s postinstall cost you {}\n", .{ - package_name, - bun.fmt.fmtDurationOneDecimal(time_running), - }); - } else { - Output.warn("{s}' postinstall cost you {}\n", .{ - package_name, - bun.fmt.fmtDurationOneDecimal(time_running), - }); - } - Output.flush(); - } - } - } - - pub const PackageUpdateInfo = struct { - original_version_literal: string, - is_alias: bool, - original_version_string_buf: string = "", - original_version: ?Semver.Version, - }; - - pub fn clearCachedItemsDependingOnLockfileBuffer(this: *PackageManager) void { - this.root_package_id.id = null; - } - - pub fn crash(this: *PackageManager) noreturn { - if (this.options.log_level != .silent) { - this.log.print(Output.errorWriter()) catch {}; - } - Global.crash(); - } - - const TrackInstalledBin = union(enum) { - none: void, - pending: void, - basename: []const u8, - }; - - // maybe rename to `PackageJSONCache` if we cache more than workspaces - pub const WorkspacePackageJSONCache = struct { - const js_ast = bun.JSAst; - const Expr = js_ast.Expr; - - pub const MapEntry = struct { - root: Expr, - source: logger.Source, - indentation: JSPrinter.Options.Indentation = .{}, - }; - - pub const Map = bun.StringHashMapUnmanaged(MapEntry); - - pub const GetJSONOptions = struct { - init_reset_store: bool = true, - guess_indentation: bool = false, - }; - - pub const GetResult = union(enum) { - entry: *MapEntry, - read_err: anyerror, - parse_err: anyerror, - - pub fn unwrap(this: GetResult) !*MapEntry { - return switch (this) { - .entry => |entry| entry, - inline else => |err| err, - }; - } - }; - - map: Map = .{}, - - /// Given an absolute path to a workspace package.json, return the AST - /// and contents of the file. If the package.json is not present in the - /// cache, it will be read from disk and parsed, and stored in the cache. - pub fn getWithPath( - this: *@This(), - allocator: std.mem.Allocator, - log: *logger.Log, - abs_package_json_path: anytype, - comptime opts: GetJSONOptions, - ) GetResult { - bun.assertWithLocation(std.fs.path.isAbsolute(abs_package_json_path), @src()); - - var buf: if (Environment.isWindows) bun.PathBuffer else void = undefined; - const path = if (comptime !Environment.isWindows) - abs_package_json_path - else brk: { - @memcpy(buf[0..abs_package_json_path.len], abs_package_json_path); - bun.path.dangerouslyConvertPathToPosixInPlace(u8, buf[0..abs_package_json_path.len]); - break :brk buf[0..abs_package_json_path.len]; - }; - - const entry = this.map.getOrPut(allocator, path) catch bun.outOfMemory(); - if (entry.found_existing) { - return .{ .entry = entry.value_ptr }; - } - - const key = allocator.dupeZ(u8, path) catch bun.outOfMemory(); - entry.key_ptr.* = key; - - const source = &(bun.sys.File.toSource(key, allocator, .{}).unwrap() catch |err| { - _ = this.map.remove(key); - allocator.free(key); - return .{ .read_err = err }; - }); - - if (comptime opts.init_reset_store) - initializeStore(); - - const json = JSON.parsePackageJSONUTF8WithOpts( - source, - log, - allocator, - .{ - .is_json = true, - .allow_comments = true, - .allow_trailing_commas = true, - .guess_indentation = opts.guess_indentation, - }, - ) catch |err| { - _ = this.map.remove(key); - allocator.free(source.contents); - allocator.free(key); - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - return .{ .parse_err = err }; - }; - - entry.value_ptr.* = .{ - .root = json.root.deepClone(bun.default_allocator) catch bun.outOfMemory(), - .source = source.*, - .indentation = json.indentation, - }; - - return .{ .entry = entry.value_ptr }; - } - - /// source path is used as the key, needs to be absolute - pub fn getWithSource( - this: *@This(), - allocator: std.mem.Allocator, - log: *logger.Log, - source: *const logger.Source, - comptime opts: GetJSONOptions, - ) GetResult { - bun.assertWithLocation(std.fs.path.isAbsolute(source.path.text), @src()); - - var buf: if (Environment.isWindows) bun.PathBuffer else void = undefined; - const path = if (comptime !Environment.isWindows) - source.path.text - else brk: { - @memcpy(buf[0..source.path.text.len], source.path.text); - bun.path.dangerouslyConvertPathToPosixInPlace(u8, buf[0..source.path.text.len]); - break :brk buf[0..source.path.text.len]; - }; - - const entry = this.map.getOrPut(allocator, path) catch bun.outOfMemory(); - if (entry.found_existing) { - return .{ .entry = entry.value_ptr }; - } - - if (comptime opts.init_reset_store) - initializeStore(); - - const json_result = JSON.parsePackageJSONUTF8WithOpts( - source, - log, - allocator, - .{ - .is_json = true, - .allow_comments = true, - .allow_trailing_commas = true, - .guess_indentation = opts.guess_indentation, - }, - ); - - const json = json_result catch |err| { - _ = this.map.remove(path); - return .{ .parse_err = err }; - }; - - entry.value_ptr.* = .{ - .root = json.root.deepClone(allocator) catch bun.outOfMemory(), - .source = source.*, - .indentation = json.indentation, - }; - - entry.key_ptr.* = allocator.dupe(u8, path) catch bun.outOfMemory(); - - return .{ .entry = entry.value_ptr }; - } - }; - - pub var verbose_install = false; - - pub const PatchTaskQueue = bun.UnboundedQueue(PatchTask, .next); - pub const AsyncNetworkTaskQueue = bun.UnboundedQueue(NetworkTask, .next); - - pub const ScriptRunEnvironment = struct { - root_dir_info: *DirInfo, - transpiler: bun.Transpiler, - }; - - const TimePasser = struct { - pub var last_time: u64 = 0; - }; - - pub const LifecycleScriptTimeLog = struct { - const Entry = struct { - package_name: string, - script_id: u8, - - // nanosecond duration - duration: u64, - }; - - mutex: bun.Mutex = .{}, - list: std.ArrayListUnmanaged(Entry) = .{}, - - pub fn appendConcurrent(log: *LifecycleScriptTimeLog, allocator: std.mem.Allocator, entry: Entry) void { - log.mutex.lock(); - defer log.mutex.unlock(); - log.list.append(allocator, entry) catch bun.outOfMemory(); - } - - /// this can be called if .start was never called - pub fn printAndDeinit(log: *LifecycleScriptTimeLog, allocator: std.mem.Allocator) void { - if (Environment.isDebug) { - if (!log.mutex.tryLock()) @panic("LifecycleScriptTimeLog.print is not intended to be thread-safe"); - log.mutex.unlock(); - } - - if (log.list.items.len > 0) { - const longest: Entry = longest: { - var i: usize = 0; - var longest: u64 = log.list.items[0].duration; - for (log.list.items[1..], 1..) |item, j| { - if (item.duration > longest) { - i = j; - longest = item.duration; - } - } - break :longest log.list.items[i]; - }; - - // extra \n will print a blank line after this one - Output.warn("{s}'s {s} script took {}\n\n", .{ - longest.package_name, - Lockfile.Scripts.names[longest.script_id], - bun.fmt.fmtDurationOneDecimal(longest.duration), - }); - Output.flush(); - } - log.list.deinit(allocator); - } - }; - - pub fn hasEnoughTimePassedBetweenWaitingMessages() bool { - const iter = get().event_loop.loop().iterationNumber(); - if (TimePasser.last_time < iter) { - TimePasser.last_time = iter; - return true; - } - - return false; - } - - pub fn configureEnvForScripts(this: *PackageManager, ctx: Command.Context, log_level: Options.LogLevel) !*transpiler.Transpiler { - if (this.env_configure) |*env_configure| { - return &env_configure.transpiler; - } - - // We need to figure out the PATH and other environment variables - // to do that, we re-use the code from bun run - // this is expensive, it traverses the entire directory tree going up to the root - // so we really only want to do it when strictly necessary - this.env_configure = .{ - .root_dir_info = undefined, - .transpiler = undefined, - }; - const this_transpiler: *transpiler.Transpiler = &this.env_configure.?.transpiler; - - const root_dir_info = try RunCommand.configureEnvForRun( - ctx, - this_transpiler, - this.env, - log_level != .silent, - false, - ); - - const init_cwd_entry = try this.env.map.getOrPutWithoutValue("INIT_CWD"); - if (!init_cwd_entry.found_existing) { - init_cwd_entry.key_ptr.* = try ctx.allocator.dupe(u8, init_cwd_entry.key_ptr.*); - init_cwd_entry.value_ptr.* = .{ - .value = try ctx.allocator.dupe(u8, strings.withoutTrailingSlash(FileSystem.instance.top_level_dir)), - .conditional = false, - }; - } - - this.env.loadCCachePath(this_transpiler.fs); - - { - var node_path: bun.PathBuffer = undefined; - if (this.env.getNodePath(this_transpiler.fs, &node_path)) |node_pathZ| { - _ = try this.env.loadNodeJSConfig(this_transpiler.fs, bun.default_allocator.dupe(u8, node_pathZ) catch bun.outOfMemory()); - } else brk: { - const current_path = this.env.get("PATH") orelse ""; - var PATH = try std.ArrayList(u8).initCapacity(bun.default_allocator, current_path.len); - try PATH.appendSlice(current_path); - var bun_path: string = ""; - RunCommand.createFakeTemporaryNodeExecutable(&PATH, &bun_path) catch break :brk; - try this.env.map.put("PATH", PATH.items); - _ = try this.env.loadNodeJSConfig(this_transpiler.fs, bun.default_allocator.dupe(u8, bun_path) catch bun.outOfMemory()); - } - } - - this.env_configure.?.root_dir_info = root_dir_info; - - return this_transpiler; - } - - pub fn httpProxy(this: *PackageManager, url: URL) ?URL { - return this.env.getHttpProxyFor(url); - } - - pub fn tlsRejectUnauthorized(this: *PackageManager) bool { - return this.env.getTLSRejectUnauthorized(); - } - - pub fn computeIsContinuousIntegration(this: *PackageManager) bool { - return this.env.isCI(); - } - - pub inline fn isContinuousIntegration(this: *PackageManager) bool { - return this.ci_mode.get(); - } - - pub const WakeHandler = struct { - // handler: fn (ctx: *anyopaque, pm: *PackageManager) void = undefined, - // onDependencyError: fn (ctx: *anyopaque, Dependency, PackageID, anyerror) void = undefined, - handler: *const anyopaque = undefined, - onDependencyError: *const anyopaque = undefined, - context: ?*anyopaque = null, - - pub inline fn getHandler(t: @This()) *const fn (ctx: *anyopaque, pm: *PackageManager) void { - return bun.cast(*const fn (ctx: *anyopaque, pm: *PackageManager) void, t.handler); - } - - pub inline fn getonDependencyError(t: @This()) *const fn (ctx: *anyopaque, Dependency, DependencyID, anyerror) void { - return bun.cast(*const fn (ctx: *anyopaque, Dependency, DependencyID, anyerror) void, t.handler); - } - }; - - pub fn failRootResolution(this: *PackageManager, dependency: *const Dependency, dependency_id: DependencyID, err: anyerror) void { - if (this.onWake.context) |ctx| { - this.onWake.getonDependencyError()( - ctx, - dependency.*, - dependency_id, - err, - ); - } - } - - pub fn wake(this: *PackageManager) void { - if (this.onWake.context) |ctx| { - this.onWake.getHandler()(ctx, this); - } - - _ = this.wait_count.fetchAdd(1, .monotonic); - this.event_loop.wakeup(); - } - - pub fn hasNoMorePendingLifecycleScripts(this: *PackageManager) bool { - this.reportSlowLifecycleScripts(); - return this.pending_lifecycle_script_tasks.load(.monotonic) == 0; - } - - pub fn tickLifecycleScripts(this: *PackageManager) void { - this.event_loop.tickOnce(this); - } - - pub fn sleepUntil(this: *PackageManager, closure: anytype, comptime isDoneFn: anytype) void { - Output.flush(); - this.event_loop.tick(closure, isDoneFn); - } - - pub fn sleep(this: *PackageManager) void { - this.reportSlowLifecycleScripts(); - Output.flush(); - this.event_loop.tick(this, hasNoMorePendingLifecycleScripts); - } - - const DependencyToEnqueue = union(enum) { - pending: DependencyID, - resolution: struct { package_id: PackageID, resolution: Resolution }, - not_found: void, - failure: anyerror, - }; - - pub fn enqueueDependencyToRoot( - this: *PackageManager, - name: []const u8, - version: *const Dependency.Version, - version_buf: []const u8, - behavior: Dependency.Behavior, - ) DependencyToEnqueue { - const dep_id = @as(DependencyID, @truncate(brk: { - const str_buf = this.lockfile.buffers.string_bytes.items; - for (this.lockfile.buffers.dependencies.items, 0..) |dep, id| { - if (!strings.eqlLong(dep.name.slice(str_buf), name, true)) continue; - if (!dep.version.eql(version, str_buf, version_buf)) continue; - break :brk id; - } - - var builder = this.lockfile.stringBuilder(); - const dummy = Dependency{ - .name = String.init(name, name), - .name_hash = String.Builder.stringHash(name), - .version = version.*, - .behavior = behavior, - }; - dummy.countWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder); - - builder.allocate() catch |err| return .{ .failure = err }; - - const dep = dummy.cloneWithDifferentBuffers(this, name, version_buf, @TypeOf(&builder), &builder) catch unreachable; - builder.clamp(); - const index = this.lockfile.buffers.dependencies.items.len; - this.lockfile.buffers.dependencies.append(this.allocator, dep) catch unreachable; - this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable; - if (comptime Environment.allow_assert) bun.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len); - break :brk index; - })); - - if (this.lockfile.buffers.resolutions.items[dep_id] == invalid_package_id) { - this.enqueueDependencyWithMainAndSuccessFn( - dep_id, - &this.lockfile.buffers.dependencies.items[dep_id], - invalid_package_id, - false, - assignRootResolution, - failRootResolution, - ) catch |err| { - return .{ .failure = err }; - }; - } - - const resolution_id = switch (this.lockfile.buffers.resolutions.items[dep_id]) { - invalid_package_id => brk: { - this.drainDependencyList(); - - const Closure = struct { - // https://github.com/ziglang/zig/issues/19586 - pub fn issue_19586_workaround() type { - return struct { - err: ?anyerror = null, - manager: *PackageManager, - pub fn isDone(closure: *@This()) bool { - const manager = closure.manager; - if (manager.pendingTaskCount() > 0) { - manager.runTasks( - void, - {}, - .{ - .onExtract = {}, - .onResolve = {}, - .onPackageManifestError = {}, - .onPackageDownloadError = {}, - }, - false, - manager.options.log_level, - ) catch |err| { - closure.err = err; - return true; - }; - - if (PackageManager.verbose_install and manager.pendingTaskCount() > 0) { - if (PackageManager.hasEnoughTimePassedBetweenWaitingMessages()) Output.prettyErrorln("[PackageManager] waiting for {d} tasks\n", .{closure.manager.pendingTaskCount()}); - } - } - - return manager.pendingTaskCount() == 0; - } - }; - } - }.issue_19586_workaround(); - - if (this.options.log_level.showProgress()) { - this.startProgressBarIfNone(); - } - - var closure = Closure{ .manager = this }; - this.sleepUntil(&closure, &Closure.isDone); - - if (this.options.log_level.showProgress()) { - this.endProgressBar(); - Output.flush(); - } - - if (closure.err) |err| { - return .{ .failure = err }; - } - - break :brk this.lockfile.buffers.resolutions.items[dep_id]; - }, - // we managed to synchronously resolve the dependency - else => |pkg_id| pkg_id, - }; - - if (resolution_id == invalid_package_id) { - return .{ - .not_found = {}, - }; - } - - return .{ - .resolution = .{ - .resolution = this.lockfile.packages.items(.resolution)[resolution_id], - .package_id = resolution_id, - }, - }; - } - - pub fn globalLinkDir(this: *PackageManager) !std.fs.Dir { - return this.global_link_dir orelse brk: { - var global_dir = try Options.openGlobalDir(this.options.explicit_global_directory); - this.global_dir = global_dir; - this.global_link_dir = try global_dir.makeOpenPath("node_modules", .{}); - var buf: bun.PathBuffer = undefined; - const _path = try bun.getFdPath(.fromStdDir(this.global_link_dir.?), &buf); - this.global_link_dir_path = try Fs.FileSystem.DirnameStore.instance.append([]const u8, _path); - break :brk this.global_link_dir.?; - }; - } - - pub fn globalLinkDirPath(this: *PackageManager) ![]const u8 { - _ = try this.globalLinkDir(); - return this.global_link_dir_path; - } - - pub fn formatLaterVersionInCache( - this: *PackageManager, - package_name: string, - name_hash: PackageNameHash, - resolution: Resolution, - ) ?Semver.Version.Formatter { - switch (resolution.tag) { - Resolution.Tag.npm => { - if (resolution.value.npm.version.tag.hasPre()) - // TODO: - return null; - - const manifest = this.manifests.byNameHash( - this, - this.scopeForPackageName(package_name), - name_hash, - .load_from_memory, - ) orelse return null; - - if (manifest.findByDistTag("latest")) |*latest_version| { - if (latest_version.version.order( - resolution.value.npm.version, - manifest.string_buf, - this.lockfile.buffers.string_bytes.items, - ) != .gt) return null; - return latest_version.version.fmt(manifest.string_buf); - } - - return null; - }, - else => return null, - } - } - - pub fn ensurePreinstallStateListCapacity(this: *PackageManager, count: usize) void { - if (this.preinstall_state.items.len >= count) { - return; - } - - const offset = this.preinstall_state.items.len; - this.preinstall_state.ensureTotalCapacity(this.allocator, count) catch bun.outOfMemory(); - this.preinstall_state.expandToCapacity(); - @memset(this.preinstall_state.items[offset..], PreinstallState.unknown); - } - - pub fn setPreinstallState(this: *PackageManager, package_id: PackageID, lockfile: *Lockfile, value: PreinstallState) void { - this.ensurePreinstallStateListCapacity(lockfile.packages.len); - this.preinstall_state.items[package_id] = value; - } - - pub fn getPreinstallState(this: *PackageManager, package_id: PackageID) PreinstallState { - if (package_id >= this.preinstall_state.items.len) { - return PreinstallState.unknown; - } - return this.preinstall_state.items[package_id]; - } - - pub fn determinePreinstallState( - manager: *PackageManager, - pkg: Package, - lockfile: *Lockfile, - out_name_and_version_hash: *?u64, - out_patchfile_hash: *?u64, - ) PreinstallState { - switch (manager.getPreinstallState(pkg.meta.id)) { - .unknown => { - - // Do not automatically start downloading packages which are disabled - // i.e. don't download all of esbuild's versions or SWCs - if (pkg.isDisabled()) { - manager.setPreinstallState(pkg.meta.id, lockfile, .done); - return .done; - } - - const patch_hash: ?u64 = brk: { - if (manager.lockfile.patched_dependencies.entries.len == 0) break :brk null; - var sfb = std.heap.stackFallback(1024, manager.lockfile.allocator); - const name_and_version = std.fmt.allocPrint( - sfb.get(), - "{s}@{}", - .{ - pkg.name.slice(manager.lockfile.buffers.string_bytes.items), - pkg.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .posix), - }, - ) catch unreachable; - const name_and_version_hash = String.Builder.stringHash(name_and_version); - const patched_dep = manager.lockfile.patched_dependencies.get(name_and_version_hash) orelse break :brk null; - defer out_name_and_version_hash.* = name_and_version_hash; - if (patched_dep.patchfile_hash_is_null) { - manager.setPreinstallState(pkg.meta.id, manager.lockfile, .calc_patch_hash); - return .calc_patch_hash; - } - out_patchfile_hash.* = patched_dep.patchfileHash().?; - break :brk patched_dep.patchfileHash().?; - }; - - const folder_path = switch (pkg.resolution.tag) { - .git => manager.cachedGitFolderNamePrintAuto(&pkg.resolution.value.git, patch_hash), - .github => manager.cachedGitHubFolderNamePrintAuto(&pkg.resolution.value.github, patch_hash), - .npm => manager.cachedNPMPackageFolderName(lockfile.str(&pkg.name), pkg.resolution.value.npm.version, patch_hash), - .local_tarball => manager.cachedTarballFolderName(pkg.resolution.value.local_tarball, patch_hash), - .remote_tarball => manager.cachedTarballFolderName(pkg.resolution.value.remote_tarball, patch_hash), - else => "", - }; - - if (folder_path.len == 0) { - manager.setPreinstallState(pkg.meta.id, lockfile, .extract); - return .extract; - } - - if (manager.isFolderInCache(folder_path)) { - manager.setPreinstallState(pkg.meta.id, lockfile, .done); - return .done; - } - - // If the package is patched, then `folder_path` looks like: - // is-even@1.0.0_patch_hash=abc8s6dedhsddfkahaldfjhlj - // - // If that's not in the cache, we need to put it there: - // 1. extract the non-patched pkg in the cache - // 2. copy non-patched pkg into temp dir - // 3. apply patch to temp dir - // 4. rename temp dir to `folder_path` - if (patch_hash != null) { - const non_patched_path_ = folder_path[0 .. std.mem.indexOf(u8, folder_path, "_patch_hash=") orelse @panic("Expected folder path to contain `patch_hash=`, this is a bug in Bun. Please file a GitHub issue.")]; - const non_patched_path = manager.lockfile.allocator.dupeZ(u8, non_patched_path_) catch bun.outOfMemory(); - defer manager.lockfile.allocator.free(non_patched_path); - if (manager.isFolderInCache(non_patched_path)) { - manager.setPreinstallState(pkg.meta.id, manager.lockfile, .apply_patch); - // yay step 1 is already done for us - return .apply_patch; - } - // we need to extract non-patched pkg into the cache - manager.setPreinstallState(pkg.meta.id, lockfile, .extract); - return .extract; - } - - manager.setPreinstallState(pkg.meta.id, lockfile, .extract); - return .extract; - }, - else => |val| return val, - } - } - - pub fn scopeForPackageName(this: *const PackageManager, name: string) *const Npm.Registry.Scope { - if (name.len == 0 or name[0] != '@') return &this.options.scope; - return this.options.registries.getPtr( - Npm.Registry.Scope.hash( - Npm.Registry.Scope.getName(name), - ), - ) orelse &this.options.scope; - } - - pub fn setNodeName( - this: *PackageManager, - node: *Progress.Node, - name: string, - emoji: string, - comptime is_first: bool, - ) void { - if (Output.isEmojiEnabled()) { - if (is_first) { - @memcpy(this.progress_name_buf[0..emoji.len], emoji); - @memcpy(this.progress_name_buf[emoji.len..][0..name.len], name); - node.name = this.progress_name_buf[0 .. emoji.len + name.len]; - } else { - @memcpy(this.progress_name_buf[emoji.len..][0..name.len], name); - node.name = this.progress_name_buf[0 .. emoji.len + name.len]; - } - } else { - @memcpy(this.progress_name_buf[0..name.len], name); - node.name = this.progress_name_buf[0..name.len]; - } - } - - pub var cached_package_folder_name_buf: bun.PathBuffer = undefined; - - pub inline fn getCacheDirectory(this: *PackageManager) std.fs.Dir { - return this.cache_directory_ orelse brk: { - this.cache_directory_ = this.ensureCacheDirectory(); - break :brk this.cache_directory_.?; - }; - } - - pub inline fn getTemporaryDirectory(this: *PackageManager) std.fs.Dir { - return this.temp_dir_ orelse brk: { - this.temp_dir_ = this.ensureTemporaryDirectory(); - var pathbuf: bun.PathBuffer = undefined; - const temp_dir_path = bun.getFdPathZ(.fromStdDir(this.temp_dir_.?), &pathbuf) catch Output.panic("Unable to read temporary directory path", .{}); - this.temp_dir_path = bun.default_allocator.dupeZ(u8, temp_dir_path) catch bun.outOfMemory(); - break :brk this.temp_dir_.?; - }; - } - - noinline fn ensureCacheDirectory(this: *PackageManager) std.fs.Dir { - loop: while (true) { - if (this.options.enable.cache) { - const cache_dir = fetchCacheDirectoryPath(this.env, &this.options); - this.cache_directory_path = this.allocator.dupeZ(u8, cache_dir.path) catch bun.outOfMemory(); - - return std.fs.cwd().makeOpenPath(cache_dir.path, .{}) catch { - this.options.enable.cache = false; - this.allocator.free(this.cache_directory_path); - continue :loop; - }; - } - - this.cache_directory_path = this.allocator.dupeZ(u8, Path.joinAbsString( - Fs.FileSystem.instance.top_level_dir, - &.{ - "node_modules", - ".cache", - }, - .auto, - )) catch bun.outOfMemory(); - - return std.fs.cwd().makeOpenPath("node_modules/.cache", .{}) catch |err| { - Output.prettyErrorln("error: bun is unable to write files: {s}", .{@errorName(err)}); - Global.crash(); - }; - } - unreachable; - } - - pub var using_fallback_temp_dir: bool = false; - - // We need a temporary directory that can be rename() - // This is important for extracting files. - // - // However, we want it to be reused! Otherwise a cache is silly. - // Error RenameAcrossMountPoints moving react-is to cache dir: - noinline fn ensureTemporaryDirectory(this: *PackageManager) std.fs.Dir { - var cache_directory = this.getCacheDirectory(); - // The chosen tempdir must be on the same filesystem as the cache directory - // This makes renameat() work - this.temp_dir_name = Fs.FileSystem.RealFS.getDefaultTempDir(); - - var tried_dot_tmp = false; - var tempdir: std.fs.Dir = bun.MakePath.makeOpenPath(std.fs.cwd(), this.temp_dir_name, .{}) catch brk: { - tried_dot_tmp = true; - break :brk bun.MakePath.makeOpenPath(cache_directory, bun.pathLiteral(".tmp"), .{}) catch |err| { - Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); - Global.crash(); - }; - }; - var tmpbuf: bun.PathBuffer = undefined; - const tmpname = Fs.FileSystem.instance.tmpname("hm", &tmpbuf, bun.fastRandom()) catch unreachable; - var timer: std.time.Timer = if (this.options.log_level != .silent) std.time.Timer.start() catch unreachable else undefined; - brk: while (true) { - var file = tempdir.createFileZ(tmpname, .{ .truncate = true }) catch |err2| { - if (!tried_dot_tmp) { - tried_dot_tmp = true; - - tempdir = bun.MakePath.makeOpenPath(cache_directory, bun.pathLiteral(".tmp"), .{}) catch |err| { - Output.prettyErrorln("error: bun is unable to access tempdir: {s}", .{@errorName(err)}); - Global.crash(); - }; - - if (PackageManager.verbose_install) { - Output.prettyErrorln("warn: bun is unable to access tempdir: {s}, using fallback", .{@errorName(err2)}); - } - - continue :brk; - } - Output.prettyErrorln("error: {s} accessing temporary directory. Please set $BUN_TMPDIR or $BUN_INSTALL", .{ - @errorName(err2), - }); - Global.crash(); - }; - file.close(); - - std.posix.renameatZ(tempdir.fd, tmpname, cache_directory.fd, tmpname) catch |err| { - if (!tried_dot_tmp) { - tried_dot_tmp = true; - tempdir = cache_directory.makeOpenPath(".tmp", .{}) catch |err2| { - Output.prettyErrorln("error: bun is unable to write files to tempdir: {s}", .{@errorName(err2)}); - Global.crash(); - }; - - if (PackageManager.verbose_install) { - Output.prettyErrorln("info: cannot move files from tempdir: {s}, using fallback", .{@errorName(err)}); - } - - continue :brk; - } - - Output.prettyErrorln("error: {s} accessing temporary directory. Please set $BUN_TMPDIR or $BUN_INSTALL", .{ - @errorName(err), - }); - Global.crash(); - }; - cache_directory.deleteFileZ(tmpname) catch {}; - break; - } - if (tried_dot_tmp) { - using_fallback_temp_dir = true; - } - if (this.options.log_level != .silent) { - const elapsed = timer.read(); - if (elapsed > std.time.ns_per_ms * 100) { - var path_buf: bun.PathBuffer = undefined; - const cache_dir_path = bun.getFdPath(.fromStdDir(cache_directory), &path_buf) catch "it"; - Output.prettyErrorln( - "warn: Slow filesystem detected. If {s} is a network drive, consider setting $BUN_INSTALL_CACHE_DIR to a local folder.", - .{cache_dir_path}, - ); - } - } - - return tempdir; - } - - pub fn ensureTempNodeGypScript(this: *PackageManager) !void { - if (this.node_gyp_tempdir_name.len > 0) return; - - const tempdir = this.getTemporaryDirectory(); - var path_buf: bun.PathBuffer = undefined; - const node_gyp_tempdir_name = bun.span(try Fs.FileSystem.instance.tmpname("node-gyp", &path_buf, 12345)); - - // used later for adding to path for scripts - this.node_gyp_tempdir_name = try this.allocator.dupe(u8, node_gyp_tempdir_name); - - var node_gyp_tempdir = tempdir.makeOpenPath(this.node_gyp_tempdir_name, .{}) catch |err| { - if (err == error.EEXIST) { - // it should not exist - Output.prettyErrorln("error: node-gyp tempdir already exists", .{}); - Global.crash(); - } - Output.prettyErrorln("error: {s} creating node-gyp tempdir", .{@errorName(err)}); - Global.crash(); - }; - defer node_gyp_tempdir.close(); - - const file_name = switch (Environment.os) { - else => "node-gyp", - .windows => "node-gyp.cmd", - }; - const mode = switch (Environment.os) { - else => 0o755, - .windows => 0, // windows does not have an executable bit - }; - - var node_gyp_file = node_gyp_tempdir.createFile(file_name, .{ .mode = mode }) catch |err| { - Output.prettyErrorln("error: {s} creating node-gyp tempdir", .{@errorName(err)}); - Global.crash(); - }; - defer node_gyp_file.close(); - - const content = switch (Environment.os) { - .windows => - \\if not defined npm_config_node_gyp ( - \\ bun x --silent node-gyp %* - \\) else ( - \\ node "%npm_config_node_gyp%" %* - \\) - \\ - , - else => - \\#!/bin/sh - \\if [ "x$npm_config_node_gyp" = "x" ]; then - \\ bun x --silent node-gyp $@ - \\else - \\ "$npm_config_node_gyp" $@ - \\fi - \\ - , - }; - - node_gyp_file.writeAll(content) catch |err| { - Output.prettyErrorln("error: {s} writing to " ++ file_name ++ " file", .{@errorName(err)}); - Global.crash(); - }; - - // Add our node-gyp tempdir to the path - const existing_path = this.env.get("PATH") orelse ""; - var PATH = try std.ArrayList(u8).initCapacity(bun.default_allocator, existing_path.len + 1 + this.temp_dir_name.len + 1 + this.node_gyp_tempdir_name.len); - try PATH.appendSlice(existing_path); - if (existing_path.len > 0 and existing_path[existing_path.len - 1] != std.fs.path.delimiter) - try PATH.append(std.fs.path.delimiter); - try PATH.appendSlice(strings.withoutTrailingSlash(this.temp_dir_name)); - try PATH.append(std.fs.path.sep); - try PATH.appendSlice(this.node_gyp_tempdir_name); - try this.env.map.put("PATH", PATH.items); - - const npm_config_node_gyp = try std.fmt.bufPrint(&path_buf, "{s}{s}{s}{s}{s}", .{ - strings.withoutTrailingSlash(this.temp_dir_name), - std.fs.path.sep_str, - strings.withoutTrailingSlash(this.node_gyp_tempdir_name), - std.fs.path.sep_str, - file_name, - }); - - const node_gyp_abs_dir = std.fs.path.dirname(npm_config_node_gyp).?; - try this.env.map.putAllocKeyAndValue(this.allocator, "BUN_WHICH_IGNORE_CWD", node_gyp_abs_dir); - } - - const Holder = struct { - pub var ptr: *PackageManager = undefined; - }; - - pub fn allocatePackageManager() void { - Holder.ptr = bun.default_allocator.create(PackageManager) catch bun.outOfMemory(); - } - - pub fn get() *PackageManager { - return Holder.ptr; - } - - pub fn getNetworkTask(this: *PackageManager) *NetworkTask { - return this.preallocated_network_tasks.get(); - } - - pub fn allocGitHubURL(this: *const PackageManager, repository: *const Repository) string { - var github_api_url: string = "https://api.github.com"; - if (this.env.get("GITHUB_API_URL")) |url| { - if (url.len > 0) { - github_api_url = url; - } - } - - const owner = this.lockfile.str(&repository.owner); - const repo = this.lockfile.str(&repository.repo); - const committish = this.lockfile.str(&repository.committish); - - return std.fmt.allocPrint( - this.allocator, - "{s}/repos/{s}/{s}{s}tarball/{s}", - .{ - strings.withoutTrailingSlash(github_api_url), - owner, - repo, - // repo might be empty if dep is https://github.com/... style - if (repo.len > 0) "/" else "", - committish, - }, - ) catch unreachable; - } - - pub fn cachedGitFolderNamePrint(buf: []u8, resolved: string, patch_hash: ?u64) stringZ { - return std.fmt.bufPrintZ(buf, "@G@{s}{}", .{ resolved, PatchHashFmt{ .hash = patch_hash } }) catch unreachable; - } - - pub fn cachedGitFolderName(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { - return cachedGitFolderNamePrint(&cached_package_folder_name_buf, this.lockfile.str(&repository.resolved), patch_hash); - } - - pub const PatchHashFmt = struct { - hash: ?u64 = null, - - pub fn format(this: *const PatchHashFmt, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - if (this.hash) |h| { - try writer.print("_patch_hash={x}", .{h}); - } - } - }; - - pub const CacheVersion = struct { - pub const current = 1; - pub const Formatter = struct { - version_number: ?usize = null, - - pub fn format(this: *const @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - if (this.version_number) |version| { - try writer.print("@@@{d}", .{version}); - } - } - }; - }; - - pub fn cachedGitFolderNamePrintAuto(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { - if (!repository.resolved.isEmpty()) { - return this.cachedGitFolderName(repository, patch_hash); - } - - if (!repository.repo.isEmpty() and !repository.committish.isEmpty()) { - const string_buf = this.lockfile.buffers.string_bytes.items; - return std.fmt.bufPrintZ( - &cached_package_folder_name_buf, - "@G@{any}{}{}", - .{ - repository.committish.fmt(string_buf), - CacheVersion.Formatter{ .version_number = CacheVersion.current }, - PatchHashFmt{ .hash = patch_hash }, - }, - ) catch unreachable; - } - - return ""; - } - - pub fn cachedGitHubFolderNamePrint(buf: []u8, resolved: string, patch_hash: ?u64) stringZ { - return std.fmt.bufPrintZ(buf, "@GH@{s}{}{}", .{ - resolved, - CacheVersion.Formatter{ .version_number = CacheVersion.current }, - PatchHashFmt{ .hash = patch_hash }, - }) catch unreachable; - } - - pub fn cachedGitHubFolderName(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { - return cachedGitHubFolderNamePrint(&cached_package_folder_name_buf, this.lockfile.str(&repository.resolved), patch_hash); - } - - fn cachedGitHubFolderNamePrintGuess(buf: []u8, string_buf: []const u8, repository: *const Repository, patch_hash: ?u64) stringZ { - return std.fmt.bufPrintZ( - buf, - "@GH@{any}-{any}-{any}{}{}", - .{ - repository.owner.fmt(string_buf), - repository.repo.fmt(string_buf), - repository.committish.fmt(string_buf), - CacheVersion.Formatter{ .version_number = CacheVersion.current }, - PatchHashFmt{ .hash = patch_hash }, - }, - ) catch unreachable; - } - - pub fn cachedGitHubFolderNamePrintAuto(this: *const PackageManager, repository: *const Repository, patch_hash: ?u64) stringZ { - if (!repository.resolved.isEmpty()) { - return this.cachedGitHubFolderName(repository, patch_hash); - } - - if (!repository.owner.isEmpty() and !repository.repo.isEmpty() and !repository.committish.isEmpty()) { - return cachedGitHubFolderNamePrintGuess(&cached_package_folder_name_buf, this.lockfile.buffers.string_bytes.items, repository, patch_hash); - } - - return ""; - } - - // TODO: normalize to alphanumeric - pub fn cachedNPMPackageFolderNamePrint(this: *const PackageManager, buf: []u8, name: string, version: Semver.Version, patch_hash: ?u64) stringZ { - const scope = this.scopeForPackageName(name); - - if (scope.name.len == 0 and !this.options.did_override_default_scope) { - const include_version_number = true; - return cachedNPMPackageFolderPrintBasename(buf, name, version, patch_hash, include_version_number); - } - - const include_version_number = false; - const basename = cachedNPMPackageFolderPrintBasename(buf, name, version, null, include_version_number); - - const spanned = bun.span(basename); - const available = buf[spanned.len..]; - var end: []u8 = undefined; - if (scope.url.hostname.len > 32 or available.len < 64) { - const visible_hostname = scope.url.hostname[0..@min(scope.url.hostname.len, 12)]; - end = std.fmt.bufPrint(available, "@@{s}__{any}{}{}", .{ - visible_hostname, - bun.fmt.hexIntLower(String.Builder.stringHash(scope.url.href)), - CacheVersion.Formatter{ .version_number = CacheVersion.current }, - PatchHashFmt{ .hash = patch_hash }, - }) catch unreachable; - } else { - end = std.fmt.bufPrint(available, "@@{s}{}{}", .{ - scope.url.hostname, - CacheVersion.Formatter{ .version_number = CacheVersion.current }, - PatchHashFmt{ .hash = patch_hash }, - }) catch unreachable; - } - - buf[spanned.len + end.len] = 0; - const result: [:0]u8 = buf[0 .. spanned.len + end.len :0]; - return result; - } - - pub fn cachedNPMPackageFolderName(this: *const PackageManager, name: string, version: Semver.Version, patch_hash: ?u64) stringZ { - return this.cachedNPMPackageFolderNamePrint(&cached_package_folder_name_buf, name, version, patch_hash); - } - - // TODO: normalize to alphanumeric - pub fn cachedNPMPackageFolderPrintBasename( - buf: []u8, - name: string, - version: Semver.Version, - patch_hash: ?u64, - include_cache_version: bool, - ) stringZ { - if (version.tag.hasPre()) { - if (version.tag.hasBuild()) { - return std.fmt.bufPrintZ( - buf, - "{s}@{d}.{d}.{d}-{any}+{any}{}{}", - .{ - name, - version.major, - version.minor, - version.patch, - bun.fmt.hexIntLower(version.tag.pre.hash), - bun.fmt.hexIntUpper(version.tag.build.hash), - CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, - PatchHashFmt{ .hash = patch_hash }, - }, - ) catch unreachable; - } - return std.fmt.bufPrintZ( - buf, - "{s}@{d}.{d}.{d}-{any}{}{}", - .{ - name, - version.major, - version.minor, - version.patch, - bun.fmt.hexIntLower(version.tag.pre.hash), - CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, - PatchHashFmt{ .hash = patch_hash }, - }, - ) catch unreachable; - } - if (version.tag.hasBuild()) { - return std.fmt.bufPrintZ( - buf, - "{s}@{d}.{d}.{d}+{any}{}{}", - .{ - name, - version.major, - version.minor, - version.patch, - bun.fmt.hexIntUpper(version.tag.build.hash), - CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, - PatchHashFmt{ .hash = patch_hash }, - }, - ) catch unreachable; - } - return std.fmt.bufPrintZ(buf, "{s}@{d}.{d}.{d}{}{}", .{ - name, - version.major, - version.minor, - version.patch, - CacheVersion.Formatter{ .version_number = if (include_cache_version) CacheVersion.current else null }, - PatchHashFmt{ .hash = patch_hash }, - }) catch unreachable; - } - - pub fn cachedTarballFolderNamePrint(buf: []u8, url: string, patch_hash: ?u64) stringZ { - return std.fmt.bufPrintZ(buf, "@T@{any}{}{}", .{ - bun.fmt.hexIntLower(String.Builder.stringHash(url)), - CacheVersion.Formatter{ .version_number = CacheVersion.current }, - PatchHashFmt{ .hash = patch_hash }, - }) catch unreachable; - } - - pub fn cachedTarballFolderName(this: *const PackageManager, url: String, patch_hash: ?u64) stringZ { - return cachedTarballFolderNamePrint(&cached_package_folder_name_buf, this.lockfile.str(&url), patch_hash); - } - - pub fn isFolderInCache(this: *PackageManager, folder_path: stringZ) bool { - return bun.sys.directoryExistsAt(.fromStdDir(this.getCacheDirectory()), folder_path).unwrap() catch false; - } - - pub fn pathForCachedNPMPath( - this: *PackageManager, - buf: *bun.PathBuffer, - package_name: []const u8, - version: Semver.Version, - ) ![]u8 { - var cache_path_buf: bun.PathBuffer = undefined; - - const cache_path = this.cachedNPMPackageFolderNamePrint(&cache_path_buf, package_name, version, null); - - if (comptime Environment.allow_assert) { - bun.assertWithLocation(cache_path[package_name.len] == '@', @src()); - } - - cache_path_buf[package_name.len] = std.fs.path.sep; - - const cache_dir: bun.FD = .fromStdDir(this.getCacheDirectory()); - - if (comptime Environment.isWindows) { - var path_buf: bun.PathBuffer = undefined; - const joined = bun.path.joinAbsStringBufZ(this.cache_directory_path, &path_buf, &[_]string{cache_path}, .windows); - return bun.sys.readlink(joined, buf).unwrap() catch |err| { - _ = bun.sys.unlink(joined); - return err; - }; - } - - return cache_dir.readlinkat(cache_path, buf).unwrap() catch |err| { - // if we run into an error, delete the symlink - // so that we don't repeatedly try to read it - _ = cache_dir.unlinkat(cache_path); - return err; - }; - } - - pub fn pathForResolution( - this: *PackageManager, - package_id: PackageID, - resolution: Resolution, - buf: *bun.PathBuffer, - ) ![]u8 { - // const folder_name = this.cachedNPMPackageFolderName(name, version); - switch (resolution.tag) { - .npm => { - const npm = resolution.value.npm; - const package_name_ = this.lockfile.packages.items(.name)[package_id]; - const package_name = this.lockfile.str(&package_name_); - - return this.pathForCachedNPMPath(buf, package_name, npm.version); - }, - else => return "", - } - } - - /// this is copy pasted from `installPackageWithNameAndResolution()` - /// it's not great to do this - pub fn computeCacheDirAndSubpath( - manager: *PackageManager, - pkg_name: string, - resolution: *const Resolution, - folder_path_buf: *bun.PathBuffer, - patch_hash: ?u64, - ) struct { cache_dir: std.fs.Dir, cache_dir_subpath: stringZ } { - const name = pkg_name; - const buf = manager.lockfile.buffers.string_bytes.items; - var cache_dir = std.fs.cwd(); - var cache_dir_subpath: stringZ = ""; - - switch (resolution.tag) { - .npm => { - cache_dir_subpath = manager.cachedNPMPackageFolderName(name, resolution.value.npm.version, patch_hash); - cache_dir = manager.getCacheDirectory(); - }, - .git => { - cache_dir_subpath = manager.cachedGitFolderName( - &resolution.value.git, - patch_hash, - ); - cache_dir = manager.getCacheDirectory(); - }, - .github => { - cache_dir_subpath = manager.cachedGitHubFolderName(&resolution.value.github, patch_hash); - cache_dir = manager.getCacheDirectory(); - }, - .folder => { - const folder = resolution.value.folder.slice(buf); - // Handle when a package depends on itself via file: - // example: - // "mineflayer": "file:." - if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { - cache_dir_subpath = "."; - } else { - @memcpy(folder_path_buf[0..folder.len], folder); - folder_path_buf[folder.len] = 0; - cache_dir_subpath = folder_path_buf[0..folder.len :0]; - } - cache_dir = std.fs.cwd(); - }, - .local_tarball => { - cache_dir_subpath = manager.cachedTarballFolderName(resolution.value.local_tarball, patch_hash); - cache_dir = manager.getCacheDirectory(); - }, - .remote_tarball => { - cache_dir_subpath = manager.cachedTarballFolderName(resolution.value.remote_tarball, patch_hash); - cache_dir = manager.getCacheDirectory(); - }, - .workspace => { - const folder = resolution.value.workspace.slice(buf); - // Handle when a package depends on itself - if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { - cache_dir_subpath = "."; - } else { - @memcpy(folder_path_buf[0..folder.len], folder); - folder_path_buf[folder.len] = 0; - cache_dir_subpath = folder_path_buf[0..folder.len :0]; - } - cache_dir = std.fs.cwd(); - }, - .symlink => { - const directory = manager.globalLinkDir() catch |err| { - const fmt = "\nerror: unable to access global directory while installing {s}: {s}\n"; - const args = .{ name, @errorName(err) }; - - Output.prettyErrorln(fmt, args); - - Global.exit(1); - }; - - const folder = resolution.value.symlink.slice(buf); - - if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { - cache_dir_subpath = "."; - cache_dir = std.fs.cwd(); - } else { - const global_link_dir = manager.globalLinkDirPath() catch unreachable; - var ptr = folder_path_buf; - var remain: []u8 = folder_path_buf[0..]; - @memcpy(ptr[0..global_link_dir.len], global_link_dir); - remain = remain[global_link_dir.len..]; - if (global_link_dir[global_link_dir.len - 1] != std.fs.path.sep) { - remain[0] = std.fs.path.sep; - remain = remain[1..]; - } - @memcpy(remain[0..folder.len], folder); - remain = remain[folder.len..]; - remain[0] = 0; - const len = @intFromPtr(remain.ptr) - @intFromPtr(ptr); - cache_dir_subpath = folder_path_buf[0..len :0]; - cache_dir = directory; - } - }, - else => {}, - } - - return .{ - .cache_dir = cache_dir, - .cache_dir_subpath = cache_dir_subpath, - }; - } - - pub fn getInstalledVersionsFromDiskCache(this: *PackageManager, tags_buf: *std.ArrayList(u8), package_name: []const u8, allocator: std.mem.Allocator) !std.ArrayList(Semver.Version) { - var list = std.ArrayList(Semver.Version).init(allocator); - var dir = this.getCacheDirectory().openDir(package_name, .{ - .iterate = true, - }) catch |err| switch (err) { - error.FileNotFound, error.NotDir, error.AccessDenied, error.DeviceBusy => return list, - else => return err, - }; - defer dir.close(); - var iter = dir.iterate(); - - while (try iter.next()) |entry| { - if (entry.kind != .directory and entry.kind != .sym_link) continue; - const name = entry.name; - const sliced = SlicedString.init(name, name); - const parsed = Semver.Version.parse(sliced); - if (!parsed.valid or parsed.wildcard != .none) continue; - // not handling OOM - // TODO: wildcard - var version = parsed.version.min(); - const total = version.tag.build.len() + version.tag.pre.len(); - if (total > 0) { - tags_buf.ensureUnusedCapacity(total) catch unreachable; - var available = tags_buf.items.ptr[tags_buf.items.len..tags_buf.capacity]; - const new_version = version.cloneInto(name, &available); - tags_buf.items.len += total; - version = new_version; - } - - list.append(version) catch unreachable; - } - - return list; - } - - pub fn resolveFromDiskCache(this: *PackageManager, package_name: []const u8, version: Dependency.Version) ?PackageID { - if (version.tag != .npm) { - // only npm supported right now - // tags are more ambiguous - return null; - } - - var arena = bun.ArenaAllocator.init(this.allocator); - defer arena.deinit(); - const arena_alloc = arena.allocator(); - var stack_fallback = std.heap.stackFallback(4096, arena_alloc); - const allocator = stack_fallback.get(); - var tags_buf = std.ArrayList(u8).init(allocator); - const installed_versions = this.getInstalledVersionsFromDiskCache(&tags_buf, package_name, allocator) catch |err| { - Output.debug("error getting installed versions from disk cache: {s}", .{bun.span(@errorName(err))}); - return null; - }; - - // TODO: make this fewer passes - std.sort.pdq( - Semver.Version, - installed_versions.items, - @as([]const u8, tags_buf.items), - Semver.Version.sortGt, - ); - for (installed_versions.items) |installed_version| { - if (version.value.npm.version.satisfies(installed_version, this.lockfile.buffers.string_bytes.items, tags_buf.items)) { - var buf: bun.PathBuffer = undefined; - const npm_package_path = this.pathForCachedNPMPath(&buf, package_name, installed_version) catch |err| { - Output.debug("error getting path for cached npm path: {s}", .{bun.span(@errorName(err))}); - return null; - }; - const dependency = Dependency.Version{ - .tag = .npm, - .value = .{ - .npm = .{ - .name = String.init(package_name, package_name), - .version = Semver.Query.Group.from(installed_version), - }, - }, - }; - switch (FolderResolution.getOrPut(.{ .cache_folder = npm_package_path }, dependency, ".", this)) { - .new_package_id => |id| { - this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id]); - return id; - }, - .package_id => |id| { - this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id]); - return id; - }, - .err => |err| { - Output.debug("error getting or putting folder resolution: {s}", .{bun.span(@errorName(err))}); - return null; - }, - } - } - } - - return null; - } - - const ResolvedPackageResult = struct { - package: Lockfile.Package, - - /// Is this the first time we've seen this package? - is_first_time: bool = false, - - task: ?union(enum) { - /// Pending network task to schedule - network_task: *NetworkTask, - - /// Apply patch task or calc patch hash task - patch_task: *PatchTask, - } = null, - }; - - fn getOrPutResolvedPackageWithFindResult( - this: *PackageManager, - name_hash: PackageNameHash, - name: String, - dependency: *const Dependency, - version: Dependency.Version, - dependency_id: DependencyID, - behavior: Behavior, - manifest: *const Npm.PackageManifest, - find_result: Npm.PackageManifest.FindResult, - install_peer: bool, - comptime successFn: SuccessFn, - ) !?ResolvedPackageResult { - const should_update = this.to_update and - // If updating, only update packages in the current workspace - this.lockfile.isRootDependency(this, dependency_id) and - // no need to do a look up if update requests are empty (`bun update` with no args) - (this.update_requests.len == 0 or - this.updating_packages.contains(dependency.name.slice(this.lockfile.buffers.string_bytes.items))); - - // Was this package already allocated? Let's reuse the existing one. - if (this.lockfile.getPackageID( - name_hash, - if (should_update) null else version, - &.{ - .tag = .npm, - .value = .{ - .npm = .{ - .version = find_result.version, - .url = find_result.package.tarball_url.value, - }, - }, - }, - )) |id| { - successFn(this, dependency_id, id); - return .{ - .package = this.lockfile.packages.get(id), - .is_first_time = false, - }; - } else if (behavior.isPeer() and !install_peer) { - return null; - } - - // appendPackage sets the PackageID on the package - const package = try this.lockfile.appendPackage(try Lockfile.Package.fromNPM( - this, - this.allocator, - this.lockfile, - this.log, - manifest, - find_result.version, - find_result.package, - manifest.string_buf, - Features.npm, - )); - - if (comptime Environment.allow_assert) bun.assert(package.meta.id != invalid_package_id); - defer successFn(this, dependency_id, package.meta.id); - - // non-null if the package is in "patchedDependencies" - var name_and_version_hash: ?u64 = null; - var patchfile_hash: ?u64 = null; - - return switch (this.determinePreinstallState( - package, - this.lockfile, - &name_and_version_hash, - &patchfile_hash, - )) { - // Is this package already in the cache? - // We don't need to download the tarball, but we should enqueue dependencies - .done => .{ .package = package, .is_first_time = true }, - // Do we need to download the tarball? - .extract => extract: { - const task_id = Task.Id.forNPMPackage(this.lockfile.str(&name), package.resolution.value.npm.version); - bun.debugAssert(!this.network_dedupe_map.contains(task_id)); - - break :extract .{ - .package = package, - .is_first_time = true, - .task = .{ - .network_task = try this.generateNetworkTaskForTarball( - task_id, - manifest.str(&find_result.package.tarball_url), - dependency.behavior.isRequired(), - dependency_id, - package, - name_and_version_hash, - // its npm. - .allow_authorization, - ) orelse unreachable, - }, - }; - }, - .calc_patch_hash => .{ - .package = package, - .is_first_time = true, - .task = .{ - .patch_task = PatchTask.newCalcPatchHash( - this, - name_and_version_hash.?, - .{ - .pkg_id = package.meta.id, - .dependency_id = dependency_id, - .url = this.allocator.dupe(u8, manifest.str(&find_result.package.tarball_url)) catch bun.outOfMemory(), - }, - ), - }, - }, - .apply_patch => .{ - .package = package, - .is_first_time = true, - .task = .{ - .patch_task = PatchTask.newApplyPatchHash( - this, - package.meta.id, - patchfile_hash.?, - name_and_version_hash.?, - ), - }, - }, - else => unreachable, - }; - } - - pub fn hasCreatedNetworkTask(this: *PackageManager, task_id: u64, is_required: bool) bool { - const gpe = this.network_dedupe_map.getOrPut(task_id) catch bun.outOfMemory(); - - // if there's an existing network task that is optional, we want to make it non-optional if this one would be required - gpe.value_ptr.is_required = if (!gpe.found_existing) - is_required - else - gpe.value_ptr.is_required or is_required; - - return gpe.found_existing; - } - - pub fn isNetworkTaskRequired(this: *const PackageManager, task_id: u64) bool { - return (this.network_dedupe_map.get(task_id) orelse return true).is_required; - } - - pub fn generateNetworkTaskForTarball( - this: *PackageManager, - task_id: u64, - url: string, - is_required: bool, - dependency_id: DependencyID, - package: Lockfile.Package, - patch_name_and_version_hash: ?u64, - authorization: NetworkTask.Authorization, - ) NetworkTask.ForTarballError!?*NetworkTask { - if (this.hasCreatedNetworkTask(task_id, is_required)) { - return null; - } - - var network_task = this.getNetworkTask(); - - network_task.* = .{ - .task_id = task_id, - .callback = undefined, - .allocator = this.allocator, - .package_manager = this, - .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { - const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; - const task = PatchTask.newApplyPatchHash(this, package.meta.id, patch_hash, h); - task.callback.apply.task_id = task_id; - break :brk task; - } else null, - }; - - const scope = this.scopeForPackageName(this.lockfile.str(&package.name)); - - try network_task.forTarball( - this.allocator, - &.{ - .package_manager = this, - .name = strings.StringOrTinyString.initAppendIfNeeded( - this.lockfile.str(&package.name), - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch bun.outOfMemory(), - .resolution = package.resolution, - .cache_dir = this.getCacheDirectory(), - .temp_dir = this.getTemporaryDirectory(), - .dependency_id = dependency_id, - .integrity = package.meta.integrity, - .url = strings.StringOrTinyString.initAppendIfNeeded( - url, - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch bun.outOfMemory(), - }, - scope, - authorization, - ); - - return network_task; - } - - pub fn enqueueNetworkTask(this: *PackageManager, task: *NetworkTask) void { - if (this.network_task_fifo.writableLength() == 0) { - this.flushNetworkQueue(); - } - - this.network_task_fifo.writeItemAssumeCapacity(task); - } - - pub fn enqueuePatchTask(this: *PackageManager, task: *PatchTask) void { - debug("Enqueue patch task: 0x{x} {s}", .{ @intFromPtr(task), @tagName(task.callback) }); - if (this.patch_task_fifo.writableLength() == 0) { - this.flushPatchTaskQueue(); - } - - this.patch_task_fifo.writeItemAssumeCapacity(task); - } - - /// We need to calculate all the patchfile hashes at the beginning so we don't run into problems with stale hashes - pub fn enqueuePatchTaskPre(this: *PackageManager, task: *PatchTask) void { - debug("Enqueue patch task pre: 0x{x} {s}", .{ @intFromPtr(task), @tagName(task.callback) }); - task.pre = true; - if (this.patch_task_fifo.writableLength() == 0) { - this.flushPatchTaskQueue(); - } - - this.patch_task_fifo.writeItemAssumeCapacity(task); - _ = this.pending_pre_calc_hashes.fetchAdd(1, .monotonic); - } - - const SuccessFn = *const fn (*PackageManager, DependencyID, PackageID) void; - const FailFn = *const fn (*PackageManager, *const Dependency, PackageID, anyerror) void; - fn assignResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void { - const buffers = &this.lockfile.buffers; - if (comptime Environment.allow_assert) { - bun.assert(dependency_id < buffers.resolutions.items.len); - bun.assert(package_id < this.lockfile.packages.len); - // bun.assert(buffers.resolutions.items[dependency_id] == invalid_package_id); - } - buffers.resolutions.items[dependency_id] = package_id; - const string_buf = buffers.string_bytes.items; - var dep = &buffers.dependencies.items[dependency_id]; - if (dep.name.isEmpty() or strings.eql(dep.name.slice(string_buf), dep.version.literal.slice(string_buf))) { - dep.name = this.lockfile.packages.items(.name)[package_id]; - dep.name_hash = this.lockfile.packages.items(.name_hash)[package_id]; - } - } - - fn assignRootResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void { - const buffers = &this.lockfile.buffers; - if (comptime Environment.allow_assert) { - bun.assert(dependency_id < buffers.resolutions.items.len); - bun.assert(package_id < this.lockfile.packages.len); - bun.assert(buffers.resolutions.items[dependency_id] == invalid_package_id); - } - buffers.resolutions.items[dependency_id] = package_id; - const string_buf = buffers.string_bytes.items; - var dep = &buffers.dependencies.items[dependency_id]; - if (dep.name.isEmpty() or strings.eql(dep.name.slice(string_buf), dep.version.literal.slice(string_buf))) { - dep.name = this.lockfile.packages.items(.name)[package_id]; - dep.name_hash = this.lockfile.packages.items(.name_hash)[package_id]; - } - } - - fn resolutionSatisfiesDependency(this: *PackageManager, resolution: Resolution, dependency: Dependency.Version) bool { - const buf = this.lockfile.buffers.string_bytes.items; - if (resolution.tag == .npm and dependency.tag == .npm) { - return dependency.value.npm.version.satisfies(resolution.value.npm.version, buf, buf); - } - - if (resolution.tag == .git and dependency.tag == .git) { - return resolution.value.git.eql(&dependency.value.git, buf, buf); - } - - if (resolution.tag == .github and dependency.tag == .github) { - return resolution.value.github.eql(&dependency.value.github, buf, buf); - } - - return false; - } - - fn getOrPutResolvedPackage( - this: *PackageManager, - name_hash: PackageNameHash, - name: String, - dependency: *const Dependency, - version: Dependency.Version, - behavior: Behavior, - dependency_id: DependencyID, - resolution: PackageID, - install_peer: bool, - comptime successFn: SuccessFn, - ) !?ResolvedPackageResult { - if (install_peer and behavior.isPeer()) { - if (this.lockfile.package_index.get(name_hash)) |index| { - const resolutions: []Resolution = this.lockfile.packages.items(.resolution); - switch (index) { - .id => |existing_id| { - if (existing_id < resolutions.len) { - const existing_resolution = resolutions[existing_id]; - if (this.resolutionSatisfiesDependency(existing_resolution, version)) { - successFn(this, dependency_id, existing_id); - return .{ - // we must fetch it from the packages array again, incase the package array mutates the value in the `successFn` - .package = this.lockfile.packages.get(existing_id), - }; - } - - const res_tag = resolutions[existing_id].tag; - const ver_tag = version.tag; - if ((res_tag == .npm and ver_tag == .npm) or (res_tag == .git and ver_tag == .git) or (res_tag == .github and ver_tag == .github)) { - const existing_package = this.lockfile.packages.get(existing_id); - this.log.addWarningFmt( - null, - logger.Loc.Empty, - this.allocator, - "incorrect peer dependency \"{}@{}\"", - .{ - existing_package.name.fmt(this.lockfile.buffers.string_bytes.items), - existing_package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), - }, - ) catch unreachable; - successFn(this, dependency_id, existing_id); - return .{ - // we must fetch it from the packages array again, incase the package array mutates the value in the `successFn` - .package = this.lockfile.packages.get(existing_id), - }; - } - } - }, - .ids => |list| { - for (list.items) |existing_id| { - if (existing_id < resolutions.len) { - const existing_resolution = resolutions[existing_id]; - if (this.resolutionSatisfiesDependency(existing_resolution, version)) { - successFn(this, dependency_id, existing_id); - return .{ - .package = this.lockfile.packages.get(existing_id), - }; - } - } - } - - if (list.items[0] < resolutions.len) { - const res_tag = resolutions[list.items[0]].tag; - const ver_tag = version.tag; - if ((res_tag == .npm and ver_tag == .npm) or (res_tag == .git and ver_tag == .git) or (res_tag == .github and ver_tag == .github)) { - const existing_package_id = list.items[0]; - const existing_package = this.lockfile.packages.get(existing_package_id); - this.log.addWarningFmt( - null, - logger.Loc.Empty, - this.allocator, - "incorrect peer dependency \"{}@{}\"", - .{ - existing_package.name.fmt(this.lockfile.buffers.string_bytes.items), - existing_package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), - }, - ) catch unreachable; - successFn(this, dependency_id, list.items[0]); - return .{ - // we must fetch it from the packages array again, incase the package array mutates the value in the `successFn` - .package = this.lockfile.packages.get(existing_package_id), - }; - } - } - }, - } - } - } - - if (resolution < this.lockfile.packages.len) { - return .{ .package = this.lockfile.packages.get(resolution) }; - } - - switch (version.tag) { - .npm, .dist_tag => { - resolve_from_workspace: { - if (version.tag == .npm) { - const workspace_path = if (this.lockfile.workspace_paths.count() > 0) this.lockfile.workspace_paths.get(name_hash) else null; - const workspace_version = this.lockfile.workspace_versions.get(name_hash); - const buf = this.lockfile.buffers.string_bytes.items; - if (this.options.link_workspace_packages and - (((workspace_version != null and version.value.npm.version.satisfies(workspace_version.?, buf, buf)) or - // https://github.com/oven-sh/bun/pull/10899#issuecomment-2099609419 - // if the workspace doesn't have a version, it can still be used if - // dependency version is wildcard - (workspace_path != null and version.value.npm.version.@"is *"())))) - { - const root_package = this.lockfile.rootPackage() orelse break :resolve_from_workspace; - const root_dependencies = root_package.dependencies.get(this.lockfile.buffers.dependencies.items); - const root_resolutions = root_package.resolutions.get(this.lockfile.buffers.resolutions.items); - - for (root_dependencies, root_resolutions) |root_dep, workspace_package_id| { - if (workspace_package_id != invalid_package_id and root_dep.version.tag == .workspace and root_dep.name_hash == name_hash) { - // make sure verifyResolutions sees this resolution as a valid package id - successFn(this, dependency_id, workspace_package_id); - return .{ - .package = this.lockfile.packages.get(workspace_package_id), - .is_first_time = false, - }; - } - } - } - } - } - - // Resolve the version from the loaded NPM manifest - const name_str = this.lockfile.str(&name); - const manifest = this.manifests.byNameHash( - this, - this.scopeForPackageName(name_str), - name_hash, - .load_from_memory_fallback_to_disk, - ) orelse return null; // manifest might still be downloading. This feels unreliable. - const find_result: Npm.PackageManifest.FindResult = switch (version.tag) { - .dist_tag => manifest.findByDistTag(this.lockfile.str(&version.value.dist_tag.tag)), - .npm => manifest.findBestVersion(version.value.npm.version, this.lockfile.buffers.string_bytes.items), - else => unreachable, - } orelse { - resolve_workspace_from_dist_tag: { - // choose a workspace for a dist_tag only if a version was not found - if (version.tag == .dist_tag) { - const workspace_path = if (this.lockfile.workspace_paths.count() > 0) this.lockfile.workspace_paths.get(name_hash) else null; - if (workspace_path != null) { - const root_package = this.lockfile.rootPackage() orelse break :resolve_workspace_from_dist_tag; - const root_dependencies = root_package.dependencies.get(this.lockfile.buffers.dependencies.items); - const root_resolutions = root_package.resolutions.get(this.lockfile.buffers.resolutions.items); - - for (root_dependencies, root_resolutions) |root_dep, workspace_package_id| { - if (workspace_package_id != invalid_package_id and root_dep.version.tag == .workspace and root_dep.name_hash == name_hash) { - // make sure verifyResolutions sees this resolution as a valid package id - successFn(this, dependency_id, workspace_package_id); - return .{ - .package = this.lockfile.packages.get(workspace_package_id), - .is_first_time = false, - }; - } - } - } - } - } - - if (behavior.isPeer()) { - return null; - } - - return switch (version.tag) { - .npm => error.NoMatchingVersion, - .dist_tag => error.DistTagNotFound, - else => unreachable, - }; - }; - - return try this.getOrPutResolvedPackageWithFindResult( - name_hash, - name, - dependency, - version, - dependency_id, - behavior, - manifest, - find_result, - install_peer, - successFn, - ); - }, - - .folder => { - const res: FolderResolution = res: { - if (this.lockfile.isWorkspaceDependency(dependency_id)) { - // relative to cwd - const folder_path = this.lockfile.str(&version.value.folder); - var buf2: bun.PathBuffer = undefined; - const folder_path_abs = if (std.fs.path.isAbsolute(folder_path)) folder_path else blk: { - break :blk Path.joinAbsStringBuf( - FileSystem.instance.top_level_dir, - &buf2, - &.{folder_path}, - .auto, - ); - // break :blk Path.joinAbsStringBuf( - // strings.withoutSuffixComptime(this.original_package_json_path, "package.json"), - // &buf2, - // &[_]string{folder_path}, - // .auto, - // ); - }; - break :res FolderResolution.getOrPut(.{ .relative = .folder }, version, folder_path_abs, this); - } - - // transitive folder dependencies do not have their dependencies resolved - var name_slice = this.lockfile.str(&name); - var folder_path = this.lockfile.str(&version.value.folder); - var package = Lockfile.Package{}; - - { - // only need name and path - var builder = this.lockfile.stringBuilder(); - - builder.count(name_slice); - builder.count(folder_path); - - builder.allocate() catch bun.outOfMemory(); - - name_slice = this.lockfile.str(&name); - folder_path = this.lockfile.str(&version.value.folder); - - package.name = builder.append(String, name_slice); - package.name_hash = name_hash; - - package.resolution = Resolution.init(.{ - .folder = builder.append(String, folder_path), - }); - - package.scripts.filled = true; - package.meta.setHasInstallScript(false); - - builder.clamp(); - } - - // these are always new - package = this.lockfile.appendPackage(package) catch bun.outOfMemory(); - - break :res .{ - .new_package_id = package.meta.id, - }; - }; - - switch (res) { - .err => |err| return err, - .package_id => |package_id| { - successFn(this, dependency_id, package_id); - return .{ .package = this.lockfile.packages.get(package_id) }; - }, - - .new_package_id => |package_id| { - successFn(this, dependency_id, package_id); - return .{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; - }, - } - }, - .workspace => { - // package name hash should be used to find workspace path from map - const workspace_path_raw: *const String = this.lockfile.workspace_paths.getPtr(name_hash) orelse &version.value.workspace; - const workspace_path = this.lockfile.str(workspace_path_raw); - var buf2: bun.PathBuffer = undefined; - const workspace_path_u8 = if (std.fs.path.isAbsolute(workspace_path)) workspace_path else blk: { - break :blk Path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &buf2, &[_]string{workspace_path}, .auto); - }; - - const res = FolderResolution.getOrPut(.{ .relative = .workspace }, version, workspace_path_u8, this); - - switch (res) { - .err => |err| return err, - .package_id => |package_id| { - successFn(this, dependency_id, package_id); - return .{ .package = this.lockfile.packages.get(package_id) }; - }, - - .new_package_id => |package_id| { - successFn(this, dependency_id, package_id); - return .{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; - }, - } - }, - .symlink => { - const res = FolderResolution.getOrPut(.{ .global = try this.globalLinkDirPath() }, version, this.lockfile.str(&version.value.symlink), this); - - switch (res) { - .err => |err| return err, - .package_id => |package_id| { - successFn(this, dependency_id, package_id); - return .{ .package = this.lockfile.packages.get(package_id) }; - }, - - .new_package_id => |package_id| { - successFn(this, dependency_id, package_id); - return .{ .package = this.lockfile.packages.get(package_id), .is_first_time = true }; - }, - } - }, - - else => return null, - } - } - - fn enqueueParseNPMPackage( - this: *PackageManager, - task_id: u64, - name: strings.StringOrTinyString, - network_task: *NetworkTask, - ) *ThreadPool.Task { - var task = this.preallocated_resolve_tasks.get(); - task.* = Task{ - .package_manager = this, - .log = logger.Log.init(this.allocator), - .tag = Task.Tag.package_manifest, - .request = .{ - .package_manifest = .{ - .network = network_task, - .name = name, - }, - }, - .id = task_id, - .data = undefined, - }; - return &task.threadpool_task; - } - - fn enqueueExtractNPMPackage( - this: *PackageManager, - tarball: *const ExtractTarball, - network_task: *NetworkTask, - ) *ThreadPool.Task { - var task = this.preallocated_resolve_tasks.get(); - task.* = Task{ - .package_manager = this, - .log = logger.Log.init(this.allocator), - .tag = Task.Tag.extract, - .request = .{ - .extract = .{ - .network = network_task, - .tarball = tarball.*, - }, - }, - .id = network_task.task_id, - .data = undefined, - }; - task.request.extract.tarball.skip_verify = !this.options.do.verify_integrity; - return &task.threadpool_task; - } - - fn enqueueGitClone( - this: *PackageManager, - task_id: u64, - name: string, - repository: *const Repository, - dep_id: DependencyID, - dependency: *const Dependency, - res: *const Resolution, - /// if patched then we need to do apply step after network task is done - patch_name_and_version_hash: ?u64, - ) *ThreadPool.Task { - var task = this.preallocated_resolve_tasks.get(); - task.* = Task{ - .package_manager = this, - .log = logger.Log.init(this.allocator), - .tag = Task.Tag.git_clone, - .request = .{ - .git_clone = .{ - .name = strings.StringOrTinyString.initAppendIfNeeded( - name, - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch unreachable, - .url = strings.StringOrTinyString.initAppendIfNeeded( - this.lockfile.str(&repository.repo), - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch unreachable, - .env = Repository.shared_env.get(this.allocator, this.env), - .dep_id = dep_id, - .res = res.*, - }, - }, - .id = task_id, - .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { - const dep = dependency; - const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { - .id => |p| p, - .ids => |ps| ps.items[0], // TODO is this correct - }; - const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; - const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); - pt.callback.apply.task_id = task_id; - break :brk pt; - } else null, - .data = undefined, - }; - return &task.threadpool_task; - } - - fn enqueueGitCheckout( - this: *PackageManager, - task_id: u64, - dir: bun.FileDescriptor, - dependency_id: DependencyID, - name: string, - resolution: Resolution, - resolved: string, - /// if patched then we need to do apply step after network task is done - patch_name_and_version_hash: ?u64, - ) *ThreadPool.Task { - var task = this.preallocated_resolve_tasks.get(); - task.* = Task{ - .package_manager = this, - .log = logger.Log.init(this.allocator), - .tag = Task.Tag.git_checkout, - .request = .{ - .git_checkout = .{ - .repo_dir = dir, - .resolution = resolution, - .dependency_id = dependency_id, - .name = strings.StringOrTinyString.initAppendIfNeeded( - name, - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch unreachable, - .url = strings.StringOrTinyString.initAppendIfNeeded( - this.lockfile.str(&resolution.value.git.repo), - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch unreachable, - .resolved = strings.StringOrTinyString.initAppendIfNeeded( - resolved, - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch unreachable, - .env = Repository.shared_env.get(this.allocator, this.env), - }, - }, - .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { - const dep = this.lockfile.buffers.dependencies.items[dependency_id]; - const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { - .id => |p| p, - .ids => |ps| ps.items[0], // TODO is this correct - }; - const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; - const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); - pt.callback.apply.task_id = task_id; - break :brk pt; - } else null, - .id = task_id, - .data = undefined, - }; - return &task.threadpool_task; - } - - fn enqueueLocalTarball( - this: *PackageManager, - task_id: u64, - dependency_id: DependencyID, - name: string, - path: string, - resolution: Resolution, - ) *ThreadPool.Task { - var task = this.preallocated_resolve_tasks.get(); - task.* = Task{ - .package_manager = this, - .log = logger.Log.init(this.allocator), - .tag = Task.Tag.local_tarball, - .request = .{ - .local_tarball = .{ - .tarball = .{ - .package_manager = this, - .name = strings.StringOrTinyString.initAppendIfNeeded( - name, - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch unreachable, - .resolution = resolution, - .cache_dir = this.getCacheDirectory(), - .temp_dir = this.getTemporaryDirectory(), - .dependency_id = dependency_id, - .url = strings.StringOrTinyString.initAppendIfNeeded( - path, - *FileSystem.FilenameStore, - FileSystem.FilenameStore.instance, - ) catch unreachable, - }, - }, - }, - .id = task_id, - .data = undefined, - }; - return &task.threadpool_task; - } - - pub fn updateLockfileIfNeeded( - manager: *PackageManager, - load_result: Lockfile.LoadResult, - ) !void { - if (load_result == .ok and load_result.ok.serializer_result.packages_need_update) { - const slice = manager.lockfile.packages.slice(); - for (slice.items(.meta)) |*meta| { - // these are possibly updated later, but need to make sure non are zero - meta.setHasInstallScript(false); - } - } - - return; - } - - pub fn writeYarnLock(this: *PackageManager) !void { - var printer = Lockfile.Printer{ - .lockfile = this.lockfile, - .options = this.options, - }; - - var tmpname_buf: [512]u8 = undefined; - tmpname_buf[0..8].* = "tmplock-".*; - var tmpfile = FileSystem.RealFS.Tmpfile{}; - var secret: [32]u8 = undefined; - std.mem.writeInt(u64, secret[0..8], @as(u64, @intCast(std.time.milliTimestamp())), .little); - var base64_bytes: [64]u8 = undefined; - std.crypto.random.bytes(&base64_bytes); - - const tmpname__ = std.fmt.bufPrint(tmpname_buf[8..], "{s}", .{std.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable; - tmpname_buf[tmpname__.len + 8] = 0; - const tmpname = tmpname_buf[0 .. tmpname__.len + 8 :0]; - - tmpfile.create(&FileSystem.instance.fs, tmpname) catch |err| { - Output.prettyErrorln("error: failed to create tmpfile: {s}", .{@errorName(err)}); - Global.crash(); - }; - - var file = tmpfile.file(); - const file_writer = file.writer(); - var buffered_writer = std.io.BufferedWriter(std.heap.page_size_min, @TypeOf(file_writer)){ - .unbuffered_writer = file_writer, - }; - const writer = buffered_writer.writer(); - try Lockfile.Printer.Yarn.print(&printer, @TypeOf(writer), writer); - try buffered_writer.flush(); - - if (comptime Environment.isPosix) { - _ = bun.c.fchmod( - tmpfile.fd.cast(), - // chmod 666, - 0o0000040 | 0o0000004 | 0o0000002 | 0o0000400 | 0o0000200 | 0o0000020, - ); - } - - try tmpfile.promoteToCWD(tmpname, "yarn.lock"); - } - - fn enqueueDependencyWithMain( - this: *PackageManager, - id: DependencyID, - /// This must be a *const to prevent UB - dependency: *const Dependency, - resolution: PackageID, - install_peer: bool, - ) !void { - return this.enqueueDependencyWithMainAndSuccessFn( - id, - dependency, - resolution, - install_peer, - assignResolution, - null, - ); - } - - const debug = Output.scoped(.PackageManager, true); - - fn updateNameAndNameHashFromVersionReplacement( - lockfile: *const Lockfile, - original_name: String, - original_name_hash: PackageNameHash, - new_version: Dependency.Version, - ) struct { String, PackageNameHash } { - return switch (new_version.tag) { - // only get name hash for npm and dist_tag. git, github, tarball don't have names until after extracting tarball - .dist_tag => .{ new_version.value.dist_tag.name, String.Builder.stringHash(lockfile.str(&new_version.value.dist_tag.name)) }, - .npm => .{ new_version.value.npm.name, String.Builder.stringHash(lockfile.str(&new_version.value.npm.name)) }, - .git => .{ new_version.value.git.package_name, original_name_hash }, - .github => .{ new_version.value.github.package_name, original_name_hash }, - .tarball => .{ new_version.value.tarball.package_name, original_name_hash }, - else => .{ original_name, original_name_hash }, - }; - } - - /// Q: "What do we do with a dependency in a package.json?" - /// A: "We enqueue it!" - fn enqueueDependencyWithMainAndSuccessFn( - this: *PackageManager, - id: DependencyID, - /// This must be a *const to prevent UB - dependency: *const Dependency, - resolution: PackageID, - install_peer: bool, - comptime successFn: SuccessFn, - comptime failFn: ?FailFn, - ) !void { - if (dependency.behavior.isOptionalPeer()) return; - - var name = dependency.realname(); - - var name_hash = switch (dependency.version.tag) { - .dist_tag, .git, .github, .npm, .tarball, .workspace => String.Builder.stringHash(this.lockfile.str(&name)), - else => dependency.name_hash, - }; - - const version = version: { - if (dependency.version.tag == .npm) { - if (this.known_npm_aliases.get(name_hash)) |aliased| { - const group = dependency.version.value.npm.version; - const buf = this.lockfile.buffers.string_bytes.items; - var curr_list: ?*const Semver.Query.List = &aliased.value.npm.version.head; - while (curr_list) |queries| { - var curr: ?*const Semver.Query = &queries.head; - while (curr) |query| { - if (group.satisfies(query.range.left.version, buf, buf) or group.satisfies(query.range.right.version, buf, buf)) { - name = aliased.value.npm.name; - name_hash = String.Builder.stringHash(this.lockfile.str(&name)); - break :version aliased; - } - curr = query.next; - } - curr_list = queries.next; - } - - // fallthrough. a package that matches the name of an alias but does not match - // the version should be enqueued as a normal npm dependency, overrides allowed - } - } - - // allow overriding all dependencies unless the dependency is coming directly from an alias, "npm:" or - // if it's a workspaceOnly dependency - if (!dependency.behavior.isWorkspaceOnly() and (dependency.version.tag != .npm or !dependency.version.value.npm.is_alias)) { - if (this.lockfile.overrides.get(name_hash)) |new| { - debug("override: {s} -> {s}", .{ this.lockfile.str(&dependency.version.literal), this.lockfile.str(&new.literal) }); - - name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, new); - - if (new.tag == .catalog) { - if (this.lockfile.catalogs.get(this.lockfile, new.value.catalog, name)) |catalog_dep| { - name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, catalog_dep.version); - break :version catalog_dep.version; - } - } - - // `name_hash` stays the same - break :version new; - } - - if (dependency.version.tag == .catalog) { - if (this.lockfile.catalogs.get(this.lockfile, dependency.version.value.catalog, name)) |catalog_dep| { - name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, catalog_dep.version); - - break :version catalog_dep.version; - } - } - } - - // explicit copy here due to `dependency.version` becoming undefined - // when `getOrPutResolvedPackageWithFindResult` is called and resizes the list. - break :version Dependency.Version{ - .literal = dependency.version.literal, - .tag = dependency.version.tag, - .value = dependency.version.value, - }; - }; - var loaded_manifest: ?Npm.PackageManifest = null; - - switch (version.tag) { - .dist_tag, .folder, .npm => { - retry_from_manifests_ptr: while (true) { - var resolve_result_ = this.getOrPutResolvedPackage( - name_hash, - name, - dependency, - version, - dependency.behavior, - id, - resolution, - install_peer, - successFn, - ); - - retry_with_new_resolve_result: while (true) { - const resolve_result = resolve_result_ catch |err| { - switch (err) { - error.DistTagNotFound => { - if (dependency.behavior.isRequired()) { - if (failFn) |fail| { - fail( - this, - dependency, - id, - err, - ); - } else { - this.log.addErrorFmt( - null, - logger.Loc.Empty, - this.allocator, - "Package \"{s}\" with tag \"{s}\" not found, but package exists", - .{ - this.lockfile.str(&name), - this.lockfile.str(&version.value.dist_tag.tag), - }, - ) catch unreachable; - } - } - - return; - }, - error.NoMatchingVersion => { - if (dependency.behavior.isRequired()) { - if (failFn) |fail| { - fail( - this, - dependency, - id, - err, - ); - } else { - this.log.addErrorFmt( - null, - logger.Loc.Empty, - this.allocator, - "No version matching \"{s}\" found for specifier \"{s}\" (but package exists)", - .{ - this.lockfile.str(&version.literal), - this.lockfile.str(&name), - }, - ) catch unreachable; - } - } - return; - }, - else => { - if (failFn) |fail| { - fail( - this, - dependency, - id, - err, - ); - return; - } - - return err; - }, - } - }; - - if (resolve_result) |result| { - // First time? - if (result.is_first_time) { - if (PackageManager.verbose_install) { - const label: string = this.lockfile.str(&version.literal); - - Output.prettyErrorln(" -> \"{s}\": \"{s}\" -> {s}@{}", .{ - this.lockfile.str(&result.package.name), - label, - this.lockfile.str(&result.package.name), - result.package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), - }); - } - // Resolve dependencies first - if (result.package.dependencies.len > 0) { - try this.lockfile.scratch.dependency_list_queue.writeItem(result.package.dependencies); - } - } - - if (result.task != null) { - switch (result.task.?) { - .network_task => |network_task| { - if (this.getPreinstallState(result.package.meta.id) == .extract) { - this.setPreinstallState(result.package.meta.id, this.lockfile, .extracting); - this.enqueueNetworkTask(network_task); - } - }, - .patch_task => |patch_task| { - if (patch_task.callback == .calc_hash and this.getPreinstallState(result.package.meta.id) == .calc_patch_hash) { - this.setPreinstallState(result.package.meta.id, this.lockfile, .calcing_patch_hash); - this.enqueuePatchTask(patch_task); - } else if (patch_task.callback == .apply and this.getPreinstallState(result.package.meta.id) == .apply_patch) { - this.setPreinstallState(result.package.meta.id, this.lockfile, .applying_patch); - this.enqueuePatchTask(patch_task); - } - }, - } - } - - if (comptime Environment.allow_assert) - debug( - "enqueueDependency({d}, {s}, {s}, {s}) = {d}", - .{ - id, - @tagName(version.tag), - this.lockfile.str(&name), - this.lockfile.str(&version.literal), - result.package.meta.id, - }, - ); - } else if (version.tag.isNPM()) { - const name_str = this.lockfile.str(&name); - const task_id = Task.Id.forManifest(name_str); - - if (comptime Environment.allow_assert) bun.assert(task_id != 0); - - if (comptime Environment.allow_assert) - debug( - "enqueueDependency({d}, {s}, {s}, {s}) = task {d}", - .{ - id, - @tagName(version.tag), - this.lockfile.str(&name), - this.lockfile.str(&version.literal), - task_id, - }, - ); - - if (!dependency.behavior.isPeer() or install_peer) { - if (!this.hasCreatedNetworkTask(task_id, dependency.behavior.isRequired())) { - if (this.options.enable.manifest_cache) { - var expired = false; - if (this.manifests.byNameHashAllowExpired( - this, - this.scopeForPackageName(name_str), - name_hash, - &expired, - .load_from_memory_fallback_to_disk, - )) |manifest| { - loaded_manifest = manifest.*; - - // If it's an exact package version already living in the cache - // We can skip the network request, even if it's beyond the caching period - if (version.tag == .npm and version.value.npm.version.isExact()) { - if (loaded_manifest.?.findByVersion(version.value.npm.version.head.head.range.left.version)) |find_result| { - if (this.getOrPutResolvedPackageWithFindResult( - name_hash, - name, - dependency, - version, - id, - dependency.behavior, - &loaded_manifest.?, - find_result, - install_peer, - successFn, - ) catch null) |new_resolve_result| { - resolve_result_ = new_resolve_result; - _ = this.network_dedupe_map.remove(task_id); - continue :retry_with_new_resolve_result; - } - } - } - - // Was it recent enough to just load it without the network call? - if (this.options.enable.manifest_cache_control and !expired) { - _ = this.network_dedupe_map.remove(task_id); - continue :retry_from_manifests_ptr; - } - } - } - - if (PackageManager.verbose_install) { - Output.prettyErrorln("Enqueue package manifest for download: {s}", .{name_str}); - } - - var network_task = this.getNetworkTask(); - network_task.* = .{ - .package_manager = this, - .callback = undefined, - .task_id = task_id, - .allocator = this.allocator, - }; - try network_task.forManifest( - name_str, - this.allocator, - this.scopeForPackageName(name_str), - if (loaded_manifest) |*manifest| manifest else null, - dependency.behavior.isOptional(), - ); - this.enqueueNetworkTask(network_task); - } - } else { - try this.peer_dependencies.writeItem(id); - return; - } - - var manifest_entry_parse = try this.task_queue.getOrPutContext(this.allocator, task_id, .{}); - if (!manifest_entry_parse.found_existing) { - manifest_entry_parse.value_ptr.* = TaskCallbackList{}; - } - - const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; - try manifest_entry_parse.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); - } - return; - } - } - return; - }, - .git => { - const dep = &version.value.git; - const res = Resolution{ - .tag = .git, - .value = .{ - .git = dep.*, - }, - }; - - // First: see if we already loaded the git package in-memory - if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { - successFn(this, id, pkg_id); - return; - } - - const alias = this.lockfile.str(&dependency.name); - const url = this.lockfile.str(&dep.repo); - const clone_id = Task.Id.forGitClone(url); - const ctx = @unionInit( - TaskCallbackContext, - if (successFn == assignRootResolution) "root_dependency" else "dependency", - id, - ); - - if (comptime Environment.allow_assert) - debug( - "enqueueDependency({d}, {s}, {s}, {s}) = {s}", - .{ - id, - @tagName(version.tag), - this.lockfile.str(&name), - this.lockfile.str(&version.literal), - url, - }, - ); - - if (this.git_repositories.get(clone_id)) |repo_fd| { - const resolved = try Repository.findCommit( - this.allocator, - this.env, - this.log, - repo_fd.stdDir(), - alias, - this.lockfile.str(&dep.committish), - clone_id, - ); - const checkout_id = Task.Id.forGitCheckout(url, resolved); - - var entry = this.task_queue.getOrPutContext(this.allocator, checkout_id, .{}) catch unreachable; - if (!entry.found_existing) entry.value_ptr.* = .{}; - if (this.lockfile.buffers.resolutions.items[id] == invalid_package_id) { - try entry.value_ptr.append(this.allocator, ctx); - } - - if (dependency.behavior.isPeer()) { - if (!install_peer) { - try this.peer_dependencies.writeItem(id); - return; - } - } - - if (this.hasCreatedNetworkTask(checkout_id, dependency.behavior.isRequired())) return; - - this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitCheckout( - checkout_id, - repo_fd, - id, - alias, - res, - resolved, - null, - ))); - } else { - var entry = this.task_queue.getOrPutContext(this.allocator, clone_id, .{}) catch unreachable; - if (!entry.found_existing) entry.value_ptr.* = .{}; - try entry.value_ptr.append(this.allocator, ctx); - - if (dependency.behavior.isPeer()) { - if (!install_peer) { - try this.peer_dependencies.writeItem(id); - return; - } - } - - if (this.hasCreatedNetworkTask(clone_id, dependency.behavior.isRequired())) return; - - this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitClone(clone_id, alias, dep, id, dependency, &res, null))); - } - }, - .github => { - const dep = &version.value.github; - const res = Resolution{ - .tag = .github, - .value = .{ - .github = dep.*, - }, - }; - - // First: see if we already loaded the github package in-memory - if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { - successFn(this, id, pkg_id); - return; - } - - const url = this.allocGitHubURL(dep); - defer this.allocator.free(url); - const task_id = Task.Id.forTarball(url); - var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable; - if (!entry.found_existing) { - entry.value_ptr.* = TaskCallbackList{}; - } - - if (comptime Environment.allow_assert) - debug( - "enqueueDependency({d}, {s}, {s}, {s}) = {s}", - .{ - id, - @tagName(version.tag), - this.lockfile.str(&name), - this.lockfile.str(&version.literal), - url, - }, - ); - - const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; - try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); - - if (dependency.behavior.isPeer()) { - if (!install_peer) { - try this.peer_dependencies.writeItem(id); - return; - } - } - - if (try this.generateNetworkTaskForTarball( - task_id, - url, - dependency.behavior.isRequired(), - id, - .{ - .name = dependency.name, - .name_hash = dependency.name_hash, - .resolution = res, - }, - null, - .no_authorization, - )) |network_task| { - this.enqueueNetworkTask(network_task); - } - }, - inline .symlink, .workspace => |dependency_tag| { - const _result = this.getOrPutResolvedPackage( - name_hash, - name, - dependency, - version, - dependency.behavior, - id, - resolution, - install_peer, - successFn, - ) catch |err| brk: { - if (err == error.MissingPackageJSON) { - break :brk @as(?ResolvedPackageResult, null); - } - - return err; - }; - - const workspace_not_found_fmt = - \\Workspace dependency "{[name]s}" not found - \\ - \\Searched in {[search_path]} - \\ - \\Workspace documentation: https://bun.sh/docs/install/workspaces - \\ - ; - const link_not_found_fmt = - \\Package "{[name]s}" is not linked - \\ - \\To install a linked package: - \\ bun link my-pkg-name-from-package-json - \\ - \\Tip: the package name is from package.json, which can differ from the folder name. - \\ - ; - if (_result) |result| { - // First time? - if (result.is_first_time) { - if (PackageManager.verbose_install) { - const label: string = this.lockfile.str(&version.literal); - - Output.prettyErrorln(" -> \"{s}\": \"{s}\" -> {s}@{}", .{ - this.lockfile.str(&result.package.name), - label, - this.lockfile.str(&result.package.name), - result.package.resolution.fmt(this.lockfile.buffers.string_bytes.items, .auto), - }); - } - // We shouldn't see any dependencies - if (result.package.dependencies.len > 0) { - try this.lockfile.scratch.dependency_list_queue.writeItem(result.package.dependencies); - } - } - - // should not trigger a network call - if (comptime Environment.allow_assert) bun.assert(result.task == null); - - if (comptime Environment.allow_assert) - debug( - "enqueueDependency({d}, {s}, {s}, {s}) = {d}", - .{ - id, - @tagName(version.tag), - this.lockfile.str(&name), - this.lockfile.str(&version.literal), - result.package.meta.id, - }, - ); - } else if (dependency.behavior.isRequired()) { - if (comptime dependency_tag == .workspace) { - this.log.addErrorFmt( - null, - logger.Loc.Empty, - this.allocator, - workspace_not_found_fmt, - .{ - .name = this.lockfile.str(&name), - .search_path = FolderResolution.PackageWorkspaceSearchPathFormatter{ .manager = this, .version = version }, - }, - ) catch unreachable; - } else { - this.log.addErrorFmt( - null, - logger.Loc.Empty, - this.allocator, - link_not_found_fmt, - .{ - .name = this.lockfile.str(&name), - }, - ) catch unreachable; - } - } else if (this.options.log_level.isVerbose()) { - if (comptime dependency_tag == .workspace) { - this.log.addWarningFmt( - null, - logger.Loc.Empty, - this.allocator, - workspace_not_found_fmt, - .{ - .name = this.lockfile.str(&name), - .search_path = FolderResolution.PackageWorkspaceSearchPathFormatter{ .manager = this, .version = version }, - }, - ) catch unreachable; - } else { - this.log.addWarningFmt( - null, - logger.Loc.Empty, - this.allocator, - link_not_found_fmt, - .{ - .name = this.lockfile.str(&name), - }, - ) catch unreachable; - } - } - }, - .tarball => { - const res: Resolution = switch (version.value.tarball.uri) { - .local => |path| .{ - .tag = .local_tarball, - .value = .{ - .local_tarball = path, - }, - }, - .remote => |url| .{ - .tag = .remote_tarball, - .value = .{ - .remote_tarball = url, - }, - }, - }; - - // First: see if we already loaded the tarball package in-memory - if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { - successFn(this, id, pkg_id); - return; - } - - const url = switch (version.value.tarball.uri) { - .local => |path| this.lockfile.str(&path), - .remote => |url| this.lockfile.str(&url), - }; - const task_id = Task.Id.forTarball(url); - var entry = this.task_queue.getOrPutContext(this.allocator, task_id, .{}) catch unreachable; - if (!entry.found_existing) { - entry.value_ptr.* = TaskCallbackList{}; - } - - if (comptime Environment.allow_assert) - debug( - "enqueueDependency({d}, {s}, {s}, {s}) = {s}", - .{ - id, - @tagName(version.tag), - this.lockfile.str(&name), - this.lockfile.str(&version.literal), - url, - }, - ); - - const callback_tag = comptime if (successFn == assignRootResolution) "root_dependency" else "dependency"; - try entry.value_ptr.append(this.allocator, @unionInit(TaskCallbackContext, callback_tag, id)); - - if (dependency.behavior.isPeer()) { - if (!install_peer) { - try this.peer_dependencies.writeItem(id); - return; - } - } - - switch (version.value.tarball.uri) { - .local => { - if (this.hasCreatedNetworkTask(task_id, dependency.behavior.isRequired())) return; - - this.task_batch.push(ThreadPool.Batch.from(this.enqueueLocalTarball( - task_id, - id, - this.lockfile.str(&dependency.name), - url, - res, - ))); - }, - .remote => { - if (try this.generateNetworkTaskForTarball( - task_id, - url, - dependency.behavior.isRequired(), - id, - .{ - .name = dependency.name, - .name_hash = dependency.name_hash, - .resolution = res, - }, - null, - .no_authorization, - )) |network_task| { - this.enqueueNetworkTask(network_task); - } - }, - } - }, - else => {}, - } - } - - pub fn flushNetworkQueue(this: *PackageManager) void { - var network = &this.network_task_fifo; - - while (network.readItem()) |network_task| { - network_task.schedule(if (network_task.callback == .extract) &this.network_tarball_batch else &this.network_resolve_batch); - } - } - - fn flushPatchTaskQueue(this: *PackageManager) void { - var patch_task_fifo = &this.patch_task_fifo; - - while (patch_task_fifo.readItem()) |patch_task| { - patch_task.schedule(if (patch_task.callback == .apply) &this.patch_apply_batch else &this.patch_calc_hash_batch); - } - } - - fn doFlushDependencyQueue(this: *PackageManager) void { - var lockfile = this.lockfile; - var dependency_queue = &lockfile.scratch.dependency_list_queue; - - while (dependency_queue.readItem()) |dependencies_list| { - var i: u32 = dependencies_list.off; - const end = dependencies_list.off + dependencies_list.len; - while (i < end) : (i += 1) { - const dependency = lockfile.buffers.dependencies.items[i]; - this.enqueueDependencyWithMain( - i, - &dependency, - lockfile.buffers.resolutions.items[i], - false, - ) catch {}; - } - } - - this.flushNetworkQueue(); - } - pub fn flushDependencyQueue(this: *PackageManager) void { - var last_count = this.total_tasks; - while (true) : (last_count = this.total_tasks) { - this.flushNetworkQueue(); - this.doFlushDependencyQueue(); - this.flushNetworkQueue(); - this.flushPatchTaskQueue(); - - if (this.total_tasks == last_count) break; - } - } - - pub fn scheduleTasks(manager: *PackageManager) usize { - const count = manager.task_batch.len + manager.network_resolve_batch.len + manager.network_tarball_batch.len + manager.patch_apply_batch.len + manager.patch_calc_hash_batch.len; - - _ = manager.incrementPendingTasks(@truncate(count)); - manager.thread_pool.schedule(manager.patch_apply_batch); - manager.thread_pool.schedule(manager.patch_calc_hash_batch); - manager.thread_pool.schedule(manager.task_batch); - manager.network_resolve_batch.push(manager.network_tarball_batch); - HTTP.http_thread.schedule(manager.network_resolve_batch); - manager.task_batch = .{}; - manager.network_tarball_batch = .{}; - manager.network_resolve_batch = .{}; - manager.patch_apply_batch = .{}; - manager.patch_calc_hash_batch = .{}; - return count; - } - - pub fn enqueueDependencyList( - this: *PackageManager, - dependencies_list: Lockfile.DependencySlice, - ) void { - this.task_queue.ensureUnusedCapacity(this.allocator, dependencies_list.len) catch unreachable; - const lockfile = this.lockfile; - - // Step 1. Go through main dependencies - var begin = dependencies_list.off; - const end = dependencies_list.off +| dependencies_list.len; - - // if dependency is peer and is going to be installed - // through "dependencies", skip it - if (end - begin > 1 and lockfile.buffers.dependencies.items[0].behavior.isPeer()) { - var peer_i: usize = 0; - var peer = &lockfile.buffers.dependencies.items[peer_i]; - while (peer.behavior.isPeer()) { - var dep_i: usize = end - 1; - var dep = lockfile.buffers.dependencies.items[dep_i]; - while (!dep.behavior.isPeer()) { - if (!dep.behavior.isDev()) { - if (peer.name_hash == dep.name_hash) { - peer.* = lockfile.buffers.dependencies.items[begin]; - begin += 1; - break; - } - } - dep_i -= 1; - dep = lockfile.buffers.dependencies.items[dep_i]; - } - peer_i += 1; - if (peer_i == end) break; - peer = &lockfile.buffers.dependencies.items[peer_i]; - } - } - - var i = begin; - - // we have to be very careful with pointers here - while (i < end) : (i += 1) { - const dependency = lockfile.buffers.dependencies.items[i]; - const resolution = lockfile.buffers.resolutions.items[i]; - this.enqueueDependencyWithMain( - i, - &dependency, - resolution, - false, - ) catch |err| { - const note = .{ - .fmt = "error occurred while resolving {}", - .args = .{bun.fmt.fmtPath(u8, lockfile.str(&dependency.realname()), .{ - .path_sep = switch (dependency.version.tag) { - .folder => .auto, - else => .any, - }, - })}, - }; - - if (dependency.behavior.isOptional() or dependency.behavior.isPeer()) - this.log.addWarningWithNote(null, .{}, this.allocator, @errorName(err), note.fmt, note.args) catch unreachable - else - this.log.addZigErrorWithNote(this.allocator, err, note.fmt, note.args) catch unreachable; - - continue; - }; - } - - this.drainDependencyList(); - } - - pub fn drainDependencyList(this: *PackageManager) void { - // Step 2. If there were cached dependencies, go through all of those but don't download the devDependencies for them. - this.flushDependencyQueue(); - - if (PackageManager.verbose_install) Output.flush(); - - // It's only network requests here because we don't store tarballs. - _ = this.scheduleTasks(); - } - - fn processDependencyListItem( - this: *PackageManager, - item: TaskCallbackContext, - any_root: ?*bool, - install_peer: bool, - ) !void { - switch (item) { - .dependency => |dependency_id| { - const dependency = this.lockfile.buffers.dependencies.items[dependency_id]; - const resolution = this.lockfile.buffers.resolutions.items[dependency_id]; - - try this.enqueueDependencyWithMain( - dependency_id, - &dependency, - resolution, - install_peer, - ); - }, - .root_dependency => |dependency_id| { - const dependency = this.lockfile.buffers.dependencies.items[dependency_id]; - const resolution = this.lockfile.buffers.resolutions.items[dependency_id]; - - try this.enqueueDependencyWithMainAndSuccessFn( - dependency_id, - &dependency, - resolution, - install_peer, - assignRootResolution, - failRootResolution, - ); - if (any_root) |ptr| { - const new_resolution_id = this.lockfile.buffers.resolutions.items[dependency_id]; - if (new_resolution_id != resolution) { - ptr.* = true; - } - } - }, - else => {}, - } - } - - fn processPeerDependencyList( - this: *PackageManager, - ) !void { - while (this.peer_dependencies.readItem()) |peer_dependency_id| { - const dependency = this.lockfile.buffers.dependencies.items[peer_dependency_id]; - const resolution = this.lockfile.buffers.resolutions.items[peer_dependency_id]; - - try this.enqueueDependencyWithMain( - peer_dependency_id, - &dependency, - resolution, - true, - ); - } - } - - fn processDependencyList( - this: *PackageManager, - dep_list: TaskCallbackList, - comptime Context: type, - ctx: Context, - comptime callbacks: anytype, - install_peer: bool, - ) !void { - if (dep_list.items.len > 0) { - var dependency_list = dep_list; - var any_root = false; - for (dependency_list.items) |item| { - try this.processDependencyListItem(item, &any_root, install_peer); - } - - if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) { - if (any_root) { - callbacks.onResolve(ctx); - } - } - - dependency_list.deinit(this.allocator); - } - } - - pub const GitResolver = struct { - resolved: string, - resolution: *const Resolution, - dep_id: DependencyID, - new_name: []u8 = "", - - pub fn count(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) void { - builder.count(this.resolved); - } - - pub fn resolve(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) !Resolution { - var resolution = this.resolution.*; - resolution.value.github.resolved = builder.append(String, this.resolved); - return resolution; - } - - pub fn checkBundledDependencies() bool { - return true; - } - }; - - const TarballResolver = struct { - url: string, - resolution: *const Resolution, - - pub fn count(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) void { - builder.count(this.url); - } - - pub fn resolve(this: @This(), comptime Builder: type, builder: Builder, _: JSAst.Expr) !Resolution { - var resolution = this.resolution.*; - switch (resolution.tag) { - .local_tarball => { - resolution.value.local_tarball = builder.append(String, this.url); - }, - .remote_tarball => { - resolution.value.remote_tarball = builder.append(String, this.url); - }, - else => unreachable, - } - return resolution; - } - - pub fn checkBundledDependencies() bool { - return true; - } - }; - - /// Returns true if we need to drain dependencies - fn processExtractedTarballPackage( - manager: *PackageManager, - package_id: *PackageID, - dep_id: DependencyID, - resolution: *const Resolution, - data: *const ExtractData, - log_level: Options.LogLevel, - ) ?Lockfile.Package { - switch (resolution.tag) { - .git, .github => { - var package = package: { - var resolver = GitResolver{ - .resolved = data.resolved, - .resolution = resolution, - .dep_id = dep_id, - }; - - var pkg = Lockfile.Package{}; - if (data.json) |json| { - const package_json_source = &logger.Source.initPathString( - json.path, - json.buf, - ); - - pkg.parse( - manager.lockfile, - manager, - manager.allocator, - manager.log, - package_json_source, - GitResolver, - &resolver, - Features.npm, - ) catch |err| { - if (log_level != .silent) { - const string_buf = manager.lockfile.buffers.string_bytes.items; - Output.err(err, "failed to parse package.json for {}", .{ - resolution.fmtURL(string_buf), - }); - } - Global.crash(); - }; - - const has_scripts = pkg.scripts.hasAny() or brk: { - const dir = std.fs.path.dirname(json.path) orelse ""; - const binding_dot_gyp_path = Path.joinAbsStringZ( - dir, - &[_]string{"binding.gyp"}, - .auto, - ); - - break :brk Syscall.exists(binding_dot_gyp_path); - }; - - pkg.meta.setHasInstallScript(has_scripts); - break :package pkg; - } - - // package.json doesn't exist, no dependencies to worry about but we need to decide on a name for the dependency - var repo = switch (resolution.tag) { - .git => resolution.value.git, - .github => resolution.value.github, - else => unreachable, - }; - - const new_name = Repository.createDependencyNameFromVersionLiteral(manager.allocator, &repo, manager.lockfile, dep_id); - defer manager.allocator.free(new_name); - - { - var builder = manager.lockfile.stringBuilder(); - - builder.count(new_name); - resolver.count(*Lockfile.StringBuilder, &builder, undefined); - - builder.allocate() catch bun.outOfMemory(); - - const name = builder.append(ExternalString, new_name); - pkg.name = name.value; - pkg.name_hash = name.hash; - - pkg.resolution = resolver.resolve(*Lockfile.StringBuilder, &builder, undefined) catch unreachable; - } - - break :package pkg; - }; - - package = manager.lockfile.appendPackage(package) catch unreachable; - package_id.* = package.meta.id; - - if (package.dependencies.len > 0) { - manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch bun.outOfMemory(); - } - - return package; - }, - .local_tarball, .remote_tarball => { - const json = data.json.?; - const package_json_source = &logger.Source.initPathString( - json.path, - json.buf, - ); - var package = Lockfile.Package{}; - - var resolver: TarballResolver = .{ - .url = data.url, - .resolution = resolution, - }; - - package.parse( - manager.lockfile, - manager, - manager.allocator, - manager.log, - package_json_source, - TarballResolver, - &resolver, - Features.npm, - ) catch |err| { - if (log_level != .silent) { - const string_buf = manager.lockfile.buffers.string_bytes.items; - Output.prettyErrorln("error: expected package.json in {any} to be a JSON file: {s}\n", .{ - resolution.fmtURL(string_buf), - @errorName(err), - }); - } - Global.crash(); - }; - - const has_scripts = package.scripts.hasAny() or brk: { - const dir = std.fs.path.dirname(json.path) orelse ""; - const binding_dot_gyp_path = Path.joinAbsStringZ( - dir, - &[_]string{"binding.gyp"}, - .auto, - ); - - break :brk Syscall.exists(binding_dot_gyp_path); - }; - - package.meta.setHasInstallScript(has_scripts); - - package = manager.lockfile.appendPackage(package) catch unreachable; - package_id.* = package.meta.id; - - if (package.dependencies.len > 0) { - manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch bun.outOfMemory(); - } - - return package; - }, - else => if (data.json.?.buf.len > 0) { - const json = data.json.?; - const package_json_source = &logger.Source.initPathString( - json.path, - json.buf, - ); - initializeStore(); - const json_root = JSON.parsePackageJSONUTF8( - package_json_source, - manager.log, - manager.allocator, - ) catch |err| { - if (log_level != .silent) { - const string_buf = manager.lockfile.buffers.string_bytes.items; - Output.prettyErrorln("error: expected package.json in {any} to be a JSON file: {s}\n", .{ - resolution.fmtURL(string_buf), - @errorName(err), - }); - } - Global.crash(); - }; - var builder = manager.lockfile.stringBuilder(); - Lockfile.Package.Scripts.parseCount(manager.allocator, &builder, json_root); - builder.allocate() catch unreachable; - if (comptime Environment.allow_assert) bun.assert(package_id.* != invalid_package_id); - var scripts = manager.lockfile.packages.items(.scripts)[package_id.*]; - scripts.parseAlloc(manager.allocator, &builder, json_root); - scripts.filled = true; - }, - } - - return null; - } - - const CacheDir = struct { path: string, is_node_modules: bool }; - pub fn fetchCacheDirectoryPath(env: *DotEnv.Loader, options: ?*const Options) CacheDir { - if (env.get("BUN_INSTALL_CACHE_DIR")) |dir| { - return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{dir}), .is_node_modules = false }; - } - - if (options) |opts| { - if (opts.cache_directory.len > 0) { - return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{opts.cache_directory}), .is_node_modules = false }; - } - } - - if (env.get("BUN_INSTALL")) |dir| { - var parts = [_]string{ dir, "install/", "cache/" }; - return CacheDir{ .path = Fs.FileSystem.instance.abs(&parts), .is_node_modules = false }; - } - - if (env.get("XDG_CACHE_HOME")) |dir| { - var parts = [_]string{ dir, ".bun/", "install/", "cache/" }; - return CacheDir{ .path = Fs.FileSystem.instance.abs(&parts), .is_node_modules = false }; - } - - if (env.get(bun.DotEnv.home_env)) |dir| { - var parts = [_]string{ dir, ".bun/", "install/", "cache/" }; - return CacheDir{ .path = Fs.FileSystem.instance.abs(&parts), .is_node_modules = false }; - } - - var fallback_parts = [_]string{"node_modules/.bun-cache"}; - return CacheDir{ .is_node_modules = true, .path = Fs.FileSystem.instance.abs(&fallback_parts) }; - } - - pub fn runTasks( - manager: *PackageManager, - comptime ExtractCompletionContext: type, - extract_ctx: ExtractCompletionContext, - comptime callbacks: anytype, - install_peer: bool, - log_level: Options.LogLevel, - ) anyerror!void { - var has_updated_this_run = false; - var has_network_error = false; - - var timestamp_this_tick: ?u32 = null; - - defer { - manager.drainDependencyList(); - - if (log_level.showProgress()) { - manager.startProgressBarIfNone(); - - if (@hasField(@TypeOf(callbacks), "progress_bar") and callbacks.progress_bar == true) { - const completed_items = manager.total_tasks - manager.pendingTaskCount(); - if (completed_items != manager.downloads_node.?.unprotected_completed_items or has_updated_this_run) { - manager.downloads_node.?.setCompletedItems(completed_items); - manager.downloads_node.?.setEstimatedTotalItems(manager.total_tasks); - } - } - manager.downloads_node.?.activate(); - manager.progress.maybeRefresh(); - } - } - - var patch_tasks_batch = manager.patch_task_queue.popBatch(); - var patch_tasks_iter = patch_tasks_batch.iterator(); - while (patch_tasks_iter.next()) |ptask| { - if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0); - _ = manager.decrementPendingTasks(); - defer ptask.deinit(); - try ptask.runFromMainThread(manager, log_level); - if (ptask.callback == .apply) { - if (ptask.callback.apply.logger.errors == 0) { - if (comptime @TypeOf(callbacks.onExtract) != void) { - if (ptask.callback.apply.task_id) |task_id| { - _ = task_id; // autofix - - } else if (ExtractCompletionContext == *PackageInstaller) { - if (ptask.callback.apply.install_context) |*ctx| { - var installer: *PackageInstaller = extract_ctx; - const path = ctx.path; - ctx.path = std.ArrayList(u8).init(bun.default_allocator); - installer.node_modules.path = path; - installer.current_tree_id = ctx.tree_id; - const pkg_id = ptask.callback.apply.pkg_id; - const resolution = &manager.lockfile.packages.items(.resolution)[pkg_id]; - - installer.installPackageWithNameAndResolution( - ctx.dependency_id, - pkg_id, - log_level, - ptask.callback.apply.pkgname, - resolution, - false, - false, - ); - } - } - } - } - } - } - - var network_tasks_batch = manager.async_network_task_queue.popBatch(); - var network_tasks_iter = network_tasks_batch.iterator(); - while (network_tasks_iter.next()) |task| { - if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0); - _ = manager.decrementPendingTasks(); - // We cannot free the network task at the end of this scope. - // It may continue to be referenced in a future task. - - switch (task.callback) { - .package_manifest => |*manifest_req| { - const name = manifest_req.name; - if (log_level.showProgress()) { - if (!has_updated_this_run) { - manager.setNodeName(manager.downloads_node.?, name.slice(), ProgressStrings.download_emoji, true); - has_updated_this_run = true; - } - } - - if (!has_network_error and task.response.metadata == null) { - has_network_error = true; - const min = manager.options.min_simultaneous_requests; - const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); - if (max > min) { - AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); - } - } - - // Handle retry-able errors. - if (task.response.metadata == null or task.response.metadata.?.response.status_code > 499) { - const err = task.response.fail orelse error.HTTPError; - - if (task.retried < manager.options.max_retry_count) { - task.retried += 1; - manager.enqueueNetworkTask(task); - - if (manager.options.log_level.isVerbose()) { - manager.log.addWarningFmt( - null, - logger.Loc.Empty, - manager.allocator, - "{s} downloading package manifest {s}. Retry {d}/{d}...", - .{ bun.span(@errorName(err)), name.slice(), task.retried, manager.options.max_retry_count }, - ) catch unreachable; - } - - continue; - } - } - - const metadata = task.response.metadata orelse { - // Handle non-retry-able errors. - const err = task.response.fail orelse error.HTTPError; - - if (@TypeOf(callbacks.onPackageManifestError) != void) { - callbacks.onPackageManifestError( - extract_ctx, - name.slice(), - err, - task.url_buf, - ); - } else { - const fmt = "{s} downloading package manifest {s}"; - if (manager.isNetworkTaskRequired(task.task_id)) { - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - fmt, - .{ @errorName(err), name.slice() }, - ) catch bun.outOfMemory(); - } else { - manager.log.addWarningFmt( - null, - logger.Loc.Empty, - manager.allocator, - fmt, - .{ @errorName(err), name.slice() }, - ) catch bun.outOfMemory(); - } - - if (manager.subcommand != .remove) { - for (manager.update_requests) |*request| { - if (strings.eql(request.name, name.slice())) { - request.failed = true; - manager.options.do.save_lockfile = false; - manager.options.do.save_yarn_lock = false; - manager.options.do.install_packages = false; - } - } - } - } - - continue; - }; - const response = metadata.response; - - if (response.status_code > 399) { - if (@TypeOf(callbacks.onPackageManifestError) != void) { - const err: PackageManifestError = switch (response.status_code) { - 400 => error.PackageManifestHTTP400, - 401 => error.PackageManifestHTTP401, - 402 => error.PackageManifestHTTP402, - 403 => error.PackageManifestHTTP403, - 404 => error.PackageManifestHTTP404, - 405...499 => error.PackageManifestHTTP4xx, - else => error.PackageManifestHTTP5xx, - }; - - callbacks.onPackageManifestError( - extract_ctx, - name.slice(), - err, - task.url_buf, - ); - - continue; - } - - if (manager.isNetworkTaskRequired(task.task_id)) { - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - "GET {s} - {d}", - .{ metadata.url, response.status_code }, - ) catch bun.outOfMemory(); - } else { - manager.log.addWarningFmt( - null, - logger.Loc.Empty, - manager.allocator, - "GET {s} - {d}", - .{ metadata.url, response.status_code }, - ) catch bun.outOfMemory(); - } - if (manager.subcommand != .remove) { - for (manager.update_requests) |*request| { - if (strings.eql(request.name, name.slice())) { - request.failed = true; - manager.options.do.save_lockfile = false; - manager.options.do.save_yarn_lock = false; - manager.options.do.install_packages = false; - } - } - } - - continue; - } - - if (log_level.isVerbose()) { - Output.prettyError(" ", .{}); - Output.printElapsed(@as(f64, @floatFromInt(task.unsafe_http_client.elapsed)) / std.time.ns_per_ms); - Output.prettyError("\nDownloaded {s} versions\n", .{name.slice()}); - Output.flush(); - } - - if (response.status_code == 304) { - // The HTTP request was cached - if (manifest_req.loaded_manifest) |manifest| { - const entry = try manager.manifests.hash_map.getOrPut(manager.allocator, manifest.pkg.name.hash); - entry.value_ptr.* = .{ .manifest = manifest }; - - if (timestamp_this_tick == null) { - timestamp_this_tick = @as(u32, @truncate(@as(u64, @intCast(@max(0, std.time.timestamp()))))) +| 300; - } - - entry.value_ptr.manifest.pkg.public_max_age = timestamp_this_tick.?; - - if (manager.options.enable.manifest_cache) { - Npm.PackageManifest.Serializer.saveAsync( - &entry.value_ptr.manifest, - manager.scopeForPackageName(name.slice()), - manager.getTemporaryDirectory(), - manager.getCacheDirectory(), - ); - } - - if (@hasField(@TypeOf(callbacks), "manifests_only") and callbacks.manifests_only) { - continue; - } - - const dependency_list_entry = manager.task_queue.getEntry(task.task_id).?; - - const dependency_list = dependency_list_entry.value_ptr.*; - dependency_list_entry.value_ptr.* = .{}; - - try manager.processDependencyList( - dependency_list, - ExtractCompletionContext, - extract_ctx, - callbacks, - install_peer, - ); - - continue; - } - } - - manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueParseNPMPackage(task.task_id, name, task))); - }, - .extract => |*extract| { - if (!has_network_error and task.response.metadata == null) { - has_network_error = true; - const min = manager.options.min_simultaneous_requests; - const max = AsyncHTTP.max_simultaneous_requests.load(.monotonic); - if (max > min) { - AsyncHTTP.max_simultaneous_requests.store(@max(min, max / 2), .monotonic); - } - } - - if (task.response.metadata == null or task.response.metadata.?.response.status_code > 499) { - const err = task.response.fail orelse error.TarballFailedToDownload; - - if (task.retried < manager.options.max_retry_count) { - task.retried += 1; - manager.enqueueNetworkTask(task); - - if (manager.options.log_level.isVerbose()) { - manager.log.addWarningFmt( - null, - logger.Loc.Empty, - manager.allocator, - "warn: {s} downloading tarball {s}@{s}. Retrying {d}/{d}...", - .{ - bun.span(@errorName(err)), - extract.name.slice(), - extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .auto), - task.retried, - manager.options.max_retry_count, - }, - ) catch unreachable; - } - - continue; - } - } - - const metadata = task.response.metadata orelse { - const err = task.response.fail orelse error.TarballFailedToDownload; - - if (@TypeOf(callbacks.onPackageDownloadError) != void) { - const package_id = manager.lockfile.buffers.resolutions.items[extract.dependency_id]; - callbacks.onPackageDownloadError( - extract_ctx, - package_id, - extract.name.slice(), - &extract.resolution, - err, - task.url_buf, - ); - continue; - } - - const fmt = "{s} downloading tarball {s}@{s}"; - if (manager.isNetworkTaskRequired(task.task_id)) { - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - fmt, - .{ - @errorName(err), - extract.name.slice(), - extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .auto), - }, - ) catch bun.outOfMemory(); - } else { - manager.log.addWarningFmt( - null, - logger.Loc.Empty, - manager.allocator, - fmt, - .{ - @errorName(err), - extract.name.slice(), - extract.resolution.fmt(manager.lockfile.buffers.string_bytes.items, .auto), - }, - ) catch bun.outOfMemory(); - } - if (manager.subcommand != .remove) { - for (manager.update_requests) |*request| { - if (strings.eql(request.name, extract.name.slice())) { - request.failed = true; - manager.options.do.save_lockfile = false; - manager.options.do.save_yarn_lock = false; - manager.options.do.install_packages = false; - } - } - } - - continue; - }; - - const response = metadata.response; - - if (response.status_code > 399) { - if (@TypeOf(callbacks.onPackageDownloadError) != void) { - const err = switch (response.status_code) { - 400 => error.TarballHTTP400, - 401 => error.TarballHTTP401, - 402 => error.TarballHTTP402, - 403 => error.TarballHTTP403, - 404 => error.TarballHTTP404, - 405...499 => error.TarballHTTP4xx, - else => error.TarballHTTP5xx, - }; - const package_id = manager.lockfile.buffers.resolutions.items[extract.dependency_id]; - - callbacks.onPackageDownloadError( - extract_ctx, - package_id, - extract.name.slice(), - &extract.resolution, - err, - task.url_buf, - ); - continue; - } - - if (manager.isNetworkTaskRequired(task.task_id)) { - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - "GET {s} - {d}", - .{ - metadata.url, - response.status_code, - }, - ) catch bun.outOfMemory(); - } else { - manager.log.addWarningFmt( - null, - logger.Loc.Empty, - manager.allocator, - "GET {s} - {d}", - .{ - metadata.url, - response.status_code, - }, - ) catch bun.outOfMemory(); - } - if (manager.subcommand != .remove) { - for (manager.update_requests) |*request| { - if (strings.eql(request.name, extract.name.slice())) { - request.failed = true; - manager.options.do.save_lockfile = false; - manager.options.do.save_yarn_lock = false; - manager.options.do.install_packages = false; - } - } - } - - continue; - } - - if (log_level.isVerbose()) { - Output.prettyError(" ", .{}); - Output.printElapsed(@as(f64, @floatCast(@as(f64, @floatFromInt(task.unsafe_http_client.elapsed)) / std.time.ns_per_ms))); - Output.prettyError(" Downloaded {s} tarball\n", .{extract.name.slice()}); - Output.flush(); - } - - if (log_level.showProgress()) { - if (!has_updated_this_run) { - manager.setNodeName(manager.downloads_node.?, extract.name.slice(), ProgressStrings.extract_emoji, true); - has_updated_this_run = true; - } - } - - manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueExtractNPMPackage(extract, task))); - }, - else => unreachable, - } - } - - var resolve_tasks_batch = manager.resolve_tasks.popBatch(); - var resolve_tasks_iter = resolve_tasks_batch.iterator(); - while (resolve_tasks_iter.next()) |task| { - if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0); - defer manager.preallocated_resolve_tasks.put(task); - _ = manager.decrementPendingTasks(); - - if (task.log.msgs.items.len > 0) { - try task.log.print(Output.errorWriter()); - if (task.log.errors > 0) { - manager.any_failed_to_install = true; - } - task.log.deinit(); - } - - switch (task.tag) { - .package_manifest => { - defer manager.preallocated_network_tasks.put(task.request.package_manifest.network); - if (task.status == .fail) { - const name = task.request.package_manifest.name; - const err = task.err orelse error.Failed; - - if (@TypeOf(callbacks.onPackageManifestError) != void) { - callbacks.onPackageManifestError( - extract_ctx, - name.slice(), - err, - task.request.package_manifest.network.url_buf, - ); - } else { - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - "{s} parsing package manifest for {s}", - .{ - @errorName(err), - name.slice(), - }, - ) catch bun.outOfMemory(); - } - - continue; - } - const manifest = &task.data.package_manifest; - - try manager.manifests.insert(manifest.pkg.name.hash, manifest); - - if (@hasField(@TypeOf(callbacks), "manifests_only") and callbacks.manifests_only) { - continue; - } - - const dependency_list_entry = manager.task_queue.getEntry(task.id).?; - const dependency_list = dependency_list_entry.value_ptr.*; - dependency_list_entry.value_ptr.* = .{}; - - try manager.processDependencyList(dependency_list, ExtractCompletionContext, extract_ctx, callbacks, install_peer); - - if (log_level.showProgress()) { - if (!has_updated_this_run) { - manager.setNodeName(manager.downloads_node.?, manifest.name(), ProgressStrings.download_emoji, true); - has_updated_this_run = true; - } - } - }, - .extract, .local_tarball => { - defer { - switch (task.tag) { - .extract => manager.preallocated_network_tasks.put(task.request.extract.network), - else => {}, - } - } - - const tarball = switch (task.tag) { - .extract => &task.request.extract.tarball, - .local_tarball => &task.request.local_tarball.tarball, - else => unreachable, - }; - const dependency_id = tarball.dependency_id; - var package_id = manager.lockfile.buffers.resolutions.items[dependency_id]; - const alias = tarball.name.slice(); - const resolution = &tarball.resolution; - - if (task.status == .fail) { - const err = task.err orelse error.TarballFailedToExtract; - - if (@TypeOf(callbacks.onPackageDownloadError) != void) { - callbacks.onPackageDownloadError( - extract_ctx, - package_id, - alias, - resolution, - err, - switch (task.tag) { - .extract => task.request.extract.network.url_buf, - .local_tarball => task.request.local_tarball.tarball.url.slice(), - else => unreachable, - }, - ); - } else { - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - "{s} extracting tarball from {s}", - .{ - @errorName(err), - alias, - }, - ) catch bun.outOfMemory(); - } - continue; - } - - manager.extracted_count += 1; - bun.Analytics.Features.extracted_packages += 1; - - if (comptime @TypeOf(callbacks.onExtract) != void and ExtractCompletionContext == *PackageInstaller) { - extract_ctx.fixCachedLockfilePackageSlices(); - callbacks.onExtract( - extract_ctx, - dependency_id, - &task.data.extract, - log_level, - ); - } else if (manager.processExtractedTarballPackage(&package_id, dependency_id, resolution, &task.data.extract, log_level)) |pkg| handle_pkg: { - // In the middle of an install, you could end up needing to downlaod the github tarball for a dependency - // We need to make sure we resolve the dependencies first before calling the onExtract callback - // TODO: move this into a separate function - var any_root = false; - var dependency_list_entry = manager.task_queue.getEntry(task.id) orelse break :handle_pkg; - var dependency_list = dependency_list_entry.value_ptr.*; - dependency_list_entry.value_ptr.* = .{}; - - defer { - dependency_list.deinit(manager.allocator); - if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) { - if (any_root) { - callbacks.onResolve(extract_ctx); - } - } - } - - for (dependency_list.items) |dep| { - switch (dep) { - .dependency, .root_dependency => |id| { - var version = &manager.lockfile.buffers.dependencies.items[id].version; - switch (version.tag) { - .git => { - version.value.git.package_name = pkg.name; - }, - .github => { - version.value.github.package_name = pkg.name; - }, - .tarball => { - version.value.tarball.package_name = pkg.name; - }, - - // `else` is reachable if this package is from `overrides`. Version in `lockfile.buffer.dependencies` - // will still have the original. - else => {}, - } - try manager.processDependencyListItem(dep, &any_root, install_peer); - }, - else => { - // if it's a node_module folder to install, handle that after we process all the dependencies within the onExtract callback. - dependency_list_entry.value_ptr.append(manager.allocator, dep) catch unreachable; - }, - } - } - } else if (manager.task_queue.getEntry(Task.Id.forManifest( - manager.lockfile.str(&manager.lockfile.packages.items(.name)[package_id]), - ))) |dependency_list_entry| { - // Peer dependencies do not initiate any downloads of their own, thus need to be resolved here instead - const dependency_list = dependency_list_entry.value_ptr.*; - dependency_list_entry.value_ptr.* = .{}; - - try manager.processDependencyList(dependency_list, void, {}, {}, install_peer); - } - - manager.setPreinstallState(package_id, manager.lockfile, .done); - - if (comptime @TypeOf(callbacks.onExtract) != void and ExtractCompletionContext != *PackageInstaller) { - // handled *PackageInstaller above - callbacks.onExtract(extract_ctx, dependency_id, &task.data.extract, log_level); - } - - if (log_level.showProgress()) { - if (!has_updated_this_run) { - manager.setNodeName(manager.downloads_node.?, alias, ProgressStrings.extract_emoji, true); - has_updated_this_run = true; - } - } - }, - .git_clone => { - const clone = &task.request.git_clone; - const repo_fd = task.data.git_clone; - const name = clone.name.slice(); - const url = clone.url.slice(); - - manager.git_repositories.put(manager.allocator, task.id, repo_fd) catch unreachable; - - if (task.status == .fail) { - const err = task.err orelse error.Failed; - - if (@TypeOf(callbacks.onPackageManifestError) != void) { - callbacks.onPackageManifestError( - extract_ctx, - name, - err, - url, - ); - } else if (log_level != .silent) { - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - "{s} cloning repository for {s}", - .{ - @errorName(err), - name, - }, - ) catch bun.outOfMemory(); - } - continue; - } - - if (comptime @TypeOf(callbacks.onExtract) != void and ExtractCompletionContext == *PackageInstaller) { - // Installing! - // this dependency might be something other than a git dependency! only need the name and - // behavior, use the resolution from the task. - const dep_id = clone.dep_id; - const dep = manager.lockfile.buffers.dependencies.items[dep_id]; - const dep_name = dep.name.slice(manager.lockfile.buffers.string_bytes.items); - - const git = clone.res.value.git; - const committish = git.committish.slice(manager.lockfile.buffers.string_bytes.items); - const repo = git.repo.slice(manager.lockfile.buffers.string_bytes.items); - - const resolved = try Repository.findCommit( - manager.allocator, - manager.env, - manager.log, - task.data.git_clone.stdDir(), - dep_name, - committish, - task.id, - ); - - const checkout_id = Task.Id.forGitCheckout(repo, resolved); - - if (manager.hasCreatedNetworkTask(checkout_id, dep.behavior.isRequired())) continue; - - manager.task_batch.push(ThreadPool.Batch.from(manager.enqueueGitCheckout( - checkout_id, - repo_fd, - dep_id, - dep_name, - clone.res, - resolved, - null, - ))); - } else { - // Resolving! - const dependency_list_entry = manager.task_queue.getEntry(task.id).?; - const dependency_list = dependency_list_entry.value_ptr.*; - dependency_list_entry.value_ptr.* = .{}; - - try manager.processDependencyList(dependency_list, ExtractCompletionContext, extract_ctx, callbacks, install_peer); - } - - if (log_level.showProgress()) { - if (!has_updated_this_run) { - manager.setNodeName(manager.downloads_node.?, name, ProgressStrings.download_emoji, true); - has_updated_this_run = true; - } - } - }, - .git_checkout => { - const git_checkout = &task.request.git_checkout; - const alias = &git_checkout.name; - const resolution = &git_checkout.resolution; - var package_id: PackageID = invalid_package_id; - - if (task.status == .fail) { - const err = task.err orelse error.Failed; - - manager.log.addErrorFmt( - null, - logger.Loc.Empty, - manager.allocator, - "{s} checking out repository for {s}", - .{ - @errorName(err), - alias.slice(), - }, - ) catch bun.outOfMemory(); - - continue; - } - - if (comptime @TypeOf(callbacks.onExtract) != void and ExtractCompletionContext == *PackageInstaller) { - // We've populated the cache, package already exists in memory. Call the package installer callback - // and don't enqueue dependencies - - // TODO(dylan-conway) most likely don't need to call this now that the package isn't appended, but - // keeping just in case for now - extract_ctx.fixCachedLockfilePackageSlices(); - - callbacks.onExtract( - extract_ctx, - git_checkout.dependency_id, - &task.data.git_checkout, - log_level, - ); - } else if (manager.processExtractedTarballPackage( - &package_id, - git_checkout.dependency_id, - resolution, - &task.data.git_checkout, - log_level, - )) |pkg| handle_pkg: { - var any_root = false; - var dependency_list_entry = manager.task_queue.getEntry(task.id) orelse break :handle_pkg; - var dependency_list = dependency_list_entry.value_ptr.*; - dependency_list_entry.value_ptr.* = .{}; - - defer { - dependency_list.deinit(manager.allocator); - if (comptime @TypeOf(callbacks) != void and @TypeOf(callbacks.onResolve) != void) { - if (any_root) { - callbacks.onResolve(extract_ctx); - } - } - } - - for (dependency_list.items) |dep| { - switch (dep) { - .dependency, .root_dependency => |id| { - var repo = &manager.lockfile.buffers.dependencies.items[id].version.value.git; - repo.resolved = pkg.resolution.value.git.resolved; - repo.package_name = pkg.name; - try manager.processDependencyListItem(dep, &any_root, install_peer); - }, - else => { - // if it's a node_module folder to install, handle that after we process all the dependencies within the onExtract callback. - dependency_list_entry.value_ptr.append(manager.allocator, dep) catch unreachable; - }, - } - } - - if (comptime @TypeOf(callbacks.onExtract) != void) { - callbacks.onExtract( - extract_ctx, - git_checkout.dependency_id, - &task.data.git_checkout, - log_level, - ); - } - } - - if (log_level.showProgress()) { - if (!has_updated_this_run) { - manager.setNodeName(manager.downloads_node.?, alias.slice(), ProgressStrings.download_emoji, true); - has_updated_this_run = true; - } - } - }, - } - } - } - - pub const ProgressStrings = struct { - pub const download_no_emoji_ = "Resolving"; - const download_no_emoji: string = download_no_emoji_ ++ "\n"; - const download_with_emoji: string = download_emoji ++ download_no_emoji_; - pub const download_emoji: string = " 🔍 "; - - pub const extract_no_emoji_ = "Resolving & extracting"; - const extract_no_emoji: string = extract_no_emoji_ ++ "\n"; - const extract_with_emoji: string = extract_emoji ++ extract_no_emoji_; - pub const extract_emoji: string = " 🚚 "; - - pub const install_no_emoji_ = "Installing"; - const install_no_emoji: string = install_no_emoji_ ++ "\n"; - const install_with_emoji: string = install_emoji ++ install_no_emoji_; - pub const install_emoji: string = " 📦 "; - - pub const save_no_emoji_ = "Saving lockfile"; - const save_no_emoji: string = save_no_emoji_; - const save_with_emoji: string = save_emoji ++ save_no_emoji_; - pub const save_emoji: string = " 🔒 "; - - pub const script_no_emoji_ = "Running script"; - const script_no_emoji: string = script_no_emoji_ ++ "\n"; - const script_with_emoji: string = script_emoji ++ script_no_emoji_; - pub const script_emoji: string = " ⚙️ "; - - pub inline fn download() string { - return if (Output.isEmojiEnabled()) download_with_emoji else download_no_emoji; - } - - pub inline fn save() string { - return if (Output.isEmojiEnabled()) save_with_emoji else save_no_emoji; - } - - pub inline fn extract() string { - return if (Output.isEmojiEnabled()) extract_with_emoji else extract_no_emoji; - } - - pub inline fn install() string { - return if (Output.isEmojiEnabled()) install_with_emoji else install_no_emoji; - } - - pub inline fn script() string { - return if (Output.isEmojiEnabled()) script_with_emoji else script_no_emoji; - } - }; - - // Corresponds to possible commands from the CLI. - pub const Subcommand = enum { - install, - update, - pm, - add, - remove, - link, - unlink, - patch, - @"patch-commit", - outdated, - pack, - publish, - audit, - info, - - // bin, - // hash, - // @"hash-print", - // @"hash-string", - // cache, - // @"default-trusted", - // untrusted, - // trust, - // ls, - // migrate, - - pub fn canGloballyInstallPackages(this: Subcommand) bool { - return switch (this) { - .install, .update, .add => true, - else => false, - }; - } - - pub fn supportsWorkspaceFiltering(this: Subcommand) bool { - return switch (this) { - .outdated => true, - .install => true, - // .pack => true, - // .add => true, - else => false, - }; - } - - pub fn supportsJsonOutput(this: Subcommand) bool { - return switch (this) { - .audit, - .pm, - .info, - => true, - else => false, - }; - } - - // TODO: make all subcommands find root and chdir - pub fn shouldChdirToRoot(this: Subcommand) bool { - return switch (this) { - .link => false, - else => true, - }; - } - }; - - fn httpThreadOnInitError(err: HTTP.InitError, opts: HTTP.HTTPThread.InitOpts) noreturn { - switch (err) { - error.LoadCAFile => { - var normalizer: bun.path.PosixToWinNormalizer = .{}; - const normalized = normalizer.resolveZ(FileSystem.instance.top_level_dir, opts.abs_ca_file_name); - if (!bun.sys.existsZ(normalized)) { - Output.err("HTTPThread", "could not find CA file: '{s}'", .{opts.abs_ca_file_name}); - } else { - Output.err("HTTPThread", "invalid CA file: '{s}'", .{opts.abs_ca_file_name}); - } - }, - error.InvalidCAFile => { - Output.err("HTTPThread", "invalid CA file: '{s}'", .{opts.abs_ca_file_name}); - }, - error.InvalidCA => { - Output.err("HTTPThread", "the CA is invalid", .{}); - }, - error.FailedToOpenSocket => { - Output.errGeneric("failed to start HTTP client thread", .{}); - }, - } - Global.crash(); - } - - pub fn init( - ctx: Command.Context, - cli: CommandLineArguments, - subcommand: Subcommand, - ) !struct { *PackageManager, string } { - if (cli.global) { - var explicit_global_dir: string = ""; - if (ctx.install) |opts| { - explicit_global_dir = opts.global_dir orelse explicit_global_dir; - } - var global_dir = try Options.openGlobalDir(explicit_global_dir); - try global_dir.setAsCwd(); - } - - var fs = try Fs.FileSystem.init(null); - const top_level_dir_no_trailing_slash = strings.withoutTrailingSlash(fs.top_level_dir); - if (comptime Environment.isWindows) { - _ = Path.pathToPosixBuf(u8, top_level_dir_no_trailing_slash, &cwd_buf); - } else { - @memcpy(cwd_buf[0..top_level_dir_no_trailing_slash.len], top_level_dir_no_trailing_slash); - } - - var original_package_json_path_buf = std.ArrayListUnmanaged(u8).initCapacity(ctx.allocator, top_level_dir_no_trailing_slash.len + "/package.json".len + 1) catch bun.outOfMemory(); - original_package_json_path_buf.appendSliceAssumeCapacity(top_level_dir_no_trailing_slash); - original_package_json_path_buf.appendSliceAssumeCapacity(std.fs.path.sep_str ++ "package.json"); - original_package_json_path_buf.appendAssumeCapacity(0); - - var original_package_json_path: stringZ = original_package_json_path_buf.items[0 .. top_level_dir_no_trailing_slash.len + "/package.json".len :0]; - const original_cwd = strings.withoutSuffixComptime(original_package_json_path, std.fs.path.sep_str ++ "package.json"); - const original_cwd_clone = ctx.allocator.dupe(u8, original_cwd) catch bun.outOfMemory(); - - var workspace_names = Package.WorkspaceMap.init(ctx.allocator); - var workspace_package_json_cache: WorkspacePackageJSONCache = .{ - .map = .{}, - }; - - var workspace_name_hash: ?PackageNameHash = null; - var root_package_json_name_at_time_of_init: []const u8 = ""; - - // Step 1. Find the nearest package.json directory - // - // We will walk up from the cwd, trying to find the nearest package.json file. - const root_package_json_file = root_package_json_file: { - var this_cwd: string = original_cwd; - var created_package_json = false; - const child_json = child: { - // if we are only doing `bun install` (no args), then we can open as read_only - // in all other cases we will need to write new data later. - // this is relevant because it allows us to succeed an install if package.json - // is readable but not writable - // - // probably wont matter as if package.json isn't writable, it's likely that - // the underlying directory and node_modules isn't either. - const need_write = subcommand != .install or cli.positionals.len > 1; - - while (true) { - var package_json_path_buf: bun.PathBuffer = undefined; - @memcpy(package_json_path_buf[0..this_cwd.len], this_cwd); - package_json_path_buf[this_cwd.len..package_json_path_buf.len][0.."/package.json".len].* = "/package.json".*; - package_json_path_buf[this_cwd.len + "/package.json".len] = 0; - const package_json_path = package_json_path_buf[0 .. this_cwd.len + "/package.json".len :0]; - - break :child std.fs.cwd().openFileZ( - package_json_path, - .{ .mode = if (need_write) .read_write else .read_only }, - ) catch |err| switch (err) { - error.FileNotFound => { - if (std.fs.path.dirname(this_cwd)) |parent| { - this_cwd = strings.withoutTrailingSlash(parent); - continue; - } else { - break; - } - }, - error.AccessDenied => { - Output.err("EACCES", "Permission denied while opening \"{s}\"", .{ - package_json_path, - }); - if (need_write) { - Output.note("package.json must be writable to add packages", .{}); - } else { - Output.note("package.json is missing read permissions, or is owned by another user", .{}); - } - Global.crash(); - }, - else => { - Output.err(err, "could not open \"{s}\"", .{ - package_json_path, - }); - return err; - }, - }; - } - - if (subcommand == .install) { - if (cli.positionals.len > 1) { - // this is `bun add `. - // - // create the package json instead of return error. this works around - // a zig bug where continuing control flow through a catch seems to - // cause a segfault the second time `PackageManager.init` is called after - // switching to the add command. - this_cwd = original_cwd; - created_package_json = true; - break :child try attemptToCreatePackageJSONAndOpen(); - } - } - return error.MissingPackageJSON; - }; - - bun.assertWithLocation(strings.eqlLong(original_package_json_path_buf.items[0..this_cwd.len], this_cwd, true), @src()); - original_package_json_path_buf.items.len = this_cwd.len; - original_package_json_path_buf.appendSliceAssumeCapacity(std.fs.path.sep_str ++ "package.json"); - original_package_json_path_buf.appendAssumeCapacity(0); - - original_package_json_path = original_package_json_path_buf.items[0 .. this_cwd.len + "/package.json".len :0]; - const child_cwd = strings.withoutSuffixComptime(original_package_json_path, std.fs.path.sep_str ++ "package.json"); - - // Check if this is a workspace; if so, use root package - var found = false; - if (subcommand.shouldChdirToRoot()) { - if (!created_package_json) { - while (std.fs.path.dirname(this_cwd)) |parent| : (this_cwd = parent) { - const parent_without_trailing_slash = strings.withoutTrailingSlash(parent); - var parent_path_buf: bun.PathBuffer = undefined; - @memcpy(parent_path_buf[0..parent_without_trailing_slash.len], parent_without_trailing_slash); - parent_path_buf[parent_without_trailing_slash.len..parent_path_buf.len][0.."/package.json".len].* = "/package.json".*; - parent_path_buf[parent_without_trailing_slash.len + "/package.json".len] = 0; - - const json_file = std.fs.cwd().openFileZ( - parent_path_buf[0 .. parent_without_trailing_slash.len + "/package.json".len :0].ptr, - .{ .mode = .read_write }, - ) catch { - continue; - }; - defer if (!found) json_file.close(); - const json_stat_size = try json_file.getEndPos(); - const json_buf = try ctx.allocator.alloc(u8, json_stat_size + 64); - defer ctx.allocator.free(json_buf); - const json_len = try json_file.preadAll(json_buf, 0); - const json_path = try bun.getFdPath(.fromStdFile(json_file), &package_json_cwd_buf); - const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); - initializeStore(); - const json = try JSON.parsePackageJSONUTF8(&json_source, ctx.log, ctx.allocator); - if (subcommand == .pm) { - if (json.getStringCloned(ctx.allocator, "name") catch null) |name| { - root_package_json_name_at_time_of_init = name; - } - } - - if (json.asProperty("workspaces")) |prop| { - const json_array = switch (prop.expr.data) { - .e_array => |arr| arr, - .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) { - .e_array => |arr| arr, - else => break, - } else break, - else => break, - }; - var log = logger.Log.init(ctx.allocator); - defer log.deinit(); - _ = workspace_names.processNamesArray( - ctx.allocator, - &workspace_package_json_cache, - &log, - json_array, - &json_source, - prop.loc, - null, - ) catch break; - - for (workspace_names.keys(), workspace_names.values()) |path, entry| { - const child_path = if (std.fs.path.isAbsolute(path)) - child_cwd - else - bun.path.relativeNormalized(json_source.path.name.dir, child_cwd, .auto, true); - - const maybe_workspace_path = if (comptime Environment.isWindows) brk: { - @memcpy(parent_path_buf[0..child_path.len], child_path); - bun.path.dangerouslyConvertPathToPosixInPlace(u8, parent_path_buf[0..child_path.len]); - break :brk parent_path_buf[0..child_path.len]; - } else child_path; - - if (strings.eqlLong(maybe_workspace_path, path, true)) { - fs.top_level_dir = try bun.default_allocator.dupeZ(u8, parent); - found = true; - child_json.close(); - if (comptime Environment.isWindows) { - try json_file.seekTo(0); - } - workspace_name_hash = String.Builder.stringHash(entry.name); - break :root_package_json_file json_file; - } - } - - break; - } - } - } - } - - fs.top_level_dir = try bun.default_allocator.dupeZ(u8, child_cwd); - break :root_package_json_file child_json; - }; - - try bun.sys.chdir(fs.top_level_dir, fs.top_level_dir).unwrap(); - try BunArguments.loadConfig(ctx.allocator, cli.config, ctx, .InstallCommand); - bun.copy(u8, &cwd_buf, fs.top_level_dir); - cwd_buf[fs.top_level_dir.len] = 0; - fs.top_level_dir = cwd_buf[0..fs.top_level_dir.len :0]; - package_json_cwd = try bun.getFdPath(.fromStdFile(root_package_json_file), &package_json_cwd_buf); - - const entries_option = try fs.fs.readDirectory(fs.top_level_dir, null, 0, true); - - var env: *DotEnv.Loader = brk: { - const map = try ctx.allocator.create(DotEnv.Map); - map.* = DotEnv.Map.init(ctx.allocator); - - const loader = try ctx.allocator.create(DotEnv.Loader); - loader.* = DotEnv.Loader.init(map, ctx.allocator); - break :brk loader; - }; - - env.loadProcess(); - try env.load(entries_option.entries, &[_][]u8{}, .production, false); - - initializeStore(); - if (bun.getenvZ("XDG_CONFIG_HOME") orelse bun.getenvZ(bun.DotEnv.home_env)) |data_dir| { - var buf: bun.PathBuffer = undefined; - var parts = [_]string{ - "./.npmrc", - }; - - bun.ini.loadNpmrcConfig(ctx.allocator, ctx.install orelse brk: { - const install_ = ctx.allocator.create(Api.BunInstall) catch bun.outOfMemory(); - install_.* = std.mem.zeroes(Api.BunInstall); - ctx.install = install_; - break :brk install_; - }, env, true, &[_][:0]const u8{ Path.joinAbsStringBufZ( - data_dir, - &buf, - &parts, - .auto, - ), ".npmrc" }); - } else { - bun.ini.loadNpmrcConfig(ctx.allocator, ctx.install orelse brk: { - const install_ = ctx.allocator.create(Api.BunInstall) catch bun.outOfMemory(); - install_.* = std.mem.zeroes(Api.BunInstall); - ctx.install = install_; - break :brk install_; - }, env, true, &[_][:0]const u8{".npmrc"}); - } - const cpu_count = bun.getThreadCount(); - - const options = Options{ - .global = cli.global, - .max_concurrent_lifecycle_scripts = cli.concurrent_scripts orelse cpu_count * 2, - }; - - if (env.get("BUN_INSTALL_VERBOSE") != null) { - PackageManager.verbose_install = true; - } - - if (env.get("BUN_FEATURE_FLAG_FORCE_WAITER_THREAD") != null) { - bun.spawn.process.WaiterThread.setShouldUseWaiterThread(); - } - - if (PackageManager.verbose_install) { - Output.prettyErrorln("Cache Dir: {s}", .{options.cache_directory}); - Output.flush(); - } - - workspace_names.map.deinit(); - - PackageManager.allocatePackageManager(); - const manager = PackageManager.get(); - // var progress = Progress{}; - // var node = progress.start(name: []const u8, estimated_total_items: usize) - manager.* = PackageManager{ - .preallocated_network_tasks = .init(bun.default_allocator), - .preallocated_resolve_tasks = .init(bun.default_allocator), - .options = options, - .active_lifecycle_scripts = .{ - .context = manager, - }, - .network_task_fifo = NetworkQueue.init(), - .patch_task_fifo = PatchTaskFifo.init(), - .allocator = ctx.allocator, - .log = ctx.log, - .root_dir = entries_option.entries, - .env = env, - .cpu_count = cpu_count, - .thread_pool = ThreadPool.init(.{ - .max_threads = cpu_count, - }), - .resolve_tasks = .{}, - .lockfile = undefined, - .root_package_json_file = root_package_json_file, - // .progress - .event_loop = .{ - .mini = JSC.MiniEventLoop.init(bun.default_allocator), - }, - .original_package_json_path = original_package_json_path, - .workspace_package_json_cache = workspace_package_json_cache, - .workspace_name_hash = workspace_name_hash, - .subcommand = subcommand, - .root_package_json_name_at_time_of_init = root_package_json_name_at_time_of_init, - }; - manager.event_loop.loop().internal_loop_data.setParentEventLoop(bun.JSC.EventLoopHandle.init(&manager.event_loop)); - manager.lockfile = try ctx.allocator.create(Lockfile); - JSC.MiniEventLoop.global = &manager.event_loop.mini; - if (!manager.options.enable.cache) { - manager.options.enable.manifest_cache = false; - manager.options.enable.manifest_cache_control = false; - } - - if (env.get("BUN_MANIFEST_CACHE")) |manifest_cache| { - if (strings.eqlComptime(manifest_cache, "1")) { - manager.options.enable.manifest_cache = true; - manager.options.enable.manifest_cache_control = false; - } else if (strings.eqlComptime(manifest_cache, "2")) { - manager.options.enable.manifest_cache = true; - manager.options.enable.manifest_cache_control = true; - } else { - manager.options.enable.manifest_cache = false; - manager.options.enable.manifest_cache_control = false; - } - } - - try manager.options.load( - ctx.allocator, - ctx.log, - env, - cli, - ctx.install, - subcommand, - ); - - var ca: []stringZ = &.{}; - if (manager.options.ca.len > 0) { - ca = try manager.allocator.alloc(stringZ, manager.options.ca.len); - for (ca, manager.options.ca) |*z, s| { - z.* = try manager.allocator.dupeZ(u8, s); - } - } - - var abs_ca_file_name: stringZ = &.{}; - if (manager.options.ca_file_name.len > 0) { - // resolve with original cwd - if (std.fs.path.isAbsolute(manager.options.ca_file_name)) { - abs_ca_file_name = try manager.allocator.dupeZ(u8, manager.options.ca_file_name); - } else { - var path_buf: bun.PathBuffer = undefined; - abs_ca_file_name = try manager.allocator.dupeZ(u8, bun.path.joinAbsStringBuf( - original_cwd_clone, - &path_buf, - &.{manager.options.ca_file_name}, - .auto, - )); - } - } - - AsyncHTTP.max_simultaneous_requests.store(brk: { - if (cli.network_concurrency) |network_concurrency| { - break :brk @max(network_concurrency, 1); - } - - // If any HTTP proxy is set, use a diferent limit - if (env.has("http_proxy") or env.has("https_proxy") or env.has("HTTPS_PROXY") or env.has("HTTP_PROXY")) { - break :brk default_max_simultaneous_requests_for_bun_install_for_proxies; - } - - break :brk default_max_simultaneous_requests_for_bun_install; - }, .monotonic); - - HTTP.HTTPThread.init(&.{ - .ca = ca, - .abs_ca_file_name = abs_ca_file_name, - .onInitError = &httpThreadOnInitError, - }); - - manager.timestamp_for_manifest_cache_control = brk: { - if (comptime bun.Environment.allow_assert) { - if (env.get("BUN_CONFIG_MANIFEST_CACHE_CONTROL_TIMESTAMP")) |cache_control| { - if (std.fmt.parseInt(u32, cache_control, 10)) |int| { - break :brk int; - } else |_| {} - } - } - - break :brk @truncate(@as(u64, @intCast(@max(std.time.timestamp(), 0)))); - }; - return .{ - manager, - original_cwd_clone, - }; - } - - pub fn initWithRuntime( - log: *logger.Log, - bun_install: ?*Api.BunInstall, - allocator: std.mem.Allocator, - cli: CommandLineArguments, - env: *DotEnv.Loader, - ) *PackageManager { - init_with_runtime_once.call(.{ - log, - bun_install, - allocator, - cli, - env, - }); - return PackageManager.get(); - } - - var init_with_runtime_once = bun.once(initWithRuntimeOnce); - - pub fn initWithRuntimeOnce( - log: *logger.Log, - bun_install: ?*Api.BunInstall, - allocator: std.mem.Allocator, - cli: CommandLineArguments, - env: *DotEnv.Loader, - ) void { - if (env.get("BUN_INSTALL_VERBOSE") != null) { - PackageManager.verbose_install = true; - } - - const cpu_count = bun.getThreadCount(); - PackageManager.allocatePackageManager(); - const manager = PackageManager.get(); - var root_dir = Fs.FileSystem.instance.fs.readDirectory( - Fs.FileSystem.instance.top_level_dir, - null, - 0, - true, - ) catch |err| { - Output.err(err, "failed to read root directory: '{s}'", .{Fs.FileSystem.instance.top_level_dir}); - @panic("Failed to initialize package manager"); - }; - - // var progress = Progress{}; - // var node = progress.start(name: []const u8, estimated_total_items: usize) - const top_level_dir_no_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); - var original_package_json_path = allocator.allocSentinel(u8, top_level_dir_no_trailing_slash.len + "/package.json".len, 0) catch bun.outOfMemory(); - @memcpy(original_package_json_path[0..top_level_dir_no_trailing_slash.len], top_level_dir_no_trailing_slash); - @memcpy(original_package_json_path[top_level_dir_no_trailing_slash.len..][0.."/package.json".len], "/package.json"); - - manager.* = PackageManager{ - .preallocated_network_tasks = .init(bun.default_allocator), - .preallocated_resolve_tasks = .init(bun.default_allocator), - .options = .{ - .max_concurrent_lifecycle_scripts = cli.concurrent_scripts orelse cpu_count * 2, - }, - .active_lifecycle_scripts = .{ - .context = manager, - }, - .network_task_fifo = NetworkQueue.init(), - .allocator = allocator, - .log = log, - .root_dir = root_dir.entries, - .env = env, - .cpu_count = cpu_count, - .thread_pool = ThreadPool.init(.{ - .max_threads = cpu_count, - }), - .lockfile = undefined, - .root_package_json_file = undefined, - .event_loop = .{ - .js = JSC.VirtualMachine.get().eventLoop(), - }, - .original_package_json_path = original_package_json_path[0..original_package_json_path.len :0], - .subcommand = .install, - }; - manager.lockfile = allocator.create(Lockfile) catch bun.outOfMemory(); - - if (Output.enable_ansi_colors_stderr) { - manager.progress = Progress{}; - manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; - manager.root_progress_node = manager.progress.start("", 0); - } else { - manager.options.log_level = .default_no_progress; - } - - if (!manager.options.enable.cache) { - manager.options.enable.manifest_cache = false; - manager.options.enable.manifest_cache_control = false; - } - - if (env.get("BUN_MANIFEST_CACHE")) |manifest_cache| { - if (strings.eqlComptime(manifest_cache, "1")) { - manager.options.enable.manifest_cache = true; - manager.options.enable.manifest_cache_control = false; - } else if (strings.eqlComptime(manifest_cache, "2")) { - manager.options.enable.manifest_cache = true; - manager.options.enable.manifest_cache_control = true; - } else { - manager.options.enable.manifest_cache = false; - manager.options.enable.manifest_cache_control = false; - } - } - - manager.options.load( - allocator, - log, - env, - cli, - bun_install, - .install, - ) catch |err| { - switch (err) { - error.OutOfMemory => bun.outOfMemory(), - } - }; - - manager.timestamp_for_manifest_cache_control = @as( - u32, - @truncate(@as( - u64, - @intCast(@max( - std.time.timestamp(), - 0, - )), - )), - // When using "bun install", we check for updates with a 300 second cache. - // When using bun, we only do staleness checks once per day - ) -| std.time.s_per_day; - - if (root_dir.entries.hasComptimeQuery("bun.lockb")) { - switch (manager.lockfile.loadFromCwd( - manager, - allocator, - log, - true, - )) { - .ok => |load| manager.lockfile = load.lockfile, - else => manager.lockfile.initEmpty(allocator), - } - } else { - manager.lockfile.initEmpty(allocator); - } - } - - fn attemptToCreatePackageJSONAndOpen() !std.fs.File { - const package_json_file = std.fs.cwd().createFileZ("package.json", .{ .read = true }) catch |err| { - Output.prettyErrorln("error: {s} create package.json", .{@errorName(err)}); - Global.crash(); - }; - - try package_json_file.pwriteAll("{\"dependencies\": {}}", 0); - - return package_json_file; - } - - fn attemptToCreatePackageJSON() !void { - var file = try attemptToCreatePackageJSONAndOpen(); - file.close(); - } - - // parse dependency of positional arg string (may include name@version for example) - // get the precise version from the lockfile (there may be multiple) - // copy the contents into a temp folder - pub fn patch(ctx: Command.Context) !void { - try updatePackageJSONAndInstallCatchError(ctx, .patch); - } - - pub fn patchCommit(ctx: Command.Context) !void { - try updatePackageJSONAndInstallCatchError(ctx, .@"patch-commit"); - } - - pub fn update(ctx: Command.Context) !void { - try updatePackageJSONAndInstallCatchError(ctx, .update); - } - - pub fn add(ctx: Command.Context) !void { - try updatePackageJSONAndInstallCatchError(ctx, .add); - } - - pub fn remove(ctx: Command.Context) !void { - try updatePackageJSONAndInstallCatchError(ctx, .remove); - } - - pub fn updatePackageJSONAndInstallCatchError( - ctx: Command.Context, - subcommand: Subcommand, - ) !void { - updatePackageJSONAndInstall(ctx, subcommand) catch |err| { - switch (err) { - error.InstallFailed, - error.InvalidPackageJSON, - => { - const log = &bun.CLI.Cli.log_; - log.print(bun.Output.errorWriter()) catch {}; - bun.Global.exit(1); - return; - }, - else => return err, - } - }; - } - - pub const CommandLineArguments = @import("./PackageManager/CommandLineArguments.zig"); - - pub fn link(ctx: Command.Context) !void { - const cli = try CommandLineArguments.parse(ctx.allocator, .link); - var manager, const original_cwd = PackageManager.init(ctx, cli, .link) catch |err| brk: { - if (err == error.MissingPackageJSON) { - try attemptToCreatePackageJSON(); - break :brk try PackageManager.init(ctx, cli, .link); - } - - return err; - }; - defer ctx.allocator.free(original_cwd); - - if (manager.options.shouldPrintCommandName()) { - Output.prettyln("bun link v" ++ Global.package_json_version_with_sha ++ "\n", .{}); - Output.flush(); - } - - if (manager.options.positionals.len == 1) { - // bun link - - var lockfile: Lockfile = undefined; - var name: string = ""; - var package = Lockfile.Package{}; - - // Step 1. parse the nearest package.json file - { - const package_json_source = &(bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { - Output.errGeneric("failed to read \"{s}\" for linking: {s}", .{ manager.original_package_json_path, @errorName(err) }); - Global.crash(); - }); - lockfile.initEmpty(ctx.allocator); - - var resolver: void = {}; - try package.parse(&lockfile, manager, ctx.allocator, manager.log, package_json_source, void, &resolver, Features.folder); - name = lockfile.str(&package.name); - if (name.len == 0) { - if (manager.options.log_level != .silent) { - Output.prettyErrorln("error: package.json missing \"name\" in \"{s}\"", .{package_json_source.path.text}); - } - Global.crash(); - } else if (!strings.isNPMPackageName(name)) { - if (manager.options.log_level != .silent) { - Output.prettyErrorln("error: invalid package.json name \"{s}\" in \"{any}\"", .{ - name, - package_json_source.path.text, - }); - } - Global.crash(); - } - } - - // Step 2. Setup the global directory - var node_modules: std.fs.Dir = brk: { - Bin.Linker.ensureUmask(); - var explicit_global_dir: string = ""; - if (ctx.install) |install_| { - explicit_global_dir = install_.global_dir orelse explicit_global_dir; - } - manager.global_dir = try Options.openGlobalDir(explicit_global_dir); - - try manager.setupGlobalDir(ctx); - - break :brk manager.global_dir.?.makeOpenPath("node_modules", .{}) catch |err| { - if (manager.options.log_level != .silent) - Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - }; - - // Step 3a. symlink to the node_modules folder - { - // delete it if it exists - node_modules.deleteTree(name) catch {}; - - // create scope if specified - if (name[0] == '@') { - if (strings.indexOfChar(name, '/')) |i| { - node_modules.makeDir(name[0..i]) catch |err| brk: { - if (err == error.PathAlreadyExists) break :brk; - if (manager.options.log_level != .silent) - Output.prettyErrorln("error: failed to create scope in global dir due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - } - } - - if (comptime Environment.isWindows) { - // create the junction - const top_level = Fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(); - var link_path_buf: bun.PathBuffer = undefined; - @memcpy( - link_path_buf[0..top_level.len], - top_level, - ); - link_path_buf[top_level.len] = 0; - const link_path = link_path_buf[0..top_level.len :0]; - const global_path = try manager.globalLinkDirPath(); - const dest_path = Path.joinAbsStringZ(global_path, &.{name}, .windows); - switch (bun.sys.sys_uv.symlinkUV( - link_path, - dest_path, - bun.windows.libuv.UV_FS_SYMLINK_JUNCTION, - )) { - .err => |err| { - Output.prettyErrorln("error: failed to create junction to node_modules in global dir due to error {}", .{err}); - Global.crash(); - }, - .result => {}, - } - } else { - // create the symlink - node_modules.symLink(Fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(), name, .{ .is_directory = true }) catch |err| { - if (manager.options.log_level != .silent) - Output.prettyErrorln("error: failed to create symlink to node_modules in global dir due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - } - } - - // Step 3b. Link any global bins - if (package.bin.tag != .none) { - var link_target_buf: bun.PathBuffer = undefined; - var link_dest_buf: bun.PathBuffer = undefined; - var link_rel_buf: bun.PathBuffer = undefined; - var node_modules_path_buf: bun.PathBuffer = undefined; - var bin_linker = Bin.Linker{ - .bin = package.bin, - .node_modules = .fromStdDir(node_modules), - .node_modules_path = bun.getFdPath(.fromStdDir(node_modules), &node_modules_path_buf) catch |err| { - if (manager.options.log_level != .silent) { - Output.err(err, "failed to link binary", .{}); - } - Global.crash(); - }, - .global_bin_path = manager.options.bin_path, - - // .destination_dir_subpath = destination_dir_subpath, - .package_name = strings.StringOrTinyString.init(name), - .string_buf = lockfile.buffers.string_bytes.items, - .extern_string_buf = lockfile.buffers.extern_strings.items, - .seen = null, - .abs_target_buf = &link_target_buf, - .abs_dest_buf = &link_dest_buf, - .rel_buf = &link_rel_buf, - }; - bin_linker.link(true); - - if (bin_linker.err) |err| { - if (manager.options.log_level != .silent) - Output.prettyErrorln("error: failed to link bin due to error {s}", .{@errorName(err)}); - Global.crash(); - } - } - - Output.flush(); - - // Done - if (manager.options.log_level != .silent) - Output.prettyln( - \\Success! Registered "{[name]s}" - \\ - \\To use {[name]s} in a project, run: - \\ bun link {[name]s} - \\ - \\Or add it in dependencies in your package.json file: - \\ "{[name]s}": "link:{[name]s}" - \\ - , - .{ - .name = name, - }, - ); - - Output.flush(); - Global.exit(0); - } else { - // bun link lodash - try manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd); - } - } - - pub fn unlink(ctx: Command.Context) !void { - const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .unlink); - var manager, const original_cwd = PackageManager.init(ctx, cli, .unlink) catch |err| brk: { - if (err == error.MissingPackageJSON) { - try attemptToCreatePackageJSON(); - break :brk try PackageManager.init(ctx, cli, .unlink); - } - - return err; - }; - defer ctx.allocator.free(original_cwd); - - if (manager.options.shouldPrintCommandName()) { - Output.prettyln("bun unlink v" ++ Global.package_json_version_with_sha ++ "\n", .{}); - Output.flush(); - } - - if (manager.options.positionals.len == 1) { - // bun unlink - - var lockfile: Lockfile = undefined; - var name: string = ""; - var package = Lockfile.Package{}; - - // Step 1. parse the nearest package.json file - { - const package_json_source = &(bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { - Output.errGeneric("failed to read \"{s}\" for unlinking: {s}", .{ manager.original_package_json_path, @errorName(err) }); - Global.crash(); - }); - lockfile.initEmpty(ctx.allocator); - - var resolver: void = {}; - try package.parse(&lockfile, manager, ctx.allocator, manager.log, package_json_source, void, &resolver, Features.folder); - name = lockfile.str(&package.name); - if (name.len == 0) { - if (manager.options.log_level != .silent) { - Output.prettyErrorln("error: package.json missing \"name\" in \"{s}\"", .{package_json_source.path.text}); - } - Global.crash(); - } else if (!strings.isNPMPackageName(name)) { - if (manager.options.log_level != .silent) { - Output.prettyErrorln("error: invalid package.json name \"{s}\" in \"{s}\"", .{ - name, - package_json_source.path.text, - }); - } - Global.crash(); - } - } - - switch (Syscall.lstat(Path.joinAbsStringZ(try manager.globalLinkDirPath(), &.{name}, .auto))) { - .result => |stat| { - if (!bun.S.ISLNK(@intCast(stat.mode))) { - Output.prettyErrorln("success: package \"{s}\" is not globally linked, so there's nothing to do.", .{name}); - Global.exit(0); - } - }, - .err => { - Output.prettyErrorln("success: package \"{s}\" is not globally linked, so there's nothing to do.", .{name}); - Global.exit(0); - }, - } - - // Step 2. Setup the global directory - var node_modules: std.fs.Dir = brk: { - Bin.Linker.ensureUmask(); - var explicit_global_dir: string = ""; - if (ctx.install) |install_| { - explicit_global_dir = install_.global_dir orelse explicit_global_dir; - } - manager.global_dir = try Options.openGlobalDir(explicit_global_dir); - - try manager.setupGlobalDir(ctx); - - break :brk manager.global_dir.?.makeOpenPath("node_modules", .{}) catch |err| { - if (manager.options.log_level != .silent) - Output.prettyErrorln("error: failed to create node_modules in global dir due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - }; - - // Step 3b. Link any global bins - if (package.bin.tag != .none) { - var link_target_buf: bun.PathBuffer = undefined; - var link_dest_buf: bun.PathBuffer = undefined; - var link_rel_buf: bun.PathBuffer = undefined; - var node_modules_path_buf: bun.PathBuffer = undefined; - - var bin_linker = Bin.Linker{ - .bin = package.bin, - .node_modules = .fromStdDir(node_modules), - .node_modules_path = bun.getFdPath(.fromStdDir(node_modules), &node_modules_path_buf) catch |err| { - if (manager.options.log_level != .silent) { - Output.err(err, "failed to link binary", .{}); - } - Global.crash(); - }, - .global_bin_path = manager.options.bin_path, - .package_name = strings.StringOrTinyString.init(name), - .string_buf = lockfile.buffers.string_bytes.items, - .extern_string_buf = lockfile.buffers.extern_strings.items, - .seen = null, - .abs_target_buf = &link_target_buf, - .abs_dest_buf = &link_dest_buf, - .rel_buf = &link_rel_buf, - }; - bin_linker.unlink(true); - } - - // delete it if it exists - node_modules.deleteTree(name) catch |err| { - if (manager.options.log_level != .silent) - Output.prettyErrorln("error: failed to unlink package in global dir due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - - Output.prettyln("success: unlinked package \"{s}\"", .{name}); - Global.exit(0); - } else { - Output.prettyln("error: bun unlink {{packageName}} not implemented yet", .{}); - Global.crash(); - } - } - - pub const PackageJSONEditor = @import("./PackageManager/PackageJSONEditor.zig"); - - pub const UpdateRequest = struct { - name: string = "", - name_hash: PackageNameHash = 0, - version: Dependency.Version = .{}, - version_buf: []const u8 = "", - package_id: PackageID = invalid_package_id, - is_aliased: bool = false, - failed: bool = false, - // This must be cloned to handle when the AST store resets - e_string: ?*JSAst.E.String = null, - - pub const Array = std.ArrayListUnmanaged(UpdateRequest); - - pub inline fn matches(this: PackageManager.UpdateRequest, dependency: Dependency, string_buf: []const u8) bool { - return this.name_hash == if (this.name.len == 0) - String.Builder.stringHash(dependency.version.literal.slice(string_buf)) - else - dependency.name_hash; - } - - /// It is incorrect to call this function before Lockfile.cleanWithLogger() because - /// resolved_name should be populated if possible. - /// - /// `this` needs to be a pointer! If `this` is a copy and the name returned from - /// resolved_name is inlined, you will return a pointer to stack memory. - pub fn getResolvedName(this: *const UpdateRequest, lockfile: *const Lockfile) string { - return if (this.is_aliased) - this.name - else if (this.package_id == invalid_package_id) - this.version.literal.slice(this.version_buf) - else - lockfile.packages.items(.name)[this.package_id].slice(this.version_buf); - } - - pub fn fromJS(globalThis: *JSC.JSGlobalObject, input: JSC.JSValue) bun.JSError!JSC.JSValue { - var arena = std.heap.ArenaAllocator.init(bun.default_allocator); - defer arena.deinit(); - var stack = std.heap.stackFallback(1024, arena.allocator()); - const allocator = stack.get(); - var all_positionals = std.ArrayList([]const u8).init(allocator); - - var log = logger.Log.init(allocator); - - if (input.isString()) { - var input_str = input.toSliceCloneWithAllocator( - globalThis, - allocator, - ) orelse return .zero; - if (input_str.len > 0) - try all_positionals.append(input_str.slice()); - } else if (input.isArray()) { - var iter = input.arrayIterator(globalThis); - while (iter.next()) |item| { - const slice = item.toSliceCloneWithAllocator(globalThis, allocator) orelse return .zero; - if (globalThis.hasException()) return .zero; - if (slice.len == 0) continue; - try all_positionals.append(slice.slice()); - } - if (globalThis.hasException()) return .zero; - } else { - return .js_undefined; - } - - if (all_positionals.items.len == 0) { - return .js_undefined; - } - - var array = Array{}; - - const update_requests = parseWithError(allocator, null, &log, all_positionals.items, &array, .add, false) catch { - return globalThis.throwValue(try log.toJS(globalThis, bun.default_allocator, "Failed to parse dependencies")); - }; - if (update_requests.len == 0) return .js_undefined; - - if (log.msgs.items.len > 0) { - return globalThis.throwValue(try log.toJS(globalThis, bun.default_allocator, "Failed to parse dependencies")); - } - - if (update_requests[0].failed) { - return globalThis.throw("Failed to parse dependencies", .{}); - } - - var object = JSC.JSValue.createEmptyObject(globalThis, 2); - var name_str = bun.String.init(update_requests[0].name); - object.put(globalThis, "name", name_str.transferToJS(globalThis)); - object.put(globalThis, "version", try update_requests[0].version.toJS(update_requests[0].version_buf, globalThis)); - return object; - } - - pub fn parse( - allocator: std.mem.Allocator, - pm: ?*PackageManager, - log: *logger.Log, - positionals: []const string, - update_requests: *Array, - subcommand: Subcommand, - ) []UpdateRequest { - return parseWithError(allocator, pm, log, positionals, update_requests, subcommand, true) catch Global.crash(); - } - - fn parseWithError( - allocator: std.mem.Allocator, - pm: ?*PackageManager, - log: *logger.Log, - positionals: []const string, - update_requests: *Array, - subcommand: Subcommand, - fatal: bool, - ) ![]UpdateRequest { - // first one is always either: - // add - // remove - outer: for (positionals) |positional| { - var input: []u8 = bun.default_allocator.dupe(u8, std.mem.trim(u8, positional, " \n\r\t")) catch bun.outOfMemory(); - { - var temp: [2048]u8 = undefined; - const len = std.mem.replace(u8, input, "\\\\", "/", &temp); - bun.path.platformToPosixInPlace(u8, &temp); - const input2 = temp[0 .. input.len - len]; - @memcpy(input[0..input2.len], input2); - input.len = input2.len; - } - switch (subcommand) { - .link, .unlink => if (!strings.hasPrefixComptime(input, "link:")) { - input = std.fmt.allocPrint(allocator, "{0s}@link:{0s}", .{input}) catch unreachable; - }, - else => {}, - } - - var value = input; - var alias: ?string = null; - if (!Dependency.isTarball(input) and strings.isNPMPackageName(input)) { - alias = input; - value = input[input.len..]; - } else if (input.len > 1) { - if (strings.indexOfChar(input[1..], '@')) |at| { - const name = input[0 .. at + 1]; - if (strings.isNPMPackageName(name)) { - alias = name; - value = input[at + 2 ..]; - } - } - } - - const placeholder = String.from("@@@"); - var version = Dependency.parseWithOptionalTag( - allocator, - if (alias) |name| String.init(input, name) else placeholder, - if (alias) |name| String.Builder.stringHash(name) else null, - value, - null, - &SlicedString.init(input, value), - log, - pm, - ) orelse { - if (fatal) { - Output.errGeneric("unrecognised dependency format: {s}", .{ - positional, - }); - } else { - log.addErrorFmt(null, logger.Loc.Empty, allocator, "unrecognised dependency format: {s}", .{ - positional, - }) catch bun.outOfMemory(); - } - - return error.UnrecognizedDependencyFormat; - }; - if (alias != null and version.tag == .git) { - if (Dependency.parseWithOptionalTag( - allocator, - placeholder, - null, - input, - null, - &SlicedString.init(input, input), - log, - pm, - )) |ver| { - alias = null; - version = ver; - } - } - if (switch (version.tag) { - .dist_tag => version.value.dist_tag.name.eql(placeholder, input, input), - .npm => version.value.npm.name.eql(placeholder, input, input), - else => false, - }) { - if (fatal) { - Output.errGeneric("unrecognised dependency format: {s}", .{ - positional, - }); - } else { - log.addErrorFmt(null, logger.Loc.Empty, allocator, "unrecognised dependency format: {s}", .{ - positional, - }) catch bun.outOfMemory(); - } - - return error.UnrecognizedDependencyFormat; - } - - var request = UpdateRequest{ - .version = version, - .version_buf = input, - }; - if (alias) |name| { - request.is_aliased = true; - request.name = allocator.dupe(u8, name) catch unreachable; - request.name_hash = String.Builder.stringHash(name); - } else if (version.tag == .github and version.value.github.committish.isEmpty()) { - request.name_hash = String.Builder.stringHash(version.literal.slice(input)); - } else { - request.name_hash = String.Builder.stringHash(version.literal.slice(input)); - } - - for (update_requests.items) |*prev| { - if (prev.name_hash == request.name_hash and request.name.len == prev.name.len) continue :outer; - } - update_requests.append(allocator, request) catch bun.outOfMemory(); - } - - return update_requests.items; - } - }; - - fn updatePackageJSONAndInstall( - ctx: Command.Context, - subcommand: Subcommand, - ) !void { - var cli = switch (subcommand) { - inline else => |cmd| try PackageManager.CommandLineArguments.parse(ctx.allocator, cmd), - }; - - // The way this works: - // 1. Run the bundler on source files - // 2. Rewrite positional arguments to act identically to the developer - // typing in the dependency names - // 3. Run the install command - if (cli.analyze) { - const Analyzer = struct { - ctx: Command.Context, - cli: *PackageManager.CommandLineArguments, - subcommand: Subcommand, - pub fn onAnalyze( - this: *@This(), - result: *bun.bundle_v2.BundleV2.DependenciesScanner.Result, - ) anyerror!void { - // TODO: add separate argument that makes it so positionals[1..] is not done and instead the positionals are passed - var positionals = bun.default_allocator.alloc(string, result.dependencies.keys().len + 1) catch bun.outOfMemory(); - positionals[0] = "add"; - bun.copy(string, positionals[1..], result.dependencies.keys()); - this.cli.positionals = positionals; - - try updatePackageJSONAndInstallAndCLI(this.ctx, this.subcommand, this.cli.*); - - Global.exit(0); - } - }; - var analyzer = Analyzer{ - .ctx = ctx, - .cli = &cli, - .subcommand = subcommand, - }; - var fetcher = bun.bundle_v2.BundleV2.DependenciesScanner{ - .ctx = &analyzer, - .entry_points = cli.positionals[1..], - .onFetch = @ptrCast(&Analyzer.onAnalyze), - }; - - // This runs the bundler. - try bun.CLI.BuildCommand.exec(bun.CLI.Command.get(), &fetcher); - return; - } - - return updatePackageJSONAndInstallAndCLI(ctx, subcommand, cli); - } - - fn updatePackageJSONAndInstallAndCLI( - ctx: Command.Context, - subcommand: Subcommand, - cli: CommandLineArguments, - ) !void { - var manager, const original_cwd = init(ctx, cli, subcommand) catch |err| brk: { - if (err == error.MissingPackageJSON) { - switch (subcommand) { - .update => { - Output.prettyErrorln("No package.json, so nothing to update", .{}); - Global.crash(); - }, - .remove => { - Output.prettyErrorln("No package.json, so nothing to remove", .{}); - Global.crash(); - }, - .patch, .@"patch-commit" => { - Output.prettyErrorln("No package.json, so nothing to patch", .{}); - Global.crash(); - }, - else => { - try attemptToCreatePackageJSON(); - break :brk try PackageManager.init(ctx, cli, subcommand); - }, - } - } - - return err; - }; - defer ctx.allocator.free(original_cwd); - - if (manager.options.shouldPrintCommandName()) { - Output.prettyln("bun {s} v" ++ Global.package_json_version_with_sha ++ "\n", .{@tagName(subcommand)}); - Output.flush(); - } - - // When you run `bun add -g ` or `bun install -g ` and the global bin dir is not in $PATH - // We should tell the user to add it to $PATH so they don't get confused. - if (subcommand.canGloballyInstallPackages()) { - if (manager.options.global and manager.options.log_level != .silent) { - manager.track_installed_bin = .{ .pending = {} }; - } - } - - try manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd); - - if (manager.options.patch_features == .patch) { - try manager.preparePatch(); - } - - if (manager.any_failed_to_install) { - Global.exit(1); - } - - // Check if we need to print a warning like: - // - // > warn: To run "vite", add the global bin folder to $PATH: - // > - // > fish_add_path "/private/tmp/test" - // - if (subcommand.canGloballyInstallPackages()) { - if (manager.options.global) { - if (manager.options.bin_path.len > 0 and manager.track_installed_bin == .basename) { - const needs_to_print = if (bun.getenvZ("PATH")) |PATH| - // This is not perfect - // - // If you already have a different binary of the same - // name, it will not detect that case. - // - // The problem is there are too many edgecases with filesystem paths. - // - // We want to veer towards false negative than false - // positive. It would be annoying if this message - // appears unnecessarily. It's kind of okay if it doesn't appear - // when it should. - // - // If you set BUN_INSTALL_BIN to "/tmp/woo" on macOS and - // we just checked for "/tmp/woo" in $PATH, it would - // incorrectly print a warning because /tmp/ on macOS is - // aliased to /private/tmp/ - // - // Another scenario is case-insensitive filesystems. If you - // have a binary called "esbuild" in /tmp/TeST and you - // install esbuild, it will not detect that case if we naively - // just checked for "esbuild" in $PATH where "$PATH" is /tmp/test - bun.which( - &package_json_cwd_buf, - PATH, - bun.fs.FileSystem.instance.top_level_dir, - manager.track_installed_bin.basename, - ) == null - else - true; - - if (needs_to_print) { - const MoreInstructions = struct { - shell: bun.CLI.ShellCompletions.Shell = .unknown, - folder: []const u8, - - // Convert "/Users/Jarred Sumner" => "/Users/Jarred\ Sumner" - const ShellPathFormatter = struct { - folder: []const u8, - - pub fn format(instructions: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - var remaining = instructions.folder; - while (bun.strings.indexOfChar(remaining, ' ')) |space| { - try writer.print( - "{}", - .{bun.fmt.fmtPath(u8, remaining[0..space], .{ - .escape_backslashes = true, - .path_sep = if (Environment.isWindows) .windows else .posix, - })}, - ); - try writer.writeAll("\\ "); - remaining = remaining[@min(space + 1, remaining.len)..]; - } - - try writer.print( - "{}", - .{bun.fmt.fmtPath(u8, remaining, .{ - .escape_backslashes = true, - .path_sep = if (Environment.isWindows) .windows else .posix, - })}, - ); - } - }; - - pub fn format(instructions: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - const path = ShellPathFormatter{ .folder = instructions.folder }; - switch (instructions.shell) { - .unknown => { - // Unfortunately really difficult to do this in one line on PowerShell. - try writer.print("{}", .{path}); - }, - .bash => { - try writer.print("export PATH=\"{}:$PATH\"", .{path}); - }, - .zsh => { - try writer.print("export PATH=\"{}:$PATH\"", .{path}); - }, - .fish => { - // Regular quotes will do here. - try writer.print("fish_add_path {}", .{bun.fmt.quote(instructions.folder)}); - }, - .pwsh => { - try writer.print("$env:PATH += \";{}\"", .{path}); - }, - } - } - }; - - Output.prettyError("\n", .{}); - - Output.warn( - \\To run {}, add the global bin folder to $PATH: - \\ - \\{} - \\ - , - .{ - bun.fmt.quote(manager.track_installed_bin.basename), - MoreInstructions{ .shell = bun.CLI.ShellCompletions.Shell.fromEnv([]const u8, bun.getenvZ("SHELL") orelse ""), .folder = manager.options.bin_path }, - }, - ); - Output.flush(); - } - } - } - } - } - - fn updatePackageJSONAndInstallWithManager( - manager: *PackageManager, - ctx: Command.Context, - original_cwd: string, - ) !void { - var update_requests = UpdateRequest.Array.initCapacity(manager.allocator, 64) catch bun.outOfMemory(); - defer update_requests.deinit(manager.allocator); - - if (manager.options.positionals.len <= 1) { - switch (manager.subcommand) { - .add => { - Output.errGeneric("no package specified to add", .{}); - Output.flush(); - PackageManager.CommandLineArguments.printHelp(.add); - - Global.exit(0); - }, - .remove => { - Output.errGeneric("no package specified to remove", .{}); - Output.flush(); - PackageManager.CommandLineArguments.printHelp(.remove); - - Global.exit(0); - }, - else => {}, - } - } - - return try updatePackageJSONAndInstallWithManagerWithUpdatesAndUpdateRequests( - manager, - ctx, - original_cwd, - manager.options.positionals[1..], - &update_requests, - ); - } - - fn updatePackageJSONAndInstallWithManagerWithUpdatesAndUpdateRequests( - manager: *PackageManager, - ctx: Command.Context, - original_cwd: string, - positionals: []const string, - update_requests: *UpdateRequest.Array, - ) !void { - var updates: []UpdateRequest = if (manager.subcommand == .@"patch-commit" or manager.subcommand == .patch) - &[_]UpdateRequest{} - else - UpdateRequest.parse(ctx.allocator, manager, ctx.log, positionals, update_requests, manager.subcommand); - try manager.updatePackageJSONAndInstallWithManagerWithUpdates( - ctx, - &updates, - manager.subcommand, - original_cwd, - ); - } - fn updatePackageJSONAndInstallWithManagerWithUpdates( - manager: *PackageManager, - ctx: Command.Context, - updates: *[]UpdateRequest, - subcommand: Subcommand, - original_cwd: string, - ) !void { - const log_level = manager.options.log_level; - if (manager.log.errors > 0) { - if (log_level != .silent) { - manager.log.print(Output.errorWriter()) catch {}; - } - Global.crash(); - } - - var current_package_json = switch (manager.workspace_package_json_cache.getWithPath( - manager.allocator, - manager.log, - manager.original_package_json_path, - .{ - .guess_indentation = true, - }, - )) { - .parse_err => |err| { - manager.log.print(Output.errorWriter()) catch {}; - Output.errGeneric("failed to parse package.json \"{s}\": {s}", .{ - manager.original_package_json_path, - @errorName(err), - }); - Global.crash(); - }, - .read_err => |err| { - Output.errGeneric("failed to read package.json \"{s}\": {s}", .{ - manager.original_package_json_path, - @errorName(err), - }); - Global.crash(); - }, - .entry => |entry| entry, - }; - const current_package_json_indent = current_package_json.indentation; - - // If there originally was a newline at the end of their package.json, preserve it - // so that we don't cause unnecessary diffs in their git history. - // https://github.com/oven-sh/bun/issues/1375 - const preserve_trailing_newline_at_eof_for_package_json = current_package_json.source.contents.len > 0 and - current_package_json.source.contents[current_package_json.source.contents.len - 1] == '\n'; - - if (subcommand == .remove) { - if (current_package_json.root.data != .e_object) { - Output.errGeneric("package.json is not an Object {{}}, so there's nothing to {s}!", .{@tagName(subcommand)}); - Global.crash(); - } else if (current_package_json.root.data.e_object.properties.len == 0) { - Output.errGeneric("package.json is empty {{}}, so there's nothing to {s}!", .{@tagName(subcommand)}); - Global.crash(); - } else if (current_package_json.root.asProperty("devDependencies") == null and - current_package_json.root.asProperty("dependencies") == null and - current_package_json.root.asProperty("optionalDependencies") == null and - current_package_json.root.asProperty("peerDependencies") == null) - { - Output.prettyErrorln("package.json doesn't have dependencies, there's nothing to {s}!", .{@tagName(subcommand)}); - Global.exit(0); - } - } - - const dependency_list = if (manager.options.update.development) - "devDependencies" - else if (manager.options.update.optional) - "optionalDependencies" - else if (manager.options.update.peer) - "peerDependencies" - else - "dependencies"; - var any_changes = false; - - var not_in_workspace_root: ?PatchCommitResult = null; - switch (subcommand) { - .remove => { - // if we're removing, they don't have to specify where it is installed in the dependencies list - // they can even put it multiple times and we will just remove all of them - for (updates.*) |request| { - inline for ([_]string{ "dependencies", "devDependencies", "optionalDependencies", "peerDependencies" }) |list| { - if (current_package_json.root.asProperty(list)) |query| { - if (query.expr.data == .e_object) { - var dependencies = query.expr.data.e_object.properties.slice(); - var i: usize = 0; - var new_len = dependencies.len; - while (i < dependencies.len) : (i += 1) { - if (dependencies[i].key.?.data == .e_string) { - if (dependencies[i].key.?.data.e_string.eql(string, request.name)) { - if (new_len > 1) { - dependencies[i] = dependencies[new_len - 1]; - new_len -= 1; - } else { - new_len = 0; - } - - any_changes = true; - } - } - } - - const changed = new_len != dependencies.len; - if (changed) { - query.expr.data.e_object.properties.len = @as(u32, @truncate(new_len)); - - // If the dependencies list is now empty, remove it from the package.json - // since we're swapRemove, we have to re-sort it - if (query.expr.data.e_object.properties.len == 0) { - var arraylist = current_package_json.root.data.e_object.properties.list(); - _ = arraylist.swapRemove(query.i); - current_package_json.root.data.e_object.properties.update(arraylist); - current_package_json.root.data.e_object.packageJSONSort(); - } else { - var obj = query.expr.data.e_object; - obj.alphabetizeProperties(); - } - } - } - } - } - } - }, - - .link, .add, .update => { - // `bun update ` is basically the same as `bun add `, except - // update will not exceed the current dependency range if it exists - - if (updates.len != 0) { - try PackageJSONEditor.edit( - manager, - updates, - ¤t_package_json.root, - dependency_list, - .{ - .exact_versions = manager.options.enable.exact_versions, - .before_install = true, - }, - ); - } else if (subcommand == .update) { - try PackageJSONEditor.editUpdateNoArgs( - manager, - ¤t_package_json.root, - .{ - .exact_versions = true, - .before_install = true, - }, - ); - } - }, - else => { - if (manager.options.patch_features == .commit) { - var pathbuf: bun.PathBuffer = undefined; - if (try manager.doPatchCommit(&pathbuf, log_level)) |stuff| { - // we're inside a workspace package, we need to edit the - // root json, not the `current_package_json` - if (stuff.not_in_workspace_root) { - not_in_workspace_root = stuff; - } else { - try PackageJSONEditor.editPatchedDependencies( - manager, - ¤t_package_json.root, - stuff.patch_key, - stuff.patchfile_path, - ); - } - } - } - }, - } - - manager.to_update = subcommand == .update; - - { - // Incase it's a pointer to self. Avoid RLS. - const cloned = updates.*; - manager.update_requests = cloned; - } - - var buffer_writer = JSPrinter.BufferWriter.init(manager.allocator); - try buffer_writer.buffer.list.ensureTotalCapacity(manager.allocator, current_package_json.source.contents.len + 1); - buffer_writer.append_newline = preserve_trailing_newline_at_eof_for_package_json; - var package_json_writer = JSPrinter.BufferPrinter.init(buffer_writer); - - var written = JSPrinter.printJSON( - @TypeOf(&package_json_writer), - &package_json_writer, - current_package_json.root, - ¤t_package_json.source, - .{ - .indent = current_package_json_indent, - .mangled_props = null, - }, - ) catch |err| { - Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - - // There are various tradeoffs with how we commit updates when you run `bun add` or `bun remove` - // The one we chose here is to effectively pretend a human did: - // 1. "bun add react@latest" - // 2. open lockfile, find what react resolved to - // 3. open package.json - // 4. replace "react" : "latest" with "react" : "^16.2.0" - // 5. save package.json - // The Smarter™ approach is you resolve ahead of time and write to disk once! - // But, turns out that's slower in any case where more than one package has to be resolved (most of the time!) - // Concurrent network requests are faster than doing one and then waiting until the next batch - var new_package_json_source = try manager.allocator.dupe(u8, package_json_writer.ctx.writtenWithoutTrailingZero()); - current_package_json.source.contents = new_package_json_source; - - // may or may not be the package json we are editing - const top_level_dir_without_trailing_slash = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); - - var root_package_json_path_buf: bun.PathBuffer = undefined; - const root_package_json_source, const root_package_json_path = brk: { - @memcpy(root_package_json_path_buf[0..top_level_dir_without_trailing_slash.len], top_level_dir_without_trailing_slash); - @memcpy(root_package_json_path_buf[top_level_dir_without_trailing_slash.len..][0.."/package.json".len], "/package.json"); - const root_package_json_path = root_package_json_path_buf[0 .. top_level_dir_without_trailing_slash.len + "/package.json".len]; - root_package_json_path_buf[root_package_json_path.len] = 0; - - // The lifetime of this pointer is only valid until the next call to `getWithPath`, which can happen after this scope. - // https://github.com/oven-sh/bun/issues/12288 - const root_package_json = switch (manager.workspace_package_json_cache.getWithPath( - manager.allocator, - manager.log, - root_package_json_path, - .{ - .guess_indentation = true, - }, - )) { - .parse_err => |err| { - manager.log.print(Output.errorWriter()) catch {}; - Output.errGeneric("failed to parse package.json \"{s}\": {s}", .{ - root_package_json_path, - @errorName(err), - }); - Global.crash(); - }, - .read_err => |err| { - Output.errGeneric("failed to read package.json \"{s}\": {s}", .{ - manager.original_package_json_path, - @errorName(err), - }); - Global.crash(); - }, - .entry => |entry| entry, - }; - - if (not_in_workspace_root) |stuff| { - try PackageJSONEditor.editPatchedDependencies( - manager, - &root_package_json.root, - stuff.patch_key, - stuff.patchfile_path, - ); - var buffer_writer2 = JSPrinter.BufferWriter.init(manager.allocator); - try buffer_writer2.buffer.list.ensureTotalCapacity(manager.allocator, root_package_json.source.contents.len + 1); - buffer_writer2.append_newline = preserve_trailing_newline_at_eof_for_package_json; - var package_json_writer2 = JSPrinter.BufferPrinter.init(buffer_writer2); - - _ = JSPrinter.printJSON( - @TypeOf(&package_json_writer2), - &package_json_writer2, - root_package_json.root, - &root_package_json.source, - .{ - .indent = root_package_json.indentation, - .mangled_props = null, - }, - ) catch |err| { - Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - root_package_json.source.contents = try manager.allocator.dupe(u8, package_json_writer2.ctx.writtenWithoutTrailingZero()); - } - - break :brk .{ root_package_json.source.contents, root_package_json_path_buf[0..root_package_json_path.len :0] }; - }; - - try manager.installWithManager(ctx, root_package_json_source, original_cwd); - - if (subcommand == .update or subcommand == .add or subcommand == .link) { - for (updates.*) |request| { - if (request.failed) { - Global.exit(1); - return; - } - } - - const source = &logger.Source.initPathString("package.json", new_package_json_source); - - // Now, we _re_ parse our in-memory edited package.json - // so we can commit the version we changed from the lockfile - var new_package_json = JSON.parsePackageJSONUTF8(source, manager.log, manager.allocator) catch |err| { - Output.prettyErrorln("package.json failed to parse due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - - if (updates.len == 0) { - try PackageJSONEditor.editUpdateNoArgs( - manager, - &new_package_json, - .{ - .exact_versions = manager.options.enable.exact_versions, - }, - ); - } else { - try PackageJSONEditor.edit( - manager, - updates, - &new_package_json, - dependency_list, - .{ - .exact_versions = manager.options.enable.exact_versions, - .add_trusted_dependencies = manager.options.do.trust_dependencies_from_args, - }, - ); - } - var buffer_writer_two = JSPrinter.BufferWriter.init(manager.allocator); - try buffer_writer_two.buffer.list.ensureTotalCapacity(manager.allocator, source.contents.len + 1); - buffer_writer_two.append_newline = - preserve_trailing_newline_at_eof_for_package_json; - var package_json_writer_two = JSPrinter.BufferPrinter.init(buffer_writer_two); - - written = JSPrinter.printJSON( - @TypeOf(&package_json_writer_two), - &package_json_writer_two, - new_package_json, - source, - .{ - .indent = current_package_json_indent, - .mangled_props = null, - }, - ) catch |err| { - Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); - Global.crash(); - }; - - new_package_json_source = try manager.allocator.dupe(u8, package_json_writer_two.ctx.writtenWithoutTrailingZero()); - } - - if (manager.options.do.write_package_json) { - const source, const path = if (manager.options.patch_features == .commit) - .{ root_package_json_source, root_package_json_path } - else - .{ new_package_json_source, manager.original_package_json_path }; - - // Now that we've run the install step - // We can save our in-memory package.json to disk - const workspace_package_json_file = (try bun.sys.File.openat( - .cwd(), - path, - bun.O.RDWR, - 0, - ).unwrap()).handle.stdFile(); - - try workspace_package_json_file.pwriteAll(source, 0); - std.posix.ftruncate(workspace_package_json_file.handle, source.len) catch {}; - workspace_package_json_file.close(); - - if (subcommand == .remove) { - if (!any_changes) { - Global.exit(0); - return; - } - - var cwd = std.fs.cwd(); - // This is not exactly correct - var node_modules_buf: bun.PathBuffer = undefined; - bun.copy(u8, &node_modules_buf, "node_modules" ++ std.fs.path.sep_str); - const offset_buf = node_modules_buf["node_modules/".len..]; - const name_hashes = manager.lockfile.packages.items(.name_hash); - for (updates.*) |request| { - // If the package no longer exists in the updated lockfile, delete the directory - // This is not thorough. - // It does not handle nested dependencies - // This is a quick & dirty cleanup intended for when deleting top-level dependencies - if (std.mem.indexOfScalar(PackageNameHash, name_hashes, String.Builder.stringHash(request.name)) == null) { - bun.copy(u8, offset_buf, request.name); - cwd.deleteTree(node_modules_buf[0 .. "node_modules/".len + request.name.len]) catch {}; - } - } - - // This is where we clean dangling symlinks - // This could be slow if there are a lot of symlinks - if (bun.openDir(cwd, manager.options.bin_path)) |node_modules_bin_handle| { - var node_modules_bin: std.fs.Dir = node_modules_bin_handle; - defer node_modules_bin.close(); - var iter: std.fs.Dir.Iterator = node_modules_bin.iterate(); - iterator: while (iter.next() catch null) |entry| { - switch (entry.kind) { - std.fs.Dir.Entry.Kind.sym_link => { - - // any symlinks which we are unable to open are assumed to be dangling - // note that using access won't work here, because access doesn't resolve symlinks - bun.copy(u8, &node_modules_buf, entry.name); - node_modules_buf[entry.name.len] = 0; - const buf: [:0]u8 = node_modules_buf[0..entry.name.len :0]; - - var file = node_modules_bin.openFileZ(buf, .{ .mode = .read_only }) catch { - node_modules_bin.deleteFileZ(buf) catch {}; - continue :iterator; - }; - - file.close(); - }, - else => {}, - } - } - } else |err| { - if (err != error.ENOENT) { - Output.err(err, "while reading node_modules/.bin", .{}); - Global.crash(); - } - } - } - } - } - - fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator(.node_modules), ids: []const IdPair) !?Lockfile.Tree.Iterator(.node_modules).Next { - while (iterator.next(null)) |node_modules| { - for (ids) |id| { - _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, id[0]) orelse continue; - return node_modules; - } - } - return null; - } - - fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator(.node_modules), dependency_id: DependencyID) !?Lockfile.Tree.Iterator(.node_modules).Next { - while (iterator.next(null)) |node_modules| { - _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue; - return node_modules; - } - - return null; - } - - const IdPair = struct { DependencyID, PackageID }; - - fn pkgInfoForNameAndVersion( - lockfile: *Lockfile, - iterator: *Lockfile.Tree.Iterator(.node_modules), - pkg_maybe_version_to_patch: []const u8, - name: []const u8, - version: ?[]const u8, - ) struct { PackageID, Lockfile.Tree.Iterator(.node_modules).Next } { - var sfb = std.heap.stackFallback(@sizeOf(IdPair) * 4, lockfile.allocator); - var pairs = std.ArrayList(IdPair).initCapacity(sfb.get(), 8) catch bun.outOfMemory(); - defer pairs.deinit(); - - const name_hash = String.Builder.stringHash(name); - - const strbuf = lockfile.buffers.string_bytes.items; - - var buf: [1024]u8 = undefined; - const dependencies = lockfile.buffers.dependencies.items; - - for (dependencies, 0..) |dep, dep_id| { - if (dep.name_hash != name_hash) continue; - const pkg_id = lockfile.buffers.resolutions.items[dep_id]; - if (pkg_id == invalid_package_id) continue; - const pkg = lockfile.packages.get(pkg_id); - if (version) |v| { - const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long"); - if (std.mem.eql(u8, label, v)) { - pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory(); - } - } else { - pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory(); - } - } - - if (pairs.items.len == 0) { - Output.prettyErrorln("\nerror: package {s} not found", .{pkg_maybe_version_to_patch}); - Global.crash(); - return; - } - - // user supplied a version e.g. `is-even@1.0.0` - if (version != null) { - if (pairs.items.len == 1) { - const dep_id, const pkg_id = pairs.items[0]; - const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse { - Output.prettyError( - "error: could not find the folder for {s} in node_modules\n", - .{pkg_maybe_version_to_patch}, - ); - Global.crash(); - }; - return .{ - pkg_id, - folder, - }; - } - - // we found multiple dependents of the supplied pkg + version - // the final package in the node_modules might be hoisted - // so we are going to try looking for each dep id in node_modules - _, const pkg_id = pairs.items[0]; - const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse { - Output.prettyError( - "error: could not find the folder for {s} in node_modules\n", - .{pkg_maybe_version_to_patch}, - ); - Global.crash(); - }; - - return .{ - pkg_id, - folder, - }; - } - - // Otherwise the user did not supply a version, just the pkg name - - // Only one match, let's use it - if (pairs.items.len == 1) { - const dep_id, const pkg_id = pairs.items[0]; - const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse { - Output.prettyError( - "error: could not find the folder for {s} in node_modules\n", - .{pkg_maybe_version_to_patch}, - ); - Global.crash(); - }; - return .{ - pkg_id, - folder, - }; - } - - // Otherwise we have multiple matches - // - // There are two cases: - // a) the multiple matches are all the same underlying package (this happens because there could be multiple dependents of the same package) - // b) the matches are actually different packages, we'll prompt the user to select which one - - _, const pkg_id = pairs.items[0]; - const count = count: { - var count: u32 = 0; - for (pairs.items) |pair| { - if (pair[1] == pkg_id) count += 1; - } - break :count count; - }; - - // Disambiguate case a) from b) - if (count == pairs.items.len) { - // It may be hoisted, so we'll try the first one that matches - const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse { - Output.prettyError( - "error: could not find the folder for {s} in node_modules\n", - .{pkg_maybe_version_to_patch}, - ); - Global.crash(); - }; - return .{ - pkg_id, - folder, - }; - } - - Output.prettyErrorln( - "\nerror: Found multiple versions of {s}, please specify a precise version from the following list:\n", - .{name}, - ); - var i: usize = 0; - while (i < pairs.items.len) : (i += 1) { - _, const pkgid = pairs.items[i]; - if (pkgid == invalid_package_id) - continue; - - const pkg = lockfile.packages.get(pkgid); - - Output.prettyError(" {s}@{}\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) }); - - if (i + 1 < pairs.items.len) { - for (pairs.items[i + 1 ..]) |*p| { - if (p[1] == pkgid) { - p[1] = invalid_package_id; - } - } - } - } - Global.crash(); - } - - const PatchArgKind = enum { - path, - name_and_version, - - pub fn fromArg(argument: []const u8) PatchArgKind { - if (bun.strings.containsComptime(argument, "node_modules/")) return .path; - if (bun.Environment.isWindows and bun.strings.hasPrefix(argument, "node_modules\\")) return .path; - return .name_and_version; - } - }; - - fn pathArgumentRelativeToRootWorkspacePackage(manager: *PackageManager, lockfile: *const Lockfile, argument: []const u8) ?[]const u8 { - const workspace_package_id = manager.root_package_id.get(lockfile, manager.workspace_name_hash); - if (workspace_package_id == 0) return null; - const workspace_res = lockfile.packages.items(.resolution)[workspace_package_id]; - const rel_path: []const u8 = workspace_res.value.workspace.slice(lockfile.buffers.string_bytes.items); - return bun.default_allocator.dupe(u8, bun.path.join(&[_][]const u8{ rel_path, argument }, .posix)) catch bun.outOfMemory(); - } - - /// 1. Arg is either: - /// - name and possibly version (e.g. "is-even" or "is-even@1.0.0") - /// - path to package in node_modules - /// 2. Calculate cache dir for package - /// 3. Overwrite the input package with the one from the cache (cuz it could be hardlinked) - /// 4. Print to user - fn preparePatch(manager: *PackageManager) !void { - const strbuf = manager.lockfile.buffers.string_bytes.items; - var argument = manager.options.positionals[1]; - - const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); - - var folder_path_buf: bun.PathBuffer = undefined; - var iterator = Lockfile.Tree.Iterator(.node_modules).init(manager.lockfile); - var resolution_buf: [1024]u8 = undefined; - - var win_normalizer: if (bun.Environment.isWindows) bun.PathBuffer else struct {} = undefined; - - const not_in_workspace_root = manager.root_package_id.get(manager.lockfile, manager.workspace_name_hash) != 0; - var free_argument = false; - argument = if (arg_kind == .path and - not_in_workspace_root and - (!bun.path.Platform.posix.isAbsolute(argument) or (bun.Environment.isWindows and !bun.path.Platform.windows.isAbsolute(argument)))) - brk: { - if (pathArgumentRelativeToRootWorkspacePackage(manager, manager.lockfile, argument)) |rel_path| { - free_argument = true; - break :brk rel_path; - } - break :brk argument; - } else argument; - defer if (free_argument) manager.allocator.free(argument); - - const cache_dir: std.fs.Dir, const cache_dir_subpath: []const u8, const module_folder: []const u8, const pkg_name: []const u8 = switch (arg_kind) { - .path => brk: { - var lockfile = manager.lockfile; - - const package_json_source: *const logger.Source = &src: { - const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); - - switch (bun.sys.File.toSource(package_json_path, manager.allocator, .{})) { - .result => |s| break :src s, - .err => |e| { - Output.err(e, "failed to read {s}", .{bun.fmt.quote(package_json_path)}); - Global.crash(); - }, - } - }; - defer manager.allocator.free(package_json_source.contents); - - initializeStore(); - const json = JSON.parsePackageJSONUTF8(package_json_source, manager.log, manager.allocator) catch |err| { - manager.log.print(Output.errorWriter()) catch {}; - Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); - Global.crash(); - }; - - const version = version: { - if (json.asProperty("version")) |v| { - if (v.expr.asString(manager.allocator)) |s| break :version s; - } - Output.prettyError( - "error: invalid package.json, missing or invalid property \"version\": {s}\n", - .{package_json_source.path.text}, - ); - Global.crash(); - }; - - var resolver: void = {}; - var package = Lockfile.Package{}; - try package.parseWithJSON(lockfile, manager, manager.allocator, manager.log, package_json_source, json, void, &resolver, Features.folder); - - const name = lockfile.str(&package.name); - const actual_package = switch (lockfile.package_index.get(package.name_hash) orelse { - Output.prettyError( - "error: failed to find package in lockfile package index, this is a bug in Bun. Please file a GitHub issue.\n", - .{}, - ); - Global.crash(); - }) { - .id => |id| lockfile.packages.get(id), - .ids => |ids| id: { - for (ids.items) |id| { - const pkg = lockfile.packages.get(id); - const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; - if (std.mem.eql(u8, resolution_label, version)) { - break :id pkg; - } - } - Output.prettyError("error: could not find package with name: {s}\n", .{ - package.name.slice(lockfile.buffers.string_bytes.items), - }); - Global.crash(); - }, - }; - - const existing_patchfile_hash = existing_patchfile_hash: { - var __sfb = std.heap.stackFallback(1024, manager.allocator); - const allocator = __sfb.get(); - const name_and_version = std.fmt.allocPrint(allocator, "{s}@{}", .{ name, actual_package.resolution.fmt(strbuf, .posix) }) catch unreachable; - defer allocator.free(name_and_version); - const name_and_version_hash = String.Builder.stringHash(name_and_version); - if (lockfile.patched_dependencies.get(name_and_version_hash)) |patched_dep| { - if (patched_dep.patchfileHash()) |hash| break :existing_patchfile_hash hash; - } - break :existing_patchfile_hash null; - }; - - const cache_result = manager.computeCacheDirAndSubpath( - name, - &actual_package.resolution, - &folder_path_buf, - existing_patchfile_hash, - ); - const cache_dir = cache_result.cache_dir; - const cache_dir_subpath = cache_result.cache_dir_subpath; - - const buf = if (comptime bun.Environment.isWindows) bun.path.pathToPosixBuf(u8, argument, win_normalizer[0..]) else argument; - - break :brk .{ - cache_dir, - cache_dir_subpath, - buf, - name, - }; - }, - .name_and_version => brk: { - const pkg_maybe_version_to_patch = argument; - const name, const version = Dependency.splitNameAndMaybeVersion(pkg_maybe_version_to_patch); - const pkg_id, const folder = pkgInfoForNameAndVersion(manager.lockfile, &iterator, pkg_maybe_version_to_patch, name, version); - - const pkg = manager.lockfile.packages.get(pkg_id); - const pkg_name = pkg.name.slice(strbuf); - - const existing_patchfile_hash = existing_patchfile_hash: { - var __sfb = std.heap.stackFallback(1024, manager.allocator); - const sfballoc = __sfb.get(); - const name_and_version = std.fmt.allocPrint(sfballoc, "{s}@{}", .{ name, pkg.resolution.fmt(strbuf, .posix) }) catch unreachable; - defer sfballoc.free(name_and_version); - const name_and_version_hash = String.Builder.stringHash(name_and_version); - if (manager.lockfile.patched_dependencies.get(name_and_version_hash)) |patched_dep| { - if (patched_dep.patchfileHash()) |hash| break :existing_patchfile_hash hash; - } - break :existing_patchfile_hash null; - }; - - const cache_result = manager.computeCacheDirAndSubpath( - pkg_name, - &pkg.resolution, - &folder_path_buf, - existing_patchfile_hash, - ); - - const cache_dir = cache_result.cache_dir; - const cache_dir_subpath = cache_result.cache_dir_subpath; - - const module_folder_ = bun.path.join(&[_][]const u8{ folder.relative_path, name }, .auto); - const buf = if (comptime bun.Environment.isWindows) bun.path.pathToPosixBuf(u8, module_folder_, win_normalizer[0..]) else module_folder_; - - break :brk .{ - cache_dir, - cache_dir_subpath, - buf, - pkg_name, - }; - }, - }; - - // The package may be installed using the hard link method, - // meaning that changes to the folder will also change the package in the cache. - // - // So we will overwrite the folder by directly copying the package in cache into it - manager.overwritePackageInNodeModulesFolder(cache_dir, cache_dir_subpath, module_folder) catch |e| { - Output.prettyError( - "error: error overwriting folder in node_modules: {s}\n", - .{@errorName(e)}, - ); - Global.crash(); - }; - - if (not_in_workspace_root) { - var bufn: bun.PathBuffer = undefined; - Output.pretty("\nTo patch {s}, edit the following folder:\n\n {s}\n", .{ pkg_name, bun.path.joinStringBuf(bufn[0..], &[_][]const u8{ bun.fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(), module_folder }, .posix) }); - Output.pretty("\nOnce you're done with your changes, run:\n\n bun patch --commit '{s}'\n", .{bun.path.joinStringBuf(bufn[0..], &[_][]const u8{ bun.fs.FileSystem.instance.topLevelDirWithoutTrailingSlash(), module_folder }, .posix)}); - } else { - Output.pretty("\nTo patch {s}, edit the following folder:\n\n {s}\n", .{ pkg_name, module_folder }); - Output.pretty("\nOnce you're done with your changes, run:\n\n bun patch --commit '{s}'\n", .{module_folder}); - } - - return; - } - - fn overwritePackageInNodeModulesFolder( - manager: *PackageManager, - cache_dir: std.fs.Dir, - cache_dir_subpath: []const u8, - node_modules_folder_path: []const u8, - ) !void { - var node_modules_folder = try std.fs.cwd().openDir(node_modules_folder_path, .{ .iterate = true }); - defer node_modules_folder.close(); - - const IGNORED_PATHS: []const bun.OSPathSlice = &[_][]const bun.OSPathChar{ - bun.OSPathLiteral("node_modules"), - bun.OSPathLiteral(".git"), - bun.OSPathLiteral("CMakeFiles"), - }; - - const FileCopier = struct { - pub fn copy( - destination_dir_: std.fs.Dir, - walker: *Walker, - in_dir: if (bun.Environment.isWindows) []const u16 else void, - out_dir: if (bun.Environment.isWindows) []const u16 else void, - buf1: if (bun.Environment.isWindows) []u16 else void, - buf2: if (bun.Environment.isWindows) []u16 else void, - tmpdir_in_node_modules: if (bun.Environment.isWindows) std.fs.Dir else void, - ) !u32 { - var real_file_count: u32 = 0; - - var copy_file_state: bun.CopyFileState = .{}; - var pathbuf: bun.PathBuffer = undefined; - var pathbuf2: bun.PathBuffer = undefined; - // _ = pathbuf; // autofix - - while (try walker.next()) |entry| { - if (entry.kind != .file) continue; - real_file_count += 1; - const openFile = std.fs.Dir.openFile; - const createFile = std.fs.Dir.createFile; - - // 1. rename original file in node_modules to tmp_dir_in_node_modules - // 2. create the file again - // 3. copy cache flie to the newly re-created file - // 4. profit - if (comptime bun.Environment.isWindows) { - var tmpbuf: [1024]u8 = undefined; - const basename = bun.strings.fromWPath(pathbuf2[0..], entry.basename); - const tmpname = bun.span(bun.fs.FileSystem.instance.tmpname(basename, tmpbuf[0..], bun.fastRandom()) catch |e| { - Output.prettyError("error: copying file {s}", .{@errorName(e)}); - Global.crash(); - }); - - const entrypath = bun.strings.fromWPath(pathbuf[0..], entry.path); - pathbuf[entrypath.len] = 0; - const entrypathZ = pathbuf[0..entrypath.len :0]; - - if (bun.sys.renameatConcurrently( - .fromStdDir(destination_dir_), - entrypathZ, - .fromStdDir(tmpdir_in_node_modules), - tmpname, - .{ .move_fallback = true }, - ).asErr()) |e| { - Output.prettyError("error: copying file {}", .{e}); - Global.crash(); - } - - var outfile = createFile(destination_dir_, entrypath, .{}) catch |e| { - Output.prettyError("error: failed to create file {s} ({s})", .{ entrypath, @errorName(e) }); - Global.crash(); - }; - outfile.close(); - - const infile_path = bun.path.joinStringBufWZ(buf1, &[_][]const u16{ in_dir, entry.path }, .auto); - const outfile_path = bun.path.joinStringBufWZ(buf2, &[_][]const u16{ out_dir, entry.path }, .auto); - - bun.copyFileWithState(infile_path, outfile_path, ©_file_state).unwrap() catch |err| { - Output.prettyError("{s}: copying file {}", .{ @errorName(err), bun.fmt.fmtOSPath(entry.path, .{}) }); - Global.crash(); - }; - } else if (comptime Environment.isPosix) { - var in_file = try openFile(entry.dir, entry.basename, .{ .mode = .read_only }); - defer in_file.close(); - - @memcpy(pathbuf[0..entry.path.len], entry.path); - pathbuf[entry.path.len] = 0; - - if (bun.sys.unlinkat( - .fromStdDir(destination_dir_), - pathbuf[0..entry.path.len :0], - ).asErr()) |e| { - Output.prettyError("error: copying file {}", .{e.withPath(entry.path)}); - Global.crash(); - } - - var outfile = try createFile(destination_dir_, entry.path, .{}); - defer outfile.close(); - - const stat = in_file.stat() catch continue; - _ = bun.c.fchmod(outfile.handle, @intCast(stat.mode)); - - bun.copyFileWithState(.fromStdFile(in_file), .fromStdFile(outfile), ©_file_state).unwrap() catch |err| { - Output.prettyError("{s}: copying file {}", .{ @errorName(err), bun.fmt.fmtOSPath(entry.path, .{}) }); - Global.crash(); - }; - } - } - - return real_file_count; - } - }; - - var pkg_in_cache_dir = try cache_dir.openDir(cache_dir_subpath, .{ .iterate = true }); - defer pkg_in_cache_dir.close(); - var walker = Walker.walk(pkg_in_cache_dir, manager.allocator, &.{}, IGNORED_PATHS) catch bun.outOfMemory(); - defer walker.deinit(); - - var buf1: if (bun.Environment.isWindows) bun.WPathBuffer else void = undefined; - var buf2: if (bun.Environment.isWindows) bun.WPathBuffer else void = undefined; - var in_dir: if (bun.Environment.isWindows) []const u16 else void = undefined; - var out_dir: if (bun.Environment.isWindows) []const u16 else void = undefined; - - if (comptime bun.Environment.isWindows) { - const inlen = bun.windows.GetFinalPathNameByHandleW(pkg_in_cache_dir.fd, &buf1, buf1.len, 0); - if (inlen == 0) { - const e = bun.windows.Win32Error.get(); - const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; - Output.prettyError("error: copying file {}", .{err}); - Global.crash(); - } - in_dir = buf1[0..inlen]; - const outlen = bun.windows.GetFinalPathNameByHandleW(node_modules_folder.fd, &buf2, buf2.len, 0); - if (outlen == 0) { - const e = bun.windows.Win32Error.get(); - const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; - Output.prettyError("error: copying file {}", .{err}); - Global.crash(); - } - out_dir = buf2[0..outlen]; - var tmpbuf: [1024]u8 = undefined; - const tmpname = bun.span(bun.fs.FileSystem.instance.tmpname("tffbp", tmpbuf[0..], bun.fastRandom()) catch |e| { - Output.prettyError("error: copying file {s}", .{@errorName(e)}); - Global.crash(); - }); - const temp_folder_in_node_modules = try node_modules_folder.makeOpenPath(tmpname, .{}); - defer { - node_modules_folder.deleteTree(tmpname) catch {}; - } - _ = try FileCopier.copy( - node_modules_folder, - &walker, - in_dir, - out_dir, - &buf1, - &buf2, - temp_folder_in_node_modules, - ); - } else if (Environment.isPosix) { - _ = try FileCopier.copy( - node_modules_folder, - &walker, - {}, - {}, - {}, - {}, - {}, - ); - } - } - - const PatchCommitResult = struct { - patch_key: []const u8, - patchfile_path: []const u8, - not_in_workspace_root: bool = false, - }; - - /// - Arg is the dir containing the package with changes OR name and version - /// - Get the patch file contents by running git diff on the temp dir and the original package dir - /// - Write the patch file to $PATCHES_DIR/$PKG_NAME_AND_VERSION.patch - /// - Update "patchedDependencies" in package.json - /// - Run install to install newly patched pkg - fn doPatchCommit( - manager: *PackageManager, - pathbuf: *bun.PathBuffer, - log_level: Options.LogLevel, - ) !?PatchCommitResult { - var folder_path_buf: bun.PathBuffer = undefined; - var lockfile: *Lockfile = try manager.allocator.create(Lockfile); - defer lockfile.deinit(); - switch (lockfile.loadFromCwd(manager, manager.allocator, manager.log, true)) { - .not_found => { - Output.errGeneric("Cannot find lockfile. Install packages with `bun install` before patching them.", .{}); - Global.crash(); - }, - .err => |cause| { - if (log_level != .silent) { - switch (cause.step) { - .open_file => Output.prettyError("error opening lockfile: {s}\n", .{ - @errorName(cause.value), - }), - .parse_file => Output.prettyError("error parsing lockfile: {s}\n", .{ - @errorName(cause.value), - }), - .read_file => Output.prettyError("error reading lockfile: {s}\n", .{ - @errorName(cause.value), - }), - .migrating => Output.prettyError("error migrating lockfile: {s}\n", .{ - @errorName(cause.value), - }), - } - - if (manager.options.enable.fail_early) { - Output.prettyError("failed to load lockfile\n", .{}); - } else { - Output.prettyError("ignoring lockfile\n", .{}); - } - - Output.flush(); - } - Global.crash(); - }, - .ok => {}, - } - - var argument = manager.options.positionals[1]; - const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); - - const not_in_workspace_root = manager.root_package_id.get(lockfile, manager.workspace_name_hash) != 0; - var free_argument = false; - argument = if (arg_kind == .path and - not_in_workspace_root and - (!bun.path.Platform.posix.isAbsolute(argument) or (bun.Environment.isWindows and !bun.path.Platform.windows.isAbsolute(argument)))) - brk: { - if (pathArgumentRelativeToRootWorkspacePackage(manager, lockfile, argument)) |rel_path| { - free_argument = true; - break :brk rel_path; - } - break :brk argument; - } else argument; - defer if (free_argument) manager.allocator.free(argument); - - // Attempt to open the existing node_modules folder - var root_node_modules = switch (bun.sys.openatOSPath(bun.FD.cwd(), bun.OSPathLiteral("node_modules"), bun.O.DIRECTORY | bun.O.RDONLY, 0o755)) { - .result => |fd| std.fs.Dir{ .fd = fd.cast() }, - .err => |e| { - Output.prettyError( - "error: failed to open root node_modules folder: {}\n", - .{e}, - ); - Global.crash(); - }, - }; - defer root_node_modules.close(); - - var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile); - var resolution_buf: [1024]u8 = undefined; - const _cache_dir: std.fs.Dir, const _cache_dir_subpath: stringZ, const _changes_dir: []const u8, const _pkg: Package = switch (arg_kind) { - .path => result: { - const package_json_source: *const logger.Source = &brk: { - const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); - - switch (bun.sys.File.toSource(package_json_path, manager.allocator, .{})) { - .result => |s| break :brk s, - .err => |e| { - Output.err(e, "failed to read {s}", .{bun.fmt.quote(package_json_path)}); - Global.crash(); - }, - } - }; - defer manager.allocator.free(package_json_source.contents); - - initializeStore(); - const json = JSON.parsePackageJSONUTF8(package_json_source, manager.log, manager.allocator) catch |err| { - manager.log.print(Output.errorWriter()) catch {}; - Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); - Global.crash(); - }; - - const version = version: { - if (json.asProperty("version")) |v| { - if (v.expr.asString(manager.allocator)) |s| break :version s; - } - Output.prettyError( - "error: invalid package.json, missing or invalid property \"version\": {s}\n", - .{package_json_source.path.text}, - ); - Global.crash(); - }; - - var resolver: void = {}; - var package = Lockfile.Package{}; - try package.parseWithJSON(lockfile, manager, manager.allocator, manager.log, package_json_source, json, void, &resolver, Features.folder); - - const name = lockfile.str(&package.name); - const actual_package = switch (lockfile.package_index.get(package.name_hash) orelse { - Output.prettyError( - "error: failed to find package in lockfile package index, this is a bug in Bun. Please file a GitHub issue.\n", - .{}, - ); - Global.crash(); - }) { - .id => |id| lockfile.packages.get(id), - .ids => |ids| brk: { - for (ids.items) |id| { - const pkg = lockfile.packages.get(id); - const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; - if (std.mem.eql(u8, resolution_label, version)) { - break :brk pkg; - } - } - Output.prettyError("error: could not find package with name: {s}\n", .{ - package.name.slice(lockfile.buffers.string_bytes.items), - }); - Global.crash(); - }, - }; - - const cache_result = manager.computeCacheDirAndSubpath( - name, - &actual_package.resolution, - &folder_path_buf, - null, - ); - const cache_dir = cache_result.cache_dir; - const cache_dir_subpath = cache_result.cache_dir_subpath; - - const changes_dir = argument; - - break :result .{ cache_dir, cache_dir_subpath, changes_dir, actual_package }; - }, - .name_and_version => brk: { - const name, const version = Dependency.splitNameAndMaybeVersion(argument); - const pkg_id, const node_modules = pkgInfoForNameAndVersion(lockfile, &iterator, argument, name, version); - - const changes_dir = bun.path.joinZBuf(pathbuf[0..], &[_][]const u8{ - node_modules.relative_path, - name, - }, .auto); - const pkg = lockfile.packages.get(pkg_id); - - const cache_result = manager.computeCacheDirAndSubpath( - pkg.name.slice(lockfile.buffers.string_bytes.items), - &pkg.resolution, - &folder_path_buf, - null, - ); - const cache_dir = cache_result.cache_dir; - const cache_dir_subpath = cache_result.cache_dir_subpath; - break :brk .{ cache_dir, cache_dir_subpath, changes_dir, pkg }; - }, - }; - - // zls - const cache_dir: std.fs.Dir = _cache_dir; - const cache_dir_subpath: stringZ = _cache_dir_subpath; - const changes_dir: []const u8 = _changes_dir; - const pkg: Package = _pkg; - - const name = pkg.name.slice(lockfile.buffers.string_bytes.items); - const resolution_label = std.fmt.bufPrint(&resolution_buf, "{s}@{}", .{ name, pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix) }) catch unreachable; - - const patchfile_contents = brk: { - const new_folder = changes_dir; - var buf2: bun.PathBuffer = undefined; - var buf3: bun.PathBuffer = undefined; - const old_folder = old_folder: { - const cache_dir_path = switch (bun.sys.getFdPath(.fromStdDir(cache_dir), &buf2)) { - .result => |s| s, - .err => |e| { - Output.err(e, "failed to read from cache", .{}); - Global.crash(); - }, - }; - break :old_folder bun.path.join(&[_][]const u8{ - cache_dir_path, - cache_dir_subpath, - }, .posix); - }; - - const random_tempdir = bun.span(bun.fs.FileSystem.instance.tmpname("node_modules_tmp", buf2[0..], bun.fastRandom()) catch |e| { - Output.err(e, "failed to make tempdir", .{}); - Global.crash(); - }); - - // If the package has nested a node_modules folder, we don't want this to - // appear in the patch file when we run git diff. - // - // There isn't an option to exclude it with `git diff --no-index`, so we - // will `rename()` it out and back again. - const has_nested_node_modules = has_nested_node_modules: { - var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { - Output.err(e, "failed to open directory {s}", .{new_folder}); - Global.crash(); - }; - defer new_folder_handle.close(); - - if (bun.sys.renameatConcurrently( - .fromStdDir(new_folder_handle), - "node_modules", - .fromStdDir(root_node_modules), - random_tempdir, - .{ .move_fallback = true }, - ).asErr()) |_| break :has_nested_node_modules false; - - break :has_nested_node_modules true; - }; - - const patch_tag_tmpname = bun.span(bun.fs.FileSystem.instance.tmpname("patch_tmp", buf3[0..], bun.fastRandom()) catch |e| { - Output.err(e, "failed to make tempdir", .{}); - Global.crash(); - }); - - var bunpatchtagbuf: BuntagHashBuf = undefined; - // If the package was already patched then it might have a ".bun-tag-XXXXXXXX" - // we need to rename this out and back too. - const bun_patch_tag: ?[:0]const u8 = has_bun_patch_tag: { - const name_and_version_hash = String.Builder.stringHash(resolution_label); - const patch_tag = patch_tag: { - if (lockfile.patched_dependencies.get(name_and_version_hash)) |patchdep| { - if (patchdep.patchfileHash()) |hash| { - break :patch_tag buntaghashbuf_make(&bunpatchtagbuf, hash); - } - } - break :has_bun_patch_tag null; - }; - var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { - Output.err(e, "failed to open directory {s}", .{new_folder}); - Global.crash(); - }; - defer new_folder_handle.close(); - - if (bun.sys.renameatConcurrently( - .fromStdDir(new_folder_handle), - patch_tag, - .fromStdDir(root_node_modules), - patch_tag_tmpname, - .{ .move_fallback = true }, - ).asErr()) |e| { - Output.warn("failed renaming the bun patch tag, this may cause issues: {}", .{e}); - break :has_bun_patch_tag null; - } - break :has_bun_patch_tag patch_tag; - }; - defer { - if (has_nested_node_modules or bun_patch_tag != null) { - var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { - Output.prettyError( - "error: failed to open directory {s} {s}\n", - .{ new_folder, @errorName(e) }, - ); - Global.crash(); - }; - defer new_folder_handle.close(); - - if (has_nested_node_modules) { - if (bun.sys.renameatConcurrently( - .fromStdDir(root_node_modules), - random_tempdir, - .fromStdDir(new_folder_handle), - "node_modules", - .{ .move_fallback = true }, - ).asErr()) |e| { - Output.warn("failed renaming nested node_modules folder, this may cause issues: {}", .{e}); - } - } - - if (bun_patch_tag) |patch_tag| { - if (bun.sys.renameatConcurrently( - .fromStdDir(root_node_modules), - patch_tag_tmpname, - .fromStdDir(new_folder_handle), - patch_tag, - .{ .move_fallback = true }, - ).asErr()) |e| { - Output.warn("failed renaming the bun patch tag, this may cause issues: {}", .{e}); - } - } - } - } - - var cwdbuf: bun.PathBuffer = undefined; - const cwd = switch (bun.sys.getcwdZ(&cwdbuf)) { - .result => |fd| fd, - .err => |e| { - Output.prettyError( - "error: failed to get cwd path {}\n", - .{e}, - ); - Global.crash(); - }, - }; - var gitbuf: bun.PathBuffer = undefined; - const git = bun.which(&gitbuf, bun.getenvZ("PATH") orelse "", cwd, "git") orelse { - Output.prettyError( - "error: git must be installed to use `bun patch --commit` \n", - .{}, - ); - Global.crash(); - }; - const paths = bun.patch.gitDiffPreprocessPaths(bun.default_allocator, old_folder, new_folder, false); - const opts = bun.patch.spawnOpts(paths[0], paths[1], cwd, git, &manager.event_loop); - - var spawn_result = switch (bun.spawnSync(&opts) catch |e| { - Output.prettyError( - "error: failed to make diff {s}\n", - .{@errorName(e)}, - ); - Global.crash(); - }) { - .result => |r| r, - .err => |e| { - Output.prettyError( - "error: failed to make diff {}\n", - .{e}, - ); - Global.crash(); - }, - }; - - const contents = switch (bun.patch.diffPostProcess(&spawn_result, paths[0], paths[1]) catch |e| { - Output.prettyError( - "error: failed to make diff {s}\n", - .{@errorName(e)}, - ); - Global.crash(); - }) { - .result => |stdout| stdout, - .err => |stderr| { - defer stderr.deinit(); - const Truncate = struct { - stderr: std.ArrayList(u8), - - pub fn format( - this: *const @This(), - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) !void { - const truncate_stderr = this.stderr.items.len > 256; - if (truncate_stderr) { - try writer.print("{s}... ({d} more bytes)", .{ this.stderr.items[0..256], this.stderr.items.len - 256 }); - } else try writer.print("{s}", .{this.stderr.items[0..]}); - } - }; - Output.prettyError( - "error: failed to make diff {}\n", - .{ - Truncate{ .stderr = stderr }, - }, - ); - Global.crash(); - }, - }; - - if (contents.items.len == 0) { - Output.pretty("\nNo changes detected, comparing {s} to {s}\n", .{ old_folder, new_folder }); - Output.flush(); - contents.deinit(); - return null; - } - - break :brk contents; - }; - defer patchfile_contents.deinit(); - - // write the patch contents to temp file then rename - var tmpname_buf: [1024]u8 = undefined; - const tempfile_name = bun.span(try bun.fs.FileSystem.instance.tmpname("tmp", &tmpname_buf, bun.fastRandom())); - const tmpdir = manager.getTemporaryDirectory(); - const tmpfd = switch (bun.sys.openat( - .fromStdDir(tmpdir), - tempfile_name, - bun.O.RDWR | bun.O.CREAT, - 0o666, - )) { - .result => |fd| fd, - .err => |e| { - Output.err(e, "failed to open temp file", .{}); - Global.crash(); - }, - }; - defer tmpfd.close(); - - if (bun.sys.File.writeAll(.{ .handle = tmpfd }, patchfile_contents.items).asErr()) |e| { - Output.err(e, "failed to write patch to temp file", .{}); - Global.crash(); - } - - @memcpy(resolution_buf[resolution_label.len .. resolution_label.len + ".patch".len], ".patch"); - var patch_filename: []const u8 = resolution_buf[0 .. resolution_label.len + ".patch".len]; - var deinit = false; - if (escapePatchFilename(manager.allocator, patch_filename)) |escaped| { - deinit = true; - patch_filename = escaped; - } - defer if (deinit) manager.allocator.free(patch_filename); - - const path_in_patches_dir = bun.path.joinZ( - &[_][]const u8{ - manager.options.patch_features.commit.patches_dir, - patch_filename, - }, - .posix, - ); - - var nodefs = bun.JSC.Node.fs.NodeFS{}; - const args = bun.JSC.Node.fs.Arguments.Mkdir{ - .path = .{ .string = bun.PathString.init(manager.options.patch_features.commit.patches_dir) }, - }; - if (nodefs.mkdirRecursive(args).asErr()) |e| { - Output.err(e, "failed to make patches dir {}", .{bun.fmt.quote(args.path.slice())}); - Global.crash(); - } - - // rename to patches dir - if (bun.sys.renameatConcurrently( - .fromStdDir(tmpdir), - tempfile_name, - bun.FD.cwd(), - path_in_patches_dir, - .{ .move_fallback = true }, - ).asErr()) |e| { - Output.err(e, "failed renaming patch file to patches dir", .{}); - Global.crash(); - } - - const patch_key = std.fmt.allocPrint(manager.allocator, "{s}", .{resolution_label}) catch bun.outOfMemory(); - const patchfile_path = manager.allocator.dupe(u8, path_in_patches_dir) catch bun.outOfMemory(); - _ = bun.sys.unlink(bun.path.joinZ(&[_][]const u8{ changes_dir, ".bun-patch-tag" }, .auto)); - - return .{ - .patch_key = patch_key, - .patchfile_path = patchfile_path, - .not_in_workspace_root = not_in_workspace_root, - }; - } - - fn patchCommitGetVersion( - buf: *[1024]u8, - patch_tag_path: [:0]const u8, - ) bun.sys.Maybe(string) { - const patch_tag_fd = switch (bun.sys.open(patch_tag_path, bun.O.RDONLY, 0)) { - .result => |fd| fd, - .err => |e| return .{ .err = e }, - }; - defer { - patch_tag_fd.close(); - // we actually need to delete this - _ = bun.sys.unlink(patch_tag_path); - } - - const version = switch (bun.sys.File.readFillBuf(.{ .handle = patch_tag_fd }, buf[0..])) { - .result => |v| v, - .err => |e| return .{ .err = e }, - }; - - // maybe if someone opens it in their editor and hits save a newline will be inserted, - // so trim that off - return .{ .result = std.mem.trimRight(u8, version, " \n\r\t") }; - } - - fn escapePatchFilename(allocator: std.mem.Allocator, name: []const u8) ?[]const u8 { - const EscapeVal = enum { - @"/", - @"\\", - @" ", - @"\n", - @"\r", - @"\t", - // @".", - other, - - pub fn escaped(this: @This()) ?[]const u8 { - return switch (this) { - .@"/" => "%2F", - .@"\\" => "%5c", - .@" " => "%20", - .@"\n" => "%0A", - .@"\r" => "%0D", - .@"\t" => "%09", - // .@"." => "%2E", - .other => null, - }; - } - }; - const ESCAPE_TABLE: [256]EscapeVal = comptime brk: { - var table: [256]EscapeVal = [_]EscapeVal{.other} ** 256; - const ty = @typeInfo(EscapeVal); - for (ty.@"enum".fields) |field| { - if (field.name.len == 1) { - const c = field.name[0]; - table[c] = @enumFromInt(field.value); - } - } - break :brk table; - }; - var count: usize = 0; - for (name) |c| count += if (ESCAPE_TABLE[c].escaped()) |e| e.len else 1; - if (count == name.len) return null; - var buf = allocator.alloc(u8, count) catch bun.outOfMemory(); - var i: usize = 0; - for (name) |c| { - const e = ESCAPE_TABLE[c].escaped() orelse &[_]u8{c}; - @memcpy(buf[i..][0..e.len], e); - i += e.len; - } - return buf; - } - - var cwd_buf: bun.PathBuffer = undefined; - var package_json_cwd_buf: bun.PathBuffer = undefined; - pub var package_json_cwd: string = ""; - - pub fn install(ctx: Command.Context) !void { - var cli = try CommandLineArguments.parse(ctx.allocator, .install); - - // The way this works: - // 1. Run the bundler on source files - // 2. Rewrite positional arguments to act identically to the developer - // typing in the dependency names - // 3. Run the install command - if (cli.analyze) { - const Analyzer = struct { - ctx: Command.Context, - cli: *CommandLineArguments, - pub fn onAnalyze(this: *@This(), result: *bun.bundle_v2.BundleV2.DependenciesScanner.Result) anyerror!void { - // TODO: add separate argument that makes it so positionals[1..] is not done and instead the positionals are passed - var positionals = bun.default_allocator.alloc(string, result.dependencies.keys().len + 1) catch bun.outOfMemory(); - positionals[0] = "install"; - bun.copy(string, positionals[1..], result.dependencies.keys()); - this.cli.positionals = positionals; - - try installWithCLI(this.ctx, this.cli.*); - - Global.exit(0); - } - }; - var analyzer = Analyzer{ - .ctx = ctx, - .cli = &cli, - }; - - var fetcher = bun.bundle_v2.BundleV2.DependenciesScanner{ - .ctx = &analyzer, - .entry_points = cli.positionals[1..], - .onFetch = @ptrCast(&Analyzer.onAnalyze), - }; - - try bun.CLI.BuildCommand.exec(bun.CLI.Command.get(), &fetcher); - return; - } - - return installWithCLI(ctx, cli); - } - - pub fn installWithCLI(ctx: Command.Context, cli: CommandLineArguments) !void { - const subcommand: Subcommand = if (cli.positionals.len > 1) .add else .install; - - // TODO(dylan-conway): print `bun install ` or `bun add ` before logs from `init`. - // and cleanup install/add subcommand usage - var manager, const original_cwd = try init(ctx, cli, .install); - - // switch to `bun add ` - if (subcommand == .add) { - manager.subcommand = .add; - if (manager.options.shouldPrintCommandName()) { - Output.prettyln("bun add v" ++ Global.package_json_version_with_sha ++ "\n", .{}); - Output.flush(); - } - return manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd); - } - - if (manager.options.shouldPrintCommandName()) { - Output.prettyln("bun install v" ++ Global.package_json_version_with_sha ++ "\n", .{}); - Output.flush(); - } - - const package_json_contents = manager.root_package_json_file.readToEndAlloc(ctx.allocator, std.math.maxInt(usize)) catch |err| { - if (manager.options.log_level != .silent) { - Output.prettyErrorln("{s} reading package.json :(", .{@errorName(err)}); - Output.flush(); - } - return; - }; - - try manager.installWithManager(ctx, package_json_contents, original_cwd); - - if (manager.any_failed_to_install) { - Global.exit(1); - } - } - - pub const PackageInstaller = @import("./PackageInstaller.zig").PackageInstaller; - - pub inline fn pendingTaskCount(manager: *const PackageManager) u32 { - return manager.pending_tasks.load(.monotonic); - } - - pub inline fn incrementPendingTasks(manager: *PackageManager, count: u32) u32 { - manager.total_tasks += count; - return manager.pending_tasks.fetchAdd(count, .monotonic); - } - - pub inline fn decrementPendingTasks(manager: *PackageManager) u32 { - return manager.pending_tasks.fetchSub(1, .monotonic); - } - - pub fn setupGlobalDir(manager: *PackageManager, ctx: Command.Context) !void { - manager.options.global_bin_dir = try Options.openGlobalBinDir(ctx.install); - var out_buffer: bun.PathBuffer = undefined; - const result = try bun.getFdPathZ(.fromStdDir(manager.options.global_bin_dir), &out_buffer); - const path = try FileSystem.instance.dirname_store.append([:0]u8, result); - manager.options.bin_path = path.ptr[0..path.len :0]; - } - - pub fn startProgressBarIfNone(manager: *PackageManager) void { - if (manager.downloads_node == null) { - manager.startProgressBar(); - } - } - pub fn startProgressBar(manager: *PackageManager) void { - manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; - manager.downloads_node = manager.progress.start(ProgressStrings.download(), 0); - manager.setNodeName(manager.downloads_node.?, ProgressStrings.download_no_emoji_, ProgressStrings.download_emoji, true); - manager.downloads_node.?.setEstimatedTotalItems(manager.total_tasks + manager.extracted_count); - manager.downloads_node.?.setCompletedItems(manager.total_tasks - manager.pendingTaskCount()); - manager.downloads_node.?.activate(); - manager.progress.refresh(); - } - - pub fn endProgressBar(manager: *PackageManager) void { - var downloads_node = manager.downloads_node orelse return; - downloads_node.setEstimatedTotalItems(downloads_node.unprotected_estimated_total_items); - downloads_node.setCompletedItems(downloads_node.unprotected_estimated_total_items); - manager.progress.refresh(); - manager.progress.root.end(); - manager.progress = .{}; - manager.downloads_node = null; - } - - pub fn loadRootLifecycleScripts(this: *PackageManager, root_package: Package) void { - const binding_dot_gyp_path = Path.joinAbsStringZ( - Fs.FileSystem.instance.top_level_dir, - &[_]string{"binding.gyp"}, - .auto, - ); - - const buf = this.lockfile.buffers.string_bytes.items; - // need to clone because this is a copy before Lockfile.cleanWithLogger - const name = root_package.name.slice(buf); - const top_level_dir_without_trailing_slash = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); - - if (root_package.scripts.hasAny()) { - const add_node_gyp_rebuild_script = root_package.scripts.install.isEmpty() and root_package.scripts.preinstall.isEmpty() and Syscall.exists(binding_dot_gyp_path); - - this.root_lifecycle_scripts = root_package.scripts.createList( - this.lockfile, - buf, - top_level_dir_without_trailing_slash, - name, - .root, - add_node_gyp_rebuild_script, - ); - } else { - if (Syscall.exists(binding_dot_gyp_path)) { - // no scripts exist but auto node gyp script needs to be added - this.root_lifecycle_scripts = root_package.scripts.createList( - this.lockfile, - buf, - top_level_dir_without_trailing_slash, - name, - .root, - true, - ); - } - } - } - - fn installWithManager( - manager: *PackageManager, - ctx: Command.Context, - root_package_json_contents: string, - original_cwd: string, - ) !void { - const log_level = manager.options.log_level; - - // Start resolving DNS for the default registry immediately. - // Unless you're behind a proxy. - if (!manager.env.hasHTTPProxy()) { - // And don't try to resolve DNS if it's an IP address. - if (manager.options.scope.url.hostname.len > 0 and !manager.options.scope.url.isIPAddress()) { - var hostname_stack = std.heap.stackFallback(512, ctx.allocator); - const allocator = hostname_stack.get(); - const hostname = try allocator.dupeZ(u8, manager.options.scope.url.hostname); - defer allocator.free(hostname); - bun.dns.internal.prefetch(manager.event_loop.loop(), hostname, manager.options.scope.url.getPortAuto()); - } - } - - var load_result: Lockfile.LoadResult = if (manager.options.do.load_lockfile) - manager.lockfile.loadFromCwd( - manager, - manager.allocator, - manager.log, - true, - ) - else - .{ .not_found = {} }; - - try manager.updateLockfileIfNeeded(load_result); - - var root = Lockfile.Package{}; - var needs_new_lockfile = load_result != .ok or - (load_result.ok.lockfile.buffers.dependencies.items.len == 0 and manager.update_requests.len > 0); - - manager.options.enable.force_save_lockfile = manager.options.enable.force_save_lockfile or - (load_result == .ok and - // if migrated always save a new lockfile - (load_result.ok.was_migrated or - - // if loaded from binary and save-text-lockfile is passed - (load_result.ok.format == .binary and - manager.options.save_text_lockfile orelse false))); - - // this defaults to false - // but we force allowing updates to the lockfile when you do bun add - var had_any_diffs = false; - manager.progress = .{}; - - // Step 2. Parse the package.json file - const root_package_json_source = &logger.Source.initPathString(package_json_cwd, root_package_json_contents); - - switch (load_result) { - .err => |cause| { - if (log_level != .silent) { - switch (cause.step) { - .open_file => Output.err(cause.value, "failed to open lockfile: '{s}'", .{ - cause.lockfile_path, - }), - .parse_file => Output.err(cause.value, "failed to parse lockfile: '{s}'", .{ - cause.lockfile_path, - }), - .read_file => Output.err(cause.value, "failed to read lockfile: '{s}'", .{ - cause.lockfile_path, - }), - .migrating => Output.err(cause.value, "failed to migrate lockfile: '{s}'", .{ - cause.lockfile_path, - }), - } - - if (!manager.options.enable.fail_early) { - Output.printErrorln("", .{}); - Output.warn("Ignoring lockfile", .{}); - } - - if (ctx.log.errors > 0) { - try manager.log.print(Output.errorWriter()); - manager.log.reset(); - } - Output.flush(); - } - - if (manager.options.enable.fail_early) Global.crash(); - }, - .ok => { - if (manager.subcommand == .update) { - // existing lockfile, get the original version is updating - const lockfile = manager.lockfile; - const packages = lockfile.packages.slice(); - const resolutions = packages.items(.resolution); - const workspace_package_id = manager.root_package_id.get(lockfile, manager.workspace_name_hash); - const workspace_dep_list = packages.items(.dependencies)[workspace_package_id]; - const workspace_res_list = packages.items(.resolutions)[workspace_package_id]; - const workspace_deps = workspace_dep_list.get(lockfile.buffers.dependencies.items); - const workspace_package_ids = workspace_res_list.get(lockfile.buffers.resolutions.items); - for (workspace_deps, workspace_package_ids) |dep, package_id| { - if (dep.version.tag != .npm and dep.version.tag != .dist_tag) continue; - if (package_id == invalid_package_id) continue; - - if (manager.updating_packages.getPtr(dep.name.slice(lockfile.buffers.string_bytes.items))) |entry_ptr| { - const original_resolution: Resolution = resolutions[package_id]; - // Just in case check if the resolution is `npm`. It should always be `npm` because the dependency version - // is `npm` or `dist_tag`. - if (original_resolution.tag != .npm) continue; - - var original = original_resolution.value.npm.version; - const tag_total = original.tag.pre.len() + original.tag.build.len(); - if (tag_total > 0) { - // clone because don't know if lockfile buffer will reallocate - const tag_buf = manager.allocator.alloc(u8, tag_total) catch bun.outOfMemory(); - var ptr = tag_buf; - original.tag = original_resolution.value.npm.version.tag.cloneInto( - lockfile.buffers.string_bytes.items, - &ptr, - ); - - entry_ptr.original_version_string_buf = tag_buf; - } - - entry_ptr.original_version = original; - } - } - } - differ: { - root = load_result.ok.lockfile.rootPackage() orelse { - needs_new_lockfile = true; - break :differ; - }; - - if (root.dependencies.len == 0) { - needs_new_lockfile = true; - } - - if (needs_new_lockfile) break :differ; - - var lockfile: Lockfile = undefined; - lockfile.initEmpty(manager.allocator); - var maybe_root = Lockfile.Package{}; - - var resolver: void = {}; - try maybe_root.parse( - &lockfile, - manager, - manager.allocator, - manager.log, - root_package_json_source, - void, - &resolver, - Features.main, - ); - const mapping = try manager.lockfile.allocator.alloc(PackageID, maybe_root.dependencies.len); - @memset(mapping, invalid_package_id); - - manager.summary = try Package.Diff.generate( - manager, - manager.allocator, - manager.log, - manager.lockfile, - &lockfile, - &root, - &maybe_root, - if (manager.to_update) manager.update_requests else null, - mapping, - ); - - had_any_diffs = manager.summary.hasDiffs(); - - if (!had_any_diffs) { - // always grab latest scripts for root package - var builder_ = manager.lockfile.stringBuilder(); - var builder = &builder_; - - maybe_root.scripts.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - try builder.allocate(); - manager.lockfile.packages.items(.scripts)[0] = maybe_root.scripts.clone( - lockfile.buffers.string_bytes.items, - *Lockfile.StringBuilder, - builder, - ); - builder.clamp(); - } else { - var builder_ = manager.lockfile.stringBuilder(); - // ensure we use one pointer to reference it instead of creating new ones and potentially aliasing - var builder = &builder_; - // If you changed packages, we will copy over the new package from the new lockfile - const new_dependencies = maybe_root.dependencies.get(lockfile.buffers.dependencies.items); - - for (new_dependencies) |new_dep| { - new_dep.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - } - - for (lockfile.workspace_paths.values()) |path| builder.count(path.slice(lockfile.buffers.string_bytes.items)); - for (lockfile.workspace_versions.values()) |version| version.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - for (lockfile.patched_dependencies.values()) |patch_dep| builder.count(patch_dep.path.slice(lockfile.buffers.string_bytes.items)); - - lockfile.overrides.count(&lockfile, builder); - lockfile.catalogs.count(&lockfile, builder); - maybe_root.scripts.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - - const off = @as(u32, @truncate(manager.lockfile.buffers.dependencies.items.len)); - const len = @as(u32, @truncate(new_dependencies.len)); - var packages = manager.lockfile.packages.slice(); - var dep_lists = packages.items(.dependencies); - var resolution_lists = packages.items(.resolutions); - const old_resolutions_list = resolution_lists[0]; - dep_lists[0] = .{ .off = off, .len = len }; - resolution_lists[0] = .{ .off = off, .len = len }; - try builder.allocate(); - - const all_name_hashes: []PackageNameHash = brk: { - if (!manager.summary.overrides_changed) break :brk &.{}; - const hashes_len = manager.lockfile.overrides.map.entries.len + lockfile.overrides.map.entries.len; - if (hashes_len == 0) break :brk &.{}; - var all_name_hashes = try bun.default_allocator.alloc(PackageNameHash, hashes_len); - @memcpy(all_name_hashes[0..manager.lockfile.overrides.map.entries.len], manager.lockfile.overrides.map.keys()); - @memcpy(all_name_hashes[manager.lockfile.overrides.map.entries.len..], lockfile.overrides.map.keys()); - var i = manager.lockfile.overrides.map.entries.len; - while (i < all_name_hashes.len) { - if (std.mem.indexOfScalar(PackageNameHash, all_name_hashes[0..i], all_name_hashes[i]) != null) { - all_name_hashes[i] = all_name_hashes[all_name_hashes.len - 1]; - all_name_hashes.len -= 1; - } else { - i += 1; - } - } - break :brk all_name_hashes; - }; - - manager.lockfile.overrides = try lockfile.overrides.clone(manager, &lockfile, manager.lockfile, builder); - manager.lockfile.catalogs = try lockfile.catalogs.clone(manager, &lockfile, manager.lockfile, builder); - - manager.lockfile.trusted_dependencies = if (lockfile.trusted_dependencies) |trusted_dependencies| - try trusted_dependencies.clone(manager.lockfile.allocator) - else - null; - - try manager.lockfile.buffers.dependencies.ensureUnusedCapacity(manager.lockfile.allocator, len); - try manager.lockfile.buffers.resolutions.ensureUnusedCapacity(manager.lockfile.allocator, len); - - const old_resolutions = old_resolutions_list.get(manager.lockfile.buffers.resolutions.items); - - var dependencies = manager.lockfile.buffers.dependencies.items.ptr[off .. off + len]; - var resolutions = manager.lockfile.buffers.resolutions.items.ptr[off .. off + len]; - - // It is too easy to accidentally undefined memory - @memset(resolutions, invalid_package_id); - @memset(dependencies, Dependency{}); - - manager.lockfile.buffers.dependencies.items = manager.lockfile.buffers.dependencies.items.ptr[0 .. off + len]; - manager.lockfile.buffers.resolutions.items = manager.lockfile.buffers.resolutions.items.ptr[0 .. off + len]; - - for (new_dependencies, 0..) |new_dep, i| { - dependencies[i] = try new_dep.clone(manager, lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - if (mapping[i] != invalid_package_id) { - resolutions[i] = old_resolutions[mapping[i]]; - } - } - - manager.lockfile.packages.items(.scripts)[0] = maybe_root.scripts.clone( - lockfile.buffers.string_bytes.items, - *Lockfile.StringBuilder, - builder, - ); - - // Update workspace paths - try manager.lockfile.workspace_paths.ensureTotalCapacity(manager.lockfile.allocator, lockfile.workspace_paths.entries.len); - { - manager.lockfile.workspace_paths.clearRetainingCapacity(); - var iter = lockfile.workspace_paths.iterator(); - while (iter.next()) |entry| { - // The string offsets will be wrong so fix them - const path = entry.value_ptr.slice(lockfile.buffers.string_bytes.items); - const str = builder.append(String, path); - manager.lockfile.workspace_paths.putAssumeCapacity(entry.key_ptr.*, str); - } - } - - // Update workspace versions - try manager.lockfile.workspace_versions.ensureTotalCapacity(manager.lockfile.allocator, lockfile.workspace_versions.entries.len); - { - manager.lockfile.workspace_versions.clearRetainingCapacity(); - var iter = lockfile.workspace_versions.iterator(); - while (iter.next()) |entry| { - // Copy version string offsets - const version = entry.value_ptr.append(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - manager.lockfile.workspace_versions.putAssumeCapacity(entry.key_ptr.*, version); - } - } - - // Update patched dependencies - { - var iter = lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| { - const pkg_name_and_version_hash = entry.key_ptr.*; - bun.debugAssert(entry.value_ptr.patchfile_hash_is_null); - const gop = try manager.lockfile.patched_dependencies.getOrPut(manager.lockfile.allocator, pkg_name_and_version_hash); - if (!gop.found_existing) { - gop.value_ptr.* = .{ - .path = builder.append(String, entry.value_ptr.*.path.slice(lockfile.buffers.string_bytes.items)), - }; - gop.value_ptr.setPatchfileHash(null); - // gop.value_ptr.path = gop.value_ptr.path; - } else if (!bun.strings.eql( - gop.value_ptr.path.slice(manager.lockfile.buffers.string_bytes.items), - entry.value_ptr.path.slice(lockfile.buffers.string_bytes.items), - )) { - gop.value_ptr.path = builder.append(String, entry.value_ptr.*.path.slice(lockfile.buffers.string_bytes.items)); - gop.value_ptr.setPatchfileHash(null); - } - } - - var count: usize = 0; - iter = manager.lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| { - if (!lockfile.patched_dependencies.contains(entry.key_ptr.*)) { - count += 1; - } - } - if (count > 0) { - try manager.patched_dependencies_to_remove.ensureTotalCapacity(manager.allocator, count); - iter = manager.lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| { - if (!lockfile.patched_dependencies.contains(entry.key_ptr.*)) { - try manager.patched_dependencies_to_remove.put(manager.allocator, entry.key_ptr.*, {}); - } - } - for (manager.patched_dependencies_to_remove.keys()) |hash| { - _ = manager.lockfile.patched_dependencies.orderedRemove(hash); - } - } - } - - builder.clamp(); - - if (manager.summary.overrides_changed and all_name_hashes.len > 0) { - for (manager.lockfile.buffers.dependencies.items, 0..) |*dependency, dependency_i| { - if (std.mem.indexOfScalar(PackageNameHash, all_name_hashes, dependency.name_hash)) |_| { - manager.lockfile.buffers.resolutions.items[dependency_i] = invalid_package_id; - try manager.enqueueDependencyWithMain( - @truncate(dependency_i), - dependency, - invalid_package_id, - false, - ); - } - } - } - - if (manager.summary.catalogs_changed) { - for (manager.lockfile.buffers.dependencies.items, 0..) |*dep, _dep_id| { - const dep_id: DependencyID = @intCast(_dep_id); - if (dep.version.tag != .catalog) continue; - - manager.lockfile.buffers.resolutions.items[dep_id] = invalid_package_id; - try manager.enqueueDependencyWithMain( - dep_id, - dep, - invalid_package_id, - false, - ); - } - } - - // Split this into two passes because the below may allocate memory or invalidate pointers - if (manager.summary.add > 0 or manager.summary.update > 0) { - const changes = @as(PackageID, @truncate(mapping.len)); - var counter_i: PackageID = 0; - - _ = manager.getCacheDirectory(); - _ = manager.getTemporaryDirectory(); - - while (counter_i < changes) : (counter_i += 1) { - if (mapping[counter_i] == invalid_package_id) { - const dependency_i = counter_i + off; - const dependency = manager.lockfile.buffers.dependencies.items[dependency_i]; - try manager.enqueueDependencyWithMain( - dependency_i, - &dependency, - manager.lockfile.buffers.resolutions.items[dependency_i], - false, - ); - } - } - } - - if (manager.summary.update > 0) root.scripts = .{}; - } - } - }, - else => {}, - } - - if (needs_new_lockfile) { - root = .{}; - manager.lockfile.initEmpty(manager.allocator); - - if (manager.options.enable.frozen_lockfile and load_result != .not_found) { - if (log_level != .silent) { - Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); - } - Global.crash(); - } - - var resolver: void = {}; - try root.parse( - manager.lockfile, - manager, - manager.allocator, - manager.log, - root_package_json_source, - void, - &resolver, - Features.main, - ); - - root = try manager.lockfile.appendPackage(root); - - if (root.dependencies.len > 0) { - _ = manager.getCacheDirectory(); - _ = manager.getTemporaryDirectory(); - } - { - var iter = manager.lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| manager.enqueuePatchTaskPre(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); - } - manager.enqueueDependencyList(root.dependencies); - } else { - { - var iter = manager.lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| manager.enqueuePatchTaskPre(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); - } - // Anything that needs to be downloaded from an update needs to be scheduled here - manager.drainDependencyList(); - } - - if (manager.pendingTaskCount() > 0 or manager.peer_dependencies.readableLength() > 0) { - if (root.dependencies.len > 0) { - _ = manager.getCacheDirectory(); - _ = manager.getTemporaryDirectory(); - } - - if (log_level.showProgress()) { - manager.startProgressBar(); - } else if (log_level != .silent) { - Output.prettyErrorln("Resolving dependencies", .{}); - Output.flush(); - } - - const runAndWaitFn = struct { - pub fn runAndWaitFn(comptime check_peers: bool, comptime only_pre_patch: bool) *const fn (*PackageManager) anyerror!void { - return struct { - manager: *PackageManager, - err: ?anyerror = null, - pub fn isDone(closure: *@This()) bool { - var this = closure.manager; - if (comptime check_peers) - this.processPeerDependencyList() catch |err| { - closure.err = err; - return true; - }; - - this.drainDependencyList(); - - this.runTasks( - *PackageManager, - this, - .{ - .onExtract = {}, - .onPatch = {}, - .onResolve = {}, - .onPackageManifestError = {}, - .onPackageDownloadError = {}, - .progress_bar = true, - }, - check_peers, - this.options.log_level, - ) catch |err| { - closure.err = err; - return true; - }; - - if (comptime check_peers) { - if (this.peer_dependencies.readableLength() > 0) { - return false; - } - } - - if (comptime only_pre_patch) { - const pending_patch = this.pending_pre_calc_hashes.load(.monotonic); - return pending_patch == 0; - } - - const pending_tasks = this.pendingTaskCount(); - - if (PackageManager.verbose_install and pending_tasks > 0) { - if (PackageManager.hasEnoughTimePassedBetweenWaitingMessages()) Output.prettyErrorln("[PackageManager] waiting for {d} tasks\n", .{pending_tasks}); - } - - return pending_tasks == 0; - } - - pub fn runAndWait(this: *PackageManager) !void { - var closure = @This(){ - .manager = this, - }; - - this.sleepUntil(&closure, &@This().isDone); - - if (closure.err) |err| { - return err; - } - } - }.runAndWait; - } - }.runAndWaitFn; - - const waitForCalcingPatchHashes = runAndWaitFn(false, true); - const waitForEverythingExceptPeers = runAndWaitFn(false, false); - const waitForPeers = runAndWaitFn(true, false); - - if (manager.lockfile.patched_dependencies.entries.len > 0) { - try waitForCalcingPatchHashes(manager); - } - - if (manager.pendingTaskCount() > 0) { - try waitForEverythingExceptPeers(manager); - } - - try waitForPeers(manager); - - if (log_level.showProgress()) { - manager.endProgressBar(); - } else if (log_level != .silent) { - Output.prettyErrorln("Resolved, downloaded and extracted [{d}]", .{manager.total_tasks}); - Output.flush(); - } - } - - const had_errors_before_cleaning_lockfile = manager.log.hasErrors(); - try manager.log.print(Output.errorWriter()); - manager.log.reset(); - - // This operation doesn't perform any I/O, so it should be relatively cheap. - const lockfile_before_clean = manager.lockfile; - - manager.lockfile = try manager.lockfile.cleanWithLogger( - manager, - manager.update_requests, - manager.log, - manager.options.enable.exact_versions, - log_level, - ); - - if (manager.lockfile.packages.len > 0) { - root = manager.lockfile.packages.get(0); - } - - if (manager.lockfile.packages.len > 0) { - for (manager.update_requests) |request| { - // prevent redundant errors - if (request.failed) { - return error.InstallFailed; - } - } - manager.verifyResolutions(log_level); - } - - // append scripts to lockfile before generating new metahash - manager.loadRootLifecycleScripts(root); - defer { - if (manager.root_lifecycle_scripts) |root_scripts| { - manager.allocator.free(root_scripts.package_name); - } - } - - if (manager.root_lifecycle_scripts) |root_scripts| { - root_scripts.appendToLockfile(manager.lockfile); - } - { - const packages = manager.lockfile.packages.slice(); - for (packages.items(.resolution), packages.items(.meta), packages.items(.scripts)) |resolution, meta, scripts| { - if (resolution.tag == .workspace) { - if (meta.hasInstallScript()) { - if (scripts.hasAny()) { - const first_index, _, const entries = scripts.getScriptEntries( - manager.lockfile, - manager.lockfile.buffers.string_bytes.items, - .workspace, - false, - ); - - if (comptime Environment.allow_assert) { - bun.assert(first_index != -1); - } - - if (first_index != -1) { - inline for (entries, 0..) |maybe_entry, i| { - if (maybe_entry) |entry| { - @field(manager.lockfile.scripts, Lockfile.Scripts.names[i]).append( - manager.lockfile.allocator, - entry, - ) catch bun.outOfMemory(); - } - } - } - } else { - const first_index, _, const entries = scripts.getScriptEntries( - manager.lockfile, - manager.lockfile.buffers.string_bytes.items, - .workspace, - true, - ); - - if (comptime Environment.allow_assert) { - bun.assert(first_index != -1); - } - - inline for (entries, 0..) |maybe_entry, i| { - if (maybe_entry) |entry| { - @field(manager.lockfile.scripts, Lockfile.Scripts.names[i]).append( - manager.lockfile.allocator, - entry, - ) catch bun.outOfMemory(); - } - } - } - } - } - } - } - - if (manager.options.global) { - try manager.setupGlobalDir(ctx); - } - - const packages_len_before_install = manager.lockfile.packages.len; - - if (manager.options.enable.frozen_lockfile and load_result != .not_found) frozen_lockfile: { - if (load_result.loadedFromTextLockfile()) { - if (manager.lockfile.eql(lockfile_before_clean, packages_len_before_install, manager.allocator) catch bun.outOfMemory()) { - break :frozen_lockfile; - } - } else { - if (!(manager.lockfile.hasMetaHashChanged(PackageManager.verbose_install or manager.options.do.print_meta_hash_string, packages_len_before_install) catch false)) { - break :frozen_lockfile; - } - } - - if (log_level != .silent) { - Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); - Output.note("try re-running without --frozen-lockfile and commit the updated lockfile", .{}); - } - Global.crash(); - } - - const lockfile_before_install = manager.lockfile; - - const save_format = load_result.saveFormat(&manager.options); - - if (manager.options.lockfile_only) { - // save the lockfile and exit. make sure metahash is generated for binary lockfile - - manager.lockfile.meta_hash = try manager.lockfile.generateMetaHash( - PackageManager.verbose_install or manager.options.do.print_meta_hash_string, - packages_len_before_install, - ); - - try manager.saveLockfile(&load_result, save_format, had_any_diffs, lockfile_before_install, packages_len_before_install, log_level); - - if (manager.options.do.summary) { - // TODO(dylan-conway): packages aren't installed but we can still print - // added/removed/updated direct dependencies. - Output.pretty("\nSaved {s} ({d} package{s}) ", .{ - switch (save_format) { - .text => "bun.lock", - .binary => "bun.lockb", - }, - manager.lockfile.packages.len, - if (manager.lockfile.packages.len == 1) "" else "s", - }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - Output.pretty("\n", .{}); - } - Output.flush(); - return; - } - - var path_buf: bun.PathBuffer = undefined; - var workspace_filters: std.ArrayListUnmanaged(WorkspaceFilter) = .{}; - // only populated when subcommand is `.install` - if (manager.subcommand == .install and manager.options.filter_patterns.len > 0) { - try workspace_filters.ensureUnusedCapacity(manager.allocator, manager.options.filter_patterns.len); - for (manager.options.filter_patterns) |pattern| { - try workspace_filters.append(manager.allocator, try WorkspaceFilter.init(manager.allocator, pattern, original_cwd, &path_buf)); - } - } - defer workspace_filters.deinit(manager.allocator); - - var install_root_dependencies = workspace_filters.items.len == 0; - if (!install_root_dependencies) { - const pkg_names = manager.lockfile.packages.items(.name); - - const abs_root_path = abs_root_path: { - if (comptime !Environment.isWindows) { - break :abs_root_path strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); - } - - var abs_path = Path.pathToPosixBuf(u8, FileSystem.instance.top_level_dir, &path_buf); - break :abs_root_path strings.withoutTrailingSlash(abs_path[Path.windowsVolumeNameLen(abs_path)[0]..]); - }; - - for (workspace_filters.items) |filter| { - const pattern, const path_or_name = switch (filter) { - .name => |pattern| .{ pattern, pkg_names[0].slice(manager.lockfile.buffers.string_bytes.items) }, - .path => |pattern| .{ pattern, abs_root_path }, - .all => { - install_root_dependencies = true; - continue; - }, - }; - - switch (bun.glob.walk.matchImpl(manager.allocator, pattern, path_or_name)) { - .match, .negate_match => install_root_dependencies = true, - - .negate_no_match => { - // always skip if a pattern specifically says "!" - install_root_dependencies = false; - break; - }, - - .no_match => {}, - } - } - } - - var install_summary = PackageInstall.Summary{}; - if (manager.options.do.install_packages) { - install_summary = try @import("./hoisted_install.zig").installHoistedPackages( - manager, - ctx, - workspace_filters.items, - install_root_dependencies, - log_level, - ); - } - - if (log_level != .silent) { - try manager.log.print(Output.errorWriter()); - } - if (had_errors_before_cleaning_lockfile or manager.log.hasErrors()) Global.crash(); - - const did_meta_hash_change = - // If the lockfile was frozen, we already checked it - !manager.options.enable.frozen_lockfile and - if (load_result.loadedFromTextLockfile()) - !try manager.lockfile.eql(lockfile_before_clean, packages_len_before_install, manager.allocator) - else - try manager.lockfile.hasMetaHashChanged( - PackageManager.verbose_install or manager.options.do.print_meta_hash_string, - @min(packages_len_before_install, manager.lockfile.packages.len), - ); - - // It's unnecessary work to re-save the lockfile if there are no changes - const should_save_lockfile = - (load_result == .ok and ((load_result.ok.format == .binary and save_format == .text) or - - // make sure old versions are updated - load_result.ok.format == .text and save_format == .text and manager.lockfile.text_lockfile_version != TextLockfile.Version.current)) or - - // check `save_lockfile` after checking if loaded from binary and save format is text - // because `save_lockfile` is set to false for `--frozen-lockfile` - (manager.options.do.save_lockfile and - (did_meta_hash_change or - had_any_diffs or - manager.update_requests.len > 0 or - (load_result == .ok and load_result.ok.serializer_result.packages_need_update) or - manager.lockfile.isEmpty() or - manager.options.enable.force_save_lockfile)); - - if (should_save_lockfile) { - try manager.saveLockfile(&load_result, save_format, had_any_diffs, lockfile_before_install, packages_len_before_install, log_level); - } - - if (needs_new_lockfile) { - manager.summary.add = @as(u32, @truncate(manager.lockfile.packages.len)); - } - - if (manager.options.do.save_yarn_lock) { - var node: *Progress.Node = undefined; - if (log_level.showProgress()) { - manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; - node = manager.progress.start("Saving yarn.lock", 0); - manager.progress.refresh(); - } else if (log_level != .silent) { - Output.prettyErrorln("Saved yarn.lock", .{}); - Output.flush(); - } - - try manager.writeYarnLock(); - if (log_level.showProgress()) { - node.completeOne(); - manager.progress.refresh(); - manager.progress.root.end(); - manager.progress = .{}; - } - } - - if (manager.options.do.run_scripts and install_root_dependencies and !manager.options.global) { - if (manager.root_lifecycle_scripts) |scripts| { - if (comptime Environment.allow_assert) { - bun.assert(scripts.total > 0); - } - - if (log_level != .silent) { - Output.printError("\n", .{}); - Output.flush(); - } - // root lifecycle scripts can run now that all dependencies are installed, dependency scripts - // have finished, and lockfiles have been saved - const optional = false; - const output_in_foreground = true; - try manager.spawnPackageLifecycleScripts(ctx, scripts, optional, output_in_foreground); - - while (manager.pending_lifecycle_script_tasks.load(.monotonic) > 0) { - manager.reportSlowLifecycleScripts(); - - manager.sleep(); - } - } - } - - if (log_level != .silent) { - try manager.printInstallSummary(ctx, &install_summary, did_meta_hash_change, log_level); - } - - if (install_summary.fail > 0) { - manager.any_failed_to_install = true; - } - - Output.flush(); - } - - fn printInstallSummary( - this: *PackageManager, - ctx: Command.Context, - install_summary: *const PackageInstall.Summary, - did_meta_hash_change: bool, - log_level: Options.LogLevel, - ) !void { - var printed_timestamp = false; - if (this.options.do.summary) { - var printer = Lockfile.Printer{ - .lockfile = this.lockfile, - .options = this.options, - .updates = this.update_requests, - .successfully_installed = install_summary.successfully_installed, - }; - - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| { - try Lockfile.Printer.Tree.print(&printer, this, Output.WriterType, Output.writer(), enable_ansi_colors, log_level); - }, - } - - if (!did_meta_hash_change) { - this.summary.remove = 0; - this.summary.add = 0; - this.summary.update = 0; - } - - if (install_summary.success > 0) { - // it's confusing when it shows 3 packages and says it installed 1 - const pkgs_installed = @max( - install_summary.success, - @as( - u32, - @truncate(this.update_requests.len), - ), - ); - Output.pretty("{d} package{s} installed ", .{ pkgs_installed, if (pkgs_installed == 1) "" else "s" }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, this.options.global); - - if (this.summary.remove > 0) { - Output.pretty("Removed: {d}\n", .{this.summary.remove}); - } - } else if (this.summary.remove > 0) { - if (this.subcommand == .remove) { - for (this.update_requests) |request| { - Output.prettyln("- {s}", .{request.name}); - } - } - - Output.pretty("{d} package{s} removed ", .{ this.summary.remove, if (this.summary.remove == 1) "" else "s" }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, this.options.global); - } else if (install_summary.skipped > 0 and install_summary.fail == 0 and this.update_requests.len == 0) { - const count = @as(PackageID, @truncate(this.lockfile.packages.len)); - if (count != install_summary.skipped) { - if (!this.options.enable.only_missing) { - Output.pretty("Checked {d} install{s} across {d} package{s} (no changes) ", .{ - install_summary.skipped, - if (install_summary.skipped == 1) "" else "s", - count, - if (count == 1) "" else "s", - }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - } - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, this.options.global); - } else { - Output.pretty("Done! Checked {d} package{s} (no changes) ", .{ - install_summary.skipped, - if (install_summary.skipped == 1) "" else "s", - }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, this.options.global); - } - } - - if (install_summary.fail > 0) { - Output.prettyln("Failed to install {d} package{s}\n", .{ install_summary.fail, if (install_summary.fail == 1) "" else "s" }); - Output.flush(); - } - } - - if (this.options.do.summary) { - if (!printed_timestamp) { - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - Output.prettyln(" done", .{}); - printed_timestamp = true; - } - } - } - - fn saveLockfile( - this: *PackageManager, - load_result: *const Lockfile.LoadResult, - save_format: Lockfile.LoadResult.LockfileFormat, - had_any_diffs: bool, - // TODO(dylan-conway): this and `packages_len_before_install` can most likely be deleted - // now that git dependnecies don't append to lockfile during installation. - lockfile_before_install: *const Lockfile, - packages_len_before_install: usize, - log_level: Options.LogLevel, - ) OOM!void { - if (this.lockfile.isEmpty()) { - if (!this.options.dry_run) delete: { - const delete_format = switch (load_result.*) { - .not_found => break :delete, - .err => |err| err.format, - .ok => |ok| ok.format, - }; - - bun.sys.unlinkat( - FD.cwd(), - if (delete_format == .text) comptime bun.OSPathLiteral("bun.lock") else comptime bun.OSPathLiteral("bun.lockb"), - ).unwrap() catch |err| { - // we don't care - if (err == error.ENOENT) { - if (had_any_diffs) return; - break :delete; - } - - if (log_level != .silent) { - Output.err(err, "failed to delete empty lockfile", .{}); - } - return; - }; - } - if (!this.options.global) { - if (log_level != .silent) { - switch (this.subcommand) { - .remove => Output.prettyErrorln("\npackage.json has no dependencies! Deleted empty lockfile", .{}), - else => Output.prettyErrorln("No packages! Deleted empty lockfile", .{}), - } - } - } - - return; - } - - var save_node: *Progress.Node = undefined; - - if (log_level.showProgress()) { - this.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; - save_node = this.progress.start(ProgressStrings.save(), 0); - save_node.activate(); - - this.progress.refresh(); - } - - this.lockfile.saveToDisk(load_result, &this.options); - - // delete binary lockfile if saving text lockfile - if (save_format == .text and load_result.loadedFromBinaryLockfile()) { - _ = bun.sys.unlinkat(FD.cwd(), comptime bun.OSPathLiteral("bun.lockb")); - } - - if (comptime Environment.allow_assert) { - if (load_result.* != .not_found) { - if (load_result.loadedFromTextLockfile()) { - if (!try this.lockfile.eql(lockfile_before_install, packages_len_before_install, this.allocator)) { - Output.panic("Lockfile non-deterministic after saving", .{}); - } - } else { - if (this.lockfile.hasMetaHashChanged(false, packages_len_before_install) catch false) { - Output.panic("Lockfile metahash non-deterministic after saving", .{}); - } - } - } - } - - if (log_level.showProgress()) { - save_node.end(); - this.progress.refresh(); - this.progress.root.end(); - this.progress = .{}; - } else if (log_level != .silent) { - Output.prettyErrorln("Saved lockfile", .{}); - Output.flush(); - } - } - - fn printBlockedPackagesInfo(summary: *const PackageInstall.Summary, global: bool) void { - const packages_count = summary.packages_with_blocked_scripts.count(); - var scripts_count: usize = 0; - for (summary.packages_with_blocked_scripts.values()) |count| scripts_count += count; - - if (comptime Environment.allow_assert) { - // if packages_count is greater than 0, scripts_count must also be greater than 0. - bun.assert(packages_count == 0 or scripts_count > 0); - // if scripts_count is 1, it's only possible for packages_count to be 1. - bun.assert(scripts_count != 1 or packages_count == 1); - } - - if (packages_count > 0) { - Output.prettyln("\n\nBlocked {d} postinstall{s}. Run `bun pm {s}untrusted` for details.\n", .{ - scripts_count, - if (scripts_count > 1) "s" else "", - if (global) "-g " else "", - }); - } else { - Output.pretty("\n", .{}); - } - } - - pub fn verifyResolutions(this: *PackageManager, log_level: PackageManager.Options.LogLevel) void { - const lockfile = this.lockfile; - const resolutions_lists: []const Lockfile.DependencyIDSlice = lockfile.packages.items(.resolutions); - const dependency_lists: []const Lockfile.DependencySlice = lockfile.packages.items(.dependencies); - const pkg_resolutions = lockfile.packages.items(.resolution); - const dependencies_buffer = lockfile.buffers.dependencies.items; - const resolutions_buffer = lockfile.buffers.resolutions.items; - const end: PackageID = @truncate(lockfile.packages.len); - - var any_failed = false; - const string_buf = lockfile.buffers.string_bytes.items; - - for (resolutions_lists, dependency_lists, 0..) |resolution_list, dependency_list, parent_id| { - for (resolution_list.get(resolutions_buffer), dependency_list.get(dependencies_buffer)) |package_id, failed_dep| { - if (package_id < end) continue; - - // TODO lockfile rewrite: remove this and make non-optional peer dependencies error if they did not resolve. - // Need to keep this for now because old lockfiles might have a peer dependency without the optional flag set. - if (failed_dep.behavior.isPeer()) continue; - - const features = switch (pkg_resolutions[parent_id].tag) { - .root, .workspace, .folder => this.options.local_package_features, - else => this.options.remote_package_features, - }; - // even if optional dependencies are enabled, it's still allowed to fail - if (failed_dep.behavior.optional or !failed_dep.behavior.isEnabled(features)) continue; - - if (log_level != .silent) { - if (failed_dep.name.isEmpty() or strings.eqlLong(failed_dep.name.slice(string_buf), failed_dep.version.literal.slice(string_buf), true)) { - Output.errGeneric("{} failed to resolve", .{ - failed_dep.version.literal.fmt(string_buf), - }); - } else { - Output.errGeneric("{s}@{} failed to resolve", .{ - failed_dep.name.slice(string_buf), - failed_dep.version.literal.fmt(string_buf), - }); - } - } - // track this so we can log each failure instead of just the first - any_failed = true; - } - } - - if (any_failed) this.crash(); - } - - const EnqueueTarballForDownloadError = NetworkTask.ForTarballError; - - pub fn enqueueTarballForDownload( - this: *PackageManager, - dependency_id: DependencyID, - package_id: PackageID, - url: string, - task_context: TaskCallbackContext, - patch_name_and_version_hash: ?u64, - ) EnqueueTarballForDownloadError!void { - const task_id = Task.Id.forTarball(url); - var task_queue = try this.task_queue.getOrPut(this.allocator, task_id); - if (!task_queue.found_existing) { - task_queue.value_ptr.* = .{}; - } - - try task_queue.value_ptr.append( - this.allocator, - task_context, - ); - - if (task_queue.found_existing) return; - - if (try this.generateNetworkTaskForTarball( - task_id, - url, - this.lockfile.buffers.dependencies.items[dependency_id].behavior.isRequired(), - dependency_id, - this.lockfile.packages.get(package_id), - patch_name_and_version_hash, - .no_authorization, - )) |task| { - task.schedule(&this.network_tarball_batch); - if (this.network_tarball_batch.len > 0) { - _ = this.scheduleTasks(); - } - } - } - - pub fn enqueueTarballForReading( - this: *PackageManager, - dependency_id: DependencyID, - alias: string, - resolution: *const Resolution, - task_context: TaskCallbackContext, - ) void { - const path = this.lockfile.str(&resolution.value.local_tarball); - const task_id = Task.Id.forTarball(path); - var task_queue = this.task_queue.getOrPut(this.allocator, task_id) catch unreachable; - if (!task_queue.found_existing) { - task_queue.value_ptr.* = .{}; - } - - task_queue.value_ptr.append( - this.allocator, - task_context, - ) catch unreachable; - - if (task_queue.found_existing) return; - - this.task_batch.push(ThreadPool.Batch.from(this.enqueueLocalTarball( - task_id, - dependency_id, - alias, - path, - resolution.*, - ))); - } - - pub fn enqueueGitForCheckout( - this: *PackageManager, - dependency_id: DependencyID, - alias: string, - resolution: *const Resolution, - task_context: TaskCallbackContext, - patch_name_and_version_hash: ?u64, - ) void { - const repository = &resolution.value.git; - const url = this.lockfile.str(&repository.repo); - const clone_id = Task.Id.forGitClone(url); - const resolved = this.lockfile.str(&repository.resolved); - const checkout_id = Task.Id.forGitCheckout(url, resolved); - var checkout_queue = this.task_queue.getOrPut(this.allocator, checkout_id) catch unreachable; - if (!checkout_queue.found_existing) { - checkout_queue.value_ptr.* = .{}; - } - - checkout_queue.value_ptr.append( - this.allocator, - task_context, - ) catch unreachable; - - if (checkout_queue.found_existing) return; - - if (this.git_repositories.get(clone_id)) |repo_fd| { - this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitCheckout(checkout_id, repo_fd, dependency_id, alias, resolution.*, resolved, patch_name_and_version_hash))); - } else { - var clone_queue = this.task_queue.getOrPut(this.allocator, clone_id) catch unreachable; - if (!clone_queue.found_existing) { - clone_queue.value_ptr.* = .{}; - } - - clone_queue.value_ptr.append( - this.allocator, - .{ .dependency = dependency_id }, - ) catch unreachable; - - if (clone_queue.found_existing) return; - - this.task_batch.push(ThreadPool.Batch.from(this.enqueueGitClone( - clone_id, - alias, - repository, - dependency_id, - &this.lockfile.buffers.dependencies.items[dependency_id], - resolution, - null, - ))); - } - } - - const EnqueuePackageForDownloadError = NetworkTask.ForTarballError; - - pub fn enqueuePackageForDownload( - this: *PackageManager, - name: []const u8, - dependency_id: DependencyID, - package_id: PackageID, - version: bun.Semver.Version, - url: []const u8, - task_context: TaskCallbackContext, - patch_name_and_version_hash: ?u64, - ) EnqueuePackageForDownloadError!void { - const task_id = Task.Id.forNPMPackage(name, version); - var task_queue = try this.task_queue.getOrPut(this.allocator, task_id); - if (!task_queue.found_existing) { - task_queue.value_ptr.* = .{}; - } - - try task_queue.value_ptr.append( - this.allocator, - task_context, - ); - - if (task_queue.found_existing) return; - - const is_required = this.lockfile.buffers.dependencies.items[dependency_id].behavior.isRequired(); - - if (try this.generateNetworkTaskForTarball( - task_id, - url, - is_required, - dependency_id, - this.lockfile.packages.get(package_id), - patch_name_and_version_hash, - .allow_authorization, - )) |task| { - task.schedule(&this.network_tarball_batch); - if (this.network_tarball_batch.len > 0) { - _ = this.scheduleTasks(); - } - } - } - - pub fn spawnPackageLifecycleScripts( - this: *PackageManager, - ctx: Command.Context, - list: Lockfile.Package.Scripts.List, - optional: bool, - foreground: bool, - ) !void { - const log_level = this.options.log_level; - var any_scripts = false; - for (list.items) |maybe_item| { - if (maybe_item != null) { - any_scripts = true; - break; - } - } - if (!any_scripts) { - return; - } - - try this.ensureTempNodeGypScript(); - - const cwd = list.cwd; - const this_transpiler = try this.configureEnvForScripts(ctx, log_level); - const original_path = this_transpiler.env.get("PATH") orelse ""; - - var PATH = try std.ArrayList(u8).initCapacity(bun.default_allocator, original_path.len + 1 + "node_modules/.bin".len + cwd.len + 1); - var current_dir: ?*DirInfo = this_transpiler.resolver.readDirInfo(cwd) catch null; - bun.assert(current_dir != null); - while (current_dir) |dir| { - if (PATH.items.len > 0 and PATH.items[PATH.items.len - 1] != std.fs.path.delimiter) { - try PATH.append(std.fs.path.delimiter); - } - try PATH.appendSlice(strings.withoutTrailingSlash(dir.abs_path)); - if (!(dir.abs_path.len == 1 and dir.abs_path[0] == std.fs.path.sep)) { - try PATH.append(std.fs.path.sep); - } - try PATH.appendSlice(this.options.bin_path); - current_dir = dir.getParent(); - } - - if (original_path.len > 0) { - if (PATH.items.len > 0 and PATH.items[PATH.items.len - 1] != std.fs.path.delimiter) { - try PATH.append(std.fs.path.delimiter); - } - - try PATH.appendSlice(original_path); - } - - this_transpiler.env.map.put("PATH", PATH.items) catch unreachable; - - const envp = try this_transpiler.env.map.createNullDelimitedEnvMap(this.allocator); - try this_transpiler.env.map.put("PATH", original_path); - PATH.deinit(); - - try LifecycleScriptSubprocess.spawnPackageScripts(this, list, envp, optional, log_level, foreground); - } -}; - -const Package = Lockfile.Package; pub const PackageManifestError = error{ PackageManifestHTTP400, @@ -10195,4 +237,47 @@ pub const PackageManifestError = error{ PackageManifestHTTP5xx, }; +// @sortImports + +pub const ExtractTarball = @import("./extract_tarball.zig"); +pub const NetworkTask = @import("NetworkTask.zig"); +pub const Npm = @import("./npm.zig"); +pub const PackageManager = @import("PackageManager.zig"); +pub const PackageManifestMap = @import("PackageManifestMap.zig"); +pub const Task = @import("PackageManagerTask.zig"); +pub const TextLockfile = @import("./lockfile/bun.lock.zig"); +const std = @import("std"); +pub const Bin = @import("./bin.zig").Bin; +pub const FolderResolution = @import("./resolvers/folder_resolver.zig").FolderResolution; pub const LifecycleScriptSubprocess = @import("./lifecycle_script_runner.zig").LifecycleScriptSubprocess; +pub const PackageInstall = @import("./PackageInstall.zig").PackageInstall; +pub const Repository = @import("./repository.zig").Repository; +pub const Resolution = @import("./resolution.zig").Resolution; +pub const Store = @import("./isolated_install/Store.zig").Store; + +pub const ArrayIdentityContext = @import("../identity_context.zig").ArrayIdentityContext; +pub const IdentityContext = @import("../identity_context.zig").IdentityContext; + +pub const external = @import("./ExternalSlice.zig"); +pub const ExternalPackageNameHashList = external.ExternalPackageNameHashList; +pub const ExternalSlice = external.ExternalSlice; +pub const ExternalStringList = external.ExternalStringList; +pub const ExternalStringMap = external.ExternalStringMap; +pub const VersionSlice = external.VersionSlice; + +pub const Dependency = @import("./dependency.zig"); +pub const Behavior = @import("./dependency.zig").Behavior; + +pub const Lockfile = @import("./lockfile.zig"); +pub const PatchedDep = Lockfile.PatchedDep; + +pub const patch = @import("./patch_install.zig"); +pub const PatchTask = patch.PatchTask; + +const bun = @import("bun"); +const JSAst = bun.JSAst; +const default_allocator = bun.default_allocator; +const string = bun.string; + +const Semver = bun.Semver; +const String = Semver.String; diff --git a/src/install/isolated_install.zig b/src/install/isolated_install.zig new file mode 100644 index 0000000000..5052fa7a50 --- /dev/null +++ b/src/install/isolated_install.zig @@ -0,0 +1,985 @@ +const log = Output.scoped(.IsolatedInstall, false); + +pub fn installIsolatedPackages( + manager: *PackageManager, + command_ctx: Command.Context, + install_root_dependencies: bool, + workspace_filters: []const WorkspaceFilter, +) OOM!PackageInstall.Summary { + bun.Analytics.Features.isolated_bun_install += 1; + + const lockfile = manager.lockfile; + + const store: Store = store: { + var timer = std.time.Timer.start() catch unreachable; + const pkgs = lockfile.packages.slice(); + const pkg_dependency_slices = pkgs.items(.dependencies); + const pkg_resolutions = pkgs.items(.resolution); + const pkg_names = pkgs.items(.name); + + const resolutions = lockfile.buffers.resolutions.items; + const dependencies = lockfile.buffers.dependencies.items; + const string_buf = lockfile.buffers.string_bytes.items; + + var nodes: Store.Node.List = .empty; + + const QueuedNode = struct { + parent_id: Store.Node.Id, + dep_id: DependencyID, + pkg_id: PackageID, + }; + + var node_queue: std.fifo.LinearFifo(QueuedNode, .Dynamic) = .init(lockfile.allocator); + defer node_queue.deinit(); + + try node_queue.writeItem(.{ + .parent_id = .invalid, + .dep_id = invalid_dependency_id, + .pkg_id = 0, + }); + + var dep_ids_sort_buf: std.ArrayListUnmanaged(DependencyID) = .empty; + defer dep_ids_sort_buf.deinit(lockfile.allocator); + + // Used by leaves and linked dependencies. They can be deduplicated early + // because peers won't change them. + // + // In the pnpm repo without this map: 772,471 nodes + // and with this map: 314,022 nodes + var early_dedupe: std.AutoHashMapUnmanaged(PackageID, Store.Node.Id) = .empty; + defer early_dedupe.deinit(lockfile.allocator); + + var peer_dep_ids: std.ArrayListUnmanaged(DependencyID) = .empty; + defer peer_dep_ids.deinit(lockfile.allocator); + + var visited_parent_node_ids: std.ArrayListUnmanaged(Store.Node.Id) = .empty; + defer visited_parent_node_ids.deinit(lockfile.allocator); + + // First pass: create full dependency tree with resolved peers + next_node: while (node_queue.readItem()) |entry| { + { + // check for cycles + const nodes_slice = nodes.slice(); + const node_pkg_ids = nodes_slice.items(.pkg_id); + const node_parent_ids = nodes_slice.items(.parent_id); + const node_nodes = nodes_slice.items(.nodes); + + var curr_id = entry.parent_id; + while (curr_id != .invalid) { + if (node_pkg_ids[curr_id.get()] == entry.pkg_id) { + // skip the new node, and add the previously added node to parent so it appears in + // 'node_modules/.bun/parent@version/node_modules' + node_nodes[entry.parent_id.get()].appendAssumeCapacity(curr_id); + continue :next_node; + } + curr_id = node_parent_ids[curr_id.get()]; + } + } + + const node_id: Store.Node.Id = .from(@intCast(nodes.len)); + const pkg_deps = pkg_dependency_slices[entry.pkg_id]; + + var skip_dependencies_of_workspace_node = false; + if (entry.dep_id != invalid_dependency_id) { + const entry_dep = dependencies[entry.dep_id]; + if (pkg_deps.len == 0 or entry_dep.isWorkspaceDep()) dont_dedupe: { + const dedupe_entry = try early_dedupe.getOrPut(lockfile.allocator, entry.pkg_id); + if (dedupe_entry.found_existing) { + const dedupe_node_id = dedupe_entry.value_ptr.*; + + const nodes_slice = nodes.slice(); + const node_nodes = nodes_slice.items(.nodes); + const node_dep_ids = nodes_slice.items(.dep_id); + + const dedupe_dep_id = node_dep_ids[dedupe_node_id.get()]; + const dedupe_dep = dependencies[dedupe_dep_id]; + + if (dedupe_dep.name_hash != entry_dep.name_hash) { + break :dont_dedupe; + } + + if (dedupe_dep.isWorkspaceDep() and entry_dep.isWorkspaceDep()) { + if (dedupe_dep.behavior.isWorkspaceOnly() != entry_dep.behavior.isWorkspaceOnly()) { + // only attach the dependencies to one of the workspaces + skip_dependencies_of_workspace_node = true; + break :dont_dedupe; + } + } + + node_nodes[entry.parent_id.get()].appendAssumeCapacity(dedupe_node_id); + continue; + } + + dedupe_entry.value_ptr.* = node_id; + } + } + + try nodes.append(lockfile.allocator, .{ + .pkg_id = entry.pkg_id, + .dep_id = entry.dep_id, + .parent_id = entry.parent_id, + .nodes = if (skip_dependencies_of_workspace_node) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len), + .dependencies = if (skip_dependencies_of_workspace_node) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len), + }); + + const nodes_slice = nodes.slice(); + const node_parent_ids = nodes_slice.items(.parent_id); + const node_dependencies = nodes_slice.items(.dependencies); + const node_peers = nodes_slice.items(.peers); + const node_nodes = nodes_slice.items(.nodes); + + if (entry.parent_id.tryGet()) |parent_id| { + node_nodes[parent_id].appendAssumeCapacity(node_id); + } + + if (skip_dependencies_of_workspace_node) { + continue; + } + + dep_ids_sort_buf.clearRetainingCapacity(); + try dep_ids_sort_buf.ensureUnusedCapacity(lockfile.allocator, pkg_deps.len); + for (pkg_deps.begin()..pkg_deps.end()) |_dep_id| { + const dep_id: DependencyID = @intCast(_dep_id); + dep_ids_sort_buf.appendAssumeCapacity(dep_id); + } + + // TODO: make this sort in an order that allows peers to be resolved last + // and devDependency handling to match `hoistDependency` + std.sort.pdq( + DependencyID, + dep_ids_sort_buf.items, + Lockfile.DepSorter{ + .lockfile = lockfile, + }, + Lockfile.DepSorter.isLessThan, + ); + + peer_dep_ids.clearRetainingCapacity(); + for (dep_ids_sort_buf.items) |dep_id| { + if (Tree.isFilteredDependencyOrWorkspace( + dep_id, + entry.pkg_id, + workspace_filters, + install_root_dependencies, + manager, + lockfile, + )) { + continue; + } + + const pkg_id = resolutions[dep_id]; + const dep = dependencies[dep_id]; + + // TODO: handle duplicate dependencies. should be similar logic + // like we have for dev dependencies in `hoistDependency` + + if (!dep.behavior.isPeer()) { + // simple case: + // - add it as a dependency + // - queue it + node_dependencies[node_id.get()].appendAssumeCapacity(.{ .dep_id = dep_id, .pkg_id = pkg_id }); + try node_queue.writeItem(.{ + .parent_id = node_id, + .dep_id = dep_id, + .pkg_id = pkg_id, + }); + continue; + } + + try peer_dep_ids.append(lockfile.allocator, dep_id); + } + + next_peer: for (peer_dep_ids.items) |peer_dep_id| { + const resolved_pkg_id, const auto_installed = resolved_pkg_id: { + + // Go through the peers parents looking for a package with the same name. + // If none is found, use current best version. Parents visited must have + // the package id for the chosen peer marked as a transitive peer. Nodes + // are deduplicated only if their package id and their transitive peer package + // ids are equal. + const peer_dep = dependencies[peer_dep_id]; + + // TODO: double check this + // Start with the current package. A package + // can satisfy it's own peers. + var curr_id = node_id; + + visited_parent_node_ids.clearRetainingCapacity(); + while (curr_id != .invalid) { + for (node_dependencies[curr_id.get()].items) |ids| { + const dep = dependencies[ids.dep_id]; + + if (dep.name_hash != peer_dep.name_hash) { + continue; + } + + const res = pkg_resolutions[ids.pkg_id]; + + if (peer_dep.version.tag != .npm or res.tag != .npm) { + // TODO: print warning for this? we don't have a version + // to compare to say if this satisfies or not. + break :resolved_pkg_id .{ ids.pkg_id, false }; + } + + const peer_dep_version = peer_dep.version.value.npm.version; + const res_version = res.value.npm.version; + + if (!peer_dep_version.satisfies(res_version, string_buf, string_buf)) { + // TODO: add warning! + } + + break :resolved_pkg_id .{ ids.pkg_id, false }; + } + + const curr_peers = node_peers[curr_id.get()]; + for (curr_peers.list.items) |ids| { + const transitive_peer_dep = dependencies[ids.dep_id]; + + if (transitive_peer_dep.name_hash != peer_dep.name_hash) { + continue; + } + + // A transitive peer with the same name has already passed + // through this node + + if (!ids.auto_installed) { + // The resolution was found here or above. Choose the same + // peer resolution. No need to mark this node or above. + + // TODO: add warning if not satisfies()! + break :resolved_pkg_id .{ ids.pkg_id, false }; + } + + // It didn't find a matching name and auto installed + // from somewhere this peer can't reach. Choose best + // version. Only mark all parents if resolution is + // different from this transitive peer. + + if (peer_dep.behavior.isOptionalPeer()) { + // exclude it + continue :next_peer; + } + + const best_version = resolutions[peer_dep_id]; + + if (best_version == ids.pkg_id) { + break :resolved_pkg_id .{ ids.pkg_id, true }; + } + + // add the remaining parent ids + while (curr_id != .invalid) { + try visited_parent_node_ids.append(lockfile.allocator, curr_id); + curr_id = node_parent_ids[curr_id.get()]; + } + + break :resolved_pkg_id .{ best_version, true }; + } + + // TODO: prevent marking workspace and symlink deps with transitive peers + + // add to visited parents after searching for a peer resolution. + // if a node resolves a transitive peer, it can still be deduplicated + try visited_parent_node_ids.append(lockfile.allocator, curr_id); + curr_id = node_parent_ids[curr_id.get()]; + } + + if (peer_dep.behavior.isOptionalPeer()) { + // exclude it + continue; + } + + // choose the current best version + break :resolved_pkg_id .{ resolutions[peer_dep_id], true }; + }; + + bun.debugAssert(resolved_pkg_id != invalid_package_id); + + for (visited_parent_node_ids.items) |visited_parent_id| { + const ctx: Store.Node.TransitivePeer.OrderedArraySetCtx = .{ + .string_buf = string_buf, + .pkg_names = pkg_names, + }; + const peer: Store.Node.TransitivePeer = .{ + .dep_id = peer_dep_id, + .pkg_id = resolved_pkg_id, + .auto_installed = auto_installed, + }; + try node_peers[visited_parent_id.get()].insert(lockfile.allocator, peer, &ctx); + } + + if (visited_parent_node_ids.items.len != 0) { + // visited parents length == 0 means the node satisfied it's own + // peer. don't queue. + node_dependencies[node_id.get()].appendAssumeCapacity(.{ .dep_id = peer_dep_id, .pkg_id = resolved_pkg_id }); + try node_queue.writeItem(.{ + .parent_id = node_id, + .dep_id = peer_dep_id, + .pkg_id = resolved_pkg_id, + }); + } + } + } + + if (manager.options.log_level.isVerbose()) { + const full_tree_end = timer.read(); + timer.reset(); + Output.prettyErrorln("Resolved peers [{}]", .{bun.fmt.fmtDurationOneDecimal(full_tree_end)}); + } + + const DedupeInfo = struct { + entry_id: Store.Entry.Id, + dep_id: DependencyID, + peers: Store.OrderedArraySet(Store.Node.TransitivePeer, Store.Node.TransitivePeer.OrderedArraySetCtx), + }; + + var dedupe: std.AutoHashMapUnmanaged(PackageID, std.ArrayListUnmanaged(DedupeInfo)) = .empty; + defer dedupe.deinit(lockfile.allocator); + + var res_fmt_buf: std.ArrayList(u8) = .init(lockfile.allocator); + defer res_fmt_buf.deinit(); + + const nodes_slice = nodes.slice(); + const node_pkg_ids = nodes_slice.items(.pkg_id); + const node_dep_ids = nodes_slice.items(.dep_id); + const node_peers: []const Store.Node.Peers = nodes_slice.items(.peers); + const node_nodes = nodes_slice.items(.nodes); + + var store: Store.Entry.List = .empty; + + const QueuedEntry = struct { + node_id: Store.Node.Id, + entry_parent_id: Store.Entry.Id, + }; + var entry_queue: std.fifo.LinearFifo(QueuedEntry, .Dynamic) = .init(lockfile.allocator); + defer entry_queue.deinit(); + + try entry_queue.writeItem(.{ + .node_id = .from(0), + .entry_parent_id = .invalid, + }); + + // Second pass: Deduplicate nodes when the pkg_id and peer set match an existing entry. + next_entry: while (entry_queue.readItem()) |entry| { + const pkg_id = node_pkg_ids[entry.node_id.get()]; + + const dedupe_entry = try dedupe.getOrPut(lockfile.allocator, pkg_id); + if (!dedupe_entry.found_existing) { + dedupe_entry.value_ptr.* = .{}; + } else { + const curr_peers = node_peers[entry.node_id.get()]; + const curr_dep_id = node_dep_ids[entry.node_id.get()]; + + for (dedupe_entry.value_ptr.items) |info| { + if (info.dep_id != invalid_dependency_id and curr_dep_id != invalid_dependency_id) { + const curr_dep = dependencies[curr_dep_id]; + const existing_dep = dependencies[info.dep_id]; + + if (existing_dep.isWorkspaceDep() and curr_dep.isWorkspaceDep()) { + if (existing_dep.behavior.isWorkspaceOnly() != curr_dep.behavior.isWorkspaceOnly()) { + continue; + } + } + } + + const eql_ctx: Store.Node.TransitivePeer.OrderedArraySetCtx = .{ + .string_buf = string_buf, + .pkg_names = pkg_names, + }; + + if (info.peers.eql(&curr_peers, &eql_ctx)) { + // dedupe! depend on the already created entry + + const entries = store.slice(); + const entry_dependencies = entries.items(.dependencies); + const entry_parents = entries.items(.parents); + + var parents = &entry_parents[info.entry_id.get()]; + + if (curr_dep_id != invalid_dependency_id and dependencies[curr_dep_id].behavior.isWorkspaceOnly()) { + try parents.append(lockfile.allocator, entry.entry_parent_id); + continue :next_entry; + } + const ctx: Store.Entry.DependenciesOrderedArraySetCtx = .{ + .string_buf = string_buf, + .dependencies = dependencies, + }; + try entry_dependencies[entry.entry_parent_id.get()].insert( + lockfile.allocator, + .{ .entry_id = info.entry_id, .dep_id = curr_dep_id }, + &ctx, + ); + try parents.append(lockfile.allocator, entry.entry_parent_id); + continue :next_entry; + } + } + + // nothing matched - create a new entry + } + + const new_entry_peer_hash: Store.Entry.PeerHash = peer_hash: { + const peers = node_peers[entry.node_id.get()]; + if (peers.len() == 0) { + break :peer_hash .none; + } + var hasher = bun.Wyhash11.init(0); + for (peers.slice()) |peer_ids| { + const pkg_name = pkg_names[peer_ids.pkg_id]; + hasher.update(pkg_name.slice(string_buf)); + const pkg_res = pkg_resolutions[peer_ids.pkg_id]; + res_fmt_buf.clearRetainingCapacity(); + try res_fmt_buf.writer().print("{}", .{pkg_res.fmt(string_buf, .posix)}); + hasher.update(res_fmt_buf.items); + } + break :peer_hash .from(hasher.final()); + }; + + const new_entry_dep_id = node_dep_ids[entry.node_id.get()]; + + const new_entry_is_root = new_entry_dep_id == invalid_dependency_id; + const new_entry_is_workspace = !new_entry_is_root and dependencies[new_entry_dep_id].isWorkspaceDep(); + + const new_entry_dependencies: Store.Entry.Dependencies = if (dedupe_entry.found_existing and new_entry_is_workspace) + .empty + else + try .initCapacity(lockfile.allocator, node_nodes[entry.node_id.get()].items.len); + + var new_entry_parents: std.ArrayListUnmanaged(Store.Entry.Id) = try .initCapacity(lockfile.allocator, 1); + new_entry_parents.appendAssumeCapacity(entry.entry_parent_id); + + const new_entry: Store.Entry = .{ + .node_id = entry.node_id, + .dependencies = new_entry_dependencies, + .parents = new_entry_parents, + .peer_hash = new_entry_peer_hash, + }; + + const new_entry_id: Store.Entry.Id = .from(@intCast(store.len)); + try store.append(lockfile.allocator, new_entry); + + if (entry.entry_parent_id.tryGet()) |entry_parent_id| skip_adding_dependency: { + if (new_entry_dep_id != invalid_dependency_id and dependencies[new_entry_dep_id].behavior.isWorkspaceOnly()) { + // skip implicit workspace dependencies on the root. + break :skip_adding_dependency; + } + + const entries = store.slice(); + const entry_dependencies = entries.items(.dependencies); + const ctx: Store.Entry.DependenciesOrderedArraySetCtx = .{ + .string_buf = string_buf, + .dependencies = dependencies, + }; + try entry_dependencies[entry_parent_id].insert( + lockfile.allocator, + .{ .entry_id = new_entry_id, .dep_id = new_entry_dep_id }, + &ctx, + ); + } + + try dedupe_entry.value_ptr.append(lockfile.allocator, .{ + .entry_id = new_entry_id, + .dep_id = new_entry_dep_id, + .peers = node_peers[entry.node_id.get()], + }); + + for (node_nodes[entry.node_id.get()].items) |node_id| { + try entry_queue.writeItem(.{ + .node_id = node_id, + .entry_parent_id = new_entry_id, + }); + } + } + + if (manager.options.log_level.isVerbose()) { + const dedupe_end = timer.read(); + Output.prettyErrorln("Created store [{}]", .{bun.fmt.fmtDurationOneDecimal(dedupe_end)}); + } + + break :store .{ + .entries = store, + .nodes = nodes, + }; + }; + + const cwd = FD.cwd(); + + const root_node_modules_dir, const is_new_root_node_modules, const bun_modules_dir, const is_new_bun_modules = root_dirs: { + const node_modules_path = bun.OSPathLiteral("node_modules"); + const bun_modules_path = bun.OSPathLiteral("node_modules/" ++ Store.modules_dir_name); + const existing_root_node_modules_dir = sys.openatOSPath(cwd, node_modules_path, bun.O.DIRECTORY | bun.O.RDONLY, 0o755).unwrap() catch { + sys.mkdirat(cwd, node_modules_path, 0o755).unwrap() catch |err| { + Output.err(err, "failed to create the './node_modules' directory", .{}); + Global.exit(1); + }; + + sys.mkdirat(cwd, bun_modules_path, 0o755).unwrap() catch |err| { + Output.err(err, "failed to create the './node_modules/.bun' directory", .{}); + Global.exit(1); + }; + + const new_root_node_modules_dir = sys.openatOSPath(cwd, node_modules_path, bun.O.DIRECTORY | bun.O.RDONLY, 0o755).unwrap() catch |err| { + Output.err(err, "failed to open the './node_modules' directory", .{}); + Global.exit(1); + }; + + const new_bun_modules_dir = sys.openatOSPath(cwd, bun_modules_path, bun.O.DIRECTORY | bun.O.RDONLY, 0o755).unwrap() catch |err| { + Output.err(err, "failed to open the './node_modules/.bun' directory", .{}); + Global.exit(1); + }; + + break :root_dirs .{ + new_root_node_modules_dir, + true, + new_bun_modules_dir, + true, + }; + }; + + const existing_bun_modules_dir = sys.openatOSPath(cwd, bun_modules_path, bun.O.DIRECTORY | bun.O.RDONLY, 0o755).unwrap() catch { + sys.mkdirat(cwd, bun_modules_path, 0o755).unwrap() catch |err| { + Output.err(err, "failed to create the './node_modules/.bun' directory", .{}); + Global.exit(1); + }; + + const new_bun_modules_dir = sys.openatOSPath(cwd, bun_modules_path, bun.O.DIRECTORY | bun.O.RDONLY, 0o755).unwrap() catch |err| { + Output.err(err, "failed to open the './node_modules/.bun' directory", .{}); + Global.exit(1); + }; + + break :root_dirs .{ + existing_root_node_modules_dir, + false, + new_bun_modules_dir, + true, + }; + }; + + break :root_dirs .{ + existing_root_node_modules_dir, + false, + existing_bun_modules_dir, + false, + }; + }; + _ = root_node_modules_dir; + _ = is_new_root_node_modules; + _ = bun_modules_dir; + // _ = is_new_bun_modules; + + { + var root_node: *Progress.Node = undefined; + // var download_node: Progress.Node = undefined; + var install_node: Progress.Node = undefined; + var scripts_node: Progress.Node = undefined; + var progress = &manager.progress; + + if (manager.options.log_level.showProgress()) { + progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; + root_node = progress.start("", 0); + // download_node = root_node.start(ProgressStrings.download(), 0); + install_node = root_node.start(ProgressStrings.install(), store.entries.len); + scripts_node = root_node.start(ProgressStrings.script(), 0); + + manager.downloads_node = null; + manager.scripts_node = &scripts_node; + } + + const nodes_slice = store.nodes.slice(); + const node_pkg_ids = nodes_slice.items(.pkg_id); + const node_dep_ids = nodes_slice.items(.dep_id); + + const entries = store.entries.slice(); + const entry_node_ids = entries.items(.node_id); + const entry_steps = entries.items(.step); + const entry_dependencies = entries.items(.dependencies); + + const string_buf = lockfile.buffers.string_bytes.items; + + const pkgs = lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_name_hashes = pkgs.items(.name_hash); + const pkg_resolutions = pkgs.items(.resolution); + + var seen_entry_ids: std.AutoHashMapUnmanaged(Store.Entry.Id, void) = .empty; + defer seen_entry_ids.deinit(lockfile.allocator); + try seen_entry_ids.ensureTotalCapacity(lockfile.allocator, @intCast(store.entries.len)); + + // TODO: delete + var seen_workspace_ids: std.AutoHashMapUnmanaged(PackageID, void) = .empty; + defer seen_workspace_ids.deinit(lockfile.allocator); + + var installer: Store.Installer = .{ + .lockfile = lockfile, + .manager = manager, + .command_ctx = command_ctx, + .installed = try .initEmpty(manager.allocator, lockfile.packages.len), + .install_node = if (manager.options.log_level.showProgress()) &install_node else null, + .scripts_node = if (manager.options.log_level.showProgress()) &scripts_node else null, + .store = &store, + .preallocated_tasks = .init(bun.default_allocator), + .trusted_dependencies_mutex = .{}, + .trusted_dependencies_from_update_requests = manager.findTrustedDependenciesFromUpdateRequests(), + }; + + // add the pending task count upfront + _ = manager.incrementPendingTasks(@intCast(store.entries.len)); + + for (0..store.entries.len) |_entry_id| { + const entry_id: Store.Entry.Id = .from(@intCast(_entry_id)); + + const node_id = entry_node_ids[entry_id.get()]; + const pkg_id = node_pkg_ids[node_id.get()]; + + const pkg_name = pkg_names[pkg_id]; + const pkg_name_hash = pkg_name_hashes[pkg_id]; + const pkg_res: Resolution = pkg_resolutions[pkg_id]; + + switch (pkg_res.tag) { + else => { + // this is `uninitialized` or `single_file_module`. + bun.debugAssert(false); + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .skipped); + continue; + }, + .root => { + if (entry_id == .root) { + entry_steps[entry_id.get()].store(.symlink_dependencies, .monotonic); + installer.startTask(entry_id); + continue; + } + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .skipped); + continue; + }, + .workspace => { + // if injected=true this might be false + if (!(try seen_workspace_ids.getOrPut(lockfile.allocator, pkg_id)).found_existing) { + entry_steps[entry_id.get()].store(.symlink_dependencies, .monotonic); + installer.startTask(entry_id); + continue; + } + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .success); + continue; + }, + .symlink => { + // no installation required, will only need to be linked to packages that depend on it. + bun.debugAssert(entry_dependencies[entry_id.get()].list.items.len == 0); + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .skipped); + continue; + }, + .folder => { + // folders are always hardlinked to keep them up-to-date + installer.startTask(entry_id); + continue; + }, + + inline .npm, + .git, + .github, + .local_tarball, + .remote_tarball, + => |pkg_res_tag| { + const patch_info = try installer.packagePatchInfo(pkg_name, pkg_name_hash, &pkg_res); + + const needs_install = + manager.options.enable.force_install or + is_new_bun_modules or + patch_info == .remove or + needs_install: { + var store_path: bun.AbsPath(.{}) = .initTopLevelDir(); + defer store_path.deinit(); + installer.appendStorePath(&store_path, entry_id); + const exists = sys.existsZ(store_path.sliceZ()); + + break :needs_install switch (patch_info) { + .none => !exists, + // checked above + .remove => unreachable, + .patch => |patch| { + var hash_buf: install.BuntagHashBuf = undefined; + const hash = install.buntaghashbuf_make(&hash_buf, patch.contents_hash); + var patch_tag_path: bun.AbsPath(.{}) = .initTopLevelDir(); + defer patch_tag_path.deinit(); + installer.appendStorePath(&patch_tag_path, entry_id); + patch_tag_path.append(hash); + break :needs_install !sys.existsZ(patch_tag_path.sliceZ()); + }, + }; + }; + + if (!needs_install) { + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .skipped); + continue; + } + + var pkg_cache_dir_subpath: bun.RelPath(.{ .sep = .auto }) = .from(switch (pkg_res_tag) { + .npm => manager.cachedNPMPackageFolderName(pkg_name.slice(string_buf), pkg_res.value.npm.version, patch_info.contentsHash()), + .git => manager.cachedGitFolderName(&pkg_res.value.git, patch_info.contentsHash()), + .github => manager.cachedGitHubFolderName(&pkg_res.value.github, patch_info.contentsHash()), + .local_tarball => manager.cachedTarballFolderName(pkg_res.value.local_tarball, patch_info.contentsHash()), + .remote_tarball => manager.cachedTarballFolderName(pkg_res.value.remote_tarball, patch_info.contentsHash()), + + else => comptime unreachable, + }); + defer pkg_cache_dir_subpath.deinit(); + + const cache_dir, const cache_dir_path = manager.getCacheDirectoryAndAbsPath(); + defer cache_dir_path.deinit(); + + const missing_from_cache = switch (manager.getPreinstallState(pkg_id)) { + .done => false, + else => missing_from_cache: { + if (patch_info == .none) { + const exists = switch (pkg_res_tag) { + .npm => exists: { + var cache_dir_path_save = pkg_cache_dir_subpath.save(); + defer cache_dir_path_save.restore(); + pkg_cache_dir_subpath.append("package.json"); + break :exists sys.existsAt(cache_dir, pkg_cache_dir_subpath.sliceZ()); + }, + else => sys.directoryExistsAt(cache_dir, pkg_cache_dir_subpath.sliceZ()).unwrapOr(false), + }; + if (exists) { + manager.setPreinstallState(pkg_id, installer.lockfile, .done); + } + break :missing_from_cache !exists; + } + + // TODO: why does this look like it will never work? + break :missing_from_cache true; + }, + }; + + if (!missing_from_cache) { + installer.startTask(entry_id); + continue; + } + + const ctx: install.TaskCallbackContext = .{ + .isolated_package_install_context = entry_id, + }; + + const dep_id = node_dep_ids[node_id.get()]; + const dep = lockfile.buffers.dependencies.items[dep_id]; + switch (pkg_res_tag) { + .npm => { + manager.enqueuePackageForDownload( + pkg_name.slice(string_buf), + dep_id, + pkg_id, + pkg_res.value.npm.version, + pkg_res.value.npm.url.slice(string_buf), + ctx, + patch_info.nameAndVersionHash(), + ) catch |err| switch (err) { + error.OutOfMemory => |oom| return oom, + error.InvalidURL => { + Output.err(err, "failed to enqueue package for download: {s}@{}", .{ + pkg_name.slice(string_buf), + pkg_res.fmt(string_buf, .auto), + }); + Output.flush(); + if (manager.options.enable.fail_early) { + Global.exit(1); + } + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .fail); + continue; + }, + }; + }, + .git => { + manager.enqueueGitForCheckout( + dep_id, + dep.name.slice(string_buf), + &pkg_res, + ctx, + patch_info.nameAndVersionHash(), + ); + }, + .github => { + const url = manager.allocGitHubURL(&pkg_res.value.git); + defer manager.allocator.free(url); + manager.enqueueTarballForDownload( + dep_id, + pkg_id, + url, + ctx, + patch_info.nameAndVersionHash(), + ) catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.InvalidURL => { + Output.err(err, "failed to enqueue github package for download: {s}@{}", .{ + pkg_name.slice(string_buf), + pkg_res.fmt(string_buf, .auto), + }); + Output.flush(); + if (manager.options.enable.fail_early) { + Global.exit(1); + } + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .fail); + continue; + }, + }; + }, + .local_tarball => { + manager.enqueueTarballForReading( + dep_id, + dep.name.slice(string_buf), + &pkg_res, + ctx, + ); + }, + .remote_tarball => { + manager.enqueueTarballForDownload( + dep_id, + pkg_id, + pkg_res.value.remote_tarball.slice(string_buf), + ctx, + patch_info.nameAndVersionHash(), + ) catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + error.InvalidURL => { + Output.err(err, "failed to enqueue tarball for download: {s}@{}", .{ + pkg_name.slice(string_buf), + pkg_res.fmt(string_buf, .auto), + }); + Output.flush(); + if (manager.options.enable.fail_early) { + Global.exit(1); + } + entry_steps[entry_id.get()].store(.done, .monotonic); + installer.onTaskComplete(entry_id, .fail); + continue; + }, + }; + }, + else => comptime unreachable, + } + }, + } + } + + if (manager.pendingTaskCount() > 0) { + const Wait = struct { + installer: *Store.Installer, + manager: *PackageManager, + err: ?anyerror = null, + + pub fn isDone(wait: *@This()) bool { + wait.manager.runTasks( + *Store.Installer, + wait.installer, + .{ + .onExtract = Store.Installer.onPackageExtracted, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, + }, + true, + wait.manager.options.log_level, + ) catch |err| { + wait.err = err; + return true; + }; + + return wait.manager.pendingTaskCount() == 0; + } + }; + + var wait: Wait = .{ + .manager = manager, + .installer = &installer, + }; + + manager.sleepUntil(&wait, &Wait.isDone); + + if (wait.err) |err| { + Output.err(err, "failed to install packages", .{}); + Global.exit(1); + } + } + + if (manager.options.log_level.showProgress()) { + progress.root.end(); + progress.* = .{}; + } + + if (comptime Environment.ci_assert) { + var done = true; + next_entry: for (store.entries.items(.step), 0..) |entry_step, _entry_id| { + const entry_id: Store.Entry.Id = .from(@intCast(_entry_id)); + const step = entry_step.load(.monotonic); + + if (step == .done) { + continue; + } + + done = false; + + log("entry not done: {d}, {s}\n", .{ entry_id, @tagName(step) }); + + const deps = store.entries.items(.dependencies)[entry_id.get()]; + for (deps.slice()) |dep| { + const dep_step = entry_steps[dep.entry_id.get()].load(.monotonic); + if (dep_step != .done) { + log(", parents:\n - ", .{}); + const parent_ids = Store.Entry.debugGatherAllParents(entry_id, installer.store); + for (parent_ids) |parent_id| { + if (parent_id == .root) { + log("root ", .{}); + } else { + log("{d} ", .{parent_id.get()}); + } + } + + log("\n", .{}); + continue :next_entry; + } + } + + log(" and is able to run\n", .{}); + } + + bun.debugAssert(done); + } + + installer.summary.successfully_installed = installer.installed; + + return installer.summary; + } +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const FD = bun.FD; +const Global = bun.Global; +const OOM = bun.OOM; +const Output = bun.Output; +const Progress = bun.Progress; +const sys = bun.sys; +const Command = bun.CLI.Command; + +const install = bun.install; +const DependencyID = install.DependencyID; +const PackageID = install.PackageID; +const PackageInstall = install.PackageInstall; +const Resolution = install.Resolution; +const Store = install.Store; +const invalid_dependency_id = install.invalid_dependency_id; +const invalid_package_id = install.invalid_package_id; + +const Lockfile = install.Lockfile; +const Tree = Lockfile.Tree; + +const PackageManager = install.PackageManager; +const ProgressStrings = PackageManager.ProgressStrings; +const WorkspaceFilter = PackageManager.WorkspaceFilter; diff --git a/src/install/isolated_install/Hardlinker.zig b/src/install/isolated_install/Hardlinker.zig new file mode 100644 index 0000000000..f3b2a968f9 --- /dev/null +++ b/src/install/isolated_install/Hardlinker.zig @@ -0,0 +1,128 @@ +pub const Hardlinker = struct { + src_dir: FD, + src: bun.AbsPath(.{ .sep = .auto, .unit = .os }), + dest: bun.RelPath(.{ .sep = .auto, .unit = .os }), + + pub fn link(this: *Hardlinker, skip_dirnames: []const bun.OSPathSlice) OOM!sys.Maybe(void) { + var walker: Walker = try .walk( + this.src_dir, + bun.default_allocator, + &.{}, + skip_dirnames, + ); + defer walker.deinit(); + + if (comptime Environment.isWindows) { + while (switch (walker.next()) { + .result => |res| res, + .err => |err| return .initErr(err), + }) |entry| { + var src_save = this.src.save(); + defer src_save.restore(); + + this.src.append(entry.path); + + var dest_save = this.dest.save(); + defer dest_save.restore(); + + this.dest.append(entry.path); + + switch (entry.kind) { + .directory => { + FD.cwd().makePath(u16, this.dest.sliceZ()) catch {}; + }, + .file => { + switch (sys.link(u16, this.src.sliceZ(), this.dest.sliceZ())) { + .result => {}, + .err => |link_err1| switch (link_err1.getErrno()) { + .UV_EEXIST, + .EXIST, + => { + _ = sys.unlinkW(this.dest.sliceZ()); + switch (sys.link(u16, this.src.sliceZ(), this.dest.sliceZ())) { + .result => {}, + .err => |link_err2| return .initErr(link_err2), + } + }, + .UV_ENOENT, + .NOENT, + => { + const dest_parent = this.dest.dirname() orelse { + return .initErr(link_err1); + }; + + FD.cwd().makePath(u16, dest_parent) catch {}; + switch (sys.link(u16, this.src.sliceZ(), this.dest.sliceZ())) { + .result => {}, + .err => |link_err2| return .initErr(link_err2), + } + }, + else => return .initErr(link_err1), + }, + } + }, + else => {}, + } + } + + return .success; + } + + while (switch (walker.next()) { + .result => |res| res, + .err => |err| return .initErr(err), + }) |entry| { + var dest_save = this.dest.save(); + defer dest_save.restore(); + + this.dest.append(entry.path); + + switch (entry.kind) { + .directory => { + FD.cwd().makePath(u8, this.dest.sliceZ()) catch {}; + }, + .file => { + switch (sys.linkatZ(entry.dir, entry.basename, FD.cwd(), this.dest.sliceZ())) { + .result => {}, + .err => |link_err1| { + switch (link_err1.getErrno()) { + .EXIST => { + FD.cwd().deleteTree(this.dest.slice()) catch {}; + switch (sys.linkatZ(entry.dir, entry.basename, FD.cwd(), this.dest.sliceZ())) { + .result => {}, + .err => |link_err2| return .initErr(link_err2), + } + }, + .NOENT => { + const dest_parent = this.dest.dirname() orelse { + return .initErr(link_err1); + }; + + FD.cwd().makePath(u8, dest_parent) catch {}; + switch (sys.linkatZ(entry.dir, entry.basename, FD.cwd(), this.dest.sliceZ())) { + .result => {}, + .err => |link_err2| return .initErr(link_err2), + } + }, + else => return .initErr(link_err1), + } + }, + } + }, + else => {}, + } + } + + return .success; + } +}; + +// @sortImports + +const Walker = @import("../../walker_skippable.zig"); + +const bun = @import("bun"); +const Environment = bun.Environment; +const FD = bun.FD; +const OOM = bun.OOM; +const sys = bun.sys; diff --git a/src/install/isolated_install/Installer.zig b/src/install/isolated_install/Installer.zig new file mode 100644 index 0000000000..2209558e77 --- /dev/null +++ b/src/install/isolated_install/Installer.zig @@ -0,0 +1,1139 @@ +pub const Installer = struct { + trusted_dependencies_mutex: bun.Mutex, + // this is not const for `lockfile.trusted_dependencies` + lockfile: *Lockfile, + + summary: PackageInstall.Summary = .{ .successfully_installed = .empty }, + installed: Bitset, + install_node: ?*Progress.Node, + scripts_node: ?*Progress.Node, + + manager: *PackageManager, + command_ctx: Command.Context, + + store: *const Store, + + tasks: bun.UnboundedQueue(Task, .next) = .{}, + preallocated_tasks: Task.Preallocated, + + trusted_dependencies_from_update_requests: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void), + + pub fn deinit(this: *const Installer) void { + this.trusted_dependencies_from_update_requests.deinit(this.lockfile.allocator); + } + + pub fn startTask(this: *Installer, entry_id: Store.Entry.Id) void { + const task = this.preallocated_tasks.get(); + + task.* = .{ + .entry_id = entry_id, + .installer = this, + }; + + this.manager.thread_pool.schedule(.from(&task.task)); + } + + pub fn onPackageExtracted(this: *Installer, task_id: install.Task.Id) void { + if (this.manager.task_queue.fetchRemove(task_id)) |removed| { + for (removed.value.items) |install_ctx| { + const entry_id = install_ctx.isolated_package_install_context; + this.startTask(entry_id); + } + } + } + + pub fn onTaskFail(this: *Installer, entry_id: Store.Entry.Id, err: Task.Error) void { + const string_buf = this.lockfile.buffers.string_bytes.items; + + const entries = this.store.entries.slice(); + const entry_node_ids = entries.items(.node_id); + + const nodes = this.store.nodes.slice(); + const node_pkg_ids = nodes.items(.pkg_id); + + const pkgs = this.lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_resolutions = pkgs.items(.resolution); + + const node_id = entry_node_ids[entry_id.get()]; + const pkg_id = node_pkg_ids[node_id.get()]; + + const pkg_name = pkg_names[pkg_id]; + const pkg_res = pkg_resolutions[pkg_id]; + + switch (err) { + .link_package => |link_err| { + Output.err(link_err, "failed to link package: {s}@{}", .{ + pkg_name.slice(string_buf), + pkg_res.fmt(string_buf, .auto), + }); + }, + .symlink_dependencies => |symlink_err| { + Output.err(symlink_err, "failed to symlink dependencies for package: {s}@{}", .{ + pkg_name.slice(string_buf), + pkg_res.fmt(string_buf, .auto), + }); + }, + else => {}, + } + Output.flush(); + + // attempt deleting the package so the next install will install it again + switch (pkg_res.tag) { + .uninitialized, + .single_file_module, + .root, + .workspace, + .symlink, + => {}, + + _ => {}, + + // to be safe make sure we only delete packages in the store + .npm, + .git, + .github, + .local_tarball, + .remote_tarball, + .folder, + => { + var store_path: bun.RelPath(.{ .sep = .auto }) = .init(); + defer store_path.deinit(); + + store_path.appendFmt("node_modules/{}", .{ + Store.Entry.fmtStorePath(entry_id, this.store, this.lockfile), + }); + + _ = sys.unlink(store_path.sliceZ()); + }, + } + + if (this.manager.options.enable.fail_early) { + Global.exit(1); + } + + this.summary.fail += 1; + + this.decrementPendingTasks(entry_id); + this.resumeUnblockedTasks(); + } + + pub fn decrementPendingTasks(this: *Installer, entry_id: Store.Entry.Id) void { + _ = entry_id; + this.manager.decrementPendingTasks(); + } + + pub fn onTaskBlocked(this: *Installer, entry_id: Store.Entry.Id) void { + + // race: task decides it is blocked because one of it's dependencies has not finished. + // before the task can mark itself as blocked, the dependency finishes it's install, + // causing the task to never finish because resumeUnblockedTasks is called before + // it's state is set to blocked. + // + // fix: check if the task is unblocked after the task returns blocked, and only set/unset + // blocked from the main thread. + + var parent_dedupe: std.AutoArrayHashMap(Store.Entry.Id, void) = .init(bun.default_allocator); + defer parent_dedupe.deinit(); + + if (this.isTaskUnblocked(entry_id, &parent_dedupe)) { + this.store.entries.items(.step)[entry_id.get()].store(.symlink_dependency_binaries, .monotonic); + this.startTask(entry_id); + return; + } + + this.store.entries.items(.step)[entry_id.get()].store(.blocked, .monotonic); + } + + fn isTaskUnblocked(this: *Installer, entry_id: Store.Entry.Id, parent_dedupe: *std.AutoArrayHashMap(Store.Entry.Id, void)) bool { + const entries = this.store.entries.slice(); + const entry_deps = entries.items(.dependencies); + const entry_steps = entries.items(.step); + + const deps = entry_deps[entry_id.get()]; + for (deps.slice()) |dep| { + if (entry_steps[dep.entry_id.get()].load(.monotonic) != .done) { + parent_dedupe.clearRetainingCapacity(); + if (this.store.isCycle(entry_id, dep.entry_id, parent_dedupe)) { + continue; + } + + return false; + } + } + + return true; + } + + pub fn onTaskComplete(this: *Installer, entry_id: Store.Entry.Id, state: enum { success, skipped, fail }) void { + if (comptime Environment.ci_assert) { + bun.assertWithLocation(this.store.entries.items(.step)[entry_id.get()].load(.monotonic) == .done, @src()); + } + + this.decrementPendingTasks(entry_id); + this.resumeUnblockedTasks(); + + if (this.install_node) |node| { + node.completeOne(); + } + + switch (state) { + .success => { + this.summary.success += 1; + }, + .skipped => { + this.summary.skipped += 1; + }, + .fail => { + this.summary.fail += 1; + return; + }, + } + + const pkg_id = pkg_id: { + if (entry_id == .root) { + return; + } + + const node_id = this.store.entries.items(.node_id)[entry_id.get()]; + const nodes = this.store.nodes.slice(); + + const dep_id = nodes.items(.dep_id)[node_id.get()]; + + if (dep_id == invalid_dependency_id) { + // should be coverd by `entry_id == .root` above, but + // just in case + return; + } + + const dep = this.lockfile.buffers.dependencies.items[dep_id]; + + if (dep.behavior.isWorkspaceOnly()) { + return; + } + + break :pkg_id nodes.items(.pkg_id)[node_id.get()]; + }; + + const is_duplicate = this.installed.isSet(pkg_id); + this.summary.success += @intFromBool(!is_duplicate); + this.installed.set(pkg_id); + } + + // This function runs only on the main thread. The installer tasks threads + // will be changing values in `entry_step`, but the blocked state is only + // set on the main thread, allowing the code between + // `entry_steps[entry_id.get()].load(.monotonic)` + // and + // `entry_steps[entry_id.get()].store(.symlink_dependency_binaries, .monotonic)` + pub fn resumeUnblockedTasks(this: *Installer) void { + const entries = this.store.entries.slice(); + const entry_steps = entries.items(.step); + + var parent_dedupe: std.AutoArrayHashMap(Store.Entry.Id, void) = .init(bun.default_allocator); + defer parent_dedupe.deinit(); + + for (0..this.store.entries.len) |_entry_id| { + const entry_id: Store.Entry.Id = .from(@intCast(_entry_id)); + + const entry_step = entry_steps[entry_id.get()].load(.monotonic); + if (entry_step != .blocked) { + continue; + } + + if (!this.isTaskUnblocked(entry_id, &parent_dedupe)) { + continue; + } + + entry_steps[entry_id.get()].store(.symlink_dependency_binaries, .monotonic); + this.startTask(entry_id); + } + } + + pub const Task = struct { + const Preallocated = bun.HiveArray(Task, 128).Fallback; + + entry_id: Store.Entry.Id, + installer: *Installer, + + task: ThreadPool.Task = .{ .callback = &callback }, + next: ?*Task = null, + + result: Result = .none, + + const Result = union(enum) { + none, + err: Error, + blocked, + done, + }; + + const Error = union(Step) { + link_package: sys.Error, + symlink_dependencies: sys.Error, + check_if_blocked, + symlink_dependency_binaries, + run_preinstall: anyerror, + binaries: anyerror, + @"run (post)install and (pre/post)prepare": anyerror, + done, + blocked, + + pub fn clone(this: *const Error, allocator: std.mem.Allocator) Error { + return switch (this.*) { + .link_package => |err| .{ .link_package = err.clone(allocator) }, + .symlink_dependencies => |err| .{ .symlink_dependencies = err.clone(allocator) }, + .check_if_blocked => .check_if_blocked, + .symlink_dependency_binaries => .symlink_dependency_binaries, + .run_preinstall => |err| .{ .run_preinstall = err }, + .binaries => |err| .{ .binaries = err }, + .@"run (post)install and (pre/post)prepare" => |err| .{ .@"run (post)install and (pre/post)prepare" = err }, + .done => .done, + .blocked => .blocked, + }; + } + }; + + pub const Step = enum(u8) { + link_package, + symlink_dependencies, + + check_if_blocked, + + // blocked can only happen here + + symlink_dependency_binaries, + run_preinstall, + + // pause here while preinstall runs + + binaries, + @"run (post)install and (pre/post)prepare", + + // pause again while remaining scripts run. + + done, + + // only the main thread sets blocked, and only the main thread + // sets a blocked task to symlink_dependency_binaries + blocked, + }; + + fn nextStep(this: *Task, comptime current_step: Step) Step { + const next_step: Step = switch (comptime current_step) { + .link_package => .symlink_dependencies, + .symlink_dependencies => .check_if_blocked, + .check_if_blocked => .symlink_dependency_binaries, + .symlink_dependency_binaries => .run_preinstall, + .run_preinstall => .binaries, + .binaries => .@"run (post)install and (pre/post)prepare", + .@"run (post)install and (pre/post)prepare" => .done, + + .done, + .blocked, + => @compileError("unexpected step"), + }; + + this.installer.store.entries.items(.step)[this.entry_id.get()].store(next_step, .monotonic); + + return next_step; + } + + const Yield = union(enum) { + yield, + done, + blocked, + fail: Error, + + pub fn failure(e: Error) Yield { + return .{ .fail = e }; + } + }; + + fn run(this: *Task) OOM!Yield { + const installer = this.installer; + const manager = installer.manager; + const lockfile = installer.lockfile; + + const pkgs = installer.lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_name_hashes = pkgs.items(.name_hash); + const pkg_resolutions = pkgs.items(.resolution); + const pkg_bins = pkgs.items(.bin); + const pkg_script_lists = pkgs.items(.scripts); + + const entries = installer.store.entries.slice(); + const entry_node_ids = entries.items(.node_id); + const entry_dependencies = entries.items(.dependencies); + const entry_steps = entries.items(.step); + const entry_scripts = entries.items(.scripts); + + const nodes = installer.store.nodes.slice(); + const node_pkg_ids = nodes.items(.pkg_id); + const node_dep_ids = nodes.items(.dep_id); + + const node_id = entry_node_ids[this.entry_id.get()]; + const pkg_id = node_pkg_ids[node_id.get()]; + const dep_id = node_dep_ids[node_id.get()]; + + const pkg_name = pkg_names[pkg_id]; + const pkg_name_hash = pkg_name_hashes[pkg_id]; + const pkg_res = pkg_resolutions[pkg_id]; + + return next_step: switch (entry_steps[this.entry_id.get()].load(.monotonic)) { + inline .link_package => |current_step| { + const string_buf = lockfile.buffers.string_bytes.items; + + var pkg_cache_dir_subpath: bun.RelPath(.{ .sep = .auto }) = .from(switch (pkg_res.tag) { + else => |tag| pkg_cache_dir_subpath: { + const patch_info = try installer.packagePatchInfo( + pkg_name, + pkg_name_hash, + &pkg_res, + ); + + break :pkg_cache_dir_subpath switch (tag) { + .npm => manager.cachedNPMPackageFolderName(pkg_name.slice(string_buf), pkg_res.value.npm.version, patch_info.contentsHash()), + .git => manager.cachedGitFolderName(&pkg_res.value.git, patch_info.contentsHash()), + .github => manager.cachedGitHubFolderName(&pkg_res.value.github, patch_info.contentsHash()), + .local_tarball => manager.cachedTarballFolderName(pkg_res.value.local_tarball, patch_info.contentsHash()), + .remote_tarball => manager.cachedTarballFolderName(pkg_res.value.remote_tarball, patch_info.contentsHash()), + + else => { + if (comptime Environment.ci_assert) { + bun.assertWithLocation(false, @src()); + } + + continue :next_step this.nextStep(current_step); + }, + }; + }, + + .folder => { + // the folder does not exist in the cache + const folder_dir = switch (bun.openDirForIteration(FD.cwd(), pkg_res.value.folder.slice(string_buf))) { + .result => |fd| fd, + .err => |err| return .failure(.{ .link_package = err }), + }; + defer folder_dir.close(); + + var src: bun.AbsPath(.{ .unit = .os, .sep = .auto }) = .initTopLevelDir(); + defer src.deinit(); + src.append(pkg_res.value.folder.slice(string_buf)); + + var dest: bun.RelPath(.{ .unit = .os, .sep = .auto }) = .init(); + defer dest.deinit(); + + installer.appendStorePath(&dest, this.entry_id); + + var hardlinker: Hardlinker = .{ + .src_dir = folder_dir, + .src = src, + .dest = dest, + }; + + switch (try hardlinker.link(&.{comptime bun.OSPathLiteral("node_modules")})) { + .result => {}, + .err => |err| return .failure(.{ .link_package = err }), + } + + continue :next_step this.nextStep(current_step); + }, + }); + defer pkg_cache_dir_subpath.deinit(); + + const cache_dir, const cache_dir_path = manager.getCacheDirectoryAndAbsPath(); + defer cache_dir_path.deinit(); + + var dest_subpath: bun.RelPath(.{ .sep = .auto, .unit = .os }) = .init(); + defer dest_subpath.deinit(); + + installer.appendStorePath(&dest_subpath, this.entry_id); + + // link the package + if (comptime Environment.isMac) { + if (install.PackageInstall.supported_method == .clonefile) hardlink_fallback: { + switch (sys.clonefileat(cache_dir, pkg_cache_dir_subpath.sliceZ(), FD.cwd(), dest_subpath.sliceZ())) { + .result => { + // success! move to next step + continue :next_step this.nextStep(current_step); + }, + .err => |clonefile_err1| { + switch (clonefile_err1.getErrno()) { + .XDEV => break :hardlink_fallback, + .OPNOTSUPP => break :hardlink_fallback, + .NOENT => { + const parent_dest_dir = std.fs.path.dirname(dest_subpath.slice()) orelse { + return .failure(.{ .link_package = clonefile_err1 }); + }; + + FD.cwd().makePath(u8, parent_dest_dir) catch {}; + + switch (sys.clonefileat(cache_dir, pkg_cache_dir_subpath.sliceZ(), FD.cwd(), dest_subpath.sliceZ())) { + .result => { + continue :next_step this.nextStep(current_step); + }, + .err => |clonefile_err2| { + return .failure(.{ .link_package = clonefile_err2 }); + }, + } + }, + else => { + break :hardlink_fallback; + }, + } + }, + } + } + } + + const cached_package_dir = cached_package_dir: { + if (comptime Environment.isWindows) { + break :cached_package_dir switch (sys.openDirAtWindowsA( + cache_dir, + pkg_cache_dir_subpath.slice(), + .{ .iterable = true, .can_rename_or_delete = false, .read_only = true }, + )) { + .result => |dir_fd| dir_fd, + .err => |err| { + return .failure(.{ .link_package = err }); + }, + }; + } + break :cached_package_dir switch (sys.openat( + cache_dir, + pkg_cache_dir_subpath.sliceZ(), + bun.O.DIRECTORY | bun.O.CLOEXEC | bun.O.RDONLY, + 0, + )) { + .result => |fd| fd, + .err => |err| { + return .failure(.{ .link_package = err }); + }, + }; + }; + defer cached_package_dir.close(); + + var src: bun.AbsPath(.{ .sep = .auto, .unit = .os }) = .from(cache_dir_path.slice()); + defer src.deinit(); + src.append(pkg_cache_dir_subpath.slice()); + + var hardlinker: Hardlinker = .{ + .src_dir = cached_package_dir, + .src = src, + .dest = dest_subpath, + }; + + switch (try hardlinker.link(&.{})) { + .result => {}, + .err => |err| return .failure(.{ .link_package = err }), + } + + continue :next_step this.nextStep(current_step); + }, + inline .symlink_dependencies => |current_step| { + const string_buf = lockfile.buffers.string_bytes.items; + const dependencies = lockfile.buffers.dependencies.items; + + for (entry_dependencies[this.entry_id.get()].slice()) |dep| { + const dep_node_id = entry_node_ids[dep.entry_id.get()]; + const dep_dep_id = node_dep_ids[dep_node_id.get()]; + const dep_name = dependencies[dep_dep_id].name; + + var dest: bun.Path(.{ .sep = .auto }) = .initTopLevelDir(); + defer dest.deinit(); + + installer.appendStoreNodeModulesPath(&dest, this.entry_id); + dest.append(dep_name.slice(string_buf)); + + var dep_store_path: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer dep_store_path.deinit(); + + installer.appendStorePath(&dep_store_path, dep.entry_id); + + const target = target: { + var dest_save = dest.save(); + defer dest_save.restore(); + + dest.undo(1); + break :target dest.relative(&dep_store_path); + }; + defer target.deinit(); + + const symlinker: Symlinker = .{ + .dest = dest, + .target = target, + .fallback_junction_target = dep_store_path, + }; + + const link_strategy: Symlinker.Strategy = if (pkg_res.tag == .root or pkg_res.tag == .workspace) + // root and workspace packages ensure their dependency symlinks + // exist unconditionally. To make sure it's fast, first readlink + // then create the symlink if necessary + .expect_existing + else + .expect_missing; + + switch (symlinker.ensureSymlink(link_strategy)) { + .result => {}, + .err => |err| { + return .failure(.{ .symlink_dependencies = err }); + }, + } + } + continue :next_step this.nextStep(current_step); + }, + inline .check_if_blocked => |current_step| { + // preinstall scripts need to run before binaries can be linked. Block here if any dependencies + // of this entry are not finished. Do not count cycles towards blocking. + + var parent_dedupe: std.AutoArrayHashMap(Store.Entry.Id, void) = .init(bun.default_allocator); + defer parent_dedupe.deinit(); + + if (!installer.isTaskUnblocked(this.entry_id, &parent_dedupe)) { + return .blocked; + } + + continue :next_step this.nextStep(current_step); + }, + inline .symlink_dependency_binaries => |current_step| { + installer.linkDependencyBins(this.entry_id) catch |err| { + return .failure(.{ .binaries = err }); + }; + + switch (pkg_res.tag) { + .uninitialized, + .root, + .workspace, + .folder, + .symlink, + .single_file_module, + => {}, + + _ => {}, + + .npm, + .git, + .github, + .local_tarball, + .remote_tarball, + => { + const string_buf = lockfile.buffers.string_bytes.items; + + var hidden_hoisted_node_modules: bun.Path(.{ .sep = .auto }) = .init(); + defer hidden_hoisted_node_modules.deinit(); + + hidden_hoisted_node_modules.append( + "node_modules" ++ std.fs.path.sep_str ++ ".bun" ++ std.fs.path.sep_str ++ "node_modules", + ); + hidden_hoisted_node_modules.append(pkg_name.slice(installer.lockfile.buffers.string_bytes.items)); + + var target: bun.RelPath(.{ .sep = .auto }) = .init(); + defer target.deinit(); + + target.append(".."); + if (strings.containsChar(pkg_name.slice(installer.lockfile.buffers.string_bytes.items), '/')) { + target.append(".."); + } + + target.appendFmt("{}/node_modules/{s}", .{ + Store.Entry.fmtStorePath(this.entry_id, installer.store, installer.lockfile), + pkg_name.slice(string_buf), + }); + + var full_target: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer full_target.deinit(); + + installer.appendStorePath(&full_target, this.entry_id); + + const symlinker: Symlinker = .{ + .dest = hidden_hoisted_node_modules, + .target = target, + .fallback_junction_target = full_target, + }; + _ = symlinker.ensureSymlink(.ignore_failure); + }, + } + + continue :next_step this.nextStep(current_step); + }, + inline .run_preinstall => |current_step| { + if (!installer.manager.options.do.run_scripts or this.entry_id == .root) { + continue :next_step this.nextStep(current_step); + } + + const string_buf = installer.lockfile.buffers.string_bytes.items; + + const dep = installer.lockfile.buffers.dependencies.items[dep_id]; + const truncated_dep_name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); + + const is_trusted, const is_trusted_through_update_request = brk: { + if (installer.trusted_dependencies_from_update_requests.contains(truncated_dep_name_hash)) { + break :brk .{ true, true }; + } + if (installer.lockfile.hasTrustedDependency(dep.name.slice(string_buf))) { + break :brk .{ true, false }; + } + break :brk .{ false, false }; + }; + + var pkg_cwd: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer pkg_cwd.deinit(); + + installer.appendStorePath(&pkg_cwd, this.entry_id); + + if (pkg_res.tag != .root and (pkg_res.tag == .workspace or is_trusted)) { + const pkg_scripts: *Package.Scripts = &pkg_script_lists[pkg_id]; + + var log = bun.logger.Log.init(bun.default_allocator); + defer log.deinit(); + + const scripts_list = pkg_scripts.getList( + &log, + installer.lockfile, + &pkg_cwd, + dep.name.slice(string_buf), + &pkg_res, + ) catch |err| { + return .failure(.{ .run_preinstall = err }); + }; + + if (scripts_list) |list| { + entry_scripts[this.entry_id.get()] = bun.create(bun.default_allocator, Package.Scripts.List, list); + + if (is_trusted_through_update_request) { + const trusted_dep_to_add = try installer.manager.allocator.dupe(u8, dep.name.slice(string_buf)); + + installer.trusted_dependencies_mutex.lock(); + defer installer.trusted_dependencies_mutex.unlock(); + + try installer.manager.trusted_deps_to_add_to_package_json.append( + installer.manager.allocator, + trusted_dep_to_add, + ); + if (installer.lockfile.trusted_dependencies == null) { + installer.lockfile.trusted_dependencies = .{}; + } + try installer.lockfile.trusted_dependencies.?.put(installer.manager.allocator, truncated_dep_name_hash, {}); + } + + if (list.first_index != 0) { + // has scripts but not a preinstall + continue :next_step this.nextStep(current_step); + } + + installer.manager.spawnPackageLifecycleScripts( + installer.command_ctx, + list, + dep.behavior.optional, + false, + .{ + .entry_id = this.entry_id, + .installer = installer, + }, + ) catch |err| { + return .failure(.{ .run_preinstall = err }); + }; + + return .yield; + } + } + + continue :next_step this.nextStep(current_step); + }, + inline .binaries => |current_step| { + if (this.entry_id == .root) { + continue :next_step this.nextStep(current_step); + } + + const bin = pkg_bins[pkg_id]; + if (bin.tag == .none) { + continue :next_step this.nextStep(current_step); + } + + const string_buf = installer.lockfile.buffers.string_bytes.items; + const dependencies = installer.lockfile.buffers.dependencies.items; + + const dep_name = dependencies[dep_id].name.slice(string_buf); + + const abs_target_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(abs_target_buf); + const abs_dest_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(abs_dest_buf); + const rel_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(rel_buf); + + var seen: bun.StringHashMap(void) = .init(bun.default_allocator); + defer seen.deinit(); + + var node_modules_path: bun.AbsPath(.{}) = .initTopLevelDir(); + defer node_modules_path.deinit(); + + installer.appendStoreNodeModulesPath(&node_modules_path, this.entry_id); + + var bin_linker: Bin.Linker = .{ + .bin = bin, + .global_bin_path = installer.manager.options.bin_path, + .package_name = strings.StringOrTinyString.init(dep_name), + .string_buf = string_buf, + .extern_string_buf = installer.lockfile.buffers.extern_strings.items, + .seen = &seen, + .node_modules_path = &node_modules_path, + .abs_target_buf = abs_target_buf, + .abs_dest_buf = abs_dest_buf, + .rel_buf = rel_buf, + }; + + bin_linker.link(false); + + if (bin_linker.err) |err| { + return .failure(.{ .binaries = err }); + } + + continue :next_step this.nextStep(current_step); + }, + inline .@"run (post)install and (pre/post)prepare" => |current_step| { + if (!installer.manager.options.do.run_scripts or this.entry_id == .root) { + continue :next_step this.nextStep(current_step); + } + + var list = entry_scripts[this.entry_id.get()] orelse { + continue :next_step this.nextStep(current_step); + }; + + if (list.first_index == 0) { + for (list.items[1..], 1..) |item, i| { + if (item != null) { + list.first_index = @intCast(i); + break; + } + } + } + + if (list.first_index == 0) { + continue :next_step this.nextStep(current_step); + } + + const dep = installer.lockfile.buffers.dependencies.items[dep_id]; + + installer.manager.spawnPackageLifecycleScripts( + installer.command_ctx, + list.*, + dep.behavior.optional, + false, + .{ + .entry_id = this.entry_id, + .installer = installer, + }, + ) catch |err| { + return .failure(.{ .@"run (post)install and (pre/post)prepare" = err }); + }; + + // when these scripts finish the package install will be + // complete. the task does not have anymore work to complete + // so it does not return to the thread pool. + + return .yield; + }, + + .done => { + return .done; + }, + + .blocked => { + bun.debugAssert(false); + return .yield; + }, + }; + } + + pub fn callback(task: *ThreadPool.Task) void { + const this: *Task = @fieldParentPtr("task", task); + + const res = this.run() catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + }; + + switch (res) { + .yield => {}, + .done => { + if (comptime Environment.ci_assert) { + bun.assertWithLocation(this.installer.store.entries.items(.step)[this.entry_id.get()].load(.monotonic) == .done, @src()); + } + this.result = .done; + this.installer.tasks.push(this); + this.installer.manager.wake(); + }, + .blocked => { + if (comptime Environment.ci_assert) { + bun.assertWithLocation(this.installer.store.entries.items(.step)[this.entry_id.get()].load(.monotonic) == .check_if_blocked, @src()); + } + this.result = .blocked; + this.installer.tasks.push(this); + this.installer.manager.wake(); + }, + .fail => |err| { + if (comptime Environment.ci_assert) { + bun.assertWithLocation(this.installer.store.entries.items(.step)[this.entry_id.get()].load(.monotonic) != .done, @src()); + } + this.installer.store.entries.items(.step)[this.entry_id.get()].store(.done, .monotonic); + this.result = .{ .err = err.clone(bun.default_allocator) }; + this.installer.tasks.push(this); + this.installer.manager.wake(); + }, + } + } + }; + + const PatchInfo = union(enum) { + none, + remove: struct { + name_and_version_hash: u64, + }, + patch: struct { + name_and_version_hash: u64, + patch_path: string, + contents_hash: u64, + }, + + pub fn contentsHash(this: *const @This()) ?u64 { + return switch (this.*) { + .none, .remove => null, + .patch => |patch| patch.contents_hash, + }; + } + + pub fn nameAndVersionHash(this: *const @This()) ?u64 { + return switch (this.*) { + .none, .remove => null, + .patch => |patch| patch.name_and_version_hash, + }; + } + }; + + pub fn packagePatchInfo( + this: *Installer, + pkg_name: String, + pkg_name_hash: PackageNameHash, + pkg_res: *const Resolution, + ) OOM!PatchInfo { + if (this.lockfile.patched_dependencies.entries.len == 0 and this.manager.patched_dependencies_to_remove.entries.len == 0) { + return .none; + } + + const string_buf = this.lockfile.buffers.string_bytes.items; + + var version_buf: std.ArrayListUnmanaged(u8) = .empty; + defer version_buf.deinit(bun.default_allocator); + + var writer = version_buf.writer(this.lockfile.allocator); + try writer.print("{s}@", .{pkg_name.slice(string_buf)}); + + switch (pkg_res.tag) { + .workspace => { + if (this.lockfile.workspace_versions.get(pkg_name_hash)) |workspace_version| { + try writer.print("{}", .{workspace_version.fmt(string_buf)}); + } + }, + else => { + try writer.print("{}", .{pkg_res.fmt(string_buf, .posix)}); + }, + } + + const name_and_version_hash = String.Builder.stringHash(version_buf.items); + + if (this.lockfile.patched_dependencies.get(name_and_version_hash)) |patch| { + return .{ + .patch = .{ + .name_and_version_hash = name_and_version_hash, + .patch_path = patch.path.slice(string_buf), + .contents_hash = patch.patchfileHash().?, + }, + }; + } + + if (this.manager.patched_dependencies_to_remove.contains(name_and_version_hash)) { + return .{ + .remove = .{ + .name_and_version_hash = name_and_version_hash, + }, + }; + } + + return .none; + } + + pub fn linkDependencyBins(this: *const Installer, parent_entry_id: Store.Entry.Id) !void { + const lockfile = this.lockfile; + const store = this.store; + + const string_buf = lockfile.buffers.string_bytes.items; + const extern_string_buf = lockfile.buffers.extern_strings.items; + + const entries = store.entries.slice(); + const entry_node_ids = entries.items(.node_id); + const entry_deps = entries.items(.dependencies); + + const nodes = store.nodes.slice(); + const node_pkg_ids = nodes.items(.pkg_id); + const node_dep_ids = nodes.items(.dep_id); + + const pkgs = lockfile.packages.slice(); + const pkg_bins = pkgs.items(.bin); + + const link_target_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(link_target_buf); + const link_dest_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(link_dest_buf); + const link_rel_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(link_rel_buf); + + var seen: bun.StringHashMap(void) = .init(bun.default_allocator); + defer seen.deinit(); + + var node_modules_path: bun.AbsPath(.{}) = .initTopLevelDir(); + defer node_modules_path.deinit(); + + this.appendStoreNodeModulesPath(&node_modules_path, parent_entry_id); + + for (entry_deps[parent_entry_id.get()].slice()) |dep| { + const node_id = entry_node_ids[dep.entry_id.get()]; + const dep_id = node_dep_ids[node_id.get()]; + const pkg_id = node_pkg_ids[node_id.get()]; + const bin = pkg_bins[pkg_id]; + if (bin.tag == .none) { + continue; + } + + const alias = lockfile.buffers.dependencies.items[dep_id].name; + + var bin_linker: Bin.Linker = .{ + .bin = bin, + .global_bin_path = this.manager.options.bin_path, + .package_name = strings.StringOrTinyString.init(alias.slice(string_buf)), + .string_buf = string_buf, + .extern_string_buf = extern_string_buf, + .seen = &seen, + .node_modules_path = &node_modules_path, + .abs_target_buf = link_target_buf, + .abs_dest_buf = link_dest_buf, + .rel_buf = link_rel_buf, + }; + + bin_linker.link(false); + + if (bin_linker.err) |err| { + return err; + } + } + } + + pub fn appendStoreNodeModulesPath(this: *const Installer, buf: anytype, entry_id: Store.Entry.Id) void { + const string_buf = this.lockfile.buffers.string_bytes.items; + + const entries = this.store.entries.slice(); + const entry_node_ids = entries.items(.node_id); + + const nodes = this.store.nodes.slice(); + const node_pkg_ids = nodes.items(.pkg_id); + + const pkgs = this.lockfile.packages.slice(); + const pkg_resolutions = pkgs.items(.resolution); + + const node_id = entry_node_ids[entry_id.get()]; + const pkg_id = node_pkg_ids[node_id.get()]; + const pkg_res = pkg_resolutions[pkg_id]; + + switch (pkg_res.tag) { + .root => { + buf.append("node_modules"); + }, + .workspace => { + buf.append(pkg_res.value.workspace.slice(string_buf)); + buf.append("node_modules"); + }, + else => { + buf.appendFmt("node_modules/" ++ Store.modules_dir_name ++ "/{}/node_modules", .{ + Store.Entry.fmtStorePath(entry_id, this.store, this.lockfile), + }); + }, + } + } + + pub fn appendStorePath(this: *const Installer, buf: anytype, entry_id: Store.Entry.Id) void { + const string_buf = this.lockfile.buffers.string_bytes.items; + + const entries = this.store.entries.slice(); + const entry_node_ids = entries.items(.node_id); + + const nodes = this.store.nodes.slice(); + const node_pkg_ids = nodes.items(.pkg_id); + // const node_peers = nodes.items(.peers); + + const pkgs = this.lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_resolutions = pkgs.items(.resolution); + + const node_id = entry_node_ids[entry_id.get()]; + // const peers = node_peers[node_id.get()]; + const pkg_id = node_pkg_ids[node_id.get()]; + const pkg_res = pkg_resolutions[pkg_id]; + + switch (pkg_res.tag) { + .root => {}, + .workspace => { + buf.append(pkg_res.value.workspace.slice(string_buf)); + }, + .symlink => { + const symlink_dir_path = this.manager.globalLinkDirPath(); + + buf.clear(); + buf.append(symlink_dir_path); + buf.append(pkg_res.value.symlink.slice(string_buf)); + }, + else => { + const pkg_name = pkg_names[pkg_id]; + buf.append("node_modules/" ++ Store.modules_dir_name); + buf.appendFmt("{}", .{ + Store.Entry.fmtStorePath(entry_id, this.store, this.lockfile), + }); + buf.append("node_modules"); + buf.append(pkg_name.slice(string_buf)); + }, + } + } +}; + +// @sortImports + +const std = @import("std"); +const Hardlinker = @import("./Hardlinker.zig").Hardlinker; +const Symlinker = @import("./Symlinker.zig").Symlinker; + +const bun = @import("bun"); +const Environment = bun.Environment; +const FD = bun.FD; +const Global = bun.Global; +const OOM = bun.OOM; +const Output = bun.Output; +const Progress = bun.Progress; +const ThreadPool = bun.ThreadPool; +const string = bun.string; +const strings = bun.strings; +const sys = bun.sys; +const Bitset = bun.bit_set.DynamicBitSetUnmanaged; +const Command = bun.CLI.Command; +const String = bun.Semver.String; + +const install = bun.install; +const Bin = install.Bin; +const PackageInstall = install.PackageInstall; +const PackageManager = install.PackageManager; +const PackageNameHash = install.PackageNameHash; +const Resolution = install.Resolution; +const Store = install.Store; +const TruncatedPackageNameHash = install.TruncatedPackageNameHash; +const invalid_dependency_id = install.invalid_dependency_id; + +const Lockfile = install.Lockfile; +const Package = Lockfile.Package; diff --git a/src/install/isolated_install/Store.zig b/src/install/isolated_install/Store.zig new file mode 100644 index 0000000000..c3dea7ad9f --- /dev/null +++ b/src/install/isolated_install/Store.zig @@ -0,0 +1,548 @@ +const Ids = struct { + dep_id: DependencyID, + pkg_id: PackageID, +}; + +pub const Store = struct { + entries: Entry.List, + nodes: Node.List, + + const log = Output.scoped(.Store, false); + + pub const modules_dir_name = ".bun"; + + fn NewId(comptime T: type) type { + return enum(u32) { + comptime { + _ = T; + } + + root = 0, + invalid = max, + _, + + const max = std.math.maxInt(u32); + + pub fn from(id: u32) @This() { + bun.debugAssert(id != max); + return @enumFromInt(id); + } + + pub fn get(id: @This()) u32 { + bun.debugAssert(id != .invalid); + return @intFromEnum(id); + } + + pub fn tryGet(id: @This()) ?u32 { + return if (id == .invalid) null else @intFromEnum(id); + } + + pub fn getOr(id: @This(), default: u32) u32 { + return if (id == .invalid) default else @intFromEnum(id); + } + }; + } + + comptime { + bun.assert(NewId(Entry) != NewId(Node)); + bun.assert(NewId(Entry) == NewId(Entry)); + } + + pub const Installer = @import("./Installer.zig").Installer; + + pub fn isCycle(this: *const Store, id: Entry.Id, maybe_parent_id: Entry.Id, parent_dedupe: *std.AutoArrayHashMap(Entry.Id, void)) bool { + var i: usize = 0; + var len: usize = 0; + + const entry_parents = this.entries.items(.parents); + + for (entry_parents[id.get()].items) |parent_id| { + if (parent_id == .invalid) { + continue; + } + if (parent_id == maybe_parent_id) { + return true; + } + parent_dedupe.put(parent_id, {}) catch bun.outOfMemory(); + } + + len = parent_dedupe.count(); + while (i < len) { + for (entry_parents[parent_dedupe.keys()[i].get()].items) |parent_id| { + if (parent_id == .invalid) { + continue; + } + if (parent_id == maybe_parent_id) { + return true; + } + parent_dedupe.put(parent_id, {}) catch bun.outOfMemory(); + len = parent_dedupe.count(); + } + i += 1; + } + + return false; + } + + // A unique entry in the store. As a path looks like: + // './node_modules/.bun/name@version/node_modules/name' + // or if peers are involved: + // './node_modules/.bun/name@version_peer1@version+peer2@version/node_modules/name' + // + // Entries are created for workspaces (including the root), but only in memory. If + // a module depends on a workspace, a symlink is created pointing outside the store + // directory to the workspace. + pub const Entry = struct { + // Used to get dependency name for destination path and peers + // for store path + node_id: Node.Id, + // parent_id: Id, + dependencies: Dependencies, + parents: std.ArrayListUnmanaged(Id) = .empty, + step: std.atomic.Value(Installer.Task.Step) = .init(.link_package), + + peer_hash: PeerHash, + + scripts: ?*Package.Scripts.List = null, + + pub const PeerHash = enum(u64) { + none = 0, + _, + + pub fn from(int: u64) @This() { + return @enumFromInt(int); + } + + pub fn cast(this: @This()) u64 { + return @intFromEnum(this); + } + }; + + const StorePathFormatter = struct { + entry_id: Id, + store: *const Store, + lockfile: *const Lockfile, + + pub fn format(this: @This(), comptime _: string, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + const store = this.store; + const entries = store.entries.slice(); + const entry_peer_hashes = entries.items(.peer_hash); + const entry_node_ids = entries.items(.node_id); + + const peer_hash = entry_peer_hashes[this.entry_id.get()]; + const node_id = entry_node_ids[this.entry_id.get()]; + const pkg_id = store.nodes.items(.pkg_id)[node_id.get()]; + + const string_buf = this.lockfile.buffers.string_bytes.items; + + const pkgs = this.lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_resolutions = pkgs.items(.resolution); + + const pkg_name = pkg_names[pkg_id]; + const pkg_res = pkg_resolutions[pkg_id]; + + switch (pkg_res.tag) { + .folder => { + try writer.print("{}@file+{}", .{ + pkg_name.fmtStorePath(string_buf), + pkg_res.value.folder.fmtStorePath(string_buf), + }); + }, + else => { + try writer.print("{}@{}", .{ + pkg_name.fmtStorePath(string_buf), + pkg_res.fmtStorePath(string_buf), + }); + }, + } + + if (peer_hash != .none) { + try writer.print("+{}", .{ + bun.fmt.hexIntLower(peer_hash.cast()), + }); + } + } + }; + + pub fn fmtStorePath(entry_id: Id, store: *const Store, lockfile: *const Lockfile) StorePathFormatter { + return .{ .entry_id = entry_id, .store = store, .lockfile = lockfile }; + } + + pub fn debugGatherAllParents(entry_id: Id, store: *const Store) []const Id { + var i: usize = 0; + var len: usize = 0; + + const entry_parents = store.entries.items(.parents); + + var parents: std.AutoArrayHashMapUnmanaged(Entry.Id, void) = .empty; + // defer parents.deinit(bun.default_allocator); + + for (entry_parents[entry_id.get()].items) |parent_id| { + if (parent_id == .invalid) { + continue; + } + parents.put(bun.default_allocator, parent_id, {}) catch bun.outOfMemory(); + } + + len = parents.count(); + while (i < len) { + for (entry_parents[parents.entries.items(.key)[i].get()].items) |parent_id| { + if (parent_id == .invalid) { + continue; + } + parents.put(bun.default_allocator, parent_id, {}) catch bun.outOfMemory(); + len = parents.count(); + } + i += 1; + } + + return parents.keys(); + } + + pub const List = bun.MultiArrayList(Entry); + + const DependenciesItem = struct { + entry_id: Id, + + // TODO: this can be removed, and instead dep_id can be retrieved through: + // entry_id -> node_id -> node_dep_ids + dep_id: DependencyID, + }; + pub const Dependencies = OrderedArraySet(DependenciesItem, DependenciesOrderedArraySetCtx); + + pub const DependenciesOrderedArraySetCtx = struct { + string_buf: string, + dependencies: []const Dependency, + + pub fn eql(ctx: *const DependenciesOrderedArraySetCtx, l_item: DependenciesItem, r_item: DependenciesItem) bool { + if (l_item.entry_id != r_item.entry_id) { + return false; + } + + const dependencies = ctx.dependencies; + const l_dep = dependencies[l_item.dep_id]; + const r_dep = dependencies[r_item.dep_id]; + + return l_dep.name_hash == r_dep.name_hash; + } + + pub fn order(ctx: *const DependenciesOrderedArraySetCtx, l: DependenciesItem, r: DependenciesItem) std.math.Order { + const dependencies = ctx.dependencies; + const l_dep = dependencies[l.dep_id]; + const r_dep = dependencies[r.dep_id]; + + if (l.entry_id == r.entry_id and l_dep.name_hash == r_dep.name_hash) { + return .eq; + } + + // TODO: y r doing + if (l.entry_id == .invalid) { + if (r.entry_id == .invalid) { + return .eq; + } + return .lt; + } else if (r.entry_id == .invalid) { + if (l.entry_id == .invalid) { + return .eq; + } + return .gt; + } + + const string_buf = ctx.string_buf; + const l_dep_name = l_dep.name; + const r_dep_name = r_dep.name; + + return l_dep_name.order(&r_dep_name, string_buf, string_buf); + } + }; + + pub const Id = NewId(Entry); + + pub fn debugPrintList(list: *const List, lockfile: *Lockfile) void { + const string_buf = lockfile.buffers.string_bytes.items; + + const pkgs = lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_resolutions = pkgs.items(.resolution); + + for (0..list.len) |entry_id| { + const entry = list.get(entry_id); + const entry_pkg_name = pkg_names[entry.pkg_id].slice(string_buf); + log( + \\entry ({d}): '{s}@{}' + \\ dep_name: {s} + \\ pkg_id: {d} + \\ parent_id: {} + \\ + , .{ + entry_id, + entry_pkg_name, + pkg_resolutions[entry.pkg_id].fmt(string_buf, .posix), + entry.dep_name.slice(string_buf), + entry.pkg_id, + entry.parent_id, + }); + + log(" dependencies ({d}):\n", .{entry.dependencies.items.len}); + for (entry.dependencies.items) |dep_entry_id| { + const dep_entry = list.get(dep_entry_id.get()); + log(" {s}@{}\n", .{ + pkg_names[dep_entry.pkg_id].slice(string_buf), + pkg_resolutions[dep_entry.pkg_id].fmt(string_buf, .posix), + }); + } + } + } + }; + + pub fn OrderedArraySet(comptime T: type, comptime Ctx: type) type { + return struct { + list: std.ArrayListUnmanaged(T) = .empty, + + pub const empty: @This() = .{}; + + pub fn initCapacity(allocator: std.mem.Allocator, n: usize) OOM!@This() { + const list: std.ArrayListUnmanaged(T) = try .initCapacity(allocator, n); + return .{ .list = list }; + } + + pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void { + this.list.deinit(allocator); + } + + pub fn slice(this: *const @This()) []const T { + return this.list.items; + } + + pub fn len(this: *const @This()) usize { + return this.list.items.len; + } + + pub fn eql(l: *const @This(), r: *const @This(), ctx: *const Ctx) bool { + if (l.list.items.len != r.list.items.len) { + return false; + } + + for (l.list.items, r.list.items) |l_item, r_item| { + if (!ctx.eql(l_item, r_item)) { + return false; + } + } + + return true; + } + + pub fn insert(this: *@This(), allocator: std.mem.Allocator, new: T, ctx: *const Ctx) OOM!void { + for (0..this.list.items.len) |i| { + const existing = this.list.items[i]; + if (ctx.eql(new, existing)) { + return; + } + + const order = ctx.order(new, existing); + + if (order == .eq) { + return; + } + + if (order == .lt) { + try this.list.insert(allocator, i, new); + return; + } + } + + try this.list.append(allocator, new); + } + + pub fn insertAssumeCapacity(this: *@This(), new: T, ctx: *const Ctx) void { + for (0..this.list.items.len) |i| { + const existing = this.list.items[i]; + if (ctx.eql(new, existing)) { + return; + } + + const order = ctx.order(new, existing); + + if (order == .eq) { + return; + } + + if (order == .lt) { + this.list.insertAssumeCapacity(i, new); + return; + } + } + + this.list.appendAssumeCapacity(new); + } + }; + } + + // A node used to represent the full dependency tree. Uniqueness is determined + // from `pkg_id` and `peers` + pub const Node = struct { + dep_id: DependencyID, + pkg_id: PackageID, + parent_id: Id, + + dependencies: std.ArrayListUnmanaged(Ids) = .empty, + peers: Peers = .empty, + + // each node in this list becomes a symlink in the package's node_modules + nodes: std.ArrayListUnmanaged(Id) = .empty, + + pub const Peers = OrderedArraySet(TransitivePeer, TransitivePeer.OrderedArraySetCtx); + + pub const TransitivePeer = struct { + dep_id: DependencyID, + pkg_id: PackageID, + auto_installed: bool, + + pub const OrderedArraySetCtx = struct { + string_buf: string, + pkg_names: []const String, + + pub fn eql(ctx: *const OrderedArraySetCtx, l_item: TransitivePeer, r_item: TransitivePeer) bool { + _ = ctx; + return l_item.pkg_id == r_item.pkg_id; + } + + pub fn order(ctx: *const OrderedArraySetCtx, l: TransitivePeer, r: TransitivePeer) std.math.Order { + const l_pkg_id = l.pkg_id; + const r_pkg_id = r.pkg_id; + if (l_pkg_id == r_pkg_id) { + return .eq; + } + + const string_buf = ctx.string_buf; + const pkg_names = ctx.pkg_names; + const l_pkg_name = pkg_names[l_pkg_id]; + const r_pkg_name = pkg_names[r_pkg_id]; + + return l_pkg_name.order(&r_pkg_name, string_buf, string_buf); + } + }; + }; + + pub const List = bun.MultiArrayList(Node); + + pub fn deinitList(list: *const List, allocator: std.mem.Allocator) void { + list.deinit(allocator); + } + + pub fn debugPrint(this: *const Node, id: Id, lockfile: *const Lockfile) void { + const pkgs = lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_resolutions = pkgs.items(.resolution); + + const string_buf = lockfile.buffers.string_bytes.items; + const deps = lockfile.buffers.dependencies.items; + + const dep_name = if (this.dep_id == invalid_dependency_id) "root" else deps[this.dep_id].name.slice(string_buf); + const dep_version = if (this.dep_id == invalid_dependency_id) "root" else deps[this.dep_id].version.literal.slice(string_buf); + + log( + \\node({d}) + \\ deps: {s}@{s} + \\ res: {s}@{} + \\ + , .{ + id, + dep_name, + dep_version, + pkg_names[this.pkg_id].slice(string_buf), + pkg_resolutions[this.pkg_id].fmt(string_buf, .posix), + }); + } + + pub const Id = NewId(Node); + + pub fn debugPrintList(list: *const List, lockfile: *const Lockfile) void { + const string_buf = lockfile.buffers.string_bytes.items; + const dependencies = lockfile.buffers.dependencies.items; + + const pkgs = lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_resolutions = pkgs.items(.resolution); + + for (0..list.len) |node_id| { + const node = list.get(node_id); + const node_pkg_name = pkg_names[node.pkg_id].slice(string_buf); + log( + \\node ({d}): '{s}' + \\ dep_id: {d} + \\ pkg_id: {d} + \\ parent_id: {} + \\ + , .{ + node_id, + node_pkg_name, + node.dep_id, + node.pkg_id, + node.parent_id, + }); + + log(" dependencies ({d}):\n", .{node.dependencies.items.len}); + for (node.dependencies.items) |ids| { + const dep = dependencies[ids.dep_id]; + const dep_name = dep.name.slice(string_buf); + + const pkg_name = pkg_names[ids.pkg_id].slice(string_buf); + const pkg_res = pkg_resolutions[ids.pkg_id]; + + log(" {s}@{} ({s}@{s})\n", .{ + pkg_name, + pkg_res.fmt(string_buf, .posix), + dep_name, + dep.version.literal.slice(string_buf), + }); + } + + log(" nodes ({d}): ", .{node.nodes.items.len}); + for (node.nodes.items, 0..) |id, i| { + log("{d}", .{id.get()}); + if (i != node.nodes.items.len - 1) { + log(",", .{}); + } + } + + log("\n peers ({d}):\n", .{node.peers.list.items.len}); + for (node.peers.list.items) |ids| { + const dep = dependencies[ids.dep_id]; + const dep_name = dep.name.slice(string_buf); + const pkg_name = pkg_names[ids.pkg_id].slice(string_buf); + const pkg_res = pkg_resolutions[ids.pkg_id]; + + log(" {s}@{} ({s}@{s})\n", .{ + pkg_name, + pkg_res.fmt(string_buf, .posix), + dep_name, + dep.version.literal.slice(string_buf), + }); + } + } + } + }; +}; + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const OOM = bun.OOM; +const Output = bun.Output; +const string = bun.string; + +const Semver = bun.Semver; +const String = Semver.String; + +const install = bun.install; +const Dependency = install.Dependency; +const DependencyID = install.DependencyID; +const PackageID = install.PackageID; +const invalid_dependency_id = install.invalid_dependency_id; + +const Lockfile = install.Lockfile; +const Package = Lockfile.Package; diff --git a/src/install/isolated_install/Symlinker.zig b/src/install/isolated_install/Symlinker.zig new file mode 100644 index 0000000000..48c4233b24 --- /dev/null +++ b/src/install/isolated_install/Symlinker.zig @@ -0,0 +1,115 @@ +pub const Symlinker = struct { + dest: bun.Path(.{ .sep = .auto }), + target: bun.RelPath(.{ .sep = .auto }), + fallback_junction_target: bun.AbsPath(.{ .sep = .auto }), + + pub fn symlink(this: *const @This()) sys.Maybe(void) { + if (comptime Environment.isWindows) { + return sys.symlinkOrJunction(this.dest.sliceZ(), this.target.sliceZ(), this.fallback_junction_target.sliceZ()); + } + return sys.symlink(this.target.sliceZ(), this.dest.sliceZ()); + } + + pub const Strategy = enum { + expect_existing, + expect_missing, + ignore_failure, + }; + + pub fn ensureSymlink( + this: *const @This(), + strategy: Strategy, + ) sys.Maybe(void) { + return switch (strategy) { + .ignore_failure => { + return switch (this.symlink()) { + .result => .success, + .err => |symlink_err| switch (symlink_err.getErrno()) { + .NOENT => { + const dest_parent = this.dest.dirname() orelse { + return .success; + }; + + FD.cwd().makePath(u8, dest_parent) catch {}; + _ = this.symlink(); + return .success; + }, + else => .success, + }, + }; + }, + .expect_missing => { + return switch (this.symlink()) { + .result => .success, + .err => |symlink_err1| switch (symlink_err1.getErrno()) { + .NOENT => { + const dest_parent = this.dest.dirname() orelse { + return .initErr(symlink_err1); + }; + + FD.cwd().makePath(u8, dest_parent) catch {}; + return this.symlink(); + }, + .EXIST => { + FD.cwd().deleteTree(this.dest.sliceZ()) catch {}; + return this.symlink(); + }, + else => .initErr(symlink_err1), + }, + }; + }, + .expect_existing => { + const current_link_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(current_link_buf); + var current_link: []const u8 = switch (sys.readlink(this.dest.sliceZ(), current_link_buf)) { + .result => |res| res, + .err => |readlink_err| return switch (readlink_err.getErrno()) { + .NOENT => switch (this.symlink()) { + .result => .success, + .err => |symlink_err| switch (symlink_err.getErrno()) { + .NOENT => { + const dest_parent = this.dest.dirname() orelse { + return .initErr(symlink_err); + }; + + FD.cwd().makePath(u8, dest_parent) catch {}; + return this.symlink(); + }, + else => .initErr(symlink_err), + }, + }, + else => { + FD.cwd().deleteTree(this.dest.sliceZ()) catch {}; + return this.symlink(); + }, + }, + }; + + // libuv adds a trailing slash to junctions. + current_link = strings.withoutTrailingSlash(current_link); + + if (strings.eqlLong(current_link, this.target.sliceZ(), true)) { + return .success; + } + + if (comptime Environment.isWindows) { + if (strings.eqlLong(current_link, this.fallback_junction_target.slice(), true)) { + return .success; + } + } + + // this existing link is pointing to the wrong package + _ = sys.unlink(this.dest.sliceZ()); + return this.symlink(); + }, + }; + } +}; + +// @sortImports + +const bun = @import("bun"); +const Environment = bun.Environment; +const FD = bun.FD; +const strings = bun.strings; +const sys = bun.sys; diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig index 0ccb974c52..2b249771b8 100644 --- a/src/install/lifecycle_script_runner.zig +++ b/src/install/lifecycle_script_runner.zig @@ -8,6 +8,7 @@ const Global = bun.Global; const JSC = bun.JSC; const Timer = std.time.Timer; const string = bun.string; +const Store = bun.install.Store; const Process = bun.spawn.Process; const log = Output.scoped(.Script, false); @@ -24,6 +25,7 @@ pub const LifecycleScriptSubprocess = struct { has_called_process_exit: bool = false, manager: *PackageManager, envp: [:null]?[*:0]const u8, + shell_bin: ?[:0]const u8, timer: ?Timer = null, @@ -33,8 +35,15 @@ pub const LifecycleScriptSubprocess = struct { optional: bool = false, started_at: u64 = 0, + ctx: ?InstallCtx, + heap: bun.io.heap.IntrusiveField(LifecycleScriptSubprocess) = .{}, + pub const InstallCtx = struct { + entry_id: Store.Entry.Id, + installer: *Store.Installer, + }; + pub const List = bun.io.heap.Intrusive(LifecycleScriptSubprocess, *PackageManager, sortByStartedAt); fn sortByStartedAt(_: *PackageManager, a: *LifecycleScriptSubprocess, b: *LifecycleScriptSubprocess) bool { @@ -94,9 +103,6 @@ pub const LifecycleScriptSubprocess = struct { this.handleExit(process.status); } - // This is only used on the main thread. - var cwd_z_buf: bun.PathBuffer = undefined; - fn resetOutputFlags(output: *OutputReader, fd: bun.FileDescriptor) void { output.flags.nonblocking = true; output.flags.socket = true; @@ -139,7 +145,6 @@ pub const LifecycleScriptSubprocess = struct { const manager = this.manager; const original_script = this.scripts.items[next_script_index].?; const cwd = this.scripts.cwd; - const env = manager.env; this.stdout.setParent(this); this.stderr.setParent(this); @@ -148,11 +153,9 @@ pub const LifecycleScriptSubprocess = struct { this.current_script_index = next_script_index; this.has_called_process_exit = false; - const shell_bin = if (Environment.isWindows) null else bun.CLI.RunCommand.findShell(env.get("PATH") orelse "", cwd) orelse null; - - var copy_script = try std.ArrayList(u8).initCapacity(manager.allocator, original_script.script.len + 1); + var copy_script = try std.ArrayList(u8).initCapacity(manager.allocator, original_script.len + 1); defer copy_script.deinit(); - try bun.CLI.RunCommand.replacePackageManagerRun(©_script, original_script.script); + try bun.CLI.RunCommand.replacePackageManagerRun(©_script, original_script); try copy_script.append(0); const combined_script: [:0]u8 = copy_script.items[0 .. copy_script.items.len - 1 :0]; @@ -174,8 +177,8 @@ pub const LifecycleScriptSubprocess = struct { log("{s} - {s} $ {s}", .{ this.package_name, this.scriptName(), combined_script }); - var argv = if (shell_bin != null and !Environment.isWindows) [_]?[*:0]const u8{ - shell_bin.?, + var argv = if (this.shell_bin != null and !Environment.isWindows) [_]?[*:0]const u8{ + this.shell_bin.?, "-c", combined_script, null, @@ -347,6 +350,10 @@ pub const LifecycleScriptSubprocess = struct { if (exit.code > 0) { if (this.optional) { + if (this.ctx) |ctx| { + ctx.installer.store.entries.items(.step)[ctx.entry_id.get()].store(.done, .monotonic); + ctx.installer.onTaskComplete(ctx.entry_id, .fail); + } _ = this.manager.pending_lifecycle_script_tasks.fetchSub(1, .monotonic); this.deinitAndDeletePackage(); return; @@ -383,6 +390,21 @@ pub const LifecycleScriptSubprocess = struct { } } + if (this.ctx) |ctx| { + switch (this.current_script_index) { + // preinstall + 0 => { + const previous_step = ctx.installer.store.entries.items(.step)[ctx.entry_id.get()].swap(.binaries, .monotonic); + bun.debugAssert(previous_step == .run_preinstall); + ctx.installer.startTask(ctx.entry_id); + _ = this.manager.pending_lifecycle_script_tasks.fetchSub(1, .monotonic); + this.deinit(); + return; + }, + else => {}, + } + } + for (this.current_script_index + 1..Lockfile.Scripts.names.len) |new_script_index| { if (this.scripts.items[new_script_index] != null) { this.resetPolls(); @@ -403,6 +425,15 @@ pub const LifecycleScriptSubprocess = struct { }); } + if (this.ctx) |ctx| { + const previous_step = ctx.installer.store.entries.items(.step)[ctx.entry_id.get()].swap(.done, .monotonic); + if (comptime Environment.ci_assert) { + bun.assertWithLocation(this.current_script_index != 0, @src()); + bun.assertWithLocation(previous_step == .@"run (post)install and (pre/post)prepare", @src()); + } + ctx.installer.onTaskComplete(ctx.entry_id, .success); + } + // the last script finished _ = this.manager.pending_lifecycle_script_tasks.fetchSub(1, .monotonic); @@ -422,6 +453,10 @@ pub const LifecycleScriptSubprocess = struct { }, .err => |err| { if (this.optional) { + if (this.ctx) |ctx| { + ctx.installer.store.entries.items(.step)[ctx.entry_id.get()].store(.done, .monotonic); + ctx.installer.onTaskComplete(ctx.entry_id, .fail); + } _ = this.manager.pending_lifecycle_script_tasks.fetchSub(1, .monotonic); this.deinitAndDeletePackage(); return; @@ -506,17 +541,21 @@ pub const LifecycleScriptSubprocess = struct { manager: *PackageManager, list: Lockfile.Package.Scripts.List, envp: [:null]?[*:0]const u8, + shell_bin: ?[:0]const u8, optional: bool, log_level: PackageManager.Options.LogLevel, foreground: bool, + ctx: ?InstallCtx, ) !void { var lifecycle_subprocess = LifecycleScriptSubprocess.new(.{ .manager = manager, .envp = envp, + .shell_bin = shell_bin, .scripts = list, .package_name = list.package_name, .foreground = foreground, .optional = optional, + .ctx = ctx, }); if (log_level.isVerbose()) { diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 1bb7834bc4..209af71f1a 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -26,15 +26,52 @@ patched_dependencies: PatchedDependenciesMap = .{}, overrides: OverrideMap = .{}, catalogs: CatalogMap = .{}, +node_linker: NodeLinker = .auto, + +pub const NodeLinker = enum(u8) { + // If workspaces are used: isolated + // If not: hoisted + // Used when nodeLinker is absent from package.json/bun.lock/bun.lockb + auto, + + hoisted, + isolated, + + pub fn fromStr(input: string) ?NodeLinker { + if (strings.eqlComptime(input, "hoisted")) { + return .hoisted; + } + if (strings.eqlComptime(input, "isolated")) { + return .isolated; + } + return null; + } +}; + +pub const DepSorter = struct { + lockfile: *const Lockfile, + + pub fn isLessThan(sorter: @This(), l: DependencyID, r: DependencyID) bool { + const deps_buf = sorter.lockfile.buffers.dependencies.items; + const string_buf = sorter.lockfile.buffers.string_bytes.items; + + const l_dep = &deps_buf[l]; + const r_dep = &deps_buf[r]; + + return switch (l_dep.behavior.cmp(r_dep.behavior)) { + .lt => true, + .gt => false, + .eq => strings.order(l_dep.name.slice(string_buf), r_dep.name.slice(string_buf)) == .lt, + }; + } +}; + pub const Stream = std.io.FixedBufferStream([]u8); pub const default_filename = "bun.lockb"; pub const Scripts = struct { const MAX_PARALLEL_PROCESSES = 10; - pub const Entry = struct { - script: string, - }; - pub const Entries = std.ArrayListUnmanaged(Entry); + pub const Entries = std.ArrayListUnmanaged(string); pub const names = [_]string{ "preinstall", @@ -73,7 +110,7 @@ pub const Scripts = struct { inline for (Scripts.names) |hook| { const list = &@field(this, hook); for (list.items) |entry| { - allocator.free(entry.script); + allocator.free(entry); } list.deinit(allocator); } @@ -618,6 +655,8 @@ pub fn cleanWithLogger( try new.buffers.preallocate(old.buffers, old.allocator); try new.patched_dependencies.ensureTotalCapacity(old.allocator, old.patched_dependencies.entries.len); + new.node_linker = old.node_linker; + old.scratch.dependency_list_queue.head = 0; { @@ -873,8 +912,6 @@ pub fn hoist( const allocator = lockfile.allocator; var slice = lockfile.packages.slice(); - var path_buf: bun.PathBuffer = undefined; - var builder = Tree.Builder(method){ .name_hashes = slice.items(.name_hash), .queue = .init(allocator), @@ -885,7 +922,6 @@ pub fn hoist( .log = log, .lockfile = lockfile, .manager = manager, - .path_buf = &path_buf, .install_root_dependencies = install_root_dependencies, .workspace_filters = workspace_filters, }; @@ -895,7 +931,6 @@ pub fn hoist( Tree.invalid_id, method, &builder, - if (method == .filter) manager.options.log_level, ); // This goes breadth-first @@ -905,7 +940,6 @@ pub fn hoist( item.hoist_root_id, method, &builder, - if (method == .filter) manager.options.log_level, ); } @@ -1207,6 +1241,7 @@ pub fn initEmpty(this: *Lockfile, allocator: Allocator) void { .workspace_versions = .{}, .overrides = .{}, .catalogs = .{}, + .node_linker = .auto, .meta_hash = zero_hash, }; } @@ -1807,8 +1842,8 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool, packag inline for (comptime std.meta.fieldNames(Lockfile.Scripts)) |field_name| { const scripts = @field(this.scripts, field_name); for (scripts.items) |script| { - if (script.script.len > 0) { - string_builder.fmtCount("{s}: {s}\n", .{ field_name, script.script }); + if (script.len > 0) { + string_builder.fmtCount("{s}: {s}\n", .{ field_name, script }); has_scripts = true; } } @@ -1843,8 +1878,8 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool, packag inline for (comptime std.meta.fieldNames(Lockfile.Scripts)) |field_name| { const scripts = @field(this.scripts, field_name); for (scripts.items) |script| { - if (script.script.len > 0) { - _ = string_builder.fmt("{s}: {s}\n", .{ field_name, script.script }); + if (script.len > 0) { + _ = string_builder.fmt("{s}: {s}\n", .{ field_name, script }); } } } @@ -1952,17 +1987,17 @@ pub const default_trusted_dependencies = brk: { @compileError("default-trusted-dependencies.txt is too large, please increase 'max_default_trusted_dependencies' in lockfile.zig"); } - // just in case there's duplicates from truncating - if (map.has(dep)) @compileError("Duplicate hash due to u64 -> u32 truncation"); - - map.putAssumeCapacity(dep, {}); + const entry = map.getOrPutAssumeCapacity(dep); + if (entry.found_existing) { + @compileError("Duplicate trusted dependency: " ++ dep); + } } const final = map; break :brk &final; }; -pub fn hasTrustedDependency(this: *Lockfile, name: []const u8) bool { +pub fn hasTrustedDependency(this: *const Lockfile, name: []const u8) bool { if (this.trusted_dependencies) |trusted_dependencies| { const hash = @as(u32, @truncate(String.Builder.stringHash(name))); return trusted_dependencies.contains(hash); diff --git a/src/install/lockfile/Package.zig b/src/install/lockfile/Package.zig index 09e1ebb295..8edae3a083 100644 --- a/src/install/lockfile/Package.zig +++ b/src/install/lockfile/Package.zig @@ -527,6 +527,7 @@ pub const Package = extern struct { update: u32 = 0, overrides_changed: bool = false, catalogs_changed: bool = false, + node_linker_changed: bool = false, // bool for if this dependency should be added to lockfile trusted dependencies. // it is false when the new trusted dependency is coming from the default list. @@ -543,6 +544,7 @@ pub const Package = extern struct { pub inline fn hasDiffs(this: Summary) bool { return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed or this.catalogs_changed or + this.node_linker_changed or this.added_trusted_dependencies.count() > 0 or this.removed_trusted_dependencies.count() > 0 or this.patched_dependencies_changed; @@ -658,6 +660,10 @@ pub const Package = extern struct { } } } + + if (from_lockfile.node_linker != to_lockfile.node_linker) { + summary.node_linker_changed = true; + } } trusted_dependencies: { @@ -1576,6 +1582,19 @@ pub const Package = extern struct { if (json.get("workspaces")) |workspaces_expr| { lockfile.catalogs.parseCount(lockfile, workspaces_expr, &string_builder); + + if (workspaces_expr.get("nodeLinker")) |node_linker_expr| { + if (!node_linker_expr.isString()) { + try log.addError(source, node_linker_expr.loc, "Expected one of \"isolated\" or \"hoisted\""); + return error.InvalidPackageJSON; + } + + const node_linker_str = node_linker_expr.data.e_string.slice(allocator); + lockfile.node_linker = Lockfile.NodeLinker.fromStr(node_linker_str) orelse { + try log.addError(source, node_linker_expr.loc, "Expected one of \"isolated\" or \"hoisted\""); + return error.InvalidPackageJSON; + }; + } } } diff --git a/src/install/lockfile/Package/Scripts.zig b/src/install/lockfile/Package/Scripts.zig index a93ce33020..516a8017e8 100644 --- a/src/install/lockfile/Package/Scripts.zig +++ b/src/install/lockfile/Package/Scripts.zig @@ -17,12 +17,22 @@ pub const Scripts = extern struct { } pub const List = struct { - items: [Lockfile.Scripts.names.len]?Lockfile.Scripts.Entry, + items: [Lockfile.Scripts.names.len]?string, first_index: u8, total: u8, cwd: stringZ, package_name: string, + pub fn initPreinstall(allocator: std.mem.Allocator, preinstall: string, cwd: string, package_name: string) @This() { + return .{ + .items = .{ allocator.dupe(u8, preinstall) catch bun.outOfMemory(), null, null, null, null, null }, + .first_index = 0, + .total = 1, + .cwd = allocator.dupeZ(u8, cwd) catch bun.outOfMemory(), + .package_name = allocator.dupe(u8, package_name) catch bun.outOfMemory(), + }; + } + pub fn printScripts( this: Package.Scripts.List, resolution: *const Resolution, @@ -51,28 +61,28 @@ pub const Scripts = extern struct { if (maybe_script) |script| { Output.pretty(fmt, .{ Lockfile.Scripts.names[script_index], - script.script, + script, }); } } } - pub fn first(this: Package.Scripts.List) Lockfile.Scripts.Entry { + pub fn first(this: Package.Scripts.List) string { if (comptime Environment.allow_assert) { assert(this.items[this.first_index] != null); } return this.items[this.first_index].?; } - pub fn deinit(this: Package.Scripts.List, allocator: std.mem.Allocator) void { - for (this.items) |maybe_item| { - if (maybe_item) |item| { - allocator.free(item.script); - } - } + // pub fn deinit(this: Package.Scripts.List, allocator: std.mem.Allocator) void { + // for (this.items) |maybe_item| { + // if (maybe_item) |item| { + // allocator.free(item); + // } + // } - allocator.free(this.cwd); - } + // allocator.free(this.cwd); + // } pub fn appendToLockfile(this: Package.Scripts.List, lockfile: *Lockfile) void { inline for (this.items, 0..) |maybe_script, i| { @@ -110,37 +120,31 @@ pub const Scripts = extern struct { pub fn getScriptEntries( this: *const Package.Scripts, - lockfile: *Lockfile, + lockfile: *const Lockfile, lockfile_buf: string, resolution_tag: Resolution.Tag, add_node_gyp_rebuild_script: bool, // return: first_index, total, entries - ) struct { i8, u8, [Lockfile.Scripts.names.len]?Lockfile.Scripts.Entry } { + ) struct { i8, u8, [Lockfile.Scripts.names.len]?string } { const allocator = lockfile.allocator; var script_index: u8 = 0; var first_script_index: i8 = -1; - var scripts: [6]?Lockfile.Scripts.Entry = .{null} ** 6; + var scripts: [6]?string = .{null} ** 6; var counter: u8 = 0; if (add_node_gyp_rebuild_script) { { script_index += 1; - const entry: Lockfile.Scripts.Entry = .{ - .script = allocator.dupe(u8, "node-gyp rebuild") catch unreachable, - }; if (first_script_index == -1) first_script_index = @intCast(script_index); - scripts[script_index] = entry; + scripts[script_index] = allocator.dupe(u8, "node-gyp rebuild") catch unreachable; script_index += 1; counter += 1; } // missing install and preinstall, only need to check postinstall if (!this.postinstall.isEmpty()) { - const entry: Lockfile.Scripts.Entry = .{ - .script = allocator.dupe(u8, this.preinstall.slice(lockfile_buf)) catch unreachable, - }; if (first_script_index == -1) first_script_index = @intCast(script_index); - scripts[script_index] = entry; + scripts[script_index] = allocator.dupe(u8, this.preinstall.slice(lockfile_buf)) catch unreachable; counter += 1; } script_index += 1; @@ -154,11 +158,8 @@ pub const Scripts = extern struct { inline for (install_scripts) |hook| { const script = @field(this, hook); if (!script.isEmpty()) { - const entry: Lockfile.Scripts.Entry = .{ - .script = allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, - }; if (first_script_index == -1) first_script_index = @intCast(script_index); - scripts[script_index] = entry; + scripts[script_index] = allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable; counter += 1; } script_index += 1; @@ -176,11 +177,8 @@ pub const Scripts = extern struct { inline for (prepare_scripts) |hook| { const script = @field(this, hook); if (!script.isEmpty()) { - const entry: Lockfile.Scripts.Entry = .{ - .script = allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, - }; if (first_script_index == -1) first_script_index = @intCast(script_index); - scripts[script_index] = entry; + scripts[script_index] = allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable; counter += 1; } script_index += 1; @@ -189,11 +187,8 @@ pub const Scripts = extern struct { .workspace => { script_index += 1; if (!this.prepare.isEmpty()) { - const entry: Lockfile.Scripts.Entry = .{ - .script = allocator.dupe(u8, this.prepare.slice(lockfile_buf)) catch unreachable, - }; if (first_script_index == -1) first_script_index = @intCast(script_index); - scripts[script_index] = entry; + scripts[script_index] = allocator.dupe(u8, this.prepare.slice(lockfile_buf)) catch unreachable; counter += 1; } script_index += 2; @@ -206,9 +201,9 @@ pub const Scripts = extern struct { pub fn createList( this: *const Package.Scripts, - lockfile: *Lockfile, + lockfile: *const Lockfile, lockfile_buf: []const u8, - cwd_: string, + cwd_: *bun.AbsPath(.{ .sep = .auto }), package_name: string, resolution_tag: Resolution.Tag, add_node_gyp_rebuild_script: bool, @@ -219,16 +214,10 @@ pub const Scripts = extern struct { var cwd_buf: if (Environment.isWindows) bun.PathBuffer else void = undefined; const cwd = if (comptime !Environment.isWindows) - cwd_ + cwd_.slice() else brk: { - @memcpy(cwd_buf[0..cwd_.len], cwd_); - cwd_buf[cwd_.len] = 0; - const cwd_handle = bun.openDirNoRenamingOrDeletingWindows(bun.invalid_fd, cwd_buf[0..cwd_.len :0]) catch break :brk cwd_; - - var buf: bun.WPathBuffer = undefined; - const new_cwd = bun.windows.GetFinalPathNameByHandle(cwd_handle.fd, .{}, &buf) catch break :brk cwd_; - - break :brk strings.convertUTF16toUTF8InBuffer(&cwd_buf, new_cwd) catch break :brk cwd_; + const cwd_handle = bun.openDirNoRenamingOrDeletingWindows(bun.invalid_fd, cwd_.sliceZ()) catch break :brk cwd_.slice(); + break :brk FD.fromStdDir(cwd_handle).getFdPath(&cwd_buf) catch break :brk cwd_.slice(); }; return .{ @@ -274,54 +263,36 @@ pub const Scripts = extern struct { pub fn getList( this: *Package.Scripts, log: *logger.Log, - lockfile: *Lockfile, - node_modules: *PackageManager.PackageInstaller.LazyPackageDestinationDir, - abs_node_modules_path: string, + lockfile: *const Lockfile, + folder_path: *bun.AbsPath(.{ .sep = .auto }), folder_name: string, resolution: *const Resolution, ) !?Package.Scripts.List { - var path_buf: [bun.MAX_PATH_BYTES * 2]u8 = undefined; if (this.hasAny()) { const add_node_gyp_rebuild_script = if (lockfile.hasTrustedDependency(folder_name) and this.install.isEmpty() and this.preinstall.isEmpty()) brk: { - const binding_dot_gyp_path = Path.joinAbsStringZ( - abs_node_modules_path, - &[_]string{ folder_name, "binding.gyp" }, - .auto, - ); + var save = folder_path.save(); + defer save.restore(); + folder_path.append("binding.gyp"); - break :brk bun.sys.exists(binding_dot_gyp_path); + break :brk bun.sys.exists(folder_path.slice()); } else false; - const cwd = Path.joinAbsStringBufZTrailingSlash( - abs_node_modules_path, - &path_buf, - &[_]string{folder_name}, - .auto, - ); - return this.createList( lockfile, lockfile.buffers.string_bytes.items, - cwd, + folder_path, folder_name, resolution.tag, add_node_gyp_rebuild_script, ); } else if (!this.filled) { - const abs_folder_path = Path.joinAbsStringBufZTrailingSlash( - abs_node_modules_path, - &path_buf, - &[_]string{folder_name}, - .auto, - ); return this.createFromPackageJSON( log, lockfile, - node_modules, - abs_folder_path, + folder_path, folder_name, resolution.tag, ); @@ -335,14 +306,16 @@ pub const Scripts = extern struct { allocator: std.mem.Allocator, string_builder: *Lockfile.StringBuilder, log: *logger.Log, - node_modules: *PackageManager.PackageInstaller.LazyPackageDestinationDir, - folder_name: string, + folder_path: *bun.AbsPath(.{ .sep = .auto }), ) !void { const json = brk: { + var save = folder_path.save(); + defer save.restore(); + folder_path.append("package.json"); + const json_src = brk2: { - const json_path = bun.path.joinZ([_]string{ folder_name, "package.json" }, .auto); - const buf = try bun.sys.File.readFrom(try node_modules.getDir(), json_path, allocator).unwrap(); - break :brk2 logger.Source.initPathString(json_path, buf); + const buf = try bun.sys.File.readFrom(bun.FD.cwd(), folder_path.sliceZ(), allocator).unwrap(); + break :brk2 logger.Source.initPathString(folder_path.slice(), buf); }; initializeStore(); @@ -362,9 +335,8 @@ pub const Scripts = extern struct { pub fn createFromPackageJSON( this: *Package.Scripts, log: *logger.Log, - lockfile: *Lockfile, - node_modules: *PackageManager.PackageInstaller.LazyPackageDestinationDir, - abs_folder_path: string, + lockfile: *const Lockfile, + folder_path: *bun.AbsPath(.{ .sep = .auto }), folder_name: string, resolution_tag: Resolution.Tag, ) !?Package.Scripts.List { @@ -372,22 +344,20 @@ pub const Scripts = extern struct { tmp.initEmpty(lockfile.allocator); defer tmp.deinit(); var builder = tmp.stringBuilder(); - try this.fillFromPackageJSON(lockfile.allocator, &builder, log, node_modules, folder_name); + try this.fillFromPackageJSON(lockfile.allocator, &builder, log, folder_path); const add_node_gyp_rebuild_script = if (this.install.isEmpty() and this.preinstall.isEmpty()) brk: { - const binding_dot_gyp_path = Path.joinAbsStringZ( - abs_folder_path, - &[_]string{"binding.gyp"}, - .auto, - ); + const save = folder_path.save(); + defer save.restore(); + folder_path.append("binding.gyp"); - break :brk bun.sys.exists(binding_dot_gyp_path); + break :brk bun.sys.exists(folder_path.slice()); } else false; return this.createList( lockfile, tmp.buffers.string_bytes.items, - abs_folder_path, + folder_path, folder_name, resolution_tag, add_node_gyp_rebuild_script, @@ -402,8 +372,6 @@ const JSAst = bun.JSAst; const JSON = bun.JSON; const Lockfile = install.Lockfile; const Output = bun.Output; -const PackageManager = install.PackageManager; -const Path = bun.path; const Resolution = bun.install.Resolution; const Semver = bun.Semver; const String = Semver.String; @@ -419,3 +387,4 @@ const stringZ = [:0]const u8; const strings = bun.strings; const Package = Lockfile.Package; const debug = Output.scoped(.Lockfile, true); +const FD = bun.FD; diff --git a/src/install/lockfile/Tree.zig b/src/install/lockfile/Tree.zig index f20ff9b013..46da478468 100644 --- a/src/install/lockfile/Tree.zig +++ b/src/install/lockfile/Tree.zig @@ -244,7 +244,6 @@ pub fn Builder(comptime method: BuilderMethod) type { sort_buf: std.ArrayListUnmanaged(DependencyID) = .{}, workspace_filters: if (method == .filter) []const WorkspaceFilter else void = if (method == .filter) &.{}, install_root_dependencies: if (method == .filter) bool else void, - path_buf: []u8, pub fn maybeReportError(this: *@This(), comptime fmt: string, args: anytype) void { this.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, fmt, args) catch {}; @@ -316,13 +315,120 @@ pub fn Builder(comptime method: BuilderMethod) type { }; } +pub fn isFilteredDependencyOrWorkspace( + dep_id: DependencyID, + parent_pkg_id: PackageID, + workspace_filters: []const WorkspaceFilter, + install_root_dependencies: bool, + manager: *const PackageManager, + lockfile: *const Lockfile, +) bool { + const pkg_id = lockfile.buffers.resolutions.items[dep_id]; + if (pkg_id >= lockfile.packages.len) { + return true; + } + + const pkgs = lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + const pkg_metas = pkgs.items(.meta); + const pkg_resolutions = pkgs.items(.resolution); + + const dep = lockfile.buffers.dependencies.items[dep_id]; + const res = &pkg_resolutions[pkg_id]; + const parent_res = &pkg_resolutions[parent_pkg_id]; + + if (pkg_metas[pkg_id].isDisabled()) { + if (manager.options.log_level.isVerbose()) { + const meta = &pkg_metas[pkg_id]; + const name = lockfile.str(&pkg_names[pkg_id]); + if (!meta.os.isMatch() and !meta.arch.isMatch()) { + Output.prettyErrorln("Skip installing {s} - cpu & os mismatch", .{name}); + } else if (!meta.os.isMatch()) { + Output.prettyErrorln("Skip installing {s} - os mismatch", .{name}); + } else if (!meta.arch.isMatch()) { + Output.prettyErrorln("Skip installing {s} - cpu mismatch", .{name}); + } + } + return true; + } + + if (dep.behavior.isBundled()) { + return true; + } + + const dep_features = switch (parent_res.tag) { + .root, .workspace, .folder => manager.options.local_package_features, + else => manager.options.remote_package_features, + }; + + if (!dep.behavior.isEnabled(dep_features)) { + return true; + } + + // Filtering only applies to the root package dependencies. Also + // --filter has a different meaning if a new package is being installed. + if (manager.subcommand != .install or parent_pkg_id != 0) { + return false; + } + + if (!dep.behavior.isWorkspaceOnly()) { + if (!install_root_dependencies) { + return true; + } + + return false; + } + + var workspace_matched = workspace_filters.len == 0; + + for (workspace_filters) |filter| { + var filter_path: bun.AbsPath(.{ .sep = .posix }) = .initTopLevelDir(); + defer filter_path.deinit(); + + const pattern, const name_or_path = switch (filter) { + .all => { + workspace_matched = true; + continue; + }, + .name => |name_pattern| .{ + name_pattern, + pkg_names[pkg_id].slice(lockfile.buffers.string_bytes.items), + }, + .path => |path_pattern| path_pattern: { + if (res.tag != .workspace) { + return false; + } + + filter_path.join(&.{res.value.workspace.slice(lockfile.buffers.string_bytes.items)}); + + break :path_pattern .{ path_pattern, filter_path.slice() }; + }, + }; + + switch (bun.glob.match(undefined, pattern, name_or_path)) { + .match, .negate_match => workspace_matched = true, + + .negate_no_match => { + // always skip if a pattern specifically says "!" + workspace_matched = false; + break; + }, + + .no_match => { + // keep looking + }, + } + } + + return !workspace_matched; +} + pub fn processSubtree( this: *const Tree, dependency_id: DependencyID, hoist_root_id: Tree.Id, comptime method: BuilderMethod, builder: *Builder(method), - log_level: if (method == .filter) PackageManager.Options.LogLevel else void, ) SubtreeError!void { const parent_pkg_id = switch (dependency_id) { root_dep_id => 0, @@ -350,8 +456,6 @@ pub fn processSubtree( const pkgs = builder.lockfile.packages.slice(); const pkg_resolutions = pkgs.items(.resolution); - const pkg_metas = pkgs.items(.meta); - const pkg_names = pkgs.items(.name); builder.sort_buf.clearRetainingCapacity(); try builder.sort_buf.ensureUnusedCapacity(builder.allocator, resolution_list.len); @@ -360,31 +464,13 @@ pub fn processSubtree( builder.sort_buf.appendAssumeCapacity(@intCast(dep_id)); } - const DepSorter = struct { - lockfile: *const Lockfile, - - pub fn isLessThan(sorter: @This(), l: DependencyID, r: DependencyID) bool { - const deps_buf = sorter.lockfile.buffers.dependencies.items; - const string_buf = sorter.lockfile.buffers.string_bytes.items; - - const l_dep = deps_buf[l]; - const r_dep = deps_buf[r]; - - return switch (l_dep.behavior.cmp(r_dep.behavior)) { - .lt => true, - .gt => false, - .eq => strings.order(l_dep.name.slice(string_buf), r_dep.name.slice(string_buf)) == .lt, - }; - } - }; - std.sort.pdq( DependencyID, builder.sort_buf.items, - DepSorter{ + Lockfile.DepSorter{ .lockfile = builder.lockfile, }, - DepSorter.isLessThan, + Lockfile.DepSorter.isLessThan, ); for (builder.sort_buf.items) |dep_id| { @@ -394,101 +480,16 @@ pub fn processSubtree( // filter out disabled dependencies if (comptime method == .filter) { - if (builder.lockfile.isResolvedDependencyDisabled( + if (isFilteredDependencyOrWorkspace( dep_id, - switch (pkg_resolutions[parent_pkg_id].tag) { - .root, .workspace, .folder => builder.manager.options.local_package_features, - else => builder.manager.options.remote_package_features, - }, - &pkg_metas[pkg_id], + parent_pkg_id, + builder.workspace_filters, + builder.install_root_dependencies, + builder.manager, + builder.lockfile, )) { - if (log_level.isVerbose()) { - const meta = &pkg_metas[pkg_id]; - const name = builder.lockfile.str(&pkg_names[pkg_id]); - if (!meta.os.isMatch() and !meta.arch.isMatch()) { - Output.prettyErrorln("Skip installing '{s}' cpu & os mismatch", .{name}); - } else if (!meta.os.isMatch()) { - Output.prettyErrorln("Skip installing '{s}' os mismatch", .{name}); - } else if (!meta.arch.isMatch()) { - Output.prettyErrorln("Skip installing '{s}' cpu mismatch", .{name}); - } - } - continue; } - - if (builder.manager.subcommand == .install) dont_skip: { - // only do this when parent is root. workspaces are always dependencies of the root - // package, and the root package is always called with `processSubtree` - if (parent_pkg_id == 0 and builder.workspace_filters.len > 0) { - if (!builder.dependencies[dep_id].behavior.isWorkspaceOnly()) { - if (builder.install_root_dependencies) { - break :dont_skip; - } - - continue; - } - - var match = false; - - for (builder.workspace_filters) |workspace_filter| { - const res_id = builder.resolutions[dep_id]; - - const pattern, const path_or_name = switch (workspace_filter) { - .name => |pattern| .{ pattern, pkg_names[res_id].slice(builder.buf()) }, - - .path => |pattern| path: { - const res = &pkg_resolutions[res_id]; - if (res.tag != .workspace) { - break :dont_skip; - } - const res_path = res.value.workspace.slice(builder.buf()); - - // occupy `builder.path_buf` - var abs_res_path = strings.withoutTrailingSlash(bun.path.joinAbsStringBuf( - FileSystem.instance.top_level_dir, - builder.path_buf, - &.{res_path}, - .auto, - )); - - if (comptime Environment.isWindows) { - abs_res_path = abs_res_path[Path.windowsVolumeNameLen(abs_res_path)[0]..]; - Path.dangerouslyConvertPathToPosixInPlace(u8, builder.path_buf[0..abs_res_path.len]); - } - - break :path .{ - pattern, - abs_res_path, - }; - }, - - .all => { - match = true; - continue; - }, - }; - - switch (bun.glob.walk.matchImpl(builder.allocator, pattern, path_or_name)) { - .match, .negate_match => match = true, - - .negate_no_match => { - // always skip if a pattern specifically says "!" - match = false; - break; - }, - - .no_match => { - // keep current - }, - } - } - - if (!match) { - continue; - } - } - } } const hoisted: HoistDependencyResult = hoisted: { @@ -646,7 +647,6 @@ const DependencyID = install.DependencyID; const DependencyIDList = Lockfile.DependencyIDList; const Environment = bun.Environment; const ExternalSlice = Lockfile.ExternalSlice; -const FileSystem = bun.fs.FileSystem; const Lockfile = install.Lockfile; const OOM = bun.OOM; const Output = bun.Output; @@ -666,7 +666,6 @@ const invalid_package_id = install.invalid_package_id; const logger = bun.logger; const string = []const u8; const stringZ = bun.stringZ; -const strings = bun.strings; const z_allocator = bun.z_allocator; const bun = @import("bun"); diff --git a/src/install/lockfile/bun.lock.zig b/src/install/lockfile/bun.lock.zig index 7b782d5f69..db50bdf193 100644 --- a/src/install/lockfile/bun.lock.zig +++ b/src/install/lockfile/bun.lock.zig @@ -91,6 +91,14 @@ pub const Stringifier = struct { try writer.print("\"lockfileVersion\": {d},\n", .{@intFromEnum(Version.current)}); try writeIndent(writer, indent); + if (lockfile.node_linker != .auto) { + try writer.print( + \\"nodeLinker": "{s}", + \\ + , .{@tagName(lockfile.node_linker)}); + try writeIndent(writer, indent); + } + try writer.writeAll("\"workspaces\": {\n"); try incIndent(writer, indent); { @@ -1002,6 +1010,7 @@ const ParseError = OOM || error{ InvalidOverridesObject, InvalidCatalogObject, InvalidCatalogsObject, + InvalidNodeLinkerValue, InvalidDependencyName, InvalidDependencyVersion, InvalidPackageResolution, @@ -1344,7 +1353,7 @@ pub fn parseIntoBinaryLockfile( if (!key.isString() or key.data.e_string.len() == 0) { try log.addError(source, key.loc, "Expected a non-empty string"); - return error.InvalidCatalogObject; + return error.InvalidCatalogsObject; } const dep_name_str = key.asString(allocator).?; @@ -1353,7 +1362,7 @@ pub fn parseIntoBinaryLockfile( if (!value.isString()) { try log.addError(source, value.loc, "Expected a string"); - return error.InvalidCatalogObject; + return error.InvalidCatalogsObject; } const version_str = value.asString(allocator).?; @@ -1374,7 +1383,7 @@ pub fn parseIntoBinaryLockfile( manager, ) orelse { try log.addError(source, value.loc, "Invalid catalog version"); - return error.InvalidCatalogObject; + return error.InvalidCatalogsObject; }, }; @@ -1386,7 +1395,7 @@ pub fn parseIntoBinaryLockfile( if (entry.found_existing) { try log.addError(source, key.loc, "Duplicate catalog entry"); - return error.InvalidCatalogObject; + return error.InvalidCatalogsObject; } entry.value_ptr.* = dep; @@ -1394,6 +1403,21 @@ pub fn parseIntoBinaryLockfile( } } + if (root.get("nodeLinker")) |node_linker_expr| { + if (!node_linker_expr.isString()) { + try log.addError(source, node_linker_expr.loc, "Expected a string"); + return error.InvalidNodeLinkerValue; + } + + const node_linker_str = node_linker_expr.data.e_string.slice(allocator); + lockfile.node_linker = BinaryLockfile.NodeLinker.fromStr(node_linker_str) orelse { + try log.addError(source, node_linker_expr.loc, "Expected one of \"isolated\" or \"hoisted\""); + return error.InvalidNodeLinkerValue; + }; + } else { + lockfile.node_linker = .auto; + } + const workspaces_obj = root.getObject("workspaces") orelse { try log.addError(source, root.loc, "Missing a workspaces object property"); return error.InvalidWorkspaceObject; diff --git a/src/install/lockfile/bun.lockb.zig b/src/install/lockfile/bun.lockb.zig index 8f546724f4..29f46c22eb 100644 --- a/src/install/lockfile/bun.lockb.zig +++ b/src/install/lockfile/bun.lockb.zig @@ -7,6 +7,7 @@ const has_trusted_dependencies_tag: u64 = @bitCast(@as([8]u8, "tRuStEDd".*)); const has_empty_trusted_dependencies_tag: u64 = @bitCast(@as([8]u8, "eMpTrUsT".*)); const has_overrides_tag: u64 = @bitCast(@as([8]u8, "oVeRriDs".*)); const has_catalogs_tag: u64 = @bitCast(@as([8]u8, "cAtAlOgS".*)); +const has_node_linker_tag: u64 = @bitCast(@as([8]u8, "nOdLiNkR".*)); pub fn save(this: *Lockfile, verbose_log: bool, bytes: *std.ArrayList(u8), total_size: *usize, end_pos: *usize) !void { @@ -244,6 +245,11 @@ pub fn save(this: *Lockfile, verbose_log: bool, bytes: *std.ArrayList(u8), total } } + if (this.node_linker != .auto) { + try writer.writeAll(std.mem.asBytes(&has_node_linker_tag)); + try writer.writeInt(u8, @intFromEnum(this.node_linker), .little); + } + total_size.* = try stream.getPos(); try writer.writeAll(&alignment_bytes_to_repeat_buffer); @@ -520,6 +526,21 @@ pub fn load( } } + { + lockfile.node_linker = .auto; + + const remaining_in_buffer = total_buffer_size -| stream.pos; + + if (remaining_in_buffer > 8 and total_buffer_size <= stream.buffer.len) { + const next_num = try reader.readInt(u64, .little); + if (next_num == has_node_linker_tag) { + lockfile.node_linker = try reader.readEnum(Lockfile.NodeLinker, .little); + } else { + stream.pos -= 8; + } + } + } + lockfile.scratch = Lockfile.Scratch.init(allocator); lockfile.package_index = PackageIndex.Map.initContext(allocator, .{}); lockfile.string_pool = StringPool.init(allocator); diff --git a/src/install/npm.zig b/src/install/npm.zig index d333ea5c20..b4fd767b1b 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -724,8 +724,8 @@ pub const OperatingSystem = enum(u16) { pub fn jsFunctionOperatingSystemIsMatch(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const args = callframe.arguments_old(1); var operating_system = negatable(.none); - var iter = args.ptr[0].arrayIterator(globalObject); - while (iter.next()) |item| { + var iter = try args.ptr[0].arrayIterator(globalObject); + while (try iter.next()) |item| { const slice = try item.toSlice(globalObject, bun.default_allocator); defer slice.deinit(); operating_system.apply(slice.slice()); @@ -841,8 +841,8 @@ pub const Architecture = enum(u16) { pub fn jsFunctionArchitectureIsMatch(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const args = callframe.arguments_old(1); var architecture = negatable(.none); - var iter = args.ptr[0].arrayIterator(globalObject); - while (iter.next()) |item| { + var iter = try args.ptr[0].arrayIterator(globalObject); + while (try iter.next()) |item| { const slice = try item.toSlice(globalObject, bun.default_allocator); defer slice.deinit(); architecture.apply(slice.slice()); @@ -1572,7 +1572,14 @@ pub const PackageManifest = struct { source, log, arena.allocator(), - ) catch return null; + ) catch { + // don't use the arena memory! + var cloned_log: logger.Log = .init(bun.default_allocator); + try log.cloneToWithRecycled(&cloned_log, true); + log.* = cloned_log; + + return null; + }; if (json.asProperty("error")) |error_q| { if (error_q.expr.asString(allocator)) |err| { diff --git a/src/install/patch_install.zig b/src/install/patch_install.zig index 20463990ae..7723582ab6 100644 --- a/src/install/patch_install.zig +++ b/src/install/patch_install.zig @@ -84,7 +84,7 @@ pub const PatchTask = struct { cache_dir_subpath_without_patch_hash: stringZ, /// this is non-null if this was called before a Task, for example extracting - task_id: ?Task.Id.Type = null, + task_id: ?Task.Id = null, install_context: ?struct { dependency_id: DependencyID, tree_id: Lockfile.Tree.Id, @@ -324,7 +324,7 @@ pub const PatchTask = struct { .cache_dir_subpath = this.callback.apply.cache_dir_subpath_without_patch_hash, .destination_dir_subpath = tempdir_name, .destination_dir_subpath_buf = tmpname_buf[0..], - .patch = .{}, + .patch = null, .progress = null, .package_name = pkg_name, .package_version = resolution_label, diff --git a/src/install/repository.zig b/src/install/repository.zig index a41a81af60..09f8953c76 100644 --- a/src/install/repository.zig +++ b/src/install/repository.zig @@ -293,11 +293,52 @@ pub const Repository = extern struct { return lhs.resolved.eql(rhs.resolved, lhs_buf, rhs_buf); } - pub fn formatAs(this: *const Repository, label: string, buf: []const u8, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void { + pub fn formatAs(this: *const Repository, label: string, buf: []const u8, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { const formatter = Formatter{ .label = label, .repository = this, .buf = buf }; return try formatter.format(layout, opts, writer); } + pub fn fmtStorePath(this: *const Repository, label: string, string_buf: string) StorePathFormatter { + return .{ + .repo = this, + .label = label, + .string_buf = string_buf, + }; + } + + pub const StorePathFormatter = struct { + repo: *const Repository, + label: string, + string_buf: string, + + pub fn format(this: StorePathFormatter, comptime _: string, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + try writer.print("{}", .{Install.fmtStorePath(this.label)}); + + if (!this.repo.owner.isEmpty()) { + try writer.print("{}", .{this.repo.owner.fmtStorePath(this.string_buf)}); + // try writer.writeByte(if (this.opts.replace_slashes) '+' else '/'); + try writer.writeByte('+'); + } else if (Dependency.isSCPLikePath(this.repo.repo.slice(this.string_buf))) { + // try writer.print("ssh:{s}", .{if (this.opts.replace_slashes) "++" else "//"}); + try writer.writeAll("ssh:++"); + } + + try writer.print("{}", .{this.repo.repo.fmtStorePath(this.string_buf)}); + + if (!this.repo.resolved.isEmpty()) { + try writer.writeByte('+'); // this would be '#' but it's not valid on windows + var resolved = this.repo.resolved.slice(this.string_buf); + if (strings.lastIndexOfChar(resolved, '-')) |i| { + resolved = resolved[i + 1 ..]; + } + try writer.print("{}", .{Install.fmtStorePath(resolved)}); + } else if (!this.repo.committish.isEmpty()) { + try writer.writeByte('+'); // this would be '#' but it's not valid on windows + try writer.print("{}", .{this.repo.committish.fmtStorePath(this.string_buf)}); + } + } + }; + pub fn fmt(this: *const Repository, label: string, buf: []const u8) Formatter { return .{ .repository = this, @@ -310,7 +351,7 @@ pub const Repository = extern struct { label: []const u8 = "", buf: []const u8, repository: *const Repository, - pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { if (comptime Environment.allow_assert) bun.assert(formatter.label.len > 0); try writer.writeAll(formatter.label); @@ -458,14 +499,14 @@ pub const Repository = extern struct { env: DotEnv.Map, log: *logger.Log, cache_dir: std.fs.Dir, - task_id: u64, + task_id: Install.Task.Id, name: string, url: string, attempt: u8, ) !std.fs.Dir { bun.Analytics.Features.git_dependencies += 1; const folder_name = try std.fmt.bufPrintZ(&folder_name_buf, "{any}.git", .{ - bun.fmt.hexIntLower(task_id), + bun.fmt.hexIntLower(task_id.get()), }); return if (cache_dir.openDirZ(folder_name, .{})) |dir| fetch: { @@ -523,10 +564,10 @@ pub const Repository = extern struct { repo_dir: std.fs.Dir, name: string, committish: string, - task_id: u64, + task_id: Install.Task.Id, ) !string { const path = Path.joinAbsString(PackageManager.get().cache_directory_path, &.{try std.fmt.bufPrint(&folder_name_buf, "{any}.git", .{ - bun.fmt.hexIntLower(task_id), + bun.fmt.hexIntLower(task_id.get()), })}, .auto); _ = repo_dir; diff --git a/src/install/resolution.zig b/src/install/resolution.zig index 50fe0d936e..b30775c6aa 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -189,6 +189,37 @@ pub const Resolution = extern struct { }; } + const StorePathFormatter = struct { + res: *const Resolution, + string_buf: string, + // opts: String.StorePathFormatter.Options, + + pub fn format(this: StorePathFormatter, comptime _: string, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + const string_buf = this.string_buf; + const res = this.res.value; + switch (this.res.tag) { + .root => try writer.writeAll("root"), + .npm => try writer.print("{}", .{res.npm.version.fmt(string_buf)}), + .local_tarball => try writer.print("{}", .{res.local_tarball.fmtStorePath(string_buf)}), + .remote_tarball => try writer.print("{}", .{res.remote_tarball.fmtStorePath(string_buf)}), + .folder => try writer.print("{}", .{res.folder.fmtStorePath(string_buf)}), + .git => try writer.print("{}", .{res.git.fmtStorePath("git+", string_buf)}), + .github => try writer.print("{}", .{res.github.fmtStorePath("github+", string_buf)}), + .workspace => try writer.print("{}", .{res.workspace.fmtStorePath(string_buf)}), + .symlink => try writer.print("{}", .{res.symlink.fmtStorePath(string_buf)}), + .single_file_module => try writer.print("{}", .{res.single_file_module.fmtStorePath(string_buf)}), + else => {}, + } + } + }; + + pub fn fmtStorePath(this: *const Resolution, string_buf: string) StorePathFormatter { + return .{ + .res = this, + .string_buf = string_buf, + }; + } + pub fn fmtURL(this: *const Resolution, string_bytes: []const u8) URLFormatter { return URLFormatter{ .resolution = this, .buf = string_bytes }; } @@ -257,7 +288,7 @@ pub const Resolution = extern struct { buf: []const u8, - pub fn format(formatter: URLFormatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void { + pub fn format(formatter: URLFormatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { const buf = formatter.buf; const value = formatter.resolution.value; switch (formatter.resolution.tag) { @@ -280,7 +311,7 @@ pub const Resolution = extern struct { buf: []const u8, path_sep: bun.fmt.PathFormatOptions.Sep, - pub fn format(formatter: Formatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void { + pub fn format(formatter: Formatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { const buf = formatter.buf; const value = formatter.resolution.value; switch (formatter.resolution.tag) { diff --git a/src/install/windows-shim/bun_shim_impl.zig b/src/install/windows-shim/bun_shim_impl.zig index e3359adfac..1beb26b780 100644 --- a/src/install/windows-shim/bun_shim_impl.zig +++ b/src/install/windows-shim/bun_shim_impl.zig @@ -203,7 +203,7 @@ const FailReason = enum { .InterpreterNotFoundBun => \\Please run the following command, or double check %PATH% is right. \\ - \\ powershell -c "irm bun.sh/install.ps1|iex" + \\ powershell -c "irm bun.com/install.ps1|iex" \\ \\ , diff --git a/src/io/io.zig b/src/io/io.zig index 1aba6f7775..69c6dae204 100644 --- a/src/io/io.zig +++ b/src/io/io.zig @@ -11,6 +11,9 @@ const Environment = bun.Environment; pub const heap = @import("./heap.zig"); const JSC = bun.JSC; +pub const openForWriting = @import("./openForWriting.zig").openForWriting; +pub const openForWritingImpl = @import("./openForWriting.zig").openForWritingImpl; + const log = bun.Output.scoped(.loop, false); const posix = std.posix; diff --git a/src/io/openForWriting.zig b/src/io/openForWriting.zig new file mode 100644 index 0000000000..700d329939 --- /dev/null +++ b/src/io/openForWriting.zig @@ -0,0 +1,139 @@ +pub fn openForWriting( + dir: bun.FileDescriptor, + input_path: anytype, + input_flags: i32, + mode: bun.Mode, + pollable: *bool, + is_socket: *bool, + force_sync: bool, + out_nonblocking: *bool, + comptime Ctx: type, + ctx: Ctx, + comptime onForceSyncOrIsaTTY: *const fn (Ctx) void, + comptime isPollable: *const fn (mode: bun.Mode) bool, +) JSC.Maybe(bun.FileDescriptor) { + return openForWritingImpl( + dir, + input_path, + input_flags, + mode, + pollable, + is_socket, + force_sync, + out_nonblocking, + Ctx, + ctx, + onForceSyncOrIsaTTY, + isPollable, + bun.sys.openat, + ); +} + +pub fn openForWritingImpl( + dir: bun.FileDescriptor, + input_path: anytype, + input_flags: i32, + mode: bun.Mode, + pollable: *bool, + is_socket: *bool, + force_sync: bool, + out_nonblocking: *bool, + comptime Ctx: type, + ctx: Ctx, + comptime onForceSyncOrIsaTTY: *const fn (Ctx) void, + comptime isPollable: *const fn (mode: bun.Mode) bool, + comptime openat: *const fn (dir: bun.FileDescriptor, path: [:0]const u8, flags: i32, mode: bun.Mode) JSC.Maybe(bun.FileDescriptor), +) JSC.Maybe(bun.FileDescriptor) { + const PathT = @TypeOf(input_path); + if (PathT != bun.webcore.PathOrFileDescriptor and PathT != [:0]const u8 and PathT != [:0]u8) { + @compileError("Only string or PathOrFileDescriptor is supported but got: " ++ @typeName(PathT)); + } + + // TODO: this should be concurrent. + var isatty = false; + var is_nonblocking = false; + const result = + switch (PathT) { + bun.webcore.PathOrFileDescriptor => switch (input_path) { + .path => |path| brk: { + is_nonblocking = true; + break :brk bun.sys.openatA(dir, path.slice(), input_flags, mode); + }, + .fd => |fd_| brk: { + const duped = bun.sys.dupWithFlags(fd_, 0); + + break :brk duped; + }, + }, + [:0]const u8, [:0]u8 => openat(dir, input_path, input_flags, mode), + else => unreachable, + }; + const fd = switch (result) { + .err => |err| return .{ .err = err }, + .result => |fd| fd, + }; + + if (comptime Environment.isPosix) { + switch (bun.sys.fstat(fd)) { + .err => |err| { + fd.close(); + return .{ .err = err }; + }, + .result => |stat| { + // pollable.* = bun.sys.isPollable(stat.mode); + pollable.* = isPollable(stat.mode); + if (!pollable.*) { + isatty = std.posix.isatty(fd.native()); + } + + if (isatty) { + pollable.* = true; + } + + is_socket.* = std.posix.S.ISSOCK(stat.mode); + + if (force_sync or isatty) { + // Prevents interleaved or dropped stdout/stderr output for terminals. + // As noted in the following reference, local TTYs tend to be quite fast and + // this behavior has become expected due historical functionality on OS X, + // even though it was originally intended to change in v1.0.2 (Libuv 1.2.1). + // Ref: https://github.com/nodejs/node/pull/1771#issuecomment-119351671 + _ = bun.sys.updateNonblocking(fd, false); + is_nonblocking = false; + // this.force_sync = true; + // this.writer.force_sync = true; + onForceSyncOrIsaTTY(ctx); + } else if (!is_nonblocking) { + const flags = switch (bun.sys.getFcntlFlags(fd)) { + .result => |flags| flags, + .err => |err| { + fd.close(); + return .{ .err = err }; + }, + }; + is_nonblocking = (flags & @as(@TypeOf(flags), bun.O.NONBLOCK)) != 0; + + if (!is_nonblocking) { + if (bun.sys.setNonblocking(fd) == .result) { + is_nonblocking = true; + } + } + } + + out_nonblocking.* = is_nonblocking and pollable.*; + }, + } + + return .{ .result = fd }; + } + + if (comptime Environment.isWindows) { + pollable.* = (bun.windows.GetFileType(fd.cast()) & bun.windows.FILE_TYPE_PIPE) != 0 and !force_sync; + return .{ .result = fd }; + } +} + +const std = @import("std"); +const bun = @import("bun"); +const Environment = bun.Environment; +const JSC = bun.JSC; diff --git a/src/js/builtins.d.ts b/src/js/builtins.d.ts index 987ba63f00..d7986dab85 100644 --- a/src/js/builtins.d.ts +++ b/src/js/builtins.d.ts @@ -344,6 +344,7 @@ declare function $addEventListener(): TODO; declare function $appendFromJS(): TODO; declare function $argv(): TODO; declare function $assignToStream(): TODO; +declare function $assignStreamIntoResumableSink(): TODO; declare function $associatedReadableByteStreamController(): TODO; declare function $autoAllocateChunkSize(): TODO; declare function $backpressure(): TODO; diff --git a/src/js/builtins/Bake.ts b/src/js/builtins/Bake.ts index 9faf064bf8..bfe3cdfcb4 100644 --- a/src/js/builtins/Bake.ts +++ b/src/js/builtins/Bake.ts @@ -128,13 +128,20 @@ export function renderRoutesForProdStatic( pageModule, layouts, }); + let result; if (paramGetter[Symbol.asyncIterator] != undefined) { for await (const params of paramGetter) { - callRouteGenerator(type, i, layouts, pageModule, params); + result = callRouteGenerator(type, i, layouts, pageModule, params); + if ($isPromise(result) && $isPromisePending(result)) { + await result; + } } } else if (paramGetter[Symbol.iterator] != undefined) { for (const params of paramGetter) { - callRouteGenerator(type, i, layouts, pageModule, params); + result = callRouteGenerator(type, i, layouts, pageModule, params); + if ($isPromise(result) && $isPromisePending(result)) { + await result; + } } } else { await Promise.all( diff --git a/src/js/builtins/ReadableStreamInternals.ts b/src/js/builtins/ReadableStreamInternals.ts index e5a9f9cf06..5cb7316330 100644 --- a/src/js/builtins/ReadableStreamInternals.ts +++ b/src/js/builtins/ReadableStreamInternals.ts @@ -758,6 +758,123 @@ export function assignToStream(stream, sink) { return $readStreamIntoSink(stream, sink, true); } +$linkTimeConstant; +export function assignStreamIntoResumableSink(stream, sink) { + const highWaterMark = $getByIdDirectPrivate(stream, "highWaterMark") || 0; + let error: Error | null = null; + let reading = false; + let closed = false; + let reader: ReadableStreamDefaultReader | undefined; + + function releaseReader() { + if (reader) { + try { + reader.releaseLock(); + } catch {} + reader = undefined; + } + sink = undefined; + if (stream) { + var streamState = $getByIdDirectPrivate(stream, "state"); + // make it easy for this to be GC'd + // but don't do property transitions + var readableStreamController = $getByIdDirectPrivate(stream, "readableStreamController"); + if (readableStreamController) { + if ($getByIdDirectPrivate(readableStreamController, "underlyingSource")) + $putByIdDirectPrivate(readableStreamController, "underlyingSource", null); + if ($getByIdDirectPrivate(readableStreamController, "controlledReadableStream")) + $putByIdDirectPrivate(readableStreamController, "controlledReadableStream", null); + + $putByIdDirectPrivate(stream, "readableStreamController", null); + if ($getByIdDirectPrivate(stream, "underlyingSource")) $putByIdDirectPrivate(stream, "underlyingSource", null); + readableStreamController = undefined; + } + + if (stream && !error && streamState !== $streamClosed && streamState !== $streamErrored) { + $readableStreamCloseIfPossible(stream); + } + stream = undefined; + } + } + function endSink(...args: any[]) { + try { + sink?.end(...args); + } catch {} // should never throw + releaseReader(); + } + + try { + // always call start even if reader throws + + sink.start({ highWaterMark }); + + reader = stream.getReader(); + + async function drainReaderIntoSink() { + if (error || closed || reading) return; + reading = true; + + try { + while (true) { + var { value, done } = await reader!.read(); + if (closed) break; + + if (done) { + closed = true; + // lets cover just in case we have a value when done is true + // this shouldn't happen but just in case + if (value) { + sink.write(value); + } + // clean end + return endSink(); + } + + if (value) { + // write returns false under backpressure + if (!sink.write(value)) { + break; + } + } + } + } catch (e: any) { + error = e; + closed = true; + try { + const prom = stream?.cancel(e); + if ($isPromise(prom)) { + $markPromiseAsHandled(prom); + } + } catch {} + // end with the error NT so we can simplify the flow to only listen to end + queueMicrotask(endSink.bind(null, e)); + } finally { + reading = false; + } + } + + function cancelStream(reason: Error | null) { + if (closed) return; + let wasClosed = closed; + closed = true; + if (stream && !error && !wasClosed && stream.$state !== $streamClosed) { + $readableStreamCancel(stream, reason); + } + releaseReader(); + } + // drain is called when the backpressure is release so we can continue draining + // cancel is called if closed or errored by the other side + sink.setHandlers(drainReaderIntoSink, cancelStream); + + drainReaderIntoSink(); + } catch (e: any) { + error = e; + closed = true; + // end with the error + queueMicrotask(endSink.bind(null, e)); + } +} + export async function readStreamIntoSink(stream: ReadableStream, sink, isNative) { var didClose = false; var didThrow = false; @@ -842,8 +959,8 @@ export async function readStreamIntoSink(stream: ReadableStream, sink, isNative) reader = undefined; } sink = undefined; - var streamState = $getByIdDirectPrivate(stream, "state"); if (stream) { + var streamState = $getByIdDirectPrivate(stream, "state"); // make it easy for this to be GC'd // but don't do property transitions var readableStreamController = $getByIdDirectPrivate(stream, "readableStreamController"); @@ -858,7 +975,7 @@ export async function readStreamIntoSink(stream: ReadableStream, sink, isNative) readableStreamController = undefined; } - if (!didThrow && streamState !== $streamClosed && streamState !== $streamErrored) { + if (stream && !didThrow && streamState !== $streamClosed && streamState !== $streamErrored) { $readableStreamCloseIfPossible(stream); } stream = undefined; @@ -1099,6 +1216,7 @@ export function onCloseDirectStream(reason) { export function onFlushDirectStream() { var stream = this.$controlledReadableStream; + if (!stream) return; var reader = $getByIdDirectPrivate(stream, "reader"); if (!reader || !$isReadableStreamDefaultReader(reader)) { return; diff --git a/src/js/bun/sql.ts b/src/js/bun/sql.ts index dd88169c35..84aa23211e 100644 --- a/src/js/bun/sql.ts +++ b/src/js/bun/sql.ts @@ -280,13 +280,13 @@ function normalizeQuery(strings, values, binding_idx = 1) { binding_values.push(sub_values[j]); } binding_idx += sub_values.length; - } else if (value instanceof SQLArrayParameter) { + } else if (value instanceof SQLHelper) { const command = detectCommand(query); // only selectIn, insert, update, updateSet are allowed if (command === SQLCommand.none || command === SQLCommand.where) { - throw new SyntaxError("Helper are only allowed for INSERT, UPDATE and WHERE IN commands"); + throw new SyntaxError("Helpers are only allowed for INSERT, UPDATE and WHERE IN commands"); } - const { columns, value: items } = value as SQLArrayParameter; + const { columns, value: items } = value as SQLHelper; const columnCount = columns.length; if (columnCount === 0 && command !== SQLCommand.whereIn) { throw new SyntaxError(`Cannot ${commandToString(command)} with no columns`); @@ -1300,7 +1300,7 @@ function doCreateQuery(strings, values, allowUnsafeTransaction, poolSize, bigint return createQuery(sqlString, final_values, new SQLResultArray(), undefined, !!bigint, !!simple); } -class SQLArrayParameter { +class SQLHelper { value: any; columns: string[]; constructor(value, keys) { @@ -1339,7 +1339,7 @@ function decodeIfValid(value) { } return null; } -function loadOptions(o) { +function loadOptions(o: Bun.SQL.Options) { var hostname, port, username, @@ -1453,6 +1453,8 @@ function loadOptions(o) { idleTimeout ??= o.idle_timeout; connectionTimeout ??= o.connectionTimeout; connectionTimeout ??= o.connection_timeout; + connectionTimeout ??= o.connectTimeout; + connectionTimeout ??= o.connect_timeout; maxLifetime ??= o.maxLifetime; maxLifetime ??= o.max_lifetime; bigint ??= o.bigint; @@ -1746,14 +1748,10 @@ function SQL(o, e = {}) { if ($isArray(strings)) { // detect if is tagged template if (!$isArray((strings as unknown as TemplateStringsArray).raw)) { - return new SQLArrayParameter(strings, values); + return new SQLHelper(strings, values); } - } else if ( - typeof strings === "object" && - !(strings instanceof Query) && - !(strings instanceof SQLArrayParameter) - ) { - return new SQLArrayParameter([strings], values); + } else if (typeof strings === "object" && !(strings instanceof Query) && !(strings instanceof SQLHelper)) { + return new SQLHelper([strings], values); } // we use the same code path as the transaction sql return queryFromTransaction(strings, values, pooledConnection, state.queries); @@ -2079,14 +2077,10 @@ function SQL(o, e = {}) { if ($isArray(strings)) { // detect if is tagged template if (!$isArray((strings as unknown as TemplateStringsArray).raw)) { - return new SQLArrayParameter(strings, values); + return new SQLHelper(strings, values); } - } else if ( - typeof strings === "object" && - !(strings instanceof Query) && - !(strings instanceof SQLArrayParameter) - ) { - return new SQLArrayParameter([strings], values); + } else if (typeof strings === "object" && !(strings instanceof Query) && !(strings instanceof SQLHelper)) { + return new SQLHelper([strings], values); } return queryFromTransaction(strings, values, pooledConnection, state.queries); @@ -2313,10 +2307,10 @@ function SQL(o, e = {}) { if ($isArray(strings)) { // detect if is tagged template if (!$isArray((strings as unknown as TemplateStringsArray).raw)) { - return new SQLArrayParameter(strings, values); + return new SQLHelper(strings, values); } - } else if (typeof strings === "object" && !(strings instanceof Query) && !(strings instanceof SQLArrayParameter)) { - return new SQLArrayParameter([strings], values); + } else if (typeof strings === "object" && !(strings instanceof Query) && !(strings instanceof SQLHelper)) { + return new SQLHelper([strings], values); } return queryFromPool(strings, values); diff --git a/src/js/internal/fs/glob.ts b/src/js/internal/fs/glob.ts index 294313fcbc..d78b419bc6 100644 --- a/src/js/internal/fs/glob.ts +++ b/src/js/internal/fs/glob.ts @@ -17,9 +17,9 @@ interface ExtendedGlobOptions extends GlobScanOptions { exclude(ent: string): boolean; } -async function* glob(pattern: string | string[], options: GlobOptions): AsyncGenerator { +async function* glob(pattern: string | string[], options?: GlobOptions): AsyncGenerator { pattern = validatePattern(pattern); - const globOptions = mapOptions(options); + const globOptions = mapOptions(options || {}); let it = new Bun.Glob(pattern).scan(globOptions); const exclude = globOptions.exclude; @@ -29,9 +29,9 @@ async function* glob(pattern: string | string[], options: GlobOptions): AsyncGen } } -function* globSync(pattern: string | string[], options: GlobOptions): Generator { +function* globSync(pattern: string | string[], options?: GlobOptions): Generator { pattern = validatePattern(pattern); - const globOptions = mapOptions(options); + const globOptions = mapOptions(options || {}); const g = new Bun.Glob(pattern); const exclude = globOptions.exclude; for (const ent of g.scanSync(globOptions)) { @@ -65,6 +65,8 @@ function mapOptions(options: GlobOptions): ExtendedGlobOptions { cwd: options?.cwd ?? process.cwd(), // https://github.com/nodejs/node/blob/a9546024975d0bfb0a8ae47da323b10fb5cbb88b/lib/internal/fs/glob.js#L655 followSymlinks: true, + // https://github.com/oven-sh/bun/issues/20507 + onlyFiles: false, exclude, }; } diff --git a/src/js/internal/streams/native-readable.ts b/src/js/internal/streams/native-readable.ts index 167f35644c..ba4aa326c8 100644 --- a/src/js/internal/streams/native-readable.ts +++ b/src/js/internal/streams/native-readable.ts @@ -19,7 +19,8 @@ const kRemainingChunk = Symbol("remainingChunk"); const MIN_BUFFER_SIZE = 512; let dynamicallyAdjustChunkSize = (_?) => ( - (_ = process.env.BUN_DISABLE_DYNAMIC_CHUNK_SIZE !== "1"), (dynamicallyAdjustChunkSize = () => _) + (_ = process.env.BUN_DISABLE_DYNAMIC_CHUNK_SIZE !== "1"), + (dynamicallyAdjustChunkSize = () => _) ); type NativeReadable = typeof import("node:stream").Readable & diff --git a/src/js/internal/validators.ts b/src/js/internal/validators.ts index 4d91f4563c..f10b6c3052 100644 --- a/src/js/internal/validators.ts +++ b/src/js/internal/validators.ts @@ -65,12 +65,30 @@ function validateLinkHeaderValue(hints) { ); } +function validateString(value, name) { + if (typeof value !== "string") throw $ERR_INVALID_ARG_TYPE(name, "string", value); +} + +function validateFunction(value, name) { + if (typeof value !== "function") throw $ERR_INVALID_ARG_TYPE(name, "function", value); +} + +function validateBoolean(value, name) { + if (typeof value !== "boolean") throw $ERR_INVALID_ARG_TYPE(name, "boolean", value); +} + +function validateUndefined(value, name) { + if (value !== undefined) throw $ERR_INVALID_ARG_TYPE(name, "undefined", value); +} + function validateInternalField(object, fieldKey, className) { if (typeof object !== "object" || object === null || !ObjectPrototypeHasOwnProperty.$call(object, fieldKey)) { throw $ERR_INVALID_ARG_TYPE("this", className, object); } } + hideFromStack(validateLinkHeaderValue, validateInternalField); +hideFromStack(validateString, validateFunction, validateBoolean, validateUndefined); export default { /** (value, name) */ @@ -82,15 +100,15 @@ export default { /** `(value, name, min, max)` */ validateNumber: $newCppFunction("NodeValidator.cpp", "jsFunction_validateNumber", 0), /** `(value, name)` */ - validateString: $newCppFunction("NodeValidator.cpp", "jsFunction_validateString", 0), + validateString, /** `(number, name)` */ validateFiniteNumber: $newCppFunction("NodeValidator.cpp", "jsFunction_validateFiniteNumber", 0), /** `(number, name, lower, upper, def)` */ checkRangesOrGetDefault: $newCppFunction("NodeValidator.cpp", "jsFunction_checkRangesOrGetDefault", 0), /** `(value, name)` */ - validateFunction: $newCppFunction("NodeValidator.cpp", "jsFunction_validateFunction", 0), + validateFunction, /** `(value, name)` */ - validateBoolean: $newCppFunction("NodeValidator.cpp", "jsFunction_validateBoolean", 0), + validateBoolean, /** `(port, name = 'Port', allowZero = true)` */ validatePort: $newCppFunction("NodeValidator.cpp", "jsFunction_validatePort", 0), /** `(signal, name)` */ @@ -108,7 +126,7 @@ export default { /** `(value, name)` */ validatePlainFunction: $newCppFunction("NodeValidator.cpp", "jsFunction_validatePlainFunction", 0), /** `(value, name)` */ - validateUndefined: $newCppFunction("NodeValidator.cpp", "jsFunction_validateUndefined", 0), + validateUndefined, /** `(buffer, name = 'buffer')` */ validateBuffer: $newCppFunction("NodeValidator.cpp", "jsFunction_validateBuffer", 0), /** `(value, name, oneOf)` */ diff --git a/src/js/node/_http_client.ts b/src/js/node/_http_client.ts index 7402d1d428..d6299c2b80 100644 --- a/src/js/node/_http_client.ts +++ b/src/js/node/_http_client.ts @@ -183,7 +183,13 @@ function ClientRequest(input, options, cb) { }; this.flushHeaders = function () { - send(); + if (!fetching) { + this[kAbortController] ??= new AbortController(); + this[kAbortController].signal.addEventListener("abort", onAbort, { + once: true, + }); + startFetch(); + } }; this.destroy = function (err?: Error) { @@ -543,7 +549,7 @@ function ClientRequest(input, options, cb) { const send = () => { this.finished = true; - this[kAbortController] = new AbortController(); + this[kAbortController] ??= new AbortController(); this[kAbortController].signal.addEventListener("abort", onAbort, { once: true }); var body = this[kBodyChunks] && this[kBodyChunks].length > 1 ? new Blob(this[kBodyChunks]) : this[kBodyChunks]?.[0]; diff --git a/src/js/node/child_process.ts b/src/js/node/child_process.ts index 7ea8e20ec0..c144e6281a 100644 --- a/src/js/node/child_process.ts +++ b/src/js/node/child_process.ts @@ -30,6 +30,8 @@ const ArrayPrototypeFilter = Array.prototype.filter; const ArrayPrototypeSort = Array.prototype.sort; const StringPrototypeToUpperCase = String.prototype.toUpperCase; const ArrayPrototypePush = Array.prototype.push; +const ArrayPrototypeLastIndexOf = Array.prototype.lastIndexOf; +const ArrayPrototypeSplice = Array.prototype.splice; var ArrayBufferIsView = ArrayBuffer.isView; @@ -270,12 +272,13 @@ function execFile(file, args, options, callback) { // merge chunks let stdout; let stderr; - if (child.stdout?.readableEncoding) { + if (encoding || child.stdout?.readableEncoding) { stdout = ArrayPrototypeJoin.$call(_stdout, ""); } else { stdout = BufferConcat(_stdout); } - if (child.stderr?.readableEncoding) { + + if (encoding || child.stderr?.readableEncoding) { stderr = ArrayPrototypeJoin.$call(_stderr, ""); } else { stderr = BufferConcat(_stderr); @@ -735,19 +738,19 @@ function fork(modulePath, args = [], options) { validateArgumentNullCheck(options.execPath, "options.execPath"); // Prepare arguments for fork: - // execArgv = options.execArgv || process.execArgv; - // validateArgumentsNullCheck(execArgv, "options.execArgv"); + let execArgv = options.execArgv || process.execArgv; + validateArgumentsNullCheck(execArgv, "options.execArgv"); - // if (execArgv === process.execArgv && process._eval != null) { - // const index = ArrayPrototypeLastIndexOf.$call(execArgv, process._eval); - // if (index > 0) { - // // Remove the -e switch to avoid fork bombing ourselves. - // execArgv = ArrayPrototypeSlice.$call(execArgv); - // ArrayPrototypeSplice.$call(execArgv, index - 1, 2); - // } - // } + if (execArgv === process.execArgv && process._eval != null) { + const index = ArrayPrototypeLastIndexOf.$call(execArgv, process._eval); + if (index > 0) { + // Remove the -e switch to avoid fork bombing ourselves. + execArgv = ArrayPrototypeSlice.$call(execArgv); + ArrayPrototypeSplice.$call(execArgv, index - 1, 2); + } + } - args = [/*...execArgv,*/ modulePath, ...args]; + args = [...execArgv, modulePath, ...args]; if (typeof options.stdio === "string") { options.stdio = stdioStringToArray(options.stdio, "ipc"); diff --git a/src/js/node/fs.ts b/src/js/node/fs.ts index 2885c6471b..4ca4cbffbb 100644 --- a/src/js/node/fs.ts +++ b/src/js/node/fs.ts @@ -115,6 +115,10 @@ function openAsBlob(path, options) { return Promise.$resolve(Bun.file(path, options)); } +function emitStop(self: StatWatcher) { + self.emit("stop"); +} + class StatWatcher extends EventEmitter { _handle: StatWatcherHandle | null; @@ -131,7 +135,11 @@ class StatWatcher extends EventEmitter { start() {} stop() { - this._handle?.close(); + if (!this._handle) return; + + process.nextTick(emitStop, this); + + this._handle.close(); this._handle = null; } @@ -649,7 +657,7 @@ function watchFile(filename, options, listener) { } if (typeof listener !== "function") { - throw new TypeError("listener must be a function"); + throw $ERR_INVALID_ARG_TYPE("listener", "function", listener); } var stat = statWatchers.get(filename); diff --git a/src/js/node/net.ts b/src/js/node/net.ts index 456ec13099..48e55c8cdb 100644 --- a/src/js/node/net.ts +++ b/src/js/node/net.ts @@ -31,7 +31,7 @@ import type { Socket, SocketHandler, SocketListener } from "bun"; import type { Server as NetServer, Socket as NetSocket, ServerOpts } from "node:net"; import type { TLSSocket } from "node:tls"; const { kTimeout, getTimerDuration } = require("internal/timers"); -const { validateFunction, validateNumber, validateAbortSignal, validatePort, validateBoolean, validateInt32 } = require("internal/validators"); // prettier-ignore +const { validateFunction, validateNumber, validateAbortSignal, validatePort, validateBoolean, validateInt32, validateString } = require("internal/validators"); // prettier-ignore const { NodeAggregateError, ErrnoException } = require("internal/shared"); const ArrayPrototypeIncludes = Array.prototype.includes; @@ -412,7 +412,7 @@ const ServerHandlers: SocketHandler = { if (typeof connectionListener === "function") { this.pauseOnConnect = pauseOnConnect; if (!isTLS) { - connectionListener.$call(self, _socket); + self.prependOnceListener("connection", connectionListener); } } self.emit("connection", _socket); @@ -457,7 +457,7 @@ const ServerHandlers: SocketHandler = { } const connectionListener = server[bunSocketServerOptions]?.connectionListener; if (typeof connectionListener === "function") { - connectionListener.$call(server, self); + server.prependOnceListener("secureConnection", connectionListener); } server.emit("secureConnection", self); // after secureConnection event we emmit secure and secureConnect @@ -625,7 +625,6 @@ const SocketHandlers2: SocketHandler { - const m: any = importModuleDynamically.$apply(this, args); + const m: any = await importModuleDynamically.$apply(this, args); if (isModuleNamespaceObject(m)) { return m; } diff --git a/src/js/node/wasi.ts b/src/js/node/wasi.ts index 342e082fb3..a0ae57df5c 100644 --- a/src/js/node/wasi.ts +++ b/src/js/node/wasi.ts @@ -12,7 +12,7 @@ var __getOwnPropNames = Object.getOwnPropertyNames; var __commonJS = (cb, mod: typeof module | undefined = undefined) => function __require2() { - return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; + return (mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports); }; // node_modules/wasi-js/dist/types.js diff --git a/src/js/private.d.ts b/src/js/private.d.ts index 9159a0afa0..83085b2656 100644 --- a/src/js/private.d.ts +++ b/src/js/private.d.ts @@ -217,7 +217,7 @@ declare function $newZigFunction any>( /** * Retrieves a handle to a function defined in Zig or C++, defined in a * `.bind.ts` file. For more information on how to define bindgen functions, see - * [bindgen's documentation](https://bun.sh/docs/project/bindgen). + * [bindgen's documentation](https://bun.com/docs/project/bindgen). * @param filename - The basename of the `.bind.ts` file. * @param symbol - The name of the function to call. */ diff --git a/src/js_ast.zig b/src/js_ast.zig index b2f5021920..11cbcc9ba6 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1,28 +1,3 @@ -const std = @import("std"); -const logger = bun.logger; -const Runtime = @import("runtime.zig").Runtime; -const bun = @import("bun"); -const string = bun.string; -const Output = bun.Output; -const Environment = bun.Environment; -const strings = bun.strings; -const MutableString = bun.MutableString; -const stringZ = bun.stringZ; -const default_allocator = bun.default_allocator; - -pub const Ref = @import("ast/base.zig").Ref; -pub const Index = @import("ast/base.zig").Index; -const RefHashCtx = @import("ast/base.zig").RefHashCtx; -const ImportRecord = @import("import_record.zig").ImportRecord; -const JSC = bun.JSC; -const JSONParser = bun.JSON; -const ComptimeStringMap = bun.ComptimeStringMap; -const JSPrinter = @import("./js_printer.zig"); -const js_lexer = @import("./js_lexer.zig"); -const TypeScript = @import("./js_parser.zig").TypeScript; -const MimeType = bun.http.MimeType; -const OOM = bun.OOM; -const Loader = bun.options.Loader; /// This is the index to the automatically-generated part containing code that /// calls "__export(exports, { ... getters ... })". This is used to generate /// getters on an exports object for ES6 export statements, and is both for @@ -30,166 +5,6 @@ const Loader = bun.options.Loader; /// although it may contain no statements if there is nothing to export. pub const namespace_export_part_index = 0; -/// This "Store" is a specialized memory allocation strategy very similar to an -/// arena, used for allocating expression and statement nodes during JavaScript -/// parsing and visiting. Allocations are grouped into large blocks, where each -/// block is treated as a fixed-buffer allocator. When a block runs out of -/// space, a new one is created; all blocks are joined as a linked list. -/// -/// Similarly to an arena, you can call .reset() to reset state, reusing memory -/// across operations. -pub fn NewStore(comptime types: []const type, comptime count: usize) type { - const largest_size, const largest_align = brk: { - var largest_size = 0; - var largest_align = 1; - for (types) |T| { - if (@sizeOf(T) == 0) { - @compileError("NewStore does not support 0 size type: " ++ @typeName(T)); - } - largest_size = @max(@sizeOf(T), largest_size); - largest_align = @max(@alignOf(T), largest_align); - } - break :brk .{ largest_size, largest_align }; - }; - - const backing_allocator = bun.default_allocator; - - const log = Output.scoped(.Store, true); - - return struct { - const Store = @This(); - - current: *Block, - debug_lock: std.debug.SafetyLock = .{}, - - pub const Block = struct { - pub const size = largest_size * count * 2; - pub const Size = std.math.IntFittingRange(0, size + largest_size); - - buffer: [size]u8 align(largest_align) = undefined, - bytes_used: Size = 0, - next: ?*Block = null, - - pub fn tryAlloc(block: *Block, comptime T: type) ?*T { - const start = std.mem.alignForward(usize, block.bytes_used, @alignOf(T)); - if (start + @sizeOf(T) > block.buffer.len) return null; - defer block.bytes_used = @intCast(start + @sizeOf(T)); - - // it's simpler to use @ptrCast, but as a sanity check, we also - // try to compute the slice. Zig will report an out of bounds - // panic if the null detection logic above is wrong - if (Environment.isDebug) { - _ = block.buffer[block.bytes_used..][0..@sizeOf(T)]; - } - - return @alignCast(@ptrCast(&block.buffer[start])); - } - }; - - const PreAlloc = struct { - metadata: Store, - first_block: Block, - }; - - pub fn firstBlock(store: *Store) *Block { - return &@as(*PreAlloc, @fieldParentPtr("metadata", store)).first_block; - } - - pub fn init() *Store { - log("init", .{}); - const prealloc = backing_allocator.create(PreAlloc) catch bun.outOfMemory(); - - prealloc.first_block.bytes_used = 0; - prealloc.first_block.next = null; - - prealloc.metadata = .{ - .current = &prealloc.first_block, - }; - - return &prealloc.metadata; - } - - pub fn deinit(store: *Store) void { - log("deinit", .{}); - var it = store.firstBlock().next; // do not free `store.head` - while (it) |next| { - if (Environment.isDebug or Environment.enable_asan) - @memset(next.buffer, undefined); - it = next.next; - backing_allocator.destroy(next); - } - - const prealloc: PreAlloc = @fieldParentPtr("metadata", store); - bun.assert(&prealloc.first_block == store.head); - backing_allocator.destroy(prealloc); - } - - pub fn reset(store: *Store) void { - log("reset", .{}); - - if (Environment.isDebug or Environment.enable_asan) { - var it: ?*Block = store.firstBlock(); - while (it) |next| : (it = next.next) { - next.bytes_used = undefined; - @memset(&next.buffer, undefined); - } - } - - store.current = store.firstBlock(); - store.current.bytes_used = 0; - } - - fn allocate(store: *Store, comptime T: type) *T { - comptime bun.assert(@sizeOf(T) > 0); // don't allocate! - comptime if (!supportsType(T)) { - @compileError("Store does not know about type: " ++ @typeName(T)); - }; - - if (store.current.tryAlloc(T)) |ptr| - return ptr; - - // a new block is needed - const next_block = if (store.current.next) |next| brk: { - next.bytes_used = 0; - break :brk next; - } else brk: { - const new_block = backing_allocator.create(Block) catch - bun.outOfMemory(); - new_block.next = null; - new_block.bytes_used = 0; - store.current.next = new_block; - break :brk new_block; - }; - - store.current = next_block; - - return next_block.tryAlloc(T) orelse - unreachable; // newly initialized blocks must have enough space for at least one - } - - pub inline fn append(store: *Store, comptime T: type, data: T) *T { - const ptr = store.allocate(T); - if (Environment.isDebug) { - log("append({s}) -> 0x{x}", .{ bun.meta.typeName(T), @intFromPtr(ptr) }); - } - ptr.* = data; - return ptr; - } - - pub fn lock(store: *Store) void { - store.debug_lock.lock(); - } - - pub fn unlock(store: *Store) void { - store.debug_lock.unlock(); - } - - fn supportsType(T: type) bool { - return std.mem.indexOfScalar(type, types, T) != null; - } - }; -} - // There are three types. // 1. Expr (expression) // 2. Stmt (statement) @@ -217,10 +32,6 @@ pub fn NewStore(comptime types: []const type, comptime count: usize) type { // But it could also be better memory locality due to smaller in-memory size (more likely to hit the cache) // only benchmarks will provide an answer! // But we must have pointers somewhere in here because can't have types that contain themselves -pub const BindingNodeIndex = Binding; -pub const StmtNodeIndex = Stmt; -pub const ExprNodeIndex = Expr; -pub const BabyList = bun.BabyList; /// Slice that stores capacity and length in the same space as a regular slice. pub const ExprNodeList = BabyList(Expr); @@ -304,246 +115,6 @@ pub const Flags = struct { }; }; -pub const Binding = struct { - loc: logger.Loc, - data: B, - - const Serializable = struct { - type: Tag, - object: string, - value: B, - loc: logger.Loc, - }; - - pub fn jsonStringify(self: *const @This(), writer: anytype) !void { - return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "binding", .value = self.data, .loc = self.loc }); - } - - pub fn ToExpr(comptime expr_type: type, comptime func_type: anytype) type { - const ExprType = expr_type; - return struct { - context: *ExprType, - allocator: std.mem.Allocator, - pub const Context = @This(); - - pub fn wrapIdentifier(ctx: *const Context, loc: logger.Loc, ref: Ref) Expr { - return func_type(ctx.context, loc, ref); - } - - pub fn init(context: *ExprType) Context { - return Context{ .context = context, .allocator = context.allocator }; - } - }; - } - - pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr { - const loc = binding.loc; - - switch (binding.data) { - .b_missing => { - return Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = loc }; - }, - .b_identifier => |b| { - return wrapper.wrapIdentifier(loc, b.ref); - }, - .b_array => |b| { - var exprs = wrapper.allocator.alloc(Expr, b.items.len) catch unreachable; - var i: usize = 0; - while (i < exprs.len) : (i += 1) { - const item = b.items[i]; - exprs[i] = convert: { - const expr = toExpr(&item.binding, wrapper); - if (b.has_spread and i == exprs.len - 1) { - break :convert Expr.init(E.Spread, E.Spread{ .value = expr }, expr.loc); - } else if (item.default_value) |default| { - break :convert Expr.assign(expr, default); - } else { - break :convert expr; - } - }; - } - - return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc); - }, - .b_object => |b| { - const properties = wrapper - .allocator - .alloc(G.Property, b.properties.len) catch unreachable; - for (properties, b.properties) |*property, item| { - property.* = .{ - .flags = item.flags, - .key = item.key, - .kind = if (item.flags.contains(.is_spread)) - .spread - else - .normal, - .value = toExpr(&item.value, wrapper), - .initializer = item.default_value, - }; - } - return Expr.init( - E.Object, - E.Object{ - .properties = G.Property.List.init(properties), - .is_single_line = b.is_single_line, - }, - loc, - ); - }, - } - } - - pub const Tag = enum(u5) { - b_identifier, - b_array, - b_object, - b_missing, - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - }; - - pub var icount: usize = 0; - - pub fn init(t: anytype, loc: logger.Loc) Binding { - icount += 1; - switch (@TypeOf(t)) { - *B.Identifier => { - return Binding{ .loc = loc, .data = B{ .b_identifier = t } }; - }, - *B.Array => { - return Binding{ .loc = loc, .data = B{ .b_array = t } }; - }, - *B.Object => { - return Binding{ .loc = loc, .data = B{ .b_object = t } }; - }, - B.Missing => { - return Binding{ .loc = loc, .data = B{ .b_missing = t } }; - }, - else => { - @compileError("Invalid type passed to Binding.init"); - }, - } - } - - pub fn alloc(allocator: std.mem.Allocator, t: anytype, loc: logger.Loc) Binding { - icount += 1; - switch (@TypeOf(t)) { - B.Identifier => { - const data = allocator.create(B.Identifier) catch unreachable; - data.* = t; - return Binding{ .loc = loc, .data = B{ .b_identifier = data } }; - }, - B.Array => { - const data = allocator.create(B.Array) catch unreachable; - data.* = t; - return Binding{ .loc = loc, .data = B{ .b_array = data } }; - }, - B.Object => { - const data = allocator.create(B.Object) catch unreachable; - data.* = t; - return Binding{ .loc = loc, .data = B{ .b_object = data } }; - }, - B.Missing => { - return Binding{ .loc = loc, .data = B{ .b_missing = .{} } }; - }, - else => { - @compileError("Invalid type passed to Binding.alloc"); - }, - } - } -}; - -/// B is for Binding! Bindings are on the left side of variable -/// declarations (s_local), which is how destructuring assignments -/// are represented in memory. Consider a basic example. -/// -/// let hello = world; -/// ^ ^ -/// | E.Identifier -/// B.Identifier -/// -/// Bindings can be nested -/// -/// B.Array -/// | B.Identifier -/// | | -/// let { foo: [ bar ] } = ... -/// ---------------- -/// B.Object -pub const B = union(Binding.Tag) { - // let x = ... - b_identifier: *B.Identifier, - // let [a, b] = ... - b_array: *B.Array, - // let { a, b: c } = ... - b_object: *B.Object, - // this is used to represent array holes - b_missing: B.Missing, - - pub const Identifier = struct { - ref: Ref, - }; - - pub const Property = struct { - flags: Flags.Property.Set = Flags.Property.None, - key: ExprNodeIndex, - value: Binding, - default_value: ?Expr = null, - }; - - pub const Object = struct { - properties: []B.Property, - is_single_line: bool = false, - - pub const Property = B.Property; - }; - - pub const Array = struct { - items: []ArrayBinding, - has_spread: bool = false, - is_single_line: bool = false, - - pub const Item = ArrayBinding; - }; - - pub const Missing = struct {}; - - /// This hash function is currently only used for React Fast Refresh transform. - /// This doesn't include the `is_single_line` properties, as they only affect whitespace. - pub fn writeToHasher(b: B, hasher: anytype, symbol_table: anytype) void { - switch (b) { - .b_identifier => |id| { - const original_name = id.ref.getSymbol(symbol_table).original_name; - writeAnyToHasher(hasher, .{ std.meta.activeTag(b), original_name.len }); - }, - .b_array => |array| { - writeAnyToHasher(hasher, .{ std.meta.activeTag(b), array.has_spread, array.items.len }); - for (array.items) |item| { - writeAnyToHasher(hasher, .{item.default_value != null}); - if (item.default_value) |default| { - default.data.writeToHasher(hasher, symbol_table); - } - item.binding.data.writeToHasher(hasher, symbol_table); - } - }, - .b_object => |object| { - writeAnyToHasher(hasher, .{ std.meta.activeTag(b), object.properties.len }); - for (object.properties) |property| { - writeAnyToHasher(hasher, .{ property.default_value != null, property.flags }); - if (property.default_value) |default| { - default.data.writeToHasher(hasher, symbol_table); - } - property.key.data.writeToHasher(hasher, symbol_table); - property.value.data.writeToHasher(hasher, symbol_table); - } - }, - .b_missing => {}, - } - } -}; - pub const ClauseItem = struct { alias: string, alias_loc: logger.Loc = logger.Loc.Empty, @@ -572,134 +143,6 @@ pub const SlotCounts = struct { } }; -const char_freq_count = 64; -pub const CharAndCount = struct { - char: u8 = 0, - count: i32 = 0, - index: usize = 0, - - pub const Array = [char_freq_count]CharAndCount; - - pub fn lessThan(_: void, a: CharAndCount, b: CharAndCount) bool { - if (a.count != b.count) { - return a.count > b.count; - } - - if (a.index != b.index) { - return a.index < b.index; - } - - return a.char < b.char; - } -}; - -pub const CharFreq = struct { - const Vector = @Vector(char_freq_count, i32); - const Buffer = [char_freq_count]i32; - - freqs: Buffer align(1) = undefined, - - const scan_big_chunk_size = 32; - pub fn scan(this: *CharFreq, text: string, delta: i32) void { - if (delta == 0) - return; - - if (text.len < scan_big_chunk_size) { - scanSmall(&this.freqs, text, delta); - } else { - scanBig(&this.freqs, text, delta); - } - } - - fn scanBig(out: *align(1) Buffer, text: string, delta: i32) void { - // https://zig.godbolt.org/z/P5dPojWGK - var freqs = out.*; - defer out.* = freqs; - var deltas: [256]i32 = [_]i32{0} ** 256; - var remain = text; - - bun.assert(remain.len >= scan_big_chunk_size); - - const unrolled = remain.len - (remain.len % scan_big_chunk_size); - const remain_end = remain.ptr + unrolled; - var unrolled_ptr = remain.ptr; - remain = remain[unrolled..]; - - while (unrolled_ptr != remain_end) : (unrolled_ptr += scan_big_chunk_size) { - const chunk = unrolled_ptr[0..scan_big_chunk_size].*; - inline for (0..scan_big_chunk_size) |i| { - deltas[@as(usize, chunk[i])] += delta; - } - } - - for (remain) |c| { - deltas[@as(usize, c)] += delta; - } - - freqs[0..26].* = deltas['a' .. 'a' + 26].*; - freqs[26 .. 26 * 2].* = deltas['A' .. 'A' + 26].*; - freqs[26 * 2 .. 62].* = deltas['0' .. '0' + 10].*; - freqs[62] = deltas['_']; - freqs[63] = deltas['$']; - } - - fn scanSmall(out: *align(1) Buffer, text: string, delta: i32) void { - var freqs: [char_freq_count]i32 = out.*; - defer out.* = freqs; - - for (text) |c| { - const i: usize = switch (c) { - 'a'...'z' => @as(usize, @intCast(c)) - 'a', - 'A'...'Z' => @as(usize, @intCast(c)) - ('A' - 26), - '0'...'9' => @as(usize, @intCast(c)) + (53 - '0'), - '_' => 62, - '$' => 63, - else => continue, - }; - freqs[i] += delta; - } - } - - pub fn include(this: *CharFreq, other: CharFreq) void { - // https://zig.godbolt.org/z/Mq8eK6K9s - const left: @Vector(char_freq_count, i32) = this.freqs; - const right: @Vector(char_freq_count, i32) = other.freqs; - - this.freqs = left + right; - } - - pub fn compile(this: *const CharFreq, allocator: std.mem.Allocator) NameMinifier { - const array: CharAndCount.Array = brk: { - var _array: CharAndCount.Array = undefined; - - for (&_array, NameMinifier.default_tail, this.freqs, 0..) |*dest, char, freq, i| { - dest.* = CharAndCount{ - .char = char, - .index = i, - .count = freq, - }; - } - - std.sort.pdq(CharAndCount, &_array, {}, CharAndCount.lessThan); - - break :brk _array; - }; - - var minifier = NameMinifier.init(allocator); - minifier.head.ensureTotalCapacityPrecise(NameMinifier.default_head.len) catch unreachable; - minifier.tail.ensureTotalCapacityPrecise(NameMinifier.default_tail.len) catch unreachable; - // TODO: investigate counting number of < 0 and > 0 and pre-allocating - for (array) |item| { - if (item.char < '0' or item.char > '9') { - minifier.head.append(item.char) catch unreachable; - } - minifier.tail.append(item.char) catch unreachable; - } - - return minifier; - } -}; - pub const NameMinifier = struct { head: std.ArrayList(u8), tail: std.ArrayList(u8), @@ -747,695 +190,6 @@ pub const NameMinifier = struct { } }; -pub const G = struct { - pub const Decl = struct { - binding: BindingNodeIndex, - value: ?ExprNodeIndex = null, - - pub const List = BabyList(Decl); - }; - - pub const NamespaceAlias = struct { - namespace_ref: Ref, - alias: string, - - was_originally_property_access: bool = false, - - import_record_index: u32 = std.math.maxInt(u32), - }; - - pub const ExportStarAlias = struct { - loc: logger.Loc, - - // Although this alias name starts off as being the same as the statement's - // namespace symbol, it may diverge if the namespace symbol name is minified. - // The original alias name is preserved here to avoid this scenario. - original_name: string, - }; - - pub const Class = struct { - class_keyword: logger.Range = logger.Range.None, - ts_decorators: ExprNodeList = ExprNodeList{}, - class_name: ?LocRef = null, - extends: ?ExprNodeIndex = null, - body_loc: logger.Loc = logger.Loc.Empty, - close_brace_loc: logger.Loc = logger.Loc.Empty, - properties: []Property = &([_]Property{}), - has_decorators: bool = false, - - pub fn canBeMoved(this: *const Class) bool { - if (this.extends != null) - return false; - - if (this.has_decorators) { - return false; - } - - for (this.properties) |property| { - if (property.kind == .class_static_block) - return false; - - const flags = property.flags; - if (flags.contains(.is_computed) or flags.contains(.is_spread)) { - return false; - } - - if (property.kind == .normal) { - if (flags.contains(.is_static)) { - for ([2]?Expr{ property.value, property.initializer }) |val_| { - if (val_) |val| { - switch (val.data) { - .e_arrow, .e_function => {}, - else => { - if (!val.canBeMoved()) { - return false; - } - }, - } - } - } - } - } - } - - return true; - } - }; - - // invalid shadowing if left as Comment - pub const Comment = struct { loc: logger.Loc, text: string }; - - pub const ClassStaticBlock = struct { - stmts: BabyList(Stmt) = .{}, - loc: logger.Loc, - }; - - pub const Property = struct { - /// This is used when parsing a pattern that uses default values: - /// - /// [a = 1] = []; - /// ({a = 1} = {}); - /// - /// It's also used for class fields: - /// - /// class Foo { a = 1 } - /// - initializer: ?ExprNodeIndex = null, - kind: Kind = .normal, - flags: Flags.Property.Set = Flags.Property.None, - - class_static_block: ?*ClassStaticBlock = null, - ts_decorators: ExprNodeList = .{}, - // Key is optional for spread - key: ?ExprNodeIndex = null, - - // This is omitted for class fields - value: ?ExprNodeIndex = null, - - ts_metadata: TypeScript.Metadata = .m_none, - - pub const List = BabyList(Property); - - pub fn deepClone(this: *const Property, allocator: std.mem.Allocator) !Property { - var class_static_block: ?*ClassStaticBlock = null; - if (this.class_static_block != null) { - class_static_block = bun.create(allocator, ClassStaticBlock, .{ - .loc = this.class_static_block.?.loc, - .stmts = try this.class_static_block.?.stmts.clone(allocator), - }); - } - return .{ - .initializer = if (this.initializer) |init| try init.deepClone(allocator) else null, - .kind = this.kind, - .flags = this.flags, - .class_static_block = class_static_block, - .ts_decorators = try this.ts_decorators.deepClone(allocator), - .key = if (this.key) |key| try key.deepClone(allocator) else null, - .value = if (this.value) |value| try value.deepClone(allocator) else null, - .ts_metadata = this.ts_metadata, - }; - } - - pub const Kind = enum(u3) { - normal, - get, - set, - spread, - declare, - abstract, - class_static_block, - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - }; - }; - - pub const FnBody = struct { - loc: logger.Loc, - stmts: StmtNodeList, - - pub fn initReturnExpr(allocator: std.mem.Allocator, expr: Expr) !FnBody { - return .{ - .stmts = try allocator.dupe(Stmt, &.{Stmt.alloc(S.Return, .{ - .value = expr, - }, expr.loc)}), - .loc = expr.loc, - }; - } - }; - - pub const Fn = struct { - name: ?LocRef = null, - open_parens_loc: logger.Loc = logger.Loc.Empty, - args: []Arg = &.{}, - // This was originally nullable, but doing so I believe caused a miscompilation - // Specifically, the body was always null. - body: FnBody = .{ .loc = logger.Loc.Empty, .stmts = &.{} }, - arguments_ref: ?Ref = null, - - flags: Flags.Function.Set = Flags.Function.None, - - return_ts_metadata: TypeScript.Metadata = .m_none, - - pub fn deepClone(this: *const Fn, allocator: std.mem.Allocator) !Fn { - const args = try allocator.alloc(Arg, this.args.len); - for (0..args.len) |i| { - args[i] = try this.args[i].deepClone(allocator); - } - return .{ - .name = this.name, - .open_parens_loc = this.open_parens_loc, - .args = args, - .body = .{ - .loc = this.body.loc, - .stmts = this.body.stmts, - }, - .arguments_ref = this.arguments_ref, - .flags = this.flags, - .return_ts_metadata = this.return_ts_metadata, - }; - } - }; - pub const Arg = struct { - ts_decorators: ExprNodeList = ExprNodeList{}, - binding: BindingNodeIndex, - default: ?ExprNodeIndex = null, - - // "constructor(public x: boolean) {}" - is_typescript_ctor_field: bool = false, - - ts_metadata: TypeScript.Metadata = .m_none, - - pub fn deepClone(this: *const Arg, allocator: std.mem.Allocator) !Arg { - return .{ - .ts_decorators = try this.ts_decorators.deepClone(allocator), - .binding = this.binding, - .default = if (this.default) |d| try d.deepClone(allocator) else null, - .is_typescript_ctor_field = this.is_typescript_ctor_field, - .ts_metadata = this.ts_metadata, - }; - } - }; -}; - -pub const Symbol = struct { - /// This is the name that came from the parser. Printed names may be renamed - /// during minification or to avoid name collisions. Do not use the original - /// name during printing. - original_name: []const u8, - - /// This is used for symbols that represent items in the import clause of an - /// ES6 import statement. These should always be referenced by EImportIdentifier - /// instead of an EIdentifier. When this is present, the expression should - /// be printed as a property access off the namespace instead of as a bare - /// identifier. - /// - /// For correctness, this must be stored on the symbol instead of indirectly - /// associated with the Ref for the symbol somehow. In ES6 "flat bundling" - /// mode, re-exported symbols are collapsed using MergeSymbols() and renamed - /// symbols from other files that end up at this symbol must be able to tell - /// if it has a namespace alias. - namespace_alias: ?G.NamespaceAlias = null, - - /// Used by the parser for single pass parsing. - link: Ref = Ref.None, - - /// An estimate of the number of uses of this symbol. This is used to detect - /// whether a symbol is used or not. For example, TypeScript imports that are - /// unused must be removed because they are probably type-only imports. This - /// is an estimate and may not be completely accurate due to oversights in the - /// code. But it should always be non-zero when the symbol is used. - use_count_estimate: u32 = 0, - - /// This is for generating cross-chunk imports and exports for code splitting. - /// - /// Do not use this directly. Use `chunkIndex()` instead. - chunk_index: u32 = invalid_chunk_index, - - /// This is used for minification. Symbols that are declared in sibling scopes - /// can share a name. A good heuristic (from Google Closure Compiler) is to - /// assign names to symbols from sibling scopes in declaration order. That way - /// local variable names are reused in each global function like this, which - /// improves gzip compression: - /// - /// function x(a, b) { ... } - /// function y(a, b, c) { ... } - /// - /// The parser fills this in for symbols inside nested scopes. There are three - /// slot namespaces: regular symbols, label symbols, and private symbols. - /// - /// Do not use this directly. Use `nestedScopeSlot()` instead. - nested_scope_slot: u32 = invalid_nested_scope_slot, - - did_keep_name: bool = true, - - must_start_with_capital_letter_for_jsx: bool = false, - - /// The kind of symbol. This is used to determine how to print the symbol - /// and how to deal with conflicts, renaming, etc. - kind: Kind = Kind.other, - - /// Certain symbols must not be renamed or minified. For example, the - /// "arguments" variable is declared by the runtime for every function. - /// Renaming can also break any identifier used inside a "with" statement. - must_not_be_renamed: bool = false, - - /// We automatically generate import items for property accesses off of - /// namespace imports. This lets us remove the expensive namespace imports - /// while bundling in many cases, replacing them with a cheap import item - /// instead: - /// - /// import * as ns from 'path' - /// ns.foo() - /// - /// That can often be replaced by this, which avoids needing the namespace: - /// - /// import {foo} from 'path' - /// foo() - /// - /// However, if the import is actually missing then we don't want to report a - /// compile-time error like we do for real import items. This status lets us - /// avoid this. We also need to be able to replace such import items with - /// undefined, which this status is also used for. - import_item_status: ImportItemStatus = ImportItemStatus.none, - - /// --- Not actually used yet ----------------------------------------------- - /// Sometimes we lower private symbols even if they are supported. For example, - /// consider the following TypeScript code: - /// - /// class Foo { - /// #foo = 123 - /// bar = this.#foo - /// } - /// - /// If "useDefineForClassFields: false" is set in "tsconfig.json", then "bar" - /// must use assignment semantics instead of define semantics. We can compile - /// that to this code: - /// - /// class Foo { - /// constructor() { - /// this.#foo = 123; - /// this.bar = this.#foo; - /// } - /// #foo; - /// } - /// - /// However, we can't do the same for static fields: - /// - /// class Foo { - /// static #foo = 123 - /// static bar = this.#foo - /// } - /// - /// Compiling these static fields to something like this would be invalid: - /// - /// class Foo { - /// static #foo; - /// } - /// Foo.#foo = 123; - /// Foo.bar = Foo.#foo; - /// - /// Thus "#foo" must be lowered even though it's supported. Another case is - /// when we're converting top-level class declarations to class expressions - /// to avoid the TDZ and the class shadowing symbol is referenced within the - /// class body: - /// - /// class Foo { - /// static #foo = Foo - /// } - /// - /// This cannot be converted into something like this: - /// - /// var Foo = class { - /// static #foo; - /// }; - /// Foo.#foo = Foo; - /// - /// --- Not actually used yet ----------------------------------------------- - private_symbol_must_be_lowered: bool = false, - - remove_overwritten_function_declaration: bool = false, - - /// Used in HMR to decide when live binding code is needed. - has_been_assigned_to: bool = false, - - comptime { - bun.assert_eql(@sizeOf(Symbol), 88); - bun.assert_eql(@alignOf(Symbol), @alignOf([]const u8)); - } - - const invalid_chunk_index = std.math.maxInt(u32); - pub const invalid_nested_scope_slot = std.math.maxInt(u32); - - pub const SlotNamespace = enum { - must_not_be_renamed, - default, - label, - private_name, - mangled_prop, - - pub const CountsArray = std.EnumArray(SlotNamespace, u32); - }; - - /// This is for generating cross-chunk imports and exports for code splitting. - pub inline fn chunkIndex(this: *const Symbol) ?u32 { - const i = this.chunk_index; - return if (i == invalid_chunk_index) null else i; - } - - pub inline fn nestedScopeSlot(this: *const Symbol) ?u32 { - const i = this.nested_scope_slot; - return if (i == invalid_nested_scope_slot) null else i; - } - - pub fn slotNamespace(this: *const Symbol) SlotNamespace { - const kind = this.kind; - - if (kind == .unbound or this.must_not_be_renamed) { - return .must_not_be_renamed; - } - - if (kind.isPrivate()) { - return .private_name; - } - - return switch (kind) { - // .mangled_prop => .mangled_prop, - .label => .label, - else => .default, - }; - } - - pub inline fn hasLink(this: *const Symbol) bool { - return this.link.tag != .invalid; - } - - pub const Kind = enum { - /// An unbound symbol is one that isn't declared in the file it's referenced - /// in. For example, using "window" without declaring it will be unbound. - unbound, - - /// This has special merging behavior. You're allowed to re-declare these - /// symbols more than once in the same scope. These symbols are also hoisted - /// out of the scope they are declared in to the closest containing function - /// or module scope. These are the symbols with this kind: - /// - /// - Function arguments - /// - Function statements - /// - Variables declared using "var" - hoisted, - hoisted_function, - - /// There's a weird special case where catch variables declared using a simple - /// identifier (i.e. not a binding pattern) block hoisted variables instead of - /// becoming an error: - /// - /// var e = 0; - /// try { throw 1 } catch (e) { - /// print(e) // 1 - /// var e = 2 - /// print(e) // 2 - /// } - /// print(e) // 0 (since the hoisting stops at the catch block boundary) - /// - /// However, other forms are still a syntax error: - /// - /// try {} catch (e) { let e } - /// try {} catch ({e}) { var e } - /// - /// This symbol is for handling this weird special case. - catch_identifier, - - /// Generator and async functions are not hoisted, but still have special - /// properties such as being able to overwrite previous functions with the - /// same name - generator_or_async_function, - - /// This is the special "arguments" variable inside functions - arguments, - - /// Classes can merge with TypeScript namespaces. - class, - - /// A class-private identifier (i.e. "#foo"). - private_field, - private_method, - private_get, - private_set, - private_get_set_pair, - private_static_field, - private_static_method, - private_static_get, - private_static_set, - private_static_get_set_pair, - - /// Labels are in their own namespace - label, - - /// TypeScript enums can merge with TypeScript namespaces and other TypeScript - /// enums. - ts_enum, - - /// TypeScript namespaces can merge with classes, functions, TypeScript enums, - /// and other TypeScript namespaces. - ts_namespace, - - /// In TypeScript, imports are allowed to silently collide with symbols within - /// the module. Presumably this is because the imports may be type-only. - /// Import statement namespace references should NOT have this set. - import, - - /// Assigning to a "const" symbol will throw a TypeError at runtime - constant, - - // CSS identifiers that are renamed to be unique to the file they are in - local_css, - - /// This annotates all other symbols that don't have special behavior. - other, - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - - pub inline fn isPrivate(kind: Symbol.Kind) bool { - return @intFromEnum(kind) >= @intFromEnum(Symbol.Kind.private_field) and @intFromEnum(kind) <= @intFromEnum(Symbol.Kind.private_static_get_set_pair); - } - - pub inline fn isHoisted(kind: Symbol.Kind) bool { - return switch (kind) { - .hoisted, .hoisted_function => true, - else => false, - }; - } - - pub inline fn isHoistedOrFunction(kind: Symbol.Kind) bool { - return switch (kind) { - .hoisted, .hoisted_function, .generator_or_async_function => true, - else => false, - }; - } - - pub inline fn isFunction(kind: Symbol.Kind) bool { - return switch (kind) { - .hoisted_function, .generator_or_async_function => true, - else => false, - }; - } - }; - - pub const isKindPrivate = Symbol.Kind.isPrivate; - pub const isKindHoisted = Symbol.Kind.isHoisted; - pub const isKindHoistedOrFunction = Symbol.Kind.isHoistedOrFunction; - pub const isKindFunction = Symbol.Kind.isFunction; - - pub const Use = struct { - count_estimate: u32 = 0, - }; - - pub const List = BabyList(Symbol); - pub const NestedList = BabyList(List); - - pub fn mergeContentsWith(this: *Symbol, old: *Symbol) void { - this.use_count_estimate += old.use_count_estimate; - if (old.must_not_be_renamed) { - this.original_name = old.original_name; - this.must_not_be_renamed = true; - } - - // TODO: MustStartWithCapitalLetterForJSX - } - - pub const Map = struct { - // This could be represented as a "map[Ref]Symbol" but a two-level array was - // more efficient in profiles. This appears to be because it doesn't involve - // a hash. This representation also makes it trivial to quickly merge symbol - // maps from multiple files together. Each file only generates symbols in a - // single inner array, so you can join the maps together by just make a - // single outer array containing all of the inner arrays. See the comment on - // "Ref" for more detail. - symbols_for_source: NestedList = .{}, - - pub fn dump(this: Map) void { - defer Output.flush(); - for (this.symbols_for_source.slice(), 0..) |symbols, i| { - Output.prettyln("\n\n-- Source ID: {d} ({d} symbols) --\n\n", .{ i, symbols.len }); - for (symbols.slice(), 0..) |symbol, inner_index| { - Output.prettyln( - " name: {s}\n tag: {s}\n {any}\n", - .{ - symbol.original_name, @tagName(symbol.kind), - if (symbol.hasLink()) symbol.link else Ref{ - .source_index = @truncate(i), - .inner_index = @truncate(inner_index), - .tag = .symbol, - }, - }, - ); - } - } - } - - pub fn assignChunkIndex(this: *Map, decls_: DeclaredSymbol.List, chunk_index: u32) void { - const Iterator = struct { - map: *Map, - chunk_index: u32, - - pub fn next(self: @This(), ref: Ref) void { - var symbol = self.map.get(ref).?; - symbol.chunk_index = self.chunk_index; - } - }; - var decls = decls_; - - DeclaredSymbol.forEachTopLevelSymbol(&decls, Iterator{ .map = this, .chunk_index = chunk_index }, Iterator.next); - } - - pub fn merge(this: *Map, old: Ref, new: Ref) Ref { - if (old.eql(new)) { - return new; - } - - var old_symbol = this.get(old).?; - if (old_symbol.hasLink()) { - const old_link = old_symbol.link; - old_symbol.link = this.merge(old_link, new); - return old_symbol.link; - } - - var new_symbol = this.get(new).?; - - if (new_symbol.hasLink()) { - const new_link = new_symbol.link; - new_symbol.link = this.merge(old, new_link); - return new_symbol.link; - } - - old_symbol.link = new; - new_symbol.mergeContentsWith(old_symbol); - return new; - } - - pub fn get(self: *const Map, ref: Ref) ?*Symbol { - if (Ref.isSourceIndexNull(ref.sourceIndex()) or ref.isSourceContentsSlice()) { - return null; - } - - return self.symbols_for_source.at(ref.sourceIndex()).mut(ref.innerIndex()); - } - - pub fn getConst(self: *const Map, ref: Ref) ?*const Symbol { - if (Ref.isSourceIndexNull(ref.sourceIndex()) or ref.isSourceContentsSlice()) { - return null; - } - - return self.symbols_for_source.at(ref.sourceIndex()).at(ref.innerIndex()); - } - - pub fn init(sourceCount: usize, allocator: std.mem.Allocator) !Map { - const symbols_for_source: NestedList = NestedList.init(try allocator.alloc([]Symbol, sourceCount)); - return Map{ .symbols_for_source = symbols_for_source }; - } - - pub fn initWithOneList(list: List) Map { - const baby_list = BabyList(List).init((&list)[0..1]); - return initList(baby_list); - } - - pub fn initList(list: NestedList) Map { - return Map{ .symbols_for_source = list }; - } - - pub fn getWithLink(symbols: *const Map, ref: Ref) ?*Symbol { - var symbol: *Symbol = symbols.get(ref) orelse return null; - if (symbol.hasLink()) { - return symbols.get(symbol.link) orelse symbol; - } - return symbol; - } - - pub fn getWithLinkConst(symbols: *Map, ref: Ref) ?*const Symbol { - var symbol: *const Symbol = symbols.getConst(ref) orelse return null; - if (symbol.hasLink()) { - return symbols.getConst(symbol.link) orelse symbol; - } - return symbol; - } - - pub fn followAll(symbols: *Map) void { - const trace = bun.perf.trace("Symbols.followAll"); - defer trace.end(); - for (symbols.symbols_for_source.slice()) |list| { - for (list.slice()) |*symbol| { - if (!symbol.hasLink()) continue; - symbol.link = follow(symbols, symbol.link); - } - } - } - - /// Equivalent to followSymbols in esbuild - pub fn follow(symbols: *const Map, ref: Ref) Ref { - var symbol = symbols.get(ref) orelse return ref; - if (!symbol.hasLink()) { - return ref; - } - - const link = follow(symbols, symbol.link); - - if (!symbol.link.eql(link)) { - symbol.link = link; - } - - return link; - } - }; - - pub inline fn isHoisted(self: *const Symbol) bool { - return Symbol.isKindHoisted(self.kind); - } -}; - pub const OptionalChain = enum(u1) { /// "a?.b" start, @@ -1449,5023 +203,6 @@ pub const OptionalChain = enum(u1) { } }; -pub const E = struct { - /// This represents an internal property name that can be mangled. The symbol - /// referenced by this expression should be a "SymbolMangledProp" symbol. - pub const NameOfSymbol = struct { - ref: Ref = Ref.None, - - /// If true, a preceding comment contains "@__KEY__" - /// - /// Currently not used - has_property_key_comment: bool = false, - }; - - pub const Array = struct { - items: ExprNodeList = ExprNodeList{}, - comma_after_spread: ?logger.Loc = null, - is_single_line: bool = false, - is_parenthesized: bool = false, - was_originally_macro: bool = false, - close_bracket_loc: logger.Loc = logger.Loc.Empty, - - pub fn push(this: *Array, allocator: std.mem.Allocator, item: Expr) !void { - try this.items.push(allocator, item); - } - - pub inline fn slice(this: Array) []Expr { - return this.items.slice(); - } - - pub fn inlineSpreadOfArrayLiterals( - this: *Array, - allocator: std.mem.Allocator, - estimated_count: usize, - ) !ExprNodeList { - var out = try allocator.alloc( - Expr, - // This over-allocates a little but it's fine - estimated_count + @as(usize, this.items.len), - ); - var remain = out; - for (this.items.slice()) |item| { - switch (item.data) { - .e_spread => |val| { - if (val.value.data == .e_array) { - for (val.value.data.e_array.items.slice()) |inner_item| { - if (inner_item.data == .e_missing) { - remain[0] = Expr.init(E.Undefined, .{}, inner_item.loc); - remain = remain[1..]; - } else { - remain[0] = inner_item; - remain = remain[1..]; - } - } - - // skip empty arrays - // don't include the inlined spread. - continue; - } - // non-arrays are kept in - }, - else => {}, - } - - remain[0] = item; - remain = remain[1..]; - } - - return ExprNodeList.init(out[0 .. out.len - remain.len]); - } - - pub fn toJS(this: @This(), allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { - const items = this.items.slice(); - var array = try JSC.JSValue.createEmptyArray(globalObject, items.len); - array.protect(); - defer array.unprotect(); - for (items, 0..) |expr, j| { - array.putIndex(globalObject, @as(u32, @truncate(j)), try expr.data.toJS(allocator, globalObject)); - } - - return array; - } - - /// Assumes each item in the array is a string - pub fn alphabetizeStrings(this: *Array) void { - if (comptime Environment.allow_assert) { - for (this.items.slice()) |item| { - bun.assert(item.data == .e_string); - } - } - std.sort.pdq(Expr, this.items.slice(), {}, Sorter.isLessThan); - } - - const Sorter = struct { - pub fn isLessThan(ctx: void, lhs: Expr, rhs: Expr) bool { - return strings.cmpStringsAsc(ctx, lhs.data.e_string.data, rhs.data.e_string.data); - } - }; - }; - - pub const Unary = struct { - op: Op.Code, - value: ExprNodeIndex, - }; - - pub const Binary = struct { - left: ExprNodeIndex, - right: ExprNodeIndex, - op: Op.Code, - }; - - pub const Boolean = struct { - value: bool, - pub fn toJS(this: @This(), ctx: *JSC.JSGlobalObject) JSC.C.JSValueRef { - return JSC.C.JSValueMakeBoolean(ctx, this.value); - } - }; - pub const Super = struct {}; - pub const Null = struct {}; - pub const This = struct {}; - pub const Undefined = struct {}; - pub const New = struct { - target: ExprNodeIndex, - args: ExprNodeList = ExprNodeList{}, - - // True if there is a comment containing "@__PURE__" or "#__PURE__" preceding - // this call expression. See the comment inside ECall for more details. - can_be_unwrapped_if_unused: bool = false, - - close_parens_loc: logger.Loc, - }; - pub const NewTarget = struct { - range: logger.Range, - }; - pub const ImportMeta = struct {}; - pub const ImportMetaMain = struct { - /// If we want to print `!import.meta.main`, set this flag to true - /// instead of wrapping in a unary not. This way, the printer can easily - /// print `require.main != module` instead of `!(require.main == module)` - inverted: bool = false, - }; - - pub const Special = union(enum) { - /// emits `exports` or `module.exports` depending on `commonjs_named_exports_deoptimized` - module_exports, - /// `import.meta.hot` - hot_enabled, - /// Acts as .e_undefined, but allows property accesses to the rest of the HMR API. - hot_disabled, - /// `import.meta.hot.data` when HMR is enabled. Not reachable when it is disabled. - hot_data, - /// `import.meta.hot.accept` when HMR is enabled. Truthy. - hot_accept, - /// Converted from `hot_accept` to this in js_parser.zig when it is - /// passed strings. Printed as `hmr.hot.acceptSpecifiers` - hot_accept_visited, - /// Prints the resolved specifier string for an import record. - resolved_specifier_string: ImportRecord.Index, - }; - - pub const Call = struct { - // Node: - target: ExprNodeIndex, - args: ExprNodeList = ExprNodeList{}, - optional_chain: ?OptionalChain = null, - is_direct_eval: bool = false, - close_paren_loc: logger.Loc = logger.Loc.Empty, - - // True if there is a comment containing "@__PURE__" or "#__PURE__" preceding - // this call expression. This is an annotation used for tree shaking, and - // means that the call can be removed if it's unused. It does not mean the - // call is pure (e.g. it may still return something different if called twice). - // - // Note that the arguments are not considered to be part of the call. If the - // call itself is removed due to this annotation, the arguments must remain - // if they have side effects. - can_be_unwrapped_if_unused: bool = false, - - // Used when printing to generate the source prop on the fly - was_jsx_element: bool = false, - - pub fn hasSameFlagsAs(a: *Call, b: *Call) bool { - return (a.optional_chain == b.optional_chain and - a.is_direct_eval == b.is_direct_eval and - a.can_be_unwrapped_if_unused == b.can_be_unwrapped_if_unused); - } - }; - - pub const Dot = struct { - // target is Node - target: ExprNodeIndex, - name: string, - name_loc: logger.Loc, - optional_chain: ?OptionalChain = null, - - // If true, this property access is known to be free of side-effects. That - // means it can be removed if the resulting value isn't used. - can_be_removed_if_unused: bool = false, - - // If true, this property access is a function that, when called, can be - // unwrapped if the resulting value is unused. Unwrapping means discarding - // the call target but keeping any arguments with side effects. - call_can_be_unwrapped_if_unused: bool = false, - - pub fn hasSameFlagsAs(a: *Dot, b: *Dot) bool { - return (a.optional_chain == b.optional_chain and - a.is_direct_eval == b.is_direct_eval and - a.can_be_removed_if_unused == b.can_be_removed_if_unused and a.call_can_be_unwrapped_if_unused == b.call_can_be_unwrapped_if_unused); - } - }; - - pub const Index = struct { - index: ExprNodeIndex, - target: ExprNodeIndex, - optional_chain: ?OptionalChain = null, - - pub fn hasSameFlagsAs(a: *E.Index, b: *E.Index) bool { - return (a.optional_chain == b.optional_chain); - } - }; - - pub const Arrow = struct { - args: []G.Arg = &[_]G.Arg{}, - body: G.FnBody, - - is_async: bool = false, - has_rest_arg: bool = false, - prefer_expr: bool = false, // Use shorthand if true and "Body" is a single return statement - - pub const noop_return_undefined: Arrow = .{ - .args = &.{}, - .body = .{ - .loc = .Empty, - .stmts = &.{}, - }, - }; - }; - - pub const Function = struct { func: G.Fn }; - - pub const Identifier = struct { - ref: Ref = Ref.None, - - // If we're inside a "with" statement, this identifier may be a property - // access. In that case it would be incorrect to remove this identifier since - // the property access may be a getter or setter with side effects. - must_keep_due_to_with_stmt: bool = false, - - // If true, this identifier is known to not have a side effect (i.e. to not - // throw an exception) when referenced. If false, this identifier may or - // not have side effects when referenced. This is used to allow the removal - // of known globals such as "Object" if they aren't used. - can_be_removed_if_unused: bool = false, - - // If true, this identifier represents a function that, when called, can be - // unwrapped if the resulting value is unused. Unwrapping means discarding - // the call target but keeping any arguments with side effects. - call_can_be_unwrapped_if_unused: bool = false, - - pub inline fn init(ref: Ref) Identifier { - return Identifier{ - .ref = ref, - .must_keep_due_to_with_stmt = false, - .can_be_removed_if_unused = false, - .call_can_be_unwrapped_if_unused = false, - }; - } - }; - - /// This is similar to an `Identifier` but it represents a reference to an ES6 - /// import item. - /// - /// Depending on how the code is linked, the file containing this EImportIdentifier - /// may or may not be in the same module group as the file it was imported from. - /// - /// If it's the same module group than we can just merge the import item symbol - /// with the corresponding symbol that was imported, effectively renaming them - /// to be the same thing and statically binding them together. - /// - /// But if it's a different module group, then the import must be dynamically - /// evaluated using a property access off the corresponding namespace symbol, - /// which represents the result of a require() call. - /// - /// It's stored as a separate type so it's not easy to confuse with a plain - /// identifier. For example, it'd be bad if code trying to convert "{x: x}" into - /// "{x}" shorthand syntax wasn't aware that the "x" in this case is actually - /// "{x: importedNamespace.x}". This separate type forces code to opt-in to - /// doing this instead of opt-out. - pub const ImportIdentifier = struct { - ref: Ref = Ref.None, - - /// If true, this was originally an identifier expression such as "foo". If - /// false, this could potentially have been a member access expression such - /// as "ns.foo" off of an imported namespace object. - was_originally_identifier: bool = false, - }; - - /// This is a dot expression on exports, such as `exports.`. It is given - /// it's own AST node to allow CommonJS unwrapping, in which this can just be - /// the identifier in the Ref - pub const CommonJSExportIdentifier = struct { - ref: Ref = Ref.None, - base: Base = .exports, - - /// The original variant of the dot expression must be known so that in the case that we - /// - fail to convert this to ESM - /// - ALSO see an assignment to `module.exports` (commonjs_module_exports_assigned_deoptimized) - /// It must be known if `exports` or `module.exports` was written in source - /// code, as the distinction will alter behavior. The fixup happens in the printer when - /// printing this node. - pub const Base = enum { - exports, - module_dot_exports, - }; - }; - - // This is similar to EIdentifier but it represents class-private fields and - // methods. It can be used where computed properties can be used, such as - // EIndex and Property. - pub const PrivateIdentifier = struct { - ref: Ref, - }; - - /// In development mode, the new JSX transform has a few special props - /// - `React.jsxDEV(type, arguments, key, isStaticChildren, source, self)` - /// - `arguments`: - /// ```{ ...props, children: children, }``` - /// - `source`: https://github.com/babel/babel/blob/ef87648f3f05ccc393f89dea7d4c7c57abf398ce/packages/babel-plugin-transform-react-jsx-source/src/index.js#L24-L48 - /// ```{ - /// fileName: string | null, - /// columnNumber: number | null, - /// lineNumber: number | null, - /// }``` - /// - `children`: - /// - static the function is React.jsxsDEV, "jsxs" instead of "jsx" - /// - one child? the function is React.jsxDEV, - /// - no children? the function is React.jsxDEV and children is an empty array. - /// `isStaticChildren`: https://github.com/facebook/react/blob/4ca62cac45c288878d2532e5056981d177f9fdac/packages/react/src/jsx/ReactJSXElementValidator.js#L369-L384 - /// This flag means children is an array of JSX Elements literals. - /// The documentation on this is sparse, but it appears that - /// React just calls Object.freeze on the children array. - /// Object.freeze, historically, is quite a bit slower[0] than just not doing that. - /// Given that...I am choosing to always pass "false" to this. - /// This also skips extra state that we'd need to track. - /// If React Fast Refresh ends up using this later, then we can revisit this decision. - /// [0]: https://github.com/automerge/automerge/issues/177 - pub const JSXElement = struct { - /// JSX tag name - ///
              => E.String.init("div") - /// => E.Identifier{.ref = symbolPointingToMyComponent } - /// null represents a fragment - tag: ?ExprNodeIndex = null, - - /// JSX props - properties: G.Property.List = G.Property.List{}, - - /// JSX element children
              {this_is_a_child_element}
              - children: ExprNodeList = ExprNodeList{}, - - // needed to make sure parse and visit happen in the same order - key_prop_index: i32 = -1, - - flags: Flags.JSXElement.Bitset = Flags.JSXElement.Bitset{}, - - close_tag_loc: logger.Loc = logger.Loc.Empty, - - pub const SpecialProp = enum { - __self, // old react transform used this as a prop - __source, - key, - ref, - any, - - pub const Map = ComptimeStringMap(SpecialProp, .{ - .{ "__self", .__self }, - .{ "__source", .__source }, - .{ "key", .key }, - .{ "ref", .ref }, - }); - }; - }; - - pub const Missing = struct { - pub fn jsonStringify(_: *const @This(), writer: anytype) !void { - return try writer.write(null); - } - }; - - pub const Number = struct { - value: f64, - - const double_digit = [_]string{ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63", "64", "65", "66", "67", "68", "69", "70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "80", "81", "82", "83", "84", "85", "86", "87", "88", "89", "90", "91", "92", "93", "94", "95", "96", "97", "98", "99", "100" }; - const neg_double_digit = [_]string{ "-0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9", "-10", "-11", "-12", "-13", "-14", "-15", "-16", "-17", "-18", "-19", "-20", "-21", "-22", "-23", "-24", "-25", "-26", "-27", "-28", "-29", "-30", "-31", "-32", "-33", "-34", "-35", "-36", "-37", "-38", "-39", "-40", "-41", "-42", "-43", "-44", "-45", "-46", "-47", "-48", "-49", "-50", "-51", "-52", "-53", "-54", "-55", "-56", "-57", "-58", "-59", "-60", "-61", "-62", "-63", "-64", "-65", "-66", "-67", "-68", "-69", "-70", "-71", "-72", "-73", "-74", "-75", "-76", "-77", "-78", "-79", "-80", "-81", "-82", "-83", "-84", "-85", "-86", "-87", "-88", "-89", "-90", "-91", "-92", "-93", "-94", "-95", "-96", "-97", "-98", "-99", "-100" }; - - /// String concatenation with numbers is required by the TypeScript compiler for - /// "constant expression" handling in enums. We can match the behavior of a JS VM - /// by calling out to the APIs in WebKit which are responsible for this operation. - /// - /// This can return `null` in wasm builds to avoid linking JSC - pub fn toString(this: Number, allocator: std.mem.Allocator) ?string { - return toStringFromF64(this.value, allocator); - } - - pub fn toStringFromF64(value: f64, allocator: std.mem.Allocator) ?string { - if (value == @trunc(value) and (value < std.math.maxInt(i32) and value > std.math.minInt(i32))) { - const int_value = @as(i64, @intFromFloat(value)); - const abs = @as(u64, @intCast(@abs(int_value))); - - // do not allocate for a small set of constant numbers: -100 through 100 - if (abs < double_digit.len) { - return if (int_value < 0) - neg_double_digit[abs] - else - double_digit[abs]; - } - - return std.fmt.allocPrint(allocator, "{d}", .{@as(i32, @intCast(int_value))}) catch return null; - } - - if (std.math.isNan(value)) { - return "NaN"; - } - - if (std.math.isNegativeInf(value)) { - return "-Infinity"; - } - - if (std.math.isInf(value)) { - return "Infinity"; - } - - if (Environment.isNative) { - var buf: [124]u8 = undefined; - return allocator.dupe(u8, bun.fmt.FormatDouble.dtoa(&buf, value)) catch bun.outOfMemory(); - } else { - // do not attempt to implement the spec here, it would be error prone. - } - - return null; - } - - pub inline fn toU64(self: Number) u64 { - return self.to(u64); - } - - pub inline fn toUsize(self: Number) usize { - return self.to(usize); - } - - pub inline fn toU32(self: Number) u32 { - return self.to(u32); - } - - pub inline fn toU16(self: Number) u16 { - return self.to(u16); - } - - pub fn to(self: Number, comptime T: type) T { - return @as(T, @intFromFloat(@min(@max(@trunc(self.value), 0), comptime @min(std.math.floatMax(f64), std.math.maxInt(T))))); - } - - pub fn jsonStringify(self: *const Number, writer: anytype) !void { - return try writer.write(self.value); - } - - pub fn toJS(this: @This()) JSC.JSValue { - return JSC.JSValue.jsNumber(this.value); - } - }; - - pub const BigInt = struct { - value: string, - - pub var empty = BigInt{ .value = "" }; - - pub fn jsonStringify(self: *const @This(), writer: anytype) !void { - return try writer.write(self.value); - } - - pub fn toJS(_: @This()) JSC.JSValue { - // TODO: - return JSC.JSValue.jsNumber(0); - } - }; - - pub const Object = struct { - properties: G.Property.List = G.Property.List{}, - comma_after_spread: ?logger.Loc = null, - is_single_line: bool = false, - is_parenthesized: bool = false, - was_originally_macro: bool = false, - - close_brace_loc: logger.Loc = logger.Loc.Empty, - - // used in TOML parser to merge properties - pub const Rope = struct { - head: Expr, - next: ?*Rope = null, - pub fn append(this: *Rope, expr: Expr, allocator: std.mem.Allocator) OOM!*Rope { - if (this.next) |next| { - return try next.append(expr, allocator); - } - - const rope = try allocator.create(Rope); - rope.* = .{ .head = expr }; - this.next = rope; - return rope; - } - }; - - pub fn get(self: *const Object, key: string) ?Expr { - return if (asProperty(self, key)) |query| query.expr else @as(?Expr, null); - } - - pub fn toJS(this: *Object, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { - var obj = JSC.JSValue.createEmptyObject(globalObject, this.properties.len); - obj.protect(); - defer obj.unprotect(); - const props: []const G.Property = this.properties.slice(); - for (props) |prop| { - if (prop.kind != .normal or prop.class_static_block != null or prop.key == null or prop.key.?.data != .e_string or prop.value == null) { - return error.@"Cannot convert argument type to JS"; - } - var key = prop.key.?.data.e_string.toZigString(allocator); - obj.put(globalObject, &key, try prop.value.?.toJS(allocator, globalObject)); - } - - return obj; - } - - pub fn put(self: *Object, allocator: std.mem.Allocator, key: string, expr: Expr) !void { - if (asProperty(self, key)) |query| { - self.properties.ptr[query.i].value = expr; - } else { - try self.properties.push(allocator, .{ - .key = Expr.init(E.String, E.String.init(key), expr.loc), - .value = expr, - }); - } - } - - pub fn putString(self: *Object, allocator: std.mem.Allocator, key: string, value: string) !void { - return try put(self, allocator, key, Expr.init(E.String, E.String.init(value), logger.Loc.Empty)); - } - - pub const SetError = error{ OutOfMemory, Clobber }; - - pub fn set(self: *const Object, key: Expr, allocator: std.mem.Allocator, value: Expr) SetError!void { - if (self.hasProperty(key.data.e_string.data)) return error.Clobber; - try self.properties.push(allocator, .{ - .key = key, - .value = value, - }); - } - - pub const RopeQuery = struct { - expr: Expr, - rope: *const Rope, - }; - - // this is terribly, shamefully slow - pub fn setRope(self: *Object, rope: *const Rope, allocator: std.mem.Allocator, value: Expr) SetError!void { - if (self.get(rope.head.data.e_string.data)) |existing| { - switch (existing.data) { - .e_array => |array| { - if (rope.next == null) { - try array.push(allocator, value); - return; - } - - if (array.items.last()) |last| { - if (last.data != .e_object) { - return error.Clobber; - } - - try last.data.e_object.setRope(rope.next.?, allocator, value); - return; - } - - try array.push(allocator, value); - return; - }, - .e_object => |object| { - if (rope.next != null) { - try object.setRope(rope.next.?, allocator, value); - return; - } - - return error.Clobber; - }, - else => { - return error.Clobber; - }, - } - } - - var value_ = value; - if (rope.next) |next| { - var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); - try obj.data.e_object.setRope(next, allocator, value); - value_ = obj; - } - - try self.properties.push(allocator, .{ - .key = rope.head, - .value = value_, - }); - } - - pub fn getOrPutObject(self: *Object, rope: *const Rope, allocator: std.mem.Allocator) SetError!Expr { - if (self.get(rope.head.data.e_string.data)) |existing| { - switch (existing.data) { - .e_array => |array| { - if (rope.next == null) { - return error.Clobber; - } - - if (array.items.last()) |last| { - if (last.data != .e_object) { - return error.Clobber; - } - - return try last.data.e_object.getOrPutObject(rope.next.?, allocator); - } - - return error.Clobber; - }, - .e_object => |object| { - if (rope.next != null) { - return try object.getOrPutObject(rope.next.?, allocator); - } - - // success - return existing; - }, - else => { - return error.Clobber; - }, - } - } - - if (rope.next) |next| { - var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); - const out = try obj.data.e_object.getOrPutObject(next, allocator); - try self.properties.push(allocator, .{ - .key = rope.head, - .value = obj, - }); - return out; - } - - const out = Expr.init(E.Object, E.Object{}, rope.head.loc); - try self.properties.push(allocator, .{ - .key = rope.head, - .value = out, - }); - return out; - } - - pub fn getOrPutArray(self: *Object, rope: *const Rope, allocator: std.mem.Allocator) SetError!Expr { - if (self.get(rope.head.data.e_string.data)) |existing| { - switch (existing.data) { - .e_array => |array| { - if (rope.next == null) { - return existing; - } - - if (array.items.last()) |last| { - if (last.data != .e_object) { - return error.Clobber; - } - - return try last.data.e_object.getOrPutArray(rope.next.?, allocator); - } - - return error.Clobber; - }, - .e_object => |object| { - if (rope.next == null) { - return error.Clobber; - } - - return try object.getOrPutArray(rope.next.?, allocator); - }, - else => { - return error.Clobber; - }, - } - } - - if (rope.next) |next| { - var obj = Expr.init(E.Object, E.Object{ .properties = .{} }, rope.head.loc); - const out = try obj.data.e_object.getOrPutArray(next, allocator); - try self.properties.push(allocator, .{ - .key = rope.head, - .value = obj, - }); - return out; - } - - const out = Expr.init(E.Array, E.Array{}, rope.head.loc); - try self.properties.push(allocator, .{ - .key = rope.head, - .value = out, - }); - return out; - } - - pub fn hasProperty(obj: *const Object, name: string) bool { - for (obj.properties.slice()) |prop| { - const key = prop.key orelse continue; - if (std.meta.activeTag(key.data) != .e_string) continue; - if (key.data.e_string.eql(string, name)) return true; - } - return false; - } - - pub fn asProperty(obj: *const Object, name: string) ?Expr.Query { - for (obj.properties.slice(), 0..) |prop, i| { - const value = prop.value orelse continue; - const key = prop.key orelse continue; - if (std.meta.activeTag(key.data) != .e_string) continue; - const key_str = key.data.e_string; - if (key_str.eql(string, name)) { - return Expr.Query{ - .expr = value, - .loc = key.loc, - .i = @as(u32, @truncate(i)), - }; - } - } - - return null; - } - - /// Assumes each key in the property is a string - pub fn alphabetizeProperties(this: *Object) void { - if (comptime Environment.isDebug) { - for (this.properties.slice()) |prop| { - bun.assert(prop.key.?.data == .e_string); - } - } - std.sort.pdq(G.Property, this.properties.slice(), {}, Sorter.isLessThan); - } - - pub fn packageJSONSort(this: *Object) void { - std.sort.pdq(G.Property, this.properties.slice(), {}, PackageJSONSort.Fields.isLessThan); - } - - const PackageJSONSort = struct { - const Fields = enum(u8) { - name = 0, - version = 1, - author = 2, - repository = 3, - config = 4, - main = 5, - module = 6, - dependencies = 7, - devDependencies = 8, - optionalDependencies = 9, - peerDependencies = 10, - exports = 11, - __fake = 12, - - pub const Map = ComptimeStringMap(Fields, .{ - .{ "name", Fields.name }, - .{ "version", Fields.version }, - .{ "author", Fields.author }, - .{ "repository", Fields.repository }, - .{ "config", Fields.config }, - .{ "main", Fields.main }, - .{ "module", Fields.module }, - .{ "dependencies", Fields.dependencies }, - .{ "devDependencies", Fields.devDependencies }, - .{ "optionalDependencies", Fields.optionalDependencies }, - .{ "peerDependencies", Fields.peerDependencies }, - .{ "exports", Fields.exports }, - }); - - pub fn isLessThan(ctx: void, lhs: G.Property, rhs: G.Property) bool { - var lhs_key_size: u8 = @intFromEnum(Fields.__fake); - var rhs_key_size: u8 = @intFromEnum(Fields.__fake); - - if (lhs.key != null and lhs.key.?.data == .e_string) { - lhs_key_size = @intFromEnum(Map.get(lhs.key.?.data.e_string.data) orelse Fields.__fake); - } - - if (rhs.key != null and rhs.key.?.data == .e_string) { - rhs_key_size = @intFromEnum(Map.get(rhs.key.?.data.e_string.data) orelse Fields.__fake); - } - - return switch (std.math.order(lhs_key_size, rhs_key_size)) { - .lt => true, - .gt => false, - .eq => strings.cmpStringsAsc(ctx, lhs.key.?.data.e_string.data, rhs.key.?.data.e_string.data), - }; - } - }; - }; - - const Sorter = struct { - pub fn isLessThan(ctx: void, lhs: G.Property, rhs: G.Property) bool { - return strings.cmpStringsAsc(ctx, lhs.key.?.data.e_string.data, rhs.key.?.data.e_string.data); - } - }; - }; - - pub const Spread = struct { value: ExprNodeIndex }; - - /// JavaScript string literal type - pub const String = struct { - // A version of this where `utf8` and `value` are stored in a packed union, with len as a single u32 was attempted. - // It did not improve benchmarks. Neither did converting this from a heap-allocated type to a stack-allocated type. - // TODO: change this to *const anyopaque and change all uses to either .slice8() or .slice16() - data: []const u8 = "", - prefer_template: bool = false, - - // A very simple rope implementation - // We only use this for string folding, so this is kind of overkill - // We don't need to deal with substrings - next: ?*String = null, - end: ?*String = null, - rope_len: u32 = 0, - is_utf16: bool = false, - - pub fn isIdentifier(this: *String, allocator: std.mem.Allocator) bool { - if (!this.isUTF8()) { - return bun.js_lexer.isIdentifierUTF16(this.slice16()); - } - - return bun.js_lexer.isIdentifier(this.slice(allocator)); - } - - pub const class = E.String{ .data = "class" }; - - pub fn push(this: *String, other: *String) void { - bun.assert(this.isUTF8()); - bun.assert(other.isUTF8()); - - if (other.rope_len == 0) { - other.rope_len = @truncate(other.data.len); - } - - if (this.rope_len == 0) { - this.rope_len = @truncate(this.data.len); - } - - this.rope_len += other.rope_len; - if (this.next == null) { - this.next = other; - this.end = other; - } else { - var end = this.end.?; - while (end.next != null) end = end.end.?; - end.next = other; - this.end = other; - } - } - - /// Cloning the rope string is rarely needed, see `foldStringAddition`'s - /// comments and the 'edgecase/EnumInliningRopeStringPoison' test - pub fn cloneRopeNodes(s: String) String { - var root = s; - - if (root.next != null) { - var current: ?*String = &root; - while (true) { - const node = current.?; - if (node.next) |next| { - node.next = Expr.Data.Store.append(String, next.*); - current = node.next; - } else { - root.end = node; - break; - } - } - } - - return root; - } - - pub fn toUTF8(this: *String, allocator: std.mem.Allocator) !void { - if (!this.is_utf16) return; - this.data = try strings.toUTF8Alloc(allocator, this.slice16()); - this.is_utf16 = false; - } - - pub fn init(value: anytype) String { - const Value = @TypeOf(value); - if (Value == []u16 or Value == []const u16) { - return .{ - .data = @as([*]const u8, @ptrCast(value.ptr))[0..value.len], - .is_utf16 = true, - }; - } - - return .{ .data = value }; - } - - /// E.String containing non-ascii characters may not fully work. - /// https://github.com/oven-sh/bun/issues/11963 - /// More investigation is needed. - pub fn initReEncodeUTF8(utf8: []const u8, allocator: std.mem.Allocator) String { - return if (bun.strings.isAllASCII(utf8)) - init(utf8) - else - init(bun.strings.toUTF16AllocForReal(allocator, utf8, false, false) catch bun.outOfMemory()); - } - - pub fn slice8(this: *const String) []const u8 { - bun.assert(!this.is_utf16); - return this.data; - } - - pub fn slice16(this: *const String) []const u16 { - bun.assert(this.is_utf16); - return @as([*]const u16, @ptrCast(@alignCast(this.data.ptr)))[0..this.data.len]; - } - - pub fn resolveRopeIfNeeded(this: *String, allocator: std.mem.Allocator) void { - if (this.next == null or !this.isUTF8()) return; - var bytes = std.ArrayList(u8).initCapacity(allocator, this.rope_len) catch bun.outOfMemory(); - - bytes.appendSliceAssumeCapacity(this.data); - var str = this.next; - while (str) |part| { - bytes.appendSlice(part.data) catch bun.outOfMemory(); - str = part.next; - } - this.data = bytes.items; - this.next = null; - } - - pub fn slice(this: *String, allocator: std.mem.Allocator) []const u8 { - this.resolveRopeIfNeeded(allocator); - return this.string(allocator) catch bun.outOfMemory(); - } - - pub var empty = String{}; - pub var @"true" = String{ .data = "true" }; - pub var @"false" = String{ .data = "false" }; - pub var @"null" = String{ .data = "null" }; - pub var @"undefined" = String{ .data = "undefined" }; - - pub fn clone(str: *const String, allocator: std.mem.Allocator) !String { - return String{ - .data = try allocator.dupe(u8, str.data), - .prefer_template = str.prefer_template, - .is_utf16 = !str.isUTF8(), - }; - } - - pub fn cloneSliceIfNecessary(str: *const String, allocator: std.mem.Allocator) !bun.string { - if (str.isUTF8()) { - return allocator.dupe(u8, str.string(allocator) catch unreachable); - } - - return str.string(allocator); - } - - pub fn javascriptLength(s: *const String) ?u32 { - if (s.rope_len > 0) { - // We only support ascii ropes for now - return s.rope_len; - } - - if (s.isUTF8()) { - if (!strings.isAllASCII(s.data)) { - return null; - } - return @truncate(s.data.len); - } - - return @truncate(s.slice16().len); - } - - pub inline fn len(s: *const String) usize { - return if (s.rope_len > 0) s.rope_len else s.data.len; - } - - pub inline fn isUTF8(s: *const String) bool { - return !s.is_utf16; - } - - pub inline fn isBlank(s: *const String) bool { - return s.len() == 0; - } - - pub inline fn isPresent(s: *const String) bool { - return s.len() > 0; - } - - pub fn eql(s: *const String, comptime _t: type, other: anytype) bool { - if (s.isUTF8()) { - switch (_t) { - @This() => { - if (other.isUTF8()) { - return strings.eqlLong(s.data, other.data, true); - } else { - return strings.utf16EqlString(other.slice16(), s.data); - } - }, - bun.string => { - return strings.eqlLong(s.data, other, true); - }, - []u16, []const u16 => { - return strings.utf16EqlString(other, s.data); - }, - else => { - @compileError("Invalid type"); - }, - } - } else { - switch (_t) { - @This() => { - if (other.isUTF8()) { - return strings.utf16EqlString(s.slice16(), other.data); - } else { - return std.mem.eql(u16, other.slice16(), s.slice16()); - } - }, - bun.string => { - return strings.utf16EqlString(s.slice16(), other); - }, - []u16, []const u16 => { - return std.mem.eql(u16, other.slice16(), s.slice16()); - }, - else => { - @compileError("Invalid type"); - }, - } - } - } - - pub fn eqlComptime(s: *const String, comptime value: []const u8) bool { - bun.assert(s.next == null); - return if (s.isUTF8()) - strings.eqlComptime(s.data, value) - else - strings.eqlComptimeUTF16(s.slice16(), value); - } - - pub fn hasPrefixComptime(s: *const String, comptime value: anytype) bool { - if (s.data.len < value.len) - return false; - - return if (s.isUTF8()) - strings.eqlComptime(s.data[0..value.len], value) - else - strings.eqlComptimeUTF16(s.slice16()[0..value.len], value); - } - - pub fn string(s: *const String, allocator: std.mem.Allocator) OOM!bun.string { - if (s.isUTF8()) { - return s.data; - } else { - return strings.toUTF8Alloc(allocator, s.slice16()); - } - } - - pub fn stringZ(s: *const String, allocator: std.mem.Allocator) OOM!bun.stringZ { - if (s.isUTF8()) { - return allocator.dupeZ(u8, s.data); - } else { - return strings.toUTF8AllocZ(allocator, s.slice16()); - } - } - - pub fn stringCloned(s: *const String, allocator: std.mem.Allocator) OOM!bun.string { - if (s.isUTF8()) { - return allocator.dupe(u8, s.data); - } else { - return strings.toUTF8Alloc(allocator, s.slice16()); - } - } - - pub fn hash(s: *const String) u64 { - if (s.isBlank()) return 0; - - if (s.isUTF8()) { - // hash utf-8 - return bun.hash(s.data); - } else { - // hash utf-16 - return bun.hash(@as([*]const u8, @ptrCast(s.slice16().ptr))[0 .. s.slice16().len * 2]); - } - } - - pub fn toJS(s: *String, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) !JSC.JSValue { - s.resolveRopeIfNeeded(allocator); - if (!s.isPresent()) { - var emp = bun.String.empty; - return emp.toJS(globalObject); - } - - if (s.isUTF8()) { - if (try strings.toUTF16Alloc(allocator, s.slice8(), false, false)) |utf16| { - var out, const chars = bun.String.createUninitialized(.utf16, utf16.len); - @memcpy(chars, utf16); - return out.transferToJS(globalObject); - } else { - var out, const chars = bun.String.createUninitialized(.latin1, s.slice8().len); - @memcpy(chars, s.slice8()); - return out.transferToJS(globalObject); - } - } else { - var out, const chars = bun.String.createUninitialized(.utf16, s.slice16().len); - @memcpy(chars, s.slice16()); - return out.transferToJS(globalObject); - } - } - - pub fn toZigString(s: *String, allocator: std.mem.Allocator) JSC.ZigString { - if (s.isUTF8()) { - return JSC.ZigString.fromUTF8(s.slice(allocator)); - } else { - return JSC.ZigString.initUTF16(s.slice16()); - } - } - - pub fn format(s: String, comptime fmt: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - comptime bun.assert(fmt.len == 0); - - try writer.writeAll("E.String"); - if (s.next == null) { - try writer.writeAll("("); - if (s.isUTF8()) { - try writer.print("\"{s}\"", .{s.data}); - } else { - try writer.print("\"{}\"", .{bun.fmt.utf16(s.slice16())}); - } - try writer.writeAll(")"); - } else { - try writer.writeAll("(rope: ["); - var it: ?*const String = &s; - while (it) |part| { - if (part.isUTF8()) { - try writer.print("\"{s}\"", .{part.data}); - } else { - try writer.print("\"{}\"", .{bun.fmt.utf16(part.slice16())}); - } - it = part.next; - if (it != null) try writer.writeAll(" "); - } - try writer.writeAll("])"); - } - } - - pub fn jsonStringify(s: *const String, writer: anytype) !void { - var buf = [_]u8{0} ** 4096; - var i: usize = 0; - for (s.slice16()) |char| { - buf[i] = @as(u8, @intCast(char)); - i += 1; - if (i >= 4096) { - break; - } - } - - return try writer.write(buf[0..i]); - } - }; - - // value is in the Node - pub const TemplatePart = struct { - value: ExprNodeIndex, - tail_loc: logger.Loc, - tail: Template.Contents, - }; - - pub const Template = struct { - tag: ?ExprNodeIndex = null, - parts: []TemplatePart = &.{}, - head: Contents, - - pub const Contents = union(Tag) { - cooked: E.String, - raw: string, - - const Tag = enum { - cooked, - raw, - }; - - pub fn isUTF8(contents: Contents) bool { - return contents == .cooked and contents.cooked.isUTF8(); - } - }; - - /// "`a${'b'}c`" => "`abc`" - pub fn fold( - this: *Template, - allocator: std.mem.Allocator, - loc: logger.Loc, - ) Expr { - if (this.tag != null or (this.head == .cooked and !this.head.cooked.isUTF8())) { - // we only fold utf-8/ascii for now - return Expr{ - .data = .{ .e_template = this }, - .loc = loc, - }; - } - - bun.assert(this.head == .cooked); - - if (this.parts.len == 0) { - return Expr.init(E.String, this.head.cooked, loc); - } - - var parts = std.ArrayList(TemplatePart).initCapacity(allocator, this.parts.len) catch unreachable; - var head = Expr.init(E.String, this.head.cooked, loc); - for (this.parts) |part_src| { - var part = part_src; - bun.assert(part.tail == .cooked); - - part.value = part.value.unwrapInlined(); - - switch (part.value.data) { - .e_number => { - if (part.value.data.e_number.toString(allocator)) |s| { - part.value = Expr.init(E.String, E.String.init(s), part.value.loc); - } - }, - .e_null => { - part.value = Expr.init(E.String, E.String.init("null"), part.value.loc); - }, - .e_boolean => { - part.value = Expr.init(E.String, E.String.init(if (part.value.data.e_boolean.value) - "true" - else - "false"), part.value.loc); - }, - .e_undefined => { - part.value = Expr.init(E.String, E.String.init("undefined"), part.value.loc); - }, - .e_big_int => |value| { - part.value = Expr.init(E.String, E.String.init(value.value), part.value.loc); - }, - else => {}, - } - - if (part.value.data == .e_string and part.tail.cooked.isUTF8() and part.value.data.e_string.isUTF8()) { - if (parts.items.len == 0) { - if (part.value.data.e_string.len() > 0) { - head.data.e_string.push(Expr.init(E.String, part.value.data.e_string.*, logger.Loc.Empty).data.e_string); - } - - if (part.tail.cooked.len() > 0) { - head.data.e_string.push(Expr.init(E.String, part.tail.cooked, part.tail_loc).data.e_string); - } - - continue; - } else { - var prev_part = &parts.items[parts.items.len - 1]; - bun.assert(prev_part.tail == .cooked); - - if (prev_part.tail.cooked.isUTF8()) { - if (part.value.data.e_string.len() > 0) { - prev_part.tail.cooked.push(Expr.init(E.String, part.value.data.e_string.*, logger.Loc.Empty).data.e_string); - } - - if (part.tail.cooked.len() > 0) { - prev_part.tail.cooked.push(Expr.init(E.String, part.tail.cooked, part.tail_loc).data.e_string); - } - } else { - parts.appendAssumeCapacity(part); - } - } - } else { - parts.appendAssumeCapacity(part); - } - } - - if (parts.items.len == 0) { - parts.deinit(); - head.data.e_string.resolveRopeIfNeeded(allocator); - return head; - } - - return Expr.init(E.Template, .{ - .tag = null, - .parts = parts.items, - .head = .{ .cooked = head.data.e_string.* }, - }, loc); - } - }; - - pub const RegExp = struct { - value: string, - - // This exists for JavaScript bindings - // The RegExp constructor expects flags as a second argument. - // We want to avoid re-lexing the flags, so we store them here. - // This is the index of the first character in a flag, not the "/" - // /foo/gim - // ^ - flags_offset: ?u16 = null, - - pub var empty = RegExp{ .value = "" }; - - pub fn pattern(this: RegExp) string { - - // rewind until we reach the /foo/gim - // ^ - // should only ever be a single character - // but we're being cautious - if (this.flags_offset) |i_| { - var i = i_; - while (i > 0 and this.value[i] != '/') { - i -= 1; - } - - return std.mem.trim(u8, this.value[0..i], "/"); - } - - return std.mem.trim(u8, this.value, "/"); - } - - pub fn flags(this: RegExp) string { - // rewind until we reach the /foo/gim - // ^ - // should only ever be a single character - // but we're being cautious - if (this.flags_offset) |i| { - return this.value[i..]; - } - - return ""; - } - - pub fn jsonStringify(self: *const RegExp, writer: anytype) !void { - return try writer.write(self.value); - } - }; - - pub const Class = G.Class; - - pub const Await = struct { - value: ExprNodeIndex, - }; - - pub const Yield = struct { - value: ?ExprNodeIndex = null, - is_star: bool = false, - }; - - pub const If = struct { - test_: ExprNodeIndex, - yes: ExprNodeIndex, - no: ExprNodeIndex, - }; - - pub const RequireString = struct { - import_record_index: u32 = 0, - - unwrapped_id: u32 = std.math.maxInt(u32), - }; - - pub const RequireResolveString = struct { - import_record_index: u32, - - // close_paren_loc: logger.Loc = logger.Loc.Empty, - }; - - pub const InlinedEnum = struct { - value: ExprNodeIndex, - comment: string, - }; - - pub const Import = struct { - expr: ExprNodeIndex, - options: ExprNodeIndex = Expr.empty, - import_record_index: u32, - - /// TODO: - /// Comments inside "import()" expressions have special meaning for Webpack. - /// Preserving comments inside these expressions makes it possible to use - /// esbuild as a TypeScript-to-JavaScript frontend for Webpack to improve - /// performance. We intentionally do not interpret these comments in esbuild - /// because esbuild is not Webpack. But we do preserve them since doing so is - /// harmless, easy to maintain, and useful to people. See the Webpack docs for - /// more info: https://webpack.js.org/api/module-methods/#magic-comments. - // leading_interior_comments: []G.Comment = &([_]G.Comment{}), - - pub fn isImportRecordNull(this: *const Import) bool { - return this.import_record_index == std.math.maxInt(u32); - } - - pub fn importRecordLoader(import: *const Import) ?bun.options.Loader { - // This logic is duplicated in js_printer.zig fn parsePath() - const obj = import.options.data.as(.e_object) orelse - return null; - const with = obj.get("with") orelse obj.get("assert") orelse - return null; - const with_obj = with.data.as(.e_object) orelse - return null; - const str = (with_obj.get("type") orelse - return null).data.as(.e_string) orelse - return null; - - if (!str.is_utf16) if (bun.options.Loader.fromString(str.data)) |loader| { - if (loader == .sqlite) { - const embed = with_obj.get("embed") orelse return loader; - const embed_str = embed.data.as(.e_string) orelse return loader; - if (embed_str.eqlComptime("true")) { - return .sqlite_embedded; - } - } - return loader; - }; - - return null; - } - }; -}; - -pub const Stmt = struct { - loc: logger.Loc, - data: Data, - - pub const Batcher = NewBatcher(Stmt); - - pub fn assign(a: Expr, b: Expr) Stmt { - return Stmt.alloc( - S.SExpr, - S.SExpr{ - .value = Expr.assign(a, b), - }, - a.loc, - ); - } - - const Serializable = struct { - type: Tag, - object: string, - value: Data, - loc: logger.Loc, - }; - - pub fn jsonStringify(self: *const Stmt, writer: anytype) !void { - return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "stmt", .value = self.data, .loc = self.loc }); - } - - pub fn isTypeScript(self: *Stmt) bool { - return @as(Stmt.Tag, self.data) == .s_type_script; - } - - pub fn isSuperCall(self: Stmt) bool { - return self.data == .s_expr and self.data.s_expr.value.data == .e_call and self.data.s_expr.value.data.e_call.target.data == .e_super; - } - - pub fn isMissingExpr(self: Stmt) bool { - return self.data == .s_expr and self.data.s_expr.value.data == .e_missing; - } - - pub fn empty() Stmt { - return Stmt{ .data = .{ .s_empty = None }, .loc = logger.Loc{} }; - } - - pub fn toEmpty(this: Stmt) Stmt { - return .{ - .data = .{ - .s_empty = None, - }, - .loc = this.loc, - }; - } - - const None = S.Empty{}; - - pub var icount: usize = 0; - pub fn init(comptime StatementType: type, origData: *StatementType, loc: logger.Loc) Stmt { - icount += 1; - - return switch (comptime StatementType) { - S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } }, - S.Block => Stmt.comptime_init("s_block", S.Block, origData, loc), - S.Break => Stmt.comptime_init("s_break", S.Break, origData, loc), - S.Class => Stmt.comptime_init("s_class", S.Class, origData, loc), - S.Comment => Stmt.comptime_init("s_comment", S.Comment, origData, loc), - S.Continue => Stmt.comptime_init("s_continue", S.Continue, origData, loc), - S.Debugger => Stmt.comptime_init("s_debugger", S.Debugger, origData, loc), - S.Directive => Stmt.comptime_init("s_directive", S.Directive, origData, loc), - S.DoWhile => Stmt.comptime_init("s_do_while", S.DoWhile, origData, loc), - S.Enum => Stmt.comptime_init("s_enum", S.Enum, origData, loc), - S.ExportClause => Stmt.comptime_init("s_export_clause", S.ExportClause, origData, loc), - S.ExportDefault => Stmt.comptime_init("s_export_default", S.ExportDefault, origData, loc), - S.ExportEquals => Stmt.comptime_init("s_export_equals", S.ExportEquals, origData, loc), - S.ExportFrom => Stmt.comptime_init("s_export_from", S.ExportFrom, origData, loc), - S.ExportStar => Stmt.comptime_init("s_export_star", S.ExportStar, origData, loc), - S.SExpr => Stmt.comptime_init("s_expr", S.SExpr, origData, loc), - S.ForIn => Stmt.comptime_init("s_for_in", S.ForIn, origData, loc), - S.ForOf => Stmt.comptime_init("s_for_of", S.ForOf, origData, loc), - S.For => Stmt.comptime_init("s_for", S.For, origData, loc), - S.Function => Stmt.comptime_init("s_function", S.Function, origData, loc), - S.If => Stmt.comptime_init("s_if", S.If, origData, loc), - S.Import => Stmt.comptime_init("s_import", S.Import, origData, loc), - S.Label => Stmt.comptime_init("s_label", S.Label, origData, loc), - S.Local => Stmt.comptime_init("s_local", S.Local, origData, loc), - S.Namespace => Stmt.comptime_init("s_namespace", S.Namespace, origData, loc), - S.Return => Stmt.comptime_init("s_return", S.Return, origData, loc), - S.Switch => Stmt.comptime_init("s_switch", S.Switch, origData, loc), - S.Throw => Stmt.comptime_init("s_throw", S.Throw, origData, loc), - S.Try => Stmt.comptime_init("s_try", S.Try, origData, loc), - S.TypeScript => Stmt.comptime_init("s_type_script", S.TypeScript, origData, loc), - S.While => Stmt.comptime_init("s_while", S.While, origData, loc), - S.With => Stmt.comptime_init("s_with", S.With, origData, loc), - else => @compileError("Invalid type in Stmt.init"), - }; - } - inline fn comptime_alloc(comptime tag_name: string, comptime typename: type, origData: anytype, loc: logger.Loc) Stmt { - return Stmt{ - .loc = loc, - .data = @unionInit( - Data, - tag_name, - Data.Store.append( - typename, - origData, - ), - ), - }; - } - - fn allocateData(allocator: std.mem.Allocator, comptime tag_name: string, comptime typename: type, origData: anytype, loc: logger.Loc) Stmt { - const value = allocator.create(@TypeOf(origData)) catch unreachable; - value.* = origData; - - return comptime_init(tag_name, *typename, value, loc); - } - - inline fn comptime_init(comptime tag_name: string, comptime TypeName: type, origData: TypeName, loc: logger.Loc) Stmt { - return Stmt{ .loc = loc, .data = @unionInit(Data, tag_name, origData) }; - } - - pub fn alloc(comptime StatementData: type, origData: StatementData, loc: logger.Loc) Stmt { - Stmt.Data.Store.assert(); - - icount += 1; - return switch (StatementData) { - S.Block => Stmt.comptime_alloc("s_block", S.Block, origData, loc), - S.Break => Stmt.comptime_alloc("s_break", S.Break, origData, loc), - S.Class => Stmt.comptime_alloc("s_class", S.Class, origData, loc), - S.Comment => Stmt.comptime_alloc("s_comment", S.Comment, origData, loc), - S.Continue => Stmt.comptime_alloc("s_continue", S.Continue, origData, loc), - S.Debugger => Stmt{ .loc = loc, .data = .{ .s_debugger = origData } }, - S.Directive => Stmt.comptime_alloc("s_directive", S.Directive, origData, loc), - S.DoWhile => Stmt.comptime_alloc("s_do_while", S.DoWhile, origData, loc), - S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } }, - S.Enum => Stmt.comptime_alloc("s_enum", S.Enum, origData, loc), - S.ExportClause => Stmt.comptime_alloc("s_export_clause", S.ExportClause, origData, loc), - S.ExportDefault => Stmt.comptime_alloc("s_export_default", S.ExportDefault, origData, loc), - S.ExportEquals => Stmt.comptime_alloc("s_export_equals", S.ExportEquals, origData, loc), - S.ExportFrom => Stmt.comptime_alloc("s_export_from", S.ExportFrom, origData, loc), - S.ExportStar => Stmt.comptime_alloc("s_export_star", S.ExportStar, origData, loc), - S.SExpr => Stmt.comptime_alloc("s_expr", S.SExpr, origData, loc), - S.ForIn => Stmt.comptime_alloc("s_for_in", S.ForIn, origData, loc), - S.ForOf => Stmt.comptime_alloc("s_for_of", S.ForOf, origData, loc), - S.For => Stmt.comptime_alloc("s_for", S.For, origData, loc), - S.Function => Stmt.comptime_alloc("s_function", S.Function, origData, loc), - S.If => Stmt.comptime_alloc("s_if", S.If, origData, loc), - S.Import => Stmt.comptime_alloc("s_import", S.Import, origData, loc), - S.Label => Stmt.comptime_alloc("s_label", S.Label, origData, loc), - S.Local => Stmt.comptime_alloc("s_local", S.Local, origData, loc), - S.Namespace => Stmt.comptime_alloc("s_namespace", S.Namespace, origData, loc), - S.Return => Stmt.comptime_alloc("s_return", S.Return, origData, loc), - S.Switch => Stmt.comptime_alloc("s_switch", S.Switch, origData, loc), - S.Throw => Stmt.comptime_alloc("s_throw", S.Throw, origData, loc), - S.Try => Stmt.comptime_alloc("s_try", S.Try, origData, loc), - S.TypeScript => Stmt{ .loc = loc, .data = Data{ .s_type_script = S.TypeScript{} } }, - S.While => Stmt.comptime_alloc("s_while", S.While, origData, loc), - S.With => Stmt.comptime_alloc("s_with", S.With, origData, loc), - else => @compileError("Invalid type in Stmt.init"), - }; - } - - pub const Disabler = bun.DebugOnlyDisabler(@This()); - - /// When the lifetime of an Stmt.Data's pointer must exist longer than reset() is called, use this function. - /// Be careful to free the memory (or use an allocator that does it for you) - /// Also, prefer Stmt.init or Stmt.alloc when possible. This will be slower. - pub fn allocate(allocator: std.mem.Allocator, comptime StatementData: type, origData: StatementData, loc: logger.Loc) Stmt { - Stmt.Data.Store.assert(); - - icount += 1; - return switch (StatementData) { - S.Block => Stmt.allocateData(allocator, "s_block", S.Block, origData, loc), - S.Break => Stmt.allocateData(allocator, "s_break", S.Break, origData, loc), - S.Class => Stmt.allocateData(allocator, "s_class", S.Class, origData, loc), - S.Comment => Stmt.allocateData(allocator, "s_comment", S.Comment, origData, loc), - S.Continue => Stmt.allocateData(allocator, "s_continue", S.Continue, origData, loc), - S.Debugger => Stmt{ .loc = loc, .data = .{ .s_debugger = origData } }, - S.Directive => Stmt.allocateData(allocator, "s_directive", S.Directive, origData, loc), - S.DoWhile => Stmt.allocateData(allocator, "s_do_while", S.DoWhile, origData, loc), - S.Empty => Stmt{ .loc = loc, .data = Data{ .s_empty = S.Empty{} } }, - S.Enum => Stmt.allocateData(allocator, "s_enum", S.Enum, origData, loc), - S.ExportClause => Stmt.allocateData(allocator, "s_export_clause", S.ExportClause, origData, loc), - S.ExportDefault => Stmt.allocateData(allocator, "s_export_default", S.ExportDefault, origData, loc), - S.ExportEquals => Stmt.allocateData(allocator, "s_export_equals", S.ExportEquals, origData, loc), - S.ExportFrom => Stmt.allocateData(allocator, "s_export_from", S.ExportFrom, origData, loc), - S.ExportStar => Stmt.allocateData(allocator, "s_export_star", S.ExportStar, origData, loc), - S.SExpr => Stmt.allocateData(allocator, "s_expr", S.SExpr, origData, loc), - S.ForIn => Stmt.allocateData(allocator, "s_for_in", S.ForIn, origData, loc), - S.ForOf => Stmt.allocateData(allocator, "s_for_of", S.ForOf, origData, loc), - S.For => Stmt.allocateData(allocator, "s_for", S.For, origData, loc), - S.Function => Stmt.allocateData(allocator, "s_function", S.Function, origData, loc), - S.If => Stmt.allocateData(allocator, "s_if", S.If, origData, loc), - S.Import => Stmt.allocateData(allocator, "s_import", S.Import, origData, loc), - S.Label => Stmt.allocateData(allocator, "s_label", S.Label, origData, loc), - S.Local => Stmt.allocateData(allocator, "s_local", S.Local, origData, loc), - S.Namespace => Stmt.allocateData(allocator, "s_namespace", S.Namespace, origData, loc), - S.Return => Stmt.allocateData(allocator, "s_return", S.Return, origData, loc), - S.Switch => Stmt.allocateData(allocator, "s_switch", S.Switch, origData, loc), - S.Throw => Stmt.allocateData(allocator, "s_throw", S.Throw, origData, loc), - S.Try => Stmt.allocateData(allocator, "s_try", S.Try, origData, loc), - S.TypeScript => Stmt{ .loc = loc, .data = Data{ .s_type_script = S.TypeScript{} } }, - S.While => Stmt.allocateData(allocator, "s_while", S.While, origData, loc), - S.With => Stmt.allocateData(allocator, "s_with", S.With, origData, loc), - else => @compileError("Invalid type in Stmt.init"), - }; - } - - pub fn allocateExpr(allocator: std.mem.Allocator, expr: Expr) Stmt { - return Stmt.allocate(allocator, S.SExpr, S.SExpr{ .value = expr }, expr.loc); - } - - pub const Tag = enum { - s_block, - s_break, - s_class, - s_comment, - s_continue, - s_directive, - s_do_while, - s_enum, - s_export_clause, - s_export_default, - s_export_equals, - s_export_from, - s_export_star, - s_expr, - s_for_in, - s_for_of, - s_for, - s_function, - s_if, - s_import, - s_label, - s_local, - s_namespace, - s_return, - s_switch, - s_throw, - s_try, - s_while, - s_with, - s_type_script, - s_empty, - s_debugger, - s_lazy_export, - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - - pub fn isExportLike(tag: Tag) bool { - return switch (tag) { - .s_export_clause, .s_export_default, .s_export_equals, .s_export_from, .s_export_star, .s_empty => true, - else => false, - }; - } - }; - - pub const Data = union(Tag) { - s_block: *S.Block, - s_break: *S.Break, - s_class: *S.Class, - s_comment: *S.Comment, - s_continue: *S.Continue, - s_directive: *S.Directive, - s_do_while: *S.DoWhile, - s_enum: *S.Enum, - s_export_clause: *S.ExportClause, - s_export_default: *S.ExportDefault, - s_export_equals: *S.ExportEquals, - s_export_from: *S.ExportFrom, - s_export_star: *S.ExportStar, - s_expr: *S.SExpr, - s_for_in: *S.ForIn, - s_for_of: *S.ForOf, - s_for: *S.For, - s_function: *S.Function, - s_if: *S.If, - s_import: *S.Import, - s_label: *S.Label, - s_local: *S.Local, - s_namespace: *S.Namespace, - s_return: *S.Return, - s_switch: *S.Switch, - s_throw: *S.Throw, - s_try: *S.Try, - s_while: *S.While, - s_with: *S.With, - - s_type_script: S.TypeScript, - s_empty: S.Empty, // special case, its a zero value type - s_debugger: S.Debugger, - - s_lazy_export: *Expr.Data, - - comptime { - if (@sizeOf(Stmt) > 24) { - @compileLog("Expected Stmt to be <= 24 bytes, but it is", @sizeOf(Stmt), " bytes"); - } - } - - pub const Store = struct { - const StoreType = NewStore(&.{ - S.Block, - S.Break, - S.Class, - S.Comment, - S.Continue, - S.Directive, - S.DoWhile, - S.Enum, - S.ExportClause, - S.ExportDefault, - S.ExportEquals, - S.ExportFrom, - S.ExportStar, - S.SExpr, - S.ForIn, - S.ForOf, - S.For, - S.Function, - S.If, - S.Import, - S.Label, - S.Local, - S.Namespace, - S.Return, - S.Switch, - S.Throw, - S.Try, - S.While, - S.With, - }, 128); - - pub threadlocal var instance: ?*StoreType = null; - pub threadlocal var memory_allocator: ?*ASTMemoryAllocator = null; - pub threadlocal var disable_reset = false; - - pub fn create() void { - if (instance != null or memory_allocator != null) { - return; - } - - instance = StoreType.init(); - } - - /// create || reset - pub fn begin() void { - if (memory_allocator != null) return; - if (instance == null) { - create(); - return; - } - - if (!disable_reset) - instance.?.reset(); - } - - pub fn reset() void { - if (disable_reset or memory_allocator != null) return; - instance.?.reset(); - } - - pub fn deinit() void { - if (instance == null or memory_allocator != null) return; - instance.?.deinit(); - instance = null; - } - - pub inline fn assert() void { - if (comptime Environment.allow_assert) { - if (instance == null and memory_allocator == null) - bun.unreachablePanic("Store must be init'd", .{}); - } - } - - pub fn append(comptime T: type, value: T) *T { - if (memory_allocator) |allocator| { - return allocator.append(T, value); - } - - Disabler.assert(); - return instance.?.append(T, value); - } - }; - }; - - pub fn StoredData(tag: Tag) type { - const T = @FieldType(Data, tag); - return switch (@typeInfo(T)) { - .pointer => |ptr| ptr.child, - else => T, - }; - } - - pub fn caresAboutScope(self: *Stmt) bool { - return switch (self.data) { - .s_block, .s_empty, .s_debugger, .s_expr, .s_if, .s_for, .s_for_in, .s_for_of, .s_do_while, .s_while, .s_with, .s_try, .s_switch, .s_return, .s_throw, .s_break, .s_continue, .s_directive => { - return false; - }, - - .s_local => |local| { - return local.kind != .k_var; - }, - else => { - return true; - }, - }; - } -}; - -pub const Expr = struct { - loc: logger.Loc, - data: Data, - - pub const empty = Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = logger.Loc.Empty }; - - pub fn isAnonymousNamed(expr: Expr) bool { - return switch (expr.data) { - .e_arrow => true, - .e_function => |func| func.func.name == null, - .e_class => |class| class.class_name == null, - else => false, - }; - } - - pub fn clone(this: Expr, allocator: std.mem.Allocator) !Expr { - return .{ - .loc = this.loc, - .data = try this.data.clone(allocator), - }; - } - - pub fn deepClone(this: Expr, allocator: std.mem.Allocator) anyerror!Expr { - return .{ - .loc = this.loc, - .data = try this.data.deepClone(allocator), - }; - } - - pub fn wrapInArrow(this: Expr, allocator: std.mem.Allocator) !Expr { - var stmts = try allocator.alloc(Stmt, 1); - stmts[0] = Stmt.alloc(S.Return, S.Return{ .value = this }, this.loc); - - return Expr.init(E.Arrow, E.Arrow{ - .args = &.{}, - .body = .{ - .loc = this.loc, - .stmts = stmts, - }, - }, this.loc); - } - - pub fn canBeInlinedFromPropertyAccess(this: Expr) bool { - return switch (this.data) { - // if the array has a spread we must keep it - // https://github.com/oven-sh/bun/issues/2594 - .e_spread => false, - - .e_missing => false, - else => true, - }; - } - - pub fn canBeConstValue(this: Expr) bool { - return this.data.canBeConstValue(); - } - - pub fn canBeMoved(expr: Expr) bool { - return expr.data.canBeMoved(); - } - - pub fn unwrapInlined(expr: Expr) Expr { - if (expr.data.as(.e_inlined_enum)) |inlined| return inlined.value; - return expr; - } - - pub fn fromBlob( - blob: *const JSC.WebCore.Blob, - allocator: std.mem.Allocator, - mime_type_: ?MimeType, - log: *logger.Log, - loc: logger.Loc, - ) !Expr { - const bytes = blob.sharedView(); - - const mime_type = mime_type_ orelse MimeType.init(blob.content_type, null, null); - - if (mime_type.category == .json) { - const source = &logger.Source.initPathString("fetch.json", bytes); - var out_expr = JSONParser.parseForMacro(source, log, allocator) catch { - return error.MacroFailed; - }; - out_expr.loc = loc; - - switch (out_expr.data) { - .e_object => { - out_expr.data.e_object.was_originally_macro = true; - }, - .e_array => { - out_expr.data.e_array.was_originally_macro = true; - }, - else => {}, - } - - return out_expr; - } - - if (mime_type.category.isTextLike()) { - var output = MutableString.initEmpty(allocator); - output = try JSPrinter.quoteForJSON(bytes, output, true); - var list = output.toOwnedSlice(); - // remove the quotes - if (list.len > 0) { - list = list[1 .. list.len - 1]; - } - return Expr.init(E.String, E.String.init(list), loc); - } - - return Expr.init( - E.String, - E.String{ - .data = try JSC.ZigString.init(bytes).toBase64DataURL(allocator), - }, - loc, - ); - } - - pub inline fn initIdentifier(ref: Ref, loc: logger.Loc) Expr { - return Expr{ - .loc = loc, - .data = .{ - .e_identifier = E.Identifier.init(ref), - }, - }; - } - - pub fn toEmpty(expr: Expr) Expr { - return Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = expr.loc }; - } - pub fn isEmpty(expr: Expr) bool { - return expr.data == .e_missing; - } - pub const Query = struct { expr: Expr, loc: logger.Loc, i: u32 = 0 }; - - pub fn hasAnyPropertyNamed(expr: *const Expr, comptime names: []const string) bool { - if (std.meta.activeTag(expr.data) != .e_object) return false; - const obj = expr.data.e_object; - if (obj.properties.len == 0) return false; - - for (obj.properties.slice()) |prop| { - if (prop.value == null) continue; - const key = prop.key orelse continue; - if (std.meta.activeTag(key.data) != .e_string) continue; - const key_str = key.data.e_string; - if (strings.eqlAnyComptime(key_str.data, names)) return true; - } - - return false; - } - - pub fn toJS(this: Expr, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { - return this.data.toJS(allocator, globalObject); - } - - pub inline fn isArray(this: *const Expr) bool { - return this.data == .e_array; - } - - pub inline fn isObject(this: *const Expr) bool { - return this.data == .e_object; - } - - pub fn get(expr: *const Expr, name: string) ?Expr { - return if (asProperty(expr, name)) |query| query.expr else null; - } - - /// Only use this for pretty-printing JSON. Do not use in transpiler. - /// - /// This does not handle edgecases like `-1` or stringifying arbitrary property lookups. - pub fn getByIndex(expr: *const Expr, index: u32, index_str: string, allocator: std.mem.Allocator) ?Expr { - switch (expr.data) { - .e_array => |array| { - if (index >= array.items.len) return null; - return array.items.slice()[index]; - }, - .e_object => |object| { - for (object.properties.sliceConst()) |*prop| { - const key = &(prop.key orelse continue); - switch (key.data) { - .e_string => |str| { - if (str.eql(string, index_str)) { - return prop.value; - } - }, - .e_number => |num| { - if (num.toU32() == index) { - return prop.value; - } - }, - else => {}, - } - } - - return null; - }, - .e_string => |str| { - if (str.len() > index) { - var slice = str.slice(allocator); - // TODO: this is not correct since .length refers to UTF-16 code units and not UTF-8 bytes - // However, since this is only used in the JSON prettifier for `bun pm view`, it's not a blocker for shipping. - if (slice.len > index) { - return Expr.init(E.String, .{ .data = slice[index..][0..1] }, expr.loc); - } - } - }, - else => {}, - } - - return null; - } - - /// This supports lookups like: - /// - `foo` - /// - `foo.bar` - /// - `foo[123]` - /// - `foo[123].bar` - /// - `foo[123].bar[456]` - /// - `foo[123].bar[456].baz` - /// - `foo[123].bar[456].baz.qux` // etc. - /// - /// This is not intended for use by the transpiler, instead by pretty printing JSON. - pub fn getPathMayBeIndex(expr: *const Expr, name: string) ?Expr { - if (name.len == 0) { - return null; - } - - if (strings.indexOfAny(name, "[.")) |idx| { - switch (name[idx]) { - '[' => { - const end_idx = strings.indexOfChar(name, ']') orelse return null; - var base_expr = expr; - if (idx > 0) { - const key = name[0..idx]; - base_expr = &(base_expr.get(key) orelse return null); - } - - const index_str = name[idx + 1 .. end_idx]; - const index = std.fmt.parseInt(u32, index_str, 10) catch return null; - const rest = if (name.len > end_idx) name[end_idx + 1 ..] else ""; - const result = &(base_expr.getByIndex(index, index_str, bun.default_allocator) orelse return null); - if (rest.len > 0) return result.getPathMayBeIndex(rest); - return result.*; - }, - '.' => { - const key = name[0..idx]; - const sub_expr = &(expr.get(key) orelse return null); - const subpath = if (name.len > idx) name[idx + 1 ..] else ""; - if (subpath.len > 0) { - return sub_expr.getPathMayBeIndex(subpath); - } - - return sub_expr.*; - }, - else => unreachable, - } - } - - return expr.get(name); - } - - /// Don't use this if you care about performance. - /// - /// Sets the value of a property, creating it if it doesn't exist. - /// `expr` must be an object. - pub fn set(expr: *Expr, allocator: std.mem.Allocator, name: string, value: Expr) OOM!void { - bun.assertWithLocation(expr.isObject(), @src()); - for (0..expr.data.e_object.properties.len) |i| { - const prop = &expr.data.e_object.properties.ptr[i]; - const key = prop.key orelse continue; - if (std.meta.activeTag(key.data) != .e_string) continue; - if (key.data.e_string.eql(string, name)) { - prop.value = value; - return; - } - } - - var new_props = expr.data.e_object.properties.listManaged(allocator); - try new_props.append(.{ - .key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty), - .value = value, - }); - - expr.data.e_object.properties = BabyList(G.Property).fromList(new_props); - } - - /// Don't use this if you care about performance. - /// - /// Sets the value of a property to a string, creating it if it doesn't exist. - /// `expr` must be an object. - pub fn setString(expr: *Expr, allocator: std.mem.Allocator, name: string, value: string) OOM!void { - bun.assertWithLocation(expr.isObject(), @src()); - for (0..expr.data.e_object.properties.len) |i| { - const prop = &expr.data.e_object.properties.ptr[i]; - const key = prop.key orelse continue; - if (std.meta.activeTag(key.data) != .e_string) continue; - if (key.data.e_string.eql(string, name)) { - prop.value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty); - return; - } - } - - var new_props = expr.data.e_object.properties.listManaged(allocator); - try new_props.append(.{ - .key = Expr.init(E.String, .{ .data = name }, logger.Loc.Empty), - .value = Expr.init(E.String, .{ .data = value }, logger.Loc.Empty), - }); - - expr.data.e_object.properties = BabyList(G.Property).fromList(new_props); - } - - pub fn getObject(expr: *const Expr, name: string) ?Expr { - if (expr.asProperty(name)) |query| { - if (query.expr.isObject()) { - return query.expr; - } - } - return null; - } - - pub fn getString(expr: *const Expr, allocator: std.mem.Allocator, name: string) OOM!?struct { string, logger.Loc } { - if (asProperty(expr, name)) |q| { - if (q.expr.asString(allocator)) |str| { - return .{ - str, - q.expr.loc, - }; - } - } - return null; - } - - pub fn getNumber(expr: *const Expr, name: string) ?struct { f64, logger.Loc } { - if (asProperty(expr, name)) |q| { - if (q.expr.asNumber()) |num| { - return .{ - num, - q.expr.loc, - }; - } - } - return null; - } - - pub fn getStringCloned(expr: *const Expr, allocator: std.mem.Allocator, name: string) OOM!?string { - return if (asProperty(expr, name)) |q| q.expr.asStringCloned(allocator) else null; - } - - pub fn getStringClonedZ(expr: *const Expr, allocator: std.mem.Allocator, name: string) OOM!?stringZ { - return if (asProperty(expr, name)) |q| q.expr.asStringZ(allocator) else null; - } - - pub fn getArray(expr: *const Expr, name: string) ?ArrayIterator { - return if (asProperty(expr, name)) |q| q.expr.asArray() else null; - } - - pub fn getRope(self: *const Expr, rope: *const E.Object.Rope) ?E.Object.RopeQuery { - if (self.get(rope.head.data.e_string.data)) |existing| { - switch (existing.data) { - .e_array => |array| { - if (rope.next) |next| { - if (array.items.last()) |end| { - return end.getRope(next); - } - } - - return E.Object.RopeQuery{ - .expr = existing, - .rope = rope, - }; - }, - .e_object => { - if (rope.next) |next| { - if (existing.getRope(next)) |end| { - return end; - } - } - - return E.Object.RopeQuery{ - .expr = existing, - .rope = rope, - }; - }, - else => return E.Object.RopeQuery{ - .expr = existing, - .rope = rope, - }, - } - } - - return null; - } - - // Making this comptime bloats the binary and doesn't seem to impact runtime performance. - pub fn asProperty(expr: *const Expr, name: string) ?Query { - if (std.meta.activeTag(expr.data) != .e_object) return null; - const obj = expr.data.e_object; - if (obj.properties.len == 0) return null; - - return obj.asProperty(name); - } - - pub fn asPropertyStringMap(expr: *const Expr, name: string, allocator: std.mem.Allocator) ?*bun.StringArrayHashMap(string) { - if (std.meta.activeTag(expr.data) != .e_object) return null; - const obj_ = expr.data.e_object; - if (obj_.properties.len == 0) return null; - const query = obj_.asProperty(name) orelse return null; - if (query.expr.data != .e_object) return null; - - const obj = query.expr.data.e_object; - var count: usize = 0; - for (obj.properties.slice()) |prop| { - const key = prop.key.?.asString(allocator) orelse continue; - const value = prop.value.?.asString(allocator) orelse continue; - count += @as(usize, @intFromBool(key.len > 0 and value.len > 0)); - } - - if (count == 0) return null; - var map = bun.StringArrayHashMap(string).init(allocator); - map.ensureUnusedCapacity(count) catch return null; - - for (obj.properties.slice()) |prop| { - const key = prop.key.?.asString(allocator) orelse continue; - const value = prop.value.?.asString(allocator) orelse continue; - - if (!(key.len > 0 and value.len > 0)) continue; - - map.putAssumeCapacity(key, value); - } - - const ptr = allocator.create(bun.StringArrayHashMap(string)) catch unreachable; - ptr.* = map; - return ptr; - } - - pub const ArrayIterator = struct { - array: *const E.Array, - index: u32, - - pub fn next(this: *ArrayIterator) ?Expr { - if (this.index >= this.array.items.len) { - return null; - } - defer this.index += 1; - return this.array.items.ptr[this.index]; - } - }; - - pub fn asArray(expr: *const Expr) ?ArrayIterator { - if (std.meta.activeTag(expr.data) != .e_array) return null; - const array = expr.data.e_array; - if (array.items.len == 0) return null; - - return ArrayIterator{ .array = array, .index = 0 }; - } - - pub inline fn asUtf8StringLiteral(expr: *const Expr) ?string { - if (expr.data == .e_string) { - bun.debugAssert(expr.data.e_string.next == null); - return expr.data.e_string.data; - } - return null; - } - - pub inline fn asStringLiteral(expr: *const Expr, allocator: std.mem.Allocator) ?string { - if (std.meta.activeTag(expr.data) != .e_string) return null; - return expr.data.e_string.string(allocator) catch null; - } - - pub inline fn isString(expr: *const Expr) bool { - return switch (expr.data) { - .e_string => true, - else => false, - }; - } - - pub inline fn asString(expr: *const Expr, allocator: std.mem.Allocator) ?string { - switch (expr.data) { - .e_string => |str| return str.string(allocator) catch bun.outOfMemory(), - else => return null, - } - } - pub inline fn asStringHash(expr: *const Expr, allocator: std.mem.Allocator, comptime hash_fn: *const fn (buf: []const u8) callconv(.Inline) u64) OOM!?u64 { - switch (expr.data) { - .e_string => |str| { - if (str.isUTF8()) return hash_fn(str.data); - const utf8_str = try str.string(allocator); - defer allocator.free(utf8_str); - return hash_fn(utf8_str); - }, - else => return null, - } - } - - pub inline fn asStringCloned(expr: *const Expr, allocator: std.mem.Allocator) OOM!?string { - switch (expr.data) { - .e_string => |str| return try str.stringCloned(allocator), - else => return null, - } - } - - pub inline fn asStringZ(expr: *const Expr, allocator: std.mem.Allocator) OOM!?stringZ { - switch (expr.data) { - .e_string => |str| return try str.stringZ(allocator), - else => return null, - } - } - - pub fn asBool( - expr: *const Expr, - ) ?bool { - if (std.meta.activeTag(expr.data) != .e_boolean) return null; - - return expr.data.e_boolean.value; - } - - pub fn asNumber(expr: *const Expr) ?f64 { - if (expr.data != .e_number) return null; - - return expr.data.e_number.value; - } - - pub const EFlags = enum { none, ts_decorator }; - - const Serializable = struct { - type: Tag, - object: string, - value: Data, - loc: logger.Loc, - }; - - pub fn isMissing(a: *const Expr) bool { - return std.meta.activeTag(a.data) == Expr.Tag.e_missing; - } - - // The goal of this function is to "rotate" the AST if it's possible to use the - // left-associative property of the operator to avoid unnecessary parentheses. - // - // When using this, make absolutely sure that the operator is actually - // associative. For example, the "-" operator is not associative for - // floating-point numbers. - pub fn joinWithLeftAssociativeOp( - comptime op: Op.Code, - a: Expr, - b: Expr, - allocator: std.mem.Allocator, - ) Expr { - // "(a, b) op c" => "a, b op c" - switch (a.data) { - .e_binary => |comma| { - if (comma.op == .bin_comma) { - comma.right = joinWithLeftAssociativeOp(op, comma.right, b, allocator); - } - }, - else => {}, - } - - // "a op (b op c)" => "(a op b) op c" - // "a op (b op (c op d))" => "((a op b) op c) op d" - switch (b.data) { - .e_binary => |binary| { - if (binary.op == op) { - return joinWithLeftAssociativeOp( - op, - joinWithLeftAssociativeOp(op, a, binary.left, allocator), - binary.right, - allocator, - ); - } - }, - else => {}, - } - - // "a op b" => "a op b" - // "(a op b) op c" => "(a op b) op c" - return Expr.init(E.Binary, E.Binary{ .op = op, .left = a, .right = b }, a.loc); - } - - pub fn joinWithComma(a: Expr, b: Expr, _: std.mem.Allocator) Expr { - if (a.isMissing()) { - return b; - } - - if (b.isMissing()) { - return a; - } - - return Expr.init(E.Binary, E.Binary{ .op = .bin_comma, .left = a, .right = b }, a.loc); - } - - pub fn joinAllWithComma(all: []Expr, allocator: std.mem.Allocator) Expr { - bun.assert(all.len > 0); - switch (all.len) { - 1 => { - return all[0]; - }, - 2 => { - return Expr.joinWithComma(all[0], all[1], allocator); - }, - else => { - var expr = all[0]; - for (1..all.len) |i| { - expr = Expr.joinWithComma(expr, all[i], allocator); - } - return expr; - }, - } - } - - pub fn joinAllWithCommaCallback(all: []Expr, comptime Context: type, ctx: Context, comptime callback: (fn (ctx: anytype, expr: Expr) ?Expr), allocator: std.mem.Allocator) ?Expr { - switch (all.len) { - 0 => return null, - 1 => { - return callback(ctx, all[0]); - }, - 2 => { - return Expr.joinWithComma( - callback(ctx, all[0]) orelse Expr{ - .data = .{ .e_missing = .{} }, - .loc = all[0].loc, - }, - callback(ctx, all[1]) orelse Expr{ - .data = .{ .e_missing = .{} }, - .loc = all[1].loc, - }, - allocator, - ); - }, - else => { - var i: usize = 1; - var expr = callback(ctx, all[0]) orelse Expr{ - .data = .{ .e_missing = .{} }, - .loc = all[0].loc, - }; - - while (i < all.len) : (i += 1) { - expr = Expr.joinWithComma(expr, callback(ctx, all[i]) orelse Expr{ - .data = .{ .e_missing = .{} }, - .loc = all[i].loc, - }, allocator); - } - - return expr; - }, - } - } - - pub fn jsonStringify(self: *const @This(), writer: anytype) !void { - return try writer.write(Serializable{ .type = std.meta.activeTag(self.data), .object = "expr", .value = self.data, .loc = self.loc }); - } - - pub fn extractNumericValues(left: Expr.Data, right: Expr.Data) ?[2]f64 { - return .{ - left.extractNumericValue() orelse return null, - right.extractNumericValue() orelse return null, - }; - } - - pub var icount: usize = 0; - - // We don't need to dynamically allocate booleans - var true_bool = E.Boolean{ .value = true }; - var false_bool = E.Boolean{ .value = false }; - var bool_values = [_]*E.Boolean{ &false_bool, &true_bool }; - - /// When the lifetime of an Expr.Data's pointer must exist longer than reset() is called, use this function. - /// Be careful to free the memory (or use an allocator that does it for you) - /// Also, prefer Expr.init or Expr.alloc when possible. This will be slower. - pub fn allocate(allocator: std.mem.Allocator, comptime Type: type, st: Type, loc: logger.Loc) Expr { - icount += 1; - Data.Store.assert(); - - switch (Type) { - E.Array => { - return Expr{ - .loc = loc, - .data = Data{ - .e_array = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Class => { - return Expr{ - .loc = loc, - .data = Data{ - .e_class = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Unary => { - return Expr{ - .loc = loc, - .data = Data{ - .e_unary = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Binary => { - return Expr{ - .loc = loc, - .data = Data{ - .e_binary = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.This => { - return Expr{ - .loc = loc, - .data = Data{ - .e_this = st, - }, - }; - }, - E.Boolean => { - return Expr{ - .loc = loc, - .data = Data{ - .e_boolean = st, - }, - }; - }, - E.Super => { - return Expr{ - .loc = loc, - .data = Data{ - .e_super = st, - }, - }; - }, - E.Null => { - return Expr{ - .loc = loc, - .data = Data{ - .e_null = st, - }, - }; - }, - E.Undefined => { - return Expr{ - .loc = loc, - .data = Data{ - .e_undefined = st, - }, - }; - }, - E.New => { - return Expr{ - .loc = loc, - .data = Data{ - .e_new = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.NewTarget => { - return Expr{ - .loc = loc, - .data = Data{ - .e_new_target = st, - }, - }; - }, - E.Function => { - return Expr{ - .loc = loc, - .data = Data{ - .e_function = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.ImportMeta => { - return Expr{ - .loc = loc, - .data = Data{ - .e_import_meta = st, - }, - }; - }, - E.Call => { - return Expr{ - .loc = loc, - .data = Data{ - .e_call = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Dot => { - return Expr{ - .loc = loc, - .data = Data{ - .e_dot = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Index => { - return Expr{ - .loc = loc, - .data = Data{ - .e_index = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Arrow => { - return Expr{ - .loc = loc, - .data = Data{ - .e_arrow = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Identifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_identifier = E.Identifier{ - .ref = st.ref, - .must_keep_due_to_with_stmt = st.must_keep_due_to_with_stmt, - .can_be_removed_if_unused = st.can_be_removed_if_unused, - .call_can_be_unwrapped_if_unused = st.call_can_be_unwrapped_if_unused, - }, - }, - }; - }, - E.ImportIdentifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_import_identifier = .{ - .ref = st.ref, - .was_originally_identifier = st.was_originally_identifier, - }, - }, - }; - }, - E.CommonJSExportIdentifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_commonjs_export_identifier = .{ - .ref = st.ref, - }, - }, - }; - }, - - E.PrivateIdentifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_private_identifier = st, - }, - }; - }, - E.JSXElement => { - return Expr{ - .loc = loc, - .data = Data{ - .e_jsx_element = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Missing => { - return Expr{ .loc = loc, .data = Data{ .e_missing = E.Missing{} } }; - }, - E.Number => { - return Expr{ - .loc = loc, - .data = Data{ - .e_number = st, - }, - }; - }, - E.BigInt => { - return Expr{ - .loc = loc, - .data = Data{ - .e_big_int = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Object => { - return Expr{ - .loc = loc, - .data = Data{ - .e_object = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Spread => { - return Expr{ - .loc = loc, - .data = Data{ - .e_spread = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.String => { - if (comptime Environment.isDebug) { - // Sanity check: assert string is not a null ptr - if (st.data.len > 0 and st.isUTF8()) { - bun.assert(@intFromPtr(st.data.ptr) > 0); - } - } - return Expr{ - .loc = loc, - .data = Data{ - .e_string = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - - E.Template => { - return Expr{ - .loc = loc, - .data = Data{ - .e_template = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.RegExp => { - return Expr{ - .loc = loc, - .data = Data{ - .e_reg_exp = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Await => { - return Expr{ - .loc = loc, - .data = Data{ - .e_await = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.Yield => { - return Expr{ - .loc = loc, - .data = Data{ - .e_yield = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.If => { - return Expr{ - .loc = loc, - .data = Data{ - .e_if = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.RequireResolveString => { - return Expr{ - .loc = loc, - .data = Data{ - .e_require_resolve_string = st, - }, - }; - }, - E.Import => { - return Expr{ - .loc = loc, - .data = Data{ - .e_import = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st; - break :brk item; - }, - }, - }; - }, - E.RequireString => { - return Expr{ - .loc = loc, - .data = Data{ - .e_require_string = st, - }, - }; - }, - *E.String => { - return Expr{ - .loc = loc, - .data = Data{ - .e_string = brk: { - const item = allocator.create(Type) catch unreachable; - item.* = st.*; - break :brk item; - }, - }, - }; - }, - - else => { - @compileError("Invalid type passed to Expr.init: " ++ @typeName(Type)); - }, - } - } - - pub const Disabler = bun.DebugOnlyDisabler(@This()); - - pub fn init(comptime Type: type, st: Type, loc: logger.Loc) Expr { - icount += 1; - Data.Store.assert(); - - switch (Type) { - E.NameOfSymbol => { - return Expr{ - .loc = loc, - .data = Data{ - .e_name_of_symbol = Data.Store.append(E.NameOfSymbol, st), - }, - }; - }, - E.Array => { - return Expr{ - .loc = loc, - .data = Data{ - .e_array = Data.Store.append(Type, st), - }, - }; - }, - E.Class => { - return Expr{ - .loc = loc, - .data = Data{ - .e_class = Data.Store.append(Type, st), - }, - }; - }, - E.Unary => { - return Expr{ - .loc = loc, - .data = Data{ - .e_unary = Data.Store.append(Type, st), - }, - }; - }, - E.Binary => { - return Expr{ - .loc = loc, - .data = Data{ - .e_binary = Data.Store.append(Type, st), - }, - }; - }, - E.This => { - return Expr{ - .loc = loc, - .data = Data{ - .e_this = st, - }, - }; - }, - E.Boolean => { - return Expr{ - .loc = loc, - .data = Data{ - .e_boolean = st, - }, - }; - }, - E.Super => { - return Expr{ - .loc = loc, - .data = Data{ - .e_super = st, - }, - }; - }, - E.Null => { - return Expr{ - .loc = loc, - .data = Data{ - .e_null = st, - }, - }; - }, - E.Undefined => { - return Expr{ - .loc = loc, - .data = Data{ - .e_undefined = st, - }, - }; - }, - E.New => { - return Expr{ - .loc = loc, - .data = Data{ - .e_new = Data.Store.append(Type, st), - }, - }; - }, - E.NewTarget => { - return Expr{ - .loc = loc, - .data = Data{ - .e_new_target = st, - }, - }; - }, - E.Function => { - return Expr{ - .loc = loc, - .data = Data{ - .e_function = Data.Store.append(Type, st), - }, - }; - }, - E.ImportMeta => { - return Expr{ - .loc = loc, - .data = Data{ - .e_import_meta = st, - }, - }; - }, - E.Call => { - return Expr{ - .loc = loc, - .data = Data{ - .e_call = Data.Store.append(Type, st), - }, - }; - }, - E.Dot => { - return Expr{ - .loc = loc, - .data = Data{ - .e_dot = Data.Store.append(Type, st), - }, - }; - }, - E.Index => { - return Expr{ - .loc = loc, - .data = Data{ - .e_index = Data.Store.append(Type, st), - }, - }; - }, - E.Arrow => { - return Expr{ - .loc = loc, - .data = Data{ - .e_arrow = Data.Store.append(Type, st), - }, - }; - }, - E.Identifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_identifier = E.Identifier{ - .ref = st.ref, - .must_keep_due_to_with_stmt = st.must_keep_due_to_with_stmt, - .can_be_removed_if_unused = st.can_be_removed_if_unused, - .call_can_be_unwrapped_if_unused = st.call_can_be_unwrapped_if_unused, - }, - }, - }; - }, - E.ImportIdentifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_import_identifier = .{ - .ref = st.ref, - .was_originally_identifier = st.was_originally_identifier, - }, - }, - }; - }, - E.CommonJSExportIdentifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_commonjs_export_identifier = .{ - .ref = st.ref, - .base = st.base, - }, - }, - }; - }, - E.PrivateIdentifier => { - return Expr{ - .loc = loc, - .data = Data{ - .e_private_identifier = st, - }, - }; - }, - E.JSXElement => { - return Expr{ - .loc = loc, - .data = Data{ - .e_jsx_element = Data.Store.append(Type, st), - }, - }; - }, - E.Missing => { - return Expr{ .loc = loc, .data = Data{ .e_missing = E.Missing{} } }; - }, - E.Number => { - return Expr{ - .loc = loc, - .data = Data{ - .e_number = st, - }, - }; - }, - E.BigInt => { - return Expr{ - .loc = loc, - .data = Data{ - .e_big_int = Data.Store.append(Type, st), - }, - }; - }, - E.Object => { - return Expr{ - .loc = loc, - .data = Data{ - .e_object = Data.Store.append(Type, st), - }, - }; - }, - E.Spread => { - return Expr{ - .loc = loc, - .data = Data{ - .e_spread = Data.Store.append(Type, st), - }, - }; - }, - E.String => { - if (comptime Environment.isDebug) { - // Sanity check: assert string is not a null ptr - if (st.data.len > 0 and st.isUTF8()) { - bun.assert(@intFromPtr(st.data.ptr) > 0); - } - } - return Expr{ - .loc = loc, - .data = Data{ - .e_string = Data.Store.append(Type, st), - }, - }; - }, - - E.Template => { - return Expr{ - .loc = loc, - .data = Data{ - .e_template = Data.Store.append(Type, st), - }, - }; - }, - E.RegExp => { - return Expr{ - .loc = loc, - .data = Data{ - .e_reg_exp = Data.Store.append(Type, st), - }, - }; - }, - E.Await => { - return Expr{ - .loc = loc, - .data = Data{ - .e_await = Data.Store.append(Type, st), - }, - }; - }, - E.Yield => { - return Expr{ - .loc = loc, - .data = Data{ - .e_yield = Data.Store.append(Type, st), - }, - }; - }, - E.If => { - return Expr{ - .loc = loc, - .data = Data{ - .e_if = Data.Store.append(Type, st), - }, - }; - }, - E.RequireResolveString => { - return Expr{ - .loc = loc, - .data = Data{ - .e_require_resolve_string = st, - }, - }; - }, - E.Import => { - return Expr{ - .loc = loc, - .data = Data{ - .e_import = Data.Store.append(Type, st), - }, - }; - }, - E.RequireString => { - return Expr{ - .loc = loc, - .data = Data{ - .e_require_string = st, - }, - }; - }, - *E.String => { - return Expr{ - .loc = loc, - .data = Data{ - .e_string = Data.Store.append(@TypeOf(st.*), st.*), - }, - }; - }, - E.InlinedEnum => return .{ .loc = loc, .data = .{ - .e_inlined_enum = Data.Store.append(@TypeOf(st), st), - } }, - - else => { - @compileError("Invalid type passed to Expr.init: " ++ @typeName(Type)); - }, - } - } - - pub fn isPrimitiveLiteral(this: Expr) bool { - return @as(Tag, this.data).isPrimitiveLiteral(); - } - - pub fn isRef(this: Expr, ref: Ref) bool { - return switch (this.data) { - .e_import_identifier => |import_identifier| import_identifier.ref.eql(ref), - .e_identifier => |ident| ident.ref.eql(ref), - else => false, - }; - } - - pub const Tag = enum { - e_array, - e_unary, - e_binary, - e_class, - e_new, - e_function, - e_call, - e_dot, - e_index, - e_arrow, - e_jsx_element, - e_object, - e_spread, - e_template, - e_reg_exp, - e_await, - e_yield, - e_if, - e_import, - e_identifier, - e_import_identifier, - e_private_identifier, - e_commonjs_export_identifier, - e_boolean, - e_number, - e_big_int, - e_string, - e_require_string, - e_require_resolve_string, - e_require_call_target, - e_require_resolve_call_target, - e_missing, - e_this, - e_super, - e_null, - e_undefined, - e_new_target, - e_import_meta, - e_import_meta_main, - e_require_main, - e_special, - e_inlined_enum, - e_name_of_symbol, - - // object, regex and array may have had side effects - pub fn isPrimitiveLiteral(tag: Tag) bool { - return switch (tag) { - .e_null, .e_undefined, .e_string, .e_boolean, .e_number, .e_big_int => true, - else => false, - }; - } - - pub fn typeof(tag: Tag) ?string { - return switch (tag) { - .e_array, .e_object, .e_null, .e_reg_exp => "object", - .e_undefined => "undefined", - .e_boolean => "boolean", - .e_number => "number", - .e_big_int => "bigint", - .e_string => "string", - .e_class, .e_function, .e_arrow => "function", - else => null, - }; - } - - pub fn format(tag: Tag, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - try switch (tag) { - .e_string => writer.writeAll("string"), - .e_array => writer.writeAll("array"), - .e_unary => writer.writeAll("unary"), - .e_binary => writer.writeAll("binary"), - .e_boolean => writer.writeAll("boolean"), - .e_super => writer.writeAll("super"), - .e_null => writer.writeAll("null"), - .e_undefined => writer.writeAll("undefined"), - .e_new => writer.writeAll("new"), - .e_function => writer.writeAll("function"), - .e_new_target => writer.writeAll("new target"), - .e_import_meta => writer.writeAll("import.meta"), - .e_call => writer.writeAll("call"), - .e_dot => writer.writeAll("dot"), - .e_index => writer.writeAll("index"), - .e_arrow => writer.writeAll("arrow"), - .e_identifier => writer.writeAll("identifier"), - .e_import_identifier => writer.writeAll("import identifier"), - .e_private_identifier => writer.writeAll("#privateIdentifier"), - .e_jsx_element => writer.writeAll(""), - .e_missing => writer.writeAll(""), - .e_number => writer.writeAll("number"), - .e_big_int => writer.writeAll("BigInt"), - .e_object => writer.writeAll("object"), - .e_spread => writer.writeAll("..."), - .e_template => writer.writeAll("template"), - .e_reg_exp => writer.writeAll("regexp"), - .e_await => writer.writeAll("await"), - .e_yield => writer.writeAll("yield"), - .e_if => writer.writeAll("if"), - .e_require_resolve_string => writer.writeAll("require_or_require_resolve"), - .e_import => writer.writeAll("import"), - .e_this => writer.writeAll("this"), - .e_class => writer.writeAll("class"), - .e_require_string => writer.writeAll("require"), - else => writer.writeAll(@tagName(tag)), - }; - } - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - - pub fn isArray(self: Tag) bool { - switch (self) { - .e_array => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isUnary(self: Tag) bool { - switch (self) { - .e_unary => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isBinary(self: Tag) bool { - switch (self) { - .e_binary => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isThis(self: Tag) bool { - switch (self) { - .e_this => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isClass(self: Tag) bool { - switch (self) { - .e_class => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isBoolean(self: Tag) bool { - switch (self) { - .e_boolean => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isSuper(self: Tag) bool { - switch (self) { - .e_super => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isNull(self: Tag) bool { - switch (self) { - .e_null => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isUndefined(self: Tag) bool { - switch (self) { - .e_undefined => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isNew(self: Tag) bool { - switch (self) { - .e_new => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isNewTarget(self: Tag) bool { - switch (self) { - .e_new_target => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isFunction(self: Tag) bool { - switch (self) { - .e_function => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isImportMeta(self: Tag) bool { - switch (self) { - .e_import_meta => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isCall(self: Tag) bool { - switch (self) { - .e_call => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isDot(self: Tag) bool { - switch (self) { - .e_dot => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isIndex(self: Tag) bool { - switch (self) { - .e_index => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isArrow(self: Tag) bool { - switch (self) { - .e_arrow => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isIdentifier(self: Tag) bool { - switch (self) { - .e_identifier => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isImportIdentifier(self: Tag) bool { - switch (self) { - .e_import_identifier => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isPrivateIdentifier(self: Tag) bool { - switch (self) { - .e_private_identifier => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isJsxElement(self: Tag) bool { - switch (self) { - .e_jsx_element => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isMissing(self: Tag) bool { - switch (self) { - .e_missing => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isNumber(self: Tag) bool { - switch (self) { - .e_number => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isBigInt(self: Tag) bool { - switch (self) { - .e_big_int => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isObject(self: Tag) bool { - switch (self) { - .e_object => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isSpread(self: Tag) bool { - switch (self) { - .e_spread => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isString(self: Tag) bool { - switch (self) { - .e_string => { - return true; - }, - else => { - return false; - }, - } - } - - pub fn isTemplate(self: Tag) bool { - switch (self) { - .e_template => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isRegExp(self: Tag) bool { - switch (self) { - .e_reg_exp => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isAwait(self: Tag) bool { - switch (self) { - .e_await => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isYield(self: Tag) bool { - switch (self) { - .e_yield => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isIf(self: Tag) bool { - switch (self) { - .e_if => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isRequireResolveString(self: Tag) bool { - switch (self) { - .e_require_resolve_string => { - return true; - }, - else => { - return false; - }, - } - } - pub fn isImport(self: Tag) bool { - switch (self) { - .e_import => { - return true; - }, - else => { - return false; - }, - } - } - }; - - pub fn isBoolean(a: Expr) bool { - switch (a.data) { - .e_boolean => { - return true; - }, - - .e_if => |ex| { - return isBoolean(ex.yes) and isBoolean(ex.no); - }, - .e_unary => |ex| { - return ex.op == .un_not or ex.op == .un_delete; - }, - .e_binary => |ex| { - switch (ex.op) { - .bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne, .bin_lt, .bin_gt, .bin_le, .bin_ge, .bin_instanceof, .bin_in => { - return true; - }, - .bin_logical_or => { - return isBoolean(ex.left) and isBoolean(ex.right); - }, - .bin_logical_and => { - return isBoolean(ex.left) and isBoolean(ex.right); - }, - else => {}, - } - }, - else => {}, - } - - return false; - } - - pub fn assign(a: Expr, b: Expr) Expr { - return init(E.Binary, E.Binary{ - .op = .bin_assign, - .left = a, - .right = b, - }, a.loc); - } - pub inline fn at(expr: Expr, comptime Type: type, t: Type, _: std.mem.Allocator) Expr { - return init(Type, t, expr.loc); - } - - // Wraps the provided expression in the "!" prefix operator. The expression - // will potentially be simplified to avoid generating unnecessary extra "!" - // operators. For example, calling this with "!!x" will return "!x" instead - // of returning "!!!x". - pub fn not(expr: Expr, allocator: std.mem.Allocator) Expr { - return maybeSimplifyNot( - expr, - allocator, - ) orelse Expr.init( - E.Unary, - E.Unary{ - .op = .un_not, - .value = expr, - }, - expr.loc, - ); - } - - pub fn hasValueForThisInCall(expr: Expr) bool { - return switch (expr.data) { - .e_dot, .e_index => true, - else => false, - }; - } - - /// The given "expr" argument should be the operand of a "!" prefix operator - /// (i.e. the "x" in "!x"). This returns a simplified expression for the - /// whole operator (i.e. the "!x") if it can be simplified, or false if not. - /// It's separate from "Not()" above to avoid allocation on failure in case - /// that is undesired. - pub fn maybeSimplifyNot(expr: Expr, allocator: std.mem.Allocator) ?Expr { - switch (expr.data) { - .e_null, .e_undefined => { - return expr.at(E.Boolean, E.Boolean{ .value = true }, allocator); - }, - .e_boolean => |b| { - return expr.at(E.Boolean, E.Boolean{ .value = b.value }, allocator); - }, - .e_number => |n| { - return expr.at(E.Boolean, E.Boolean{ .value = (n.value == 0 or std.math.isNan(n.value)) }, allocator); - }, - .e_big_int => |b| { - return expr.at(E.Boolean, E.Boolean{ .value = strings.eqlComptime(b.value, "0") }, allocator); - }, - .e_function, - .e_arrow, - .e_reg_exp, - => { - return expr.at(E.Boolean, E.Boolean{ .value = false }, allocator); - }, - // "!!!a" => "!a" - .e_unary => |un| { - if (un.op == Op.Code.un_not and knownPrimitive(un.value) == .boolean) { - return un.value; - } - }, - .e_binary => |ex| { - // TODO: evaluate whether or not it is safe to do this mutation since it's modifying in-place. - // Make sure that these transformations are all safe for special values. - // For example, "!(a < b)" is not the same as "a >= b" if a and/or b are - // NaN (or undefined, or null, or possibly other problem cases too). - switch (ex.op) { - Op.Code.bin_loose_eq => { - // "!(a == b)" => "a != b" - ex.op = .bin_loose_ne; - return expr; - }, - Op.Code.bin_loose_ne => { - // "!(a != b)" => "a == b" - ex.op = .bin_loose_eq; - return expr; - }, - Op.Code.bin_strict_eq => { - // "!(a === b)" => "a !== b" - ex.op = .bin_strict_ne; - return expr; - }, - Op.Code.bin_strict_ne => { - // "!(a !== b)" => "a === b" - ex.op = .bin_strict_eq; - return expr; - }, - Op.Code.bin_comma => { - // "!(a, b)" => "a, !b" - ex.right = ex.right.not(allocator); - return expr; - }, - else => {}, - } - }, - .e_inlined_enum => |inlined| { - return maybeSimplifyNot(inlined.value, allocator); - }, - - else => {}, - } - - return null; - } - - pub fn toStringExprWithoutSideEffects(expr: Expr, allocator: std.mem.Allocator) ?Expr { - const unwrapped = expr.unwrapInlined(); - const slice = switch (unwrapped.data) { - .e_null => "null", - .e_string => return expr, - .e_undefined => "undefined", - .e_boolean => |data| if (data.value) "true" else "false", - .e_big_int => |bigint| bigint.value, - .e_number => |num| if (num.toString(allocator)) |str| - str - else - null, - .e_reg_exp => |regexp| regexp.value, - .e_dot => |dot| @as(?[]const u8, brk: { - // This is dumb but some JavaScript obfuscators use this to generate string literals - if (bun.strings.eqlComptime(dot.name, "constructor")) { - break :brk switch (dot.target.data) { - .e_string => "function String() { [native code] }", - .e_reg_exp => "function RegExp() { [native code] }", - else => null, - }; - } - break :brk null; - }), - else => null, - }; - return if (slice) |s| Expr.init(E.String, E.String.init(s), expr.loc) else null; - } - - pub fn isOptionalChain(self: *const @This()) bool { - return switch (self.data) { - .e_dot => self.data.e_dot.optional_chain != null, - .e_index => self.data.e_index.optional_chain != null, - .e_call => self.data.e_call.optional_chain != null, - else => false, - }; - } - - pub inline fn knownPrimitive(self: @This()) PrimitiveType { - return self.data.knownPrimitive(); - } - - pub const PrimitiveType = enum { - unknown, - mixed, - null, - undefined, - boolean, - number, - string, - bigint, - - pub const static = std.enums.EnumSet(PrimitiveType).init(.{ - .mixed = true, - .null = true, - .undefined = true, - .boolean = true, - .number = true, - .string = true, - // for our purposes, bigint is dynamic - // it is technically static though - // .@"bigint" = true, - }); - - pub inline fn isStatic(this: PrimitiveType) bool { - return static.contains(this); - } - - pub fn merge(left_known: PrimitiveType, right_known: PrimitiveType) PrimitiveType { - if (right_known == .unknown or left_known == .unknown) - return .unknown; - - return if (left_known == right_known) - left_known - else - .mixed; - } - }; - - pub const Data = union(Tag) { - e_array: *E.Array, - e_unary: *E.Unary, - e_binary: *E.Binary, - e_class: *E.Class, - - e_new: *E.New, - e_function: *E.Function, - e_call: *E.Call, - e_dot: *E.Dot, - e_index: *E.Index, - e_arrow: *E.Arrow, - - e_jsx_element: *E.JSXElement, - e_object: *E.Object, - e_spread: *E.Spread, - e_template: *E.Template, - e_reg_exp: *E.RegExp, - e_await: *E.Await, - e_yield: *E.Yield, - e_if: *E.If, - e_import: *E.Import, - - e_identifier: E.Identifier, - e_import_identifier: E.ImportIdentifier, - e_private_identifier: E.PrivateIdentifier, - e_commonjs_export_identifier: E.CommonJSExportIdentifier, - - e_boolean: E.Boolean, - e_number: E.Number, - e_big_int: *E.BigInt, - e_string: *E.String, - - e_require_string: E.RequireString, - e_require_resolve_string: E.RequireResolveString, - e_require_call_target, - e_require_resolve_call_target, - - e_missing: E.Missing, - e_this: E.This, - e_super: E.Super, - e_null: E.Null, - e_undefined: E.Undefined, - e_new_target: E.NewTarget, - e_import_meta: E.ImportMeta, - - e_import_meta_main: E.ImportMetaMain, - e_require_main, - - /// Covers some exotic AST node types under one namespace, since the - /// places this is found it all follows similar handling. - e_special: E.Special, - - e_inlined_enum: *E.InlinedEnum, - - e_name_of_symbol: *E.NameOfSymbol, - - comptime { - bun.assert_eql(@sizeOf(Data), 24); // Do not increase the size of Expr - } - - pub fn as(data: Data, comptime tag: Tag) ?@FieldType(Data, @tagName(tag)) { - return if (data == tag) @field(data, @tagName(tag)) else null; - } - - pub fn clone(this: Expr.Data, allocator: std.mem.Allocator) !Data { - return switch (this) { - .e_array => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_array))); - item.* = el.*; - return .{ .e_array = item }; - }, - .e_unary => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_unary))); - item.* = el.*; - return .{ .e_unary = item }; - }, - .e_binary => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_binary))); - item.* = el.*; - return .{ .e_binary = item }; - }, - .e_class => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_class))); - item.* = el.*; - return .{ .e_class = item }; - }, - .e_new => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_new))); - item.* = el.*; - return .{ .e_new = item }; - }, - .e_function => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_function))); - item.* = el.*; - return .{ .e_function = item }; - }, - .e_call => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_call))); - item.* = el.*; - return .{ .e_call = item }; - }, - .e_dot => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_dot))); - item.* = el.*; - return .{ .e_dot = item }; - }, - .e_index => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_index))); - item.* = el.*; - return .{ .e_index = item }; - }, - .e_arrow => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_arrow))); - item.* = el.*; - return .{ .e_arrow = item }; - }, - .e_jsx_element => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_jsx_element))); - item.* = el.*; - return .{ .e_jsx_element = item }; - }, - .e_object => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_object))); - item.* = el.*; - return .{ .e_object = item }; - }, - .e_spread => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_spread))); - item.* = el.*; - return .{ .e_spread = item }; - }, - .e_template => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_template))); - item.* = el.*; - return .{ .e_template = item }; - }, - .e_reg_exp => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_reg_exp))); - item.* = el.*; - return .{ .e_reg_exp = item }; - }, - .e_await => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_await))); - item.* = el.*; - return .{ .e_await = item }; - }, - .e_yield => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_yield))); - item.* = el.*; - return .{ .e_yield = item }; - }, - .e_if => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_if))); - item.* = el.*; - return .{ .e_if = item }; - }, - .e_import => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_import))); - item.* = el.*; - return .{ .e_import = item }; - }, - .e_big_int => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_big_int))); - item.* = el.*; - return .{ .e_big_int = item }; - }, - .e_string => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_string))); - item.* = el.*; - return .{ .e_string = item }; - }, - .e_inlined_enum => |el| { - const item = try allocator.create(std.meta.Child(@TypeOf(this.e_inlined_enum))); - item.* = el.*; - return .{ .e_inlined_enum = item }; - }, - else => this, - }; - } - - pub fn deepClone(this: Expr.Data, allocator: std.mem.Allocator) !Data { - return switch (this) { - .e_array => |el| { - const items = try el.items.deepClone(allocator); - const item = bun.create(allocator, E.Array, .{ - .items = items, - .comma_after_spread = el.comma_after_spread, - .was_originally_macro = el.was_originally_macro, - .is_single_line = el.is_single_line, - .is_parenthesized = el.is_parenthesized, - .close_bracket_loc = el.close_bracket_loc, - }); - return .{ .e_array = item }; - }, - .e_unary => |el| { - const item = bun.create(allocator, E.Unary, .{ - .op = el.op, - .value = try el.value.deepClone(allocator), - }); - return .{ .e_unary = item }; - }, - .e_binary => |el| { - const item = bun.create(allocator, E.Binary, .{ - .op = el.op, - .left = try el.left.deepClone(allocator), - .right = try el.right.deepClone(allocator), - }); - return .{ .e_binary = item }; - }, - .e_class => |el| { - const properties = try allocator.alloc(G.Property, el.properties.len); - for (el.properties, 0..) |prop, i| { - properties[i] = try prop.deepClone(allocator); - } - - const item = bun.create(allocator, E.Class, .{ - .class_keyword = el.class_keyword, - .ts_decorators = try el.ts_decorators.deepClone(allocator), - .class_name = el.class_name, - .extends = if (el.extends) |e| try e.deepClone(allocator) else null, - .body_loc = el.body_loc, - .close_brace_loc = el.close_brace_loc, - .properties = properties, - .has_decorators = el.has_decorators, - }); - return .{ .e_class = item }; - }, - .e_new => |el| { - const item = bun.create(allocator, E.New, .{ - .target = try el.target.deepClone(allocator), - .args = try el.args.deepClone(allocator), - .can_be_unwrapped_if_unused = el.can_be_unwrapped_if_unused, - .close_parens_loc = el.close_parens_loc, - }); - - return .{ .e_new = item }; - }, - .e_function => |el| { - const item = bun.create(allocator, E.Function, .{ - .func = try el.func.deepClone(allocator), - }); - return .{ .e_function = item }; - }, - .e_call => |el| { - const item = bun.create(allocator, E.Call, .{ - .target = try el.target.deepClone(allocator), - .args = try el.args.deepClone(allocator), - .optional_chain = el.optional_chain, - .is_direct_eval = el.is_direct_eval, - .close_paren_loc = el.close_paren_loc, - .can_be_unwrapped_if_unused = el.can_be_unwrapped_if_unused, - .was_jsx_element = el.was_jsx_element, - }); - return .{ .e_call = item }; - }, - .e_dot => |el| { - const item = bun.create(allocator, E.Dot, .{ - .target = try el.target.deepClone(allocator), - .name = el.name, - .name_loc = el.name_loc, - .optional_chain = el.optional_chain, - .can_be_removed_if_unused = el.can_be_removed_if_unused, - .call_can_be_unwrapped_if_unused = el.call_can_be_unwrapped_if_unused, - }); - return .{ .e_dot = item }; - }, - .e_index => |el| { - const item = bun.create(allocator, E.Index, .{ - .target = try el.target.deepClone(allocator), - .index = try el.index.deepClone(allocator), - .optional_chain = el.optional_chain, - }); - return .{ .e_index = item }; - }, - .e_arrow => |el| { - const args = try allocator.alloc(G.Arg, el.args.len); - for (0..args.len) |i| { - args[i] = try el.args[i].deepClone(allocator); - } - const item = bun.create(allocator, E.Arrow, .{ - .args = args, - .body = el.body, - .is_async = el.is_async, - .has_rest_arg = el.has_rest_arg, - .prefer_expr = el.prefer_expr, - }); - - return .{ .e_arrow = item }; - }, - .e_jsx_element => |el| { - const item = bun.create(allocator, E.JSXElement, .{ - .tag = if (el.tag) |tag| try tag.deepClone(allocator) else null, - .properties = try el.properties.deepClone(allocator), - .children = try el.children.deepClone(allocator), - .key_prop_index = el.key_prop_index, - .flags = el.flags, - .close_tag_loc = el.close_tag_loc, - }); - return .{ .e_jsx_element = item }; - }, - .e_object => |el| { - const item = bun.create(allocator, E.Object, .{ - .properties = try el.properties.deepClone(allocator), - .comma_after_spread = el.comma_after_spread, - .is_single_line = el.is_single_line, - .is_parenthesized = el.is_parenthesized, - .was_originally_macro = el.was_originally_macro, - .close_brace_loc = el.close_brace_loc, - }); - return .{ .e_object = item }; - }, - .e_spread => |el| { - const item = bun.create(allocator, E.Spread, .{ - .value = try el.value.deepClone(allocator), - }); - return .{ .e_spread = item }; - }, - .e_template => |el| { - const item = bun.create(allocator, E.Template, .{ - .tag = if (el.tag) |tag| try tag.deepClone(allocator) else null, - .parts = el.parts, - .head = el.head, - }); - return .{ .e_template = item }; - }, - .e_reg_exp => |el| { - const item = bun.create(allocator, E.RegExp, .{ - .value = el.value, - .flags_offset = el.flags_offset, - }); - return .{ .e_reg_exp = item }; - }, - .e_await => |el| { - const item = bun.create(allocator, E.Await, .{ - .value = try el.value.deepClone(allocator), - }); - return .{ .e_await = item }; - }, - .e_yield => |el| { - const item = bun.create(allocator, E.Yield, .{ - .value = if (el.value) |value| try value.deepClone(allocator) else null, - .is_star = el.is_star, - }); - return .{ .e_yield = item }; - }, - .e_if => |el| { - const item = bun.create(allocator, E.If, .{ - .test_ = try el.test_.deepClone(allocator), - .yes = try el.yes.deepClone(allocator), - .no = try el.no.deepClone(allocator), - }); - return .{ .e_if = item }; - }, - .e_import => |el| { - const item = bun.create(allocator, E.Import, .{ - .expr = try el.expr.deepClone(allocator), - .options = try el.options.deepClone(allocator), - .import_record_index = el.import_record_index, - }); - return .{ .e_import = item }; - }, - .e_big_int => |el| { - const item = bun.create(allocator, E.BigInt, .{ - .value = el.value, - }); - return .{ .e_big_int = item }; - }, - .e_string => |el| { - const item = bun.create(allocator, E.String, .{ - .data = el.data, - .prefer_template = el.prefer_template, - .next = el.next, - .end = el.end, - .rope_len = el.rope_len, - .is_utf16 = el.is_utf16, - }); - return .{ .e_string = item }; - }, - .e_inlined_enum => |el| { - const item = bun.create(allocator, E.InlinedEnum, .{ - .value = el.value, - .comment = el.comment, - }); - return .{ .e_inlined_enum = item }; - }, - else => this, - }; - } - - /// `hasher` should be something with 'pub fn update([]const u8) void'; - /// symbol table is passed to serialize `Ref` as an identifier names instead of a nondeterministic numbers - pub fn writeToHasher(this: Expr.Data, hasher: anytype, symbol_table: anytype) void { - writeAnyToHasher(hasher, std.meta.activeTag(this)); - switch (this) { - .e_name_of_symbol => |e| { - const symbol = e.ref.getSymbol(symbol_table); - hasher.update(symbol.original_name); - }, - .e_array => |e| { - writeAnyToHasher(hasher, .{ - e.is_single_line, - e.is_parenthesized, - e.was_originally_macro, - e.items.len, - }); - for (e.items.slice()) |item| { - item.data.writeToHasher(hasher, symbol_table); - } - }, - .e_unary => |e| { - writeAnyToHasher(hasher, .{e.op}); - e.value.data.writeToHasher(hasher, symbol_table); - }, - .e_binary => |e| { - writeAnyToHasher(hasher, .{e.op}); - e.left.data.writeToHasher(hasher, symbol_table); - e.right.data.writeToHasher(hasher, symbol_table); - }, - .e_class => |e| { - _ = e; // autofix - }, - inline .e_new, .e_call => |e| { - _ = e; // autofix - }, - .e_function => |e| { - _ = e; // autofix - }, - .e_dot => |e| { - writeAnyToHasher(hasher, .{ e.optional_chain, e.name.len }); - e.target.data.writeToHasher(hasher, symbol_table); - hasher.update(e.name); - }, - .e_index => |e| { - writeAnyToHasher(hasher, .{e.optional_chain}); - e.target.data.writeToHasher(hasher, symbol_table); - e.index.data.writeToHasher(hasher, symbol_table); - }, - .e_arrow => |e| { - _ = e; // autofix - }, - .e_jsx_element => |e| { - _ = e; // autofix - }, - .e_object => |e| { - _ = e; // autofix - }, - inline .e_spread, .e_await => |e| { - e.value.data.writeToHasher(hasher, symbol_table); - }, - inline .e_yield => |e| { - writeAnyToHasher(hasher, .{ e.is_star, e.value }); - if (e.value) |value| - value.data.writeToHasher(hasher, symbol_table); - }, - .e_template => |e| { - _ = e; // autofix - }, - .e_if => |e| { - _ = e; // autofix - }, - .e_import => |e| { - _ = e; // autofix - - }, - inline .e_identifier, - .e_import_identifier, - .e_private_identifier, - .e_commonjs_export_identifier, - => |e| { - const symbol = e.ref.getSymbol(symbol_table); - hasher.update(symbol.original_name); - }, - inline .e_boolean, .e_number => |e| { - writeAnyToHasher(hasher, e.value); - }, - inline .e_big_int, .e_reg_exp => |e| { - hasher.update(e.value); - }, - - .e_string => |e| { - var next: ?*E.String = e; - if (next) |current| { - if (current.isUTF8()) { - hasher.update(current.data); - } else { - hasher.update(bun.reinterpretSlice(u8, current.slice16())); - } - next = current.next; - hasher.update("\x00"); - } - }, - inline .e_require_string, .e_require_resolve_string => |e| { - writeAnyToHasher(hasher, e.import_record_index); // preferably, i'd like to write the filepath - }, - - .e_import_meta_main => |e| { - writeAnyToHasher(hasher, e.inverted); - }, - .e_inlined_enum => |e| { - // pretend there is no comment - e.value.data.writeToHasher(hasher, symbol_table); - }, - - // no data - .e_require_call_target, - .e_require_resolve_call_target, - .e_missing, - .e_this, - .e_super, - .e_null, - .e_undefined, - .e_new_target, - .e_require_main, - .e_import_meta, - .e_special, - => {}, - } - } - - /// "const values" here refers to expressions that can participate in constant - /// inlining, as they have no side effects on instantiation, and there would be - /// no observable difference if duplicated. This is a subset of canBeMoved() - pub fn canBeConstValue(this: Expr.Data) bool { - return switch (this) { - .e_number, - .e_boolean, - .e_null, - .e_undefined, - .e_inlined_enum, - => true, - .e_string => |str| str.next == null, - .e_array => |array| array.was_originally_macro, - .e_object => |object| object.was_originally_macro, - else => false, - }; - } - - /// Expressions that can be moved are those that do not have side - /// effects on their own. This is used to determine what can be moved - /// outside of a module wrapper (__esm/__commonJS). - pub fn canBeMoved(data: Expr.Data) bool { - return switch (data) { - // TODO: identifiers can be removed if unused, however code that - // moves expressions around sometimes does so incorrectly when - // doing destructures. test case: https://github.com/oven-sh/bun/issues/14027 - // .e_identifier => |id| id.can_be_removed_if_unused, - - .e_class => |class| class.canBeMoved(), - - .e_arrow, - .e_function, - - .e_number, - .e_boolean, - .e_null, - .e_undefined, - // .e_reg_exp, - .e_big_int, - .e_string, - .e_inlined_enum, - .e_import_meta, - => true, - - .e_template => |template| template.tag == null and template.parts.len == 0, - - .e_array => |array| array.was_originally_macro, - .e_object => |object| object.was_originally_macro, - - // TODO: experiment with allowing some e_binary, e_unary, e_if as movable - - else => false, - }; - } - - pub fn knownPrimitive(data: Expr.Data) PrimitiveType { - return switch (data) { - .e_big_int => .bigint, - .e_boolean => .boolean, - .e_null => .null, - .e_number => .number, - .e_string => .string, - .e_undefined => .undefined, - .e_template => if (data.e_template.tag == null) PrimitiveType.string else PrimitiveType.unknown, - .e_if => mergeKnownPrimitive(data.e_if.yes.data, data.e_if.no.data), - .e_binary => |binary| brk: { - switch (binary.op) { - .bin_strict_eq, - .bin_strict_ne, - .bin_loose_eq, - .bin_loose_ne, - .bin_lt, - .bin_gt, - .bin_le, - .bin_ge, - .bin_instanceof, - .bin_in, - => break :brk PrimitiveType.boolean, - .bin_logical_or, .bin_logical_and => break :brk binary.left.data.mergeKnownPrimitive(binary.right.data), - - .bin_nullish_coalescing => { - const left = binary.left.data.knownPrimitive(); - const right = binary.right.data.knownPrimitive(); - if (left == .null or left == .undefined) - break :brk right; - - if (left != .unknown) { - if (left != .mixed) - break :brk left; // Definitely not null or undefined - - if (right != .unknown) - break :brk PrimitiveType.mixed; // Definitely some kind of primitive - } - }, - - .bin_add => { - const left = binary.left.data.knownPrimitive(); - const right = binary.right.data.knownPrimitive(); - - if (left == .string or right == .string) - break :brk PrimitiveType.string; - - if (left == .bigint or right == .bigint) - break :brk PrimitiveType.bigint; - - if (switch (left) { - .unknown, .mixed, .bigint => false, - else => true, - } and switch (right) { - .unknown, .mixed, .bigint => false, - else => true, - }) - break :brk PrimitiveType.number; - - break :brk PrimitiveType.mixed; // Can be number or bigint or string (or an exception) - }, - - .bin_sub, - .bin_sub_assign, - .bin_mul, - .bin_mul_assign, - .bin_div, - .bin_div_assign, - .bin_rem, - .bin_rem_assign, - .bin_pow, - .bin_pow_assign, - .bin_bitwise_and, - .bin_bitwise_and_assign, - .bin_bitwise_or, - .bin_bitwise_or_assign, - .bin_bitwise_xor, - .bin_bitwise_xor_assign, - .bin_shl, - .bin_shl_assign, - .bin_shr, - .bin_shr_assign, - .bin_u_shr, - .bin_u_shr_assign, - => break :brk PrimitiveType.mixed, // Can be number or bigint (or an exception) - - .bin_assign, - .bin_comma, - => break :brk binary.right.data.knownPrimitive(), - - else => {}, - } - - break :brk PrimitiveType.unknown; - }, - - .e_unary => switch (data.e_unary.op) { - .un_void => PrimitiveType.undefined, - .un_typeof => PrimitiveType.string, - .un_not, .un_delete => PrimitiveType.boolean, - .un_pos => PrimitiveType.number, // Cannot be bigint because that throws an exception - .un_neg, .un_cpl => switch (data.e_unary.value.data.knownPrimitive()) { - .bigint => PrimitiveType.bigint, - .unknown, .mixed => PrimitiveType.mixed, - else => PrimitiveType.number, // Can be number or bigint - }, - .un_pre_dec, .un_pre_inc, .un_post_dec, .un_post_inc => PrimitiveType.mixed, // Can be number or bigint - - else => PrimitiveType.unknown, - }, - - .e_inlined_enum => |inlined| inlined.value.data.knownPrimitive(), - - else => PrimitiveType.unknown, - }; - } - - pub fn mergeKnownPrimitive(lhs: Expr.Data, rhs: Expr.Data) PrimitiveType { - return lhs.knownPrimitive().merge(rhs.knownPrimitive()); - } - - /// Returns true if the result of the "typeof" operator on this expression is - /// statically determined and this expression has no side effects (i.e. can be - /// removed without consequence). - pub inline fn toTypeof(data: Expr.Data) ?string { - return @as(Expr.Tag, data).typeof(); - } - - pub fn toNumber(data: Expr.Data) ?f64 { - return switch (data) { - .e_null => 0, - .e_undefined => std.math.nan(f64), - .e_string => |str| { - if (str.next != null) return null; - if (!str.isUTF8()) return null; - - // +'1' => 1 - return stringToEquivalentNumberValue(str.slice8()); - }, - .e_boolean => @as(f64, if (data.e_boolean.value) 1.0 else 0.0), - .e_number => data.e_number.value, - .e_inlined_enum => |inlined| switch (inlined.value.data) { - .e_number => |num| num.value, - .e_string => |str| { - if (str.next != null) return null; - if (!str.isUTF8()) return null; - - // +'1' => 1 - return stringToEquivalentNumberValue(str.slice8()); - }, - else => null, - }, - else => null, - }; - } - - pub fn toFiniteNumber(data: Expr.Data) ?f64 { - return switch (data) { - .e_boolean => @as(f64, if (data.e_boolean.value) 1.0 else 0.0), - .e_number => if (std.math.isFinite(data.e_number.value)) - data.e_number.value - else - null, - .e_inlined_enum => |inlined| switch (inlined.value.data) { - .e_number => |num| if (std.math.isFinite(num.value)) - num.value - else - null, - else => null, - }, - else => null, - }; - } - - pub fn extractNumericValue(data: Expr.Data) ?f64 { - return switch (data) { - .e_number => data.e_number.value, - .e_inlined_enum => |inlined| switch (inlined.value.data) { - .e_number => |num| num.value, - else => null, - }, - else => null, - }; - } - - pub const Equality = struct { - equal: bool = false, - ok: bool = false, - - /// This extra flag is unfortunately required for the case of visiting the expression - /// `require.main === module` (and any combination of !==, ==, !=, either ordering) - /// - /// We want to replace this with the dedicated import_meta_main node, which: - /// - Stops this module from having p.require_ref, allowing conversion to ESM - /// - Allows us to inline `import.meta.main`'s value, if it is known (bun build --compile) - is_require_main_and_module: bool = false, - - pub const @"true" = Equality{ .ok = true, .equal = true }; - pub const @"false" = Equality{ .ok = true, .equal = false }; - pub const unknown = Equality{ .ok = false }; - }; - - // Returns "equal, ok". If "ok" is false, then nothing is known about the two - // values. If "ok" is true, the equality or inequality of the two values is - // stored in "equal". - pub fn eql( - left: Expr.Data, - right: Expr.Data, - p: anytype, - comptime kind: enum { loose, strict }, - ) Equality { - comptime bun.assert(@typeInfo(@TypeOf(p)).pointer.size == .one); // pass *Parser - - // https://dorey.github.io/JavaScript-Equality-Table/ - switch (left) { - .e_inlined_enum => |inlined| return inlined.value.data.eql(right, p, kind), - - .e_null, .e_undefined => { - const ok = switch (@as(Expr.Tag, right)) { - .e_null, .e_undefined => true, - else => @as(Expr.Tag, right).isPrimitiveLiteral(), - }; - - if (comptime kind == .loose) { - return .{ - .equal = switch (@as(Expr.Tag, right)) { - .e_null, .e_undefined => true, - else => false, - }, - .ok = ok, - }; - } - - return .{ - .equal = @as(Tag, right) == @as(Tag, left), - .ok = ok, - }; - }, - .e_boolean => |l| { - switch (right) { - .e_boolean => { - return .{ - .ok = true, - .equal = l.value == right.e_boolean.value, - }; - }, - .e_number => |num| { - if (comptime kind == .strict) { - // "true === 1" is false - // "false === 0" is false - return Equality.false; - } - - return .{ - .ok = true, - .equal = if (l.value) - num.value == 1 - else - num.value == 0, - }; - }, - .e_null, .e_undefined => { - return Equality.false; - }, - else => {}, - } - }, - .e_number => |l| { - switch (right) { - .e_number => |r| { - return .{ - .ok = true, - .equal = l.value == r.value, - }; - }, - .e_inlined_enum => |r| if (r.value.data == .e_number) { - return .{ - .ok = true, - .equal = l.value == r.value.data.e_number.value, - }; - }, - .e_boolean => |r| { - if (comptime kind == .loose) { - return .{ - .ok = true, - // "1 == true" is true - // "0 == false" is true - .equal = if (r.value) - l.value == 1 - else - l.value == 0, - }; - } - - // "1 === true" is false - // "0 === false" is false - return Equality.false; - }, - .e_null, .e_undefined => { - // "(not null or undefined) == undefined" is false - return Equality.false; - }, - else => {}, - } - }, - .e_big_int => |l| { - if (right == .e_big_int) { - if (strings.eqlLong(l.value, right.e_big_int.value, true)) { - return Equality.true; - } - - // 0x0000n == 0n is true - return .{ .ok = false }; - } else { - return .{ - .ok = switch (right) { - .e_null, .e_undefined => true, - else => false, - }, - .equal = false, - }; - } - }, - .e_string => |l| { - switch (right) { - .e_string => |r| { - r.resolveRopeIfNeeded(p.allocator); - l.resolveRopeIfNeeded(p.allocator); - return .{ - .ok = true, - .equal = r.eql(E.String, l), - }; - }, - .e_inlined_enum => |inlined| { - if (inlined.value.data == .e_string) { - const r = inlined.value.data.e_string; - - r.resolveRopeIfNeeded(p.allocator); - l.resolveRopeIfNeeded(p.allocator); - - return .{ - .ok = true, - .equal = r.eql(E.String, l), - }; - } - }, - .e_null, .e_undefined => { - return Equality.false; - }, - .e_number => |r| { - if (comptime kind == .loose) { - l.resolveRopeIfNeeded(p.allocator); - if (r.value == 0 and (l.isBlank() or l.eqlComptime("0"))) { - return Equality.true; - } - - if (r.value == 1 and l.eqlComptime("1")) { - return Equality.true; - } - - // the string could still equal 0 or 1 but it could be hex, binary, octal, ... - return Equality.unknown; - } else { - return Equality.false; - } - }, - - else => {}, - } - }, - - else => { - // Do not need to check left because e_require_main is - // always re-ordered to the right side. - if (right == .e_require_main) { - if (left.as(.e_identifier)) |id| { - if (id.ref.eql(p.module_ref)) return .{ - .ok = true, - .equal = true, - .is_require_main_and_module = true, - }; - } - } - }, - } - - return Equality.unknown; - } - - pub fn toJS(this: Data, allocator: std.mem.Allocator, globalObject: *JSC.JSGlobalObject) ToJSError!JSC.JSValue { - return switch (this) { - .e_array => |e| e.toJS(allocator, globalObject), - .e_object => |e| e.toJS(allocator, globalObject), - .e_string => |e| e.toJS(allocator, globalObject), - .e_null => JSC.JSValue.null, - .e_undefined => .js_undefined, - .e_boolean => |boolean| if (boolean.value) - JSC.JSValue.true - else - JSC.JSValue.false, - .e_number => |e| e.toJS(), - // .e_big_int => |e| e.toJS(ctx, exception), - - .e_inlined_enum => |inlined| inlined.value.data.toJS(allocator, globalObject), - - .e_identifier, - .e_import_identifier, - .e_private_identifier, - .e_commonjs_export_identifier, - => error.@"Cannot convert identifier to JS. Try a statically-known value", - - // brk: { - // // var node = try allocator.create(Macro.JSNode); - // // node.* = Macro.JSNode.initExpr(Expr{ .data = this, .loc = logger.Loc.Empty }); - // // break :brk JSC.JSValue.c(Macro.JSNode.Class.make(globalObject, node)); - // }, - - else => { - return error.@"Cannot convert argument type to JS"; - }, - }; - } - - pub const Store = struct { - const StoreType = NewStore(&.{ - E.NameOfSymbol, - E.Array, - E.Arrow, - E.Await, - E.BigInt, - E.Binary, - E.Call, - E.Class, - E.Dot, - E.Function, - E.If, - E.Import, - E.Index, - E.InlinedEnum, - E.JSXElement, - E.New, - E.Number, - E.Object, - E.PrivateIdentifier, - E.RegExp, - E.Spread, - E.String, - E.Template, - E.TemplatePart, - E.Unary, - E.Yield, - }, 512); - - pub threadlocal var instance: ?*StoreType = null; - pub threadlocal var memory_allocator: ?*ASTMemoryAllocator = null; - pub threadlocal var disable_reset = false; - - pub fn create() void { - if (instance != null or memory_allocator != null) { - return; - } - - instance = StoreType.init(); - } - - pub fn reset() void { - if (disable_reset or memory_allocator != null) return; - instance.?.reset(); - } - - pub fn deinit() void { - if (instance == null or memory_allocator != null) return; - instance.?.deinit(); - instance = null; - } - - pub inline fn assert() void { - if (comptime Environment.isDebug or Environment.enable_asan) { - if (instance == null and memory_allocator == null) - bun.unreachablePanic("Store must be init'd", .{}); - } - } - - /// create || reset - pub fn begin() void { - if (memory_allocator != null) return; - if (instance == null) { - create(); - return; - } - - if (!disable_reset) - instance.?.reset(); - } - - pub fn append(comptime T: type, value: T) *T { - if (memory_allocator) |allocator| { - return allocator.append(T, value); - } - - Disabler.assert(); - return instance.?.append(T, value); - } - }; - - pub inline fn isStringValue(self: Data) bool { - return @as(Expr.Tag, self) == .e_string; - } - }; - - pub fn StoredData(tag: Tag) type { - const T = @FieldType(Data, tag); - return switch (@typeInfo(T)) { - .pointer => |ptr| ptr.child, - else => T, - }; - } -}; - pub const EnumValue = struct { loc: logger.Loc, ref: Ref, @@ -6477,222 +214,6 @@ pub const EnumValue = struct { } }; -pub const S = struct { - pub const Block = struct { - stmts: StmtNodeList, - close_brace_loc: logger.Loc = logger.Loc.Empty, - }; - - pub const SExpr = struct { - value: ExprNodeIndex, - - // This is set to true for automatically-generated expressions that should - // not affect tree shaking. For example, calling a function from the runtime - // that doesn't have externally-visible side effects. - does_not_affect_tree_shaking: bool = false, - }; - - pub const Comment = struct { text: string }; - - pub const Directive = struct { - value: []const u8, - }; - - pub const ExportClause = struct { - items: []ClauseItem, - is_single_line: bool, - }; - - pub const Empty = struct {}; - - pub const ExportStar = struct { - namespace_ref: Ref, - alias: ?G.ExportStarAlias = null, - import_record_index: u32, - }; - - // This is an "export = value;" statement in TypeScript - pub const ExportEquals = struct { value: ExprNodeIndex }; - - pub const Label = struct { name: LocRef, stmt: StmtNodeIndex }; - - // This is a stand-in for a TypeScript type declaration - pub const TypeScript = struct {}; - - pub const Debugger = struct {}; - - pub const ExportFrom = struct { - items: []ClauseItem, - namespace_ref: Ref, - import_record_index: u32, - is_single_line: bool, - }; - - pub const ExportDefault = struct { - default_name: LocRef, // value may be a SFunction or SClass - value: StmtOrExpr, - - pub fn canBeMoved(self: *const ExportDefault) bool { - return switch (self.value) { - .expr => |e| e.canBeMoved(), - .stmt => |s| switch (s.data) { - .s_class => |class| class.class.canBeMoved(), - .s_function => true, - else => false, - }, - }; - } - }; - - pub const Enum = struct { - name: LocRef, - arg: Ref, - values: []EnumValue, - is_export: bool, - }; - - pub const Namespace = struct { - name: LocRef, - arg: Ref, - stmts: StmtNodeList, - is_export: bool, - }; - - pub const Function = struct { - func: G.Fn, - }; - - pub const Class = struct { class: G.Class, is_export: bool = false }; - - pub const If = struct { - test_: ExprNodeIndex, - yes: StmtNodeIndex, - no: ?StmtNodeIndex, - }; - - pub const For = struct { - // May be a SConst, SLet, SVar, or SExpr - init: ?StmtNodeIndex = null, - test_: ?ExprNodeIndex = null, - update: ?ExprNodeIndex = null, - body: StmtNodeIndex, - }; - - pub const ForIn = struct { - // May be a SConst, SLet, SVar, or SExpr - init: StmtNodeIndex, - value: ExprNodeIndex, - body: StmtNodeIndex, - }; - - pub const ForOf = struct { - is_await: bool = false, - // May be a SConst, SLet, SVar, or SExpr - init: StmtNodeIndex, - value: ExprNodeIndex, - body: StmtNodeIndex, - }; - - pub const DoWhile = struct { body: StmtNodeIndex, test_: ExprNodeIndex }; - - pub const While = struct { - test_: ExprNodeIndex, - body: StmtNodeIndex, - }; - - pub const With = struct { - value: ExprNodeIndex, - body: StmtNodeIndex, - body_loc: logger.Loc = logger.Loc.Empty, - }; - - pub const Try = struct { - body_loc: logger.Loc, - body: StmtNodeList, - - catch_: ?Catch = null, - finally: ?Finally = null, - }; - - pub const Switch = struct { - test_: ExprNodeIndex, - body_loc: logger.Loc, - cases: []Case, - }; - - // This object represents all of these types of import statements: - // - // import 'path' - // import {item1, item2} from 'path' - // import * as ns from 'path' - // import defaultItem, {item1, item2} from 'path' - // import defaultItem, * as ns from 'path' - // - // Many parts are optional and can be combined in different ways. The only - // restriction is that you cannot have both a clause and a star namespace. - pub const Import = struct { - // If this is a star import: This is a Ref for the namespace symbol. The Loc - // for the symbol is StarLoc. - // - // Otherwise: This is an auto-generated Ref for the namespace representing - // the imported file. In this case StarLoc is nil. The NamespaceRef is used - // when converting this module to a CommonJS module. - namespace_ref: Ref, - default_name: ?LocRef = null, - items: []ClauseItem = &.{}, - star_name_loc: ?logger.Loc = null, - import_record_index: u32, - is_single_line: bool = false, - }; - - pub const Return = struct { value: ?ExprNodeIndex = null }; - pub const Throw = struct { value: ExprNodeIndex }; - - pub const Local = struct { - kind: Kind = .k_var, - decls: G.Decl.List = .{}, - is_export: bool = false, - // The TypeScript compiler doesn't generate code for "import foo = bar" - // statements where the import is never used. - was_ts_import_equals: bool = false, - - was_commonjs_export: bool = false, - - pub fn canMergeWith(this: *const Local, other: *const Local) bool { - return this.kind == other.kind and this.is_export == other.is_export and - this.was_commonjs_export == other.was_commonjs_export; - } - - pub const Kind = enum { - k_var, - k_let, - k_const, - k_using, - k_await_using, - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - - pub fn isUsing(self: Kind) bool { - return self == .k_using or self == .k_await_using; - } - - pub fn isReassignable(kind: Kind) bool { - return kind == .k_var or kind == .k_let; - } - }; - }; - - pub const Break = struct { - label: ?LocRef = null, - }; - - pub const Continue = struct { - label: ?LocRef = null, - }; -}; - pub const Catch = struct { loc: logger.Loc, binding: ?BindingNodeIndex = null, @@ -6707,412 +228,11 @@ pub const Finally = struct { pub const Case = struct { loc: logger.Loc, value: ?ExprNodeIndex, body: StmtNodeList }; -pub const Op = struct { - // If you add a new token, remember to add it to "Table" too - pub const Code = enum { - // Prefix - un_pos, // +expr - un_neg, // -expr - un_cpl, // ~expr - un_not, // !expr - un_void, - un_typeof, - un_delete, - - // Prefix update - un_pre_dec, - un_pre_inc, - - // Postfix update - un_post_dec, - un_post_inc, - - /// Left-associative - bin_add, - /// Left-associative - bin_sub, - /// Left-associative - bin_mul, - /// Left-associative - bin_div, - /// Left-associative - bin_rem, - /// Left-associative - bin_pow, - /// Left-associative - bin_lt, - /// Left-associative - bin_le, - /// Left-associative - bin_gt, - /// Left-associative - bin_ge, - /// Left-associative - bin_in, - /// Left-associative - bin_instanceof, - /// Left-associative - bin_shl, - /// Left-associative - bin_shr, - /// Left-associative - bin_u_shr, - /// Left-associative - bin_loose_eq, - /// Left-associative - bin_loose_ne, - /// Left-associative - bin_strict_eq, - /// Left-associative - bin_strict_ne, - /// Left-associative - bin_nullish_coalescing, - /// Left-associative - bin_logical_or, - /// Left-associative - bin_logical_and, - /// Left-associative - bin_bitwise_or, - /// Left-associative - bin_bitwise_and, - /// Left-associative - bin_bitwise_xor, - - /// Non-associative - bin_comma, - - /// Right-associative - bin_assign, - /// Right-associative - bin_add_assign, - /// Right-associative - bin_sub_assign, - /// Right-associative - bin_mul_assign, - /// Right-associative - bin_div_assign, - /// Right-associative - bin_rem_assign, - /// Right-associative - bin_pow_assign, - /// Right-associative - bin_shl_assign, - /// Right-associative - bin_shr_assign, - /// Right-associative - bin_u_shr_assign, - /// Right-associative - bin_bitwise_or_assign, - /// Right-associative - bin_bitwise_and_assign, - /// Right-associative - bin_bitwise_xor_assign, - /// Right-associative - bin_nullish_coalescing_assign, - /// Right-associative - bin_logical_or_assign, - /// Right-associative - bin_logical_and_assign, - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - - pub fn unaryAssignTarget(code: Op.Code) AssignTarget { - if (@intFromEnum(code) >= - @intFromEnum(Op.Code.un_pre_dec) and @intFromEnum(code) <= - @intFromEnum(Op.Code.un_post_inc)) - { - return AssignTarget.update; - } - - return AssignTarget.none; - } - pub fn isLeftAssociative(code: Op.Code) bool { - return @intFromEnum(code) >= - @intFromEnum(Op.Code.bin_add) and - @intFromEnum(code) < @intFromEnum(Op.Code.bin_comma) and code != .bin_pow; - } - pub fn isRightAssociative(code: Op.Code) bool { - return @intFromEnum(code) >= @intFromEnum(Op.Code.bin_assign) or code == .bin_pow; - } - pub fn binaryAssignTarget(code: Op.Code) AssignTarget { - if (code == .bin_assign) { - return AssignTarget.replace; - } - - if (@intFromEnum(code) > @intFromEnum(Op.Code.bin_assign)) { - return AssignTarget.update; - } - - return AssignTarget.none; - } - - pub fn isPrefix(code: Op.Code) bool { - return @intFromEnum(code) < @intFromEnum(Op.Code.un_post_dec); - } - }; - - pub const Level = enum(u6) { - lowest, - comma, - spread, - yield, - assign, - conditional, - nullish_coalescing, - logical_or, - logical_and, - bitwise_or, - bitwise_xor, - bitwise_and, - equals, - compare, - shift, - add, - multiply, - exponentiation, - prefix, - postfix, - new, - call, - member, - - pub inline fn lt(self: Level, b: Level) bool { - return @intFromEnum(self) < @intFromEnum(b); - } - pub inline fn gt(self: Level, b: Level) bool { - return @intFromEnum(self) > @intFromEnum(b); - } - pub inline fn gte(self: Level, b: Level) bool { - return @intFromEnum(self) >= @intFromEnum(b); - } - pub inline fn lte(self: Level, b: Level) bool { - return @intFromEnum(self) <= @intFromEnum(b); - } - pub inline fn eql(self: Level, b: Level) bool { - return @intFromEnum(self) == @intFromEnum(b); - } - - pub inline fn sub(self: Level, i: anytype) Level { - return @as(Level, @enumFromInt(@intFromEnum(self) - i)); - } - - pub inline fn addF(self: Level, i: anytype) Level { - return @as(Level, @enumFromInt(@intFromEnum(self) + i)); - } - }; - - text: string, - level: Level, - is_keyword: bool = false, - - pub fn init(triple: anytype) Op { - return Op{ - .text = triple.@"0", - .level = triple.@"1", - .is_keyword = triple.@"2", - }; - } - - pub fn jsonStringify(self: *const @This(), writer: anytype) !void { - return try writer.write(self.text); - } - - pub const TableType: std.EnumArray(Op.Code, Op) = undefined; - pub const Table = brk: { - var table = std.EnumArray(Op.Code, Op).initUndefined(); - - // Prefix - table.set(Op.Code.un_pos, Op.init(.{ "+", Level.prefix, false })); - table.set(Op.Code.un_neg, Op.init(.{ "-", Level.prefix, false })); - table.set(Op.Code.un_cpl, Op.init(.{ "~", Level.prefix, false })); - table.set(Op.Code.un_not, Op.init(.{ "!", Level.prefix, false })); - table.set(Op.Code.un_void, Op.init(.{ "void", Level.prefix, true })); - table.set(Op.Code.un_typeof, Op.init(.{ "typeof", Level.prefix, true })); - table.set(Op.Code.un_delete, Op.init(.{ "delete", Level.prefix, true })); - - // Prefix update - table.set(Op.Code.un_pre_dec, Op.init(.{ "--", Level.prefix, false })); - table.set(Op.Code.un_pre_inc, Op.init(.{ "++", Level.prefix, false })); - - // Postfix update - table.set(Op.Code.un_post_dec, Op.init(.{ "--", Level.postfix, false })); - table.set(Op.Code.un_post_inc, Op.init(.{ "++", Level.postfix, false })); - - // Left-associative - table.set(Op.Code.bin_add, Op.init(.{ "+", Level.add, false })); - table.set(Op.Code.bin_sub, Op.init(.{ "-", Level.add, false })); - table.set(Op.Code.bin_mul, Op.init(.{ "*", Level.multiply, false })); - table.set(Op.Code.bin_div, Op.init(.{ "/", Level.multiply, false })); - table.set(Op.Code.bin_rem, Op.init(.{ "%", Level.multiply, false })); - table.set(Op.Code.bin_pow, Op.init(.{ "**", Level.exponentiation, false })); - table.set(Op.Code.bin_lt, Op.init(.{ "<", Level.compare, false })); - table.set(Op.Code.bin_le, Op.init(.{ "<=", Level.compare, false })); - table.set(Op.Code.bin_gt, Op.init(.{ ">", Level.compare, false })); - table.set(Op.Code.bin_ge, Op.init(.{ ">=", Level.compare, false })); - table.set(Op.Code.bin_in, Op.init(.{ "in", Level.compare, true })); - table.set(Op.Code.bin_instanceof, Op.init(.{ "instanceof", Level.compare, true })); - table.set(Op.Code.bin_shl, Op.init(.{ "<<", Level.shift, false })); - table.set(Op.Code.bin_shr, Op.init(.{ ">>", Level.shift, false })); - table.set(Op.Code.bin_u_shr, Op.init(.{ ">>>", Level.shift, false })); - table.set(Op.Code.bin_loose_eq, Op.init(.{ "==", Level.equals, false })); - table.set(Op.Code.bin_loose_ne, Op.init(.{ "!=", Level.equals, false })); - table.set(Op.Code.bin_strict_eq, Op.init(.{ "===", Level.equals, false })); - table.set(Op.Code.bin_strict_ne, Op.init(.{ "!==", Level.equals, false })); - table.set(Op.Code.bin_nullish_coalescing, Op.init(.{ "??", Level.nullish_coalescing, false })); - table.set(Op.Code.bin_logical_or, Op.init(.{ "||", Level.logical_or, false })); - table.set(Op.Code.bin_logical_and, Op.init(.{ "&&", Level.logical_and, false })); - table.set(Op.Code.bin_bitwise_or, Op.init(.{ "|", Level.bitwise_or, false })); - table.set(Op.Code.bin_bitwise_and, Op.init(.{ "&", Level.bitwise_and, false })); - table.set(Op.Code.bin_bitwise_xor, Op.init(.{ "^", Level.bitwise_xor, false })); - - // Non-associative - table.set(Op.Code.bin_comma, Op.init(.{ ",", Level.comma, false })); - - // Right-associative - table.set(Op.Code.bin_assign, Op.init(.{ "=", Level.assign, false })); - table.set(Op.Code.bin_add_assign, Op.init(.{ "+=", Level.assign, false })); - table.set(Op.Code.bin_sub_assign, Op.init(.{ "-=", Level.assign, false })); - table.set(Op.Code.bin_mul_assign, Op.init(.{ "*=", Level.assign, false })); - table.set(Op.Code.bin_div_assign, Op.init(.{ "/=", Level.assign, false })); - table.set(Op.Code.bin_rem_assign, Op.init(.{ "%=", Level.assign, false })); - table.set(Op.Code.bin_pow_assign, Op.init(.{ "**=", Level.assign, false })); - table.set(Op.Code.bin_shl_assign, Op.init(.{ "<<=", Level.assign, false })); - table.set(Op.Code.bin_shr_assign, Op.init(.{ ">>=", Level.assign, false })); - table.set(Op.Code.bin_u_shr_assign, Op.init(.{ ">>>=", Level.assign, false })); - table.set(Op.Code.bin_bitwise_or_assign, Op.init(.{ "|=", Level.assign, false })); - table.set(Op.Code.bin_bitwise_and_assign, Op.init(.{ "&=", Level.assign, false })); - table.set(Op.Code.bin_bitwise_xor_assign, Op.init(.{ "^=", Level.assign, false })); - table.set(Op.Code.bin_nullish_coalescing_assign, Op.init(.{ "??=", Level.assign, false })); - table.set(Op.Code.bin_logical_or_assign, Op.init(.{ "||=", Level.assign, false })); - table.set(Op.Code.bin_logical_and_assign, Op.init(.{ "&&=", Level.assign, false })); - - break :brk table; - }; -}; - pub const ArrayBinding = struct { binding: BindingNodeIndex, default_value: ?ExprNodeIndex = null, }; -pub const Ast = struct { - pub const TopLevelSymbolToParts = std.ArrayHashMapUnmanaged(Ref, BabyList(u32), Ref.ArrayHashCtx, false); - - approximate_newline_count: usize = 0, - has_lazy_export: bool = false, - runtime_imports: Runtime.Imports = .{}, - - nested_scope_slot_counts: SlotCounts = SlotCounts{}, - - runtime_import_record_id: ?u32 = null, - needs_runtime: bool = false, - // This is a list of CommonJS features. When a file uses CommonJS features, - // it's not a candidate for "flat bundling" and must be wrapped in its own - // closure. - has_top_level_return: bool = false, - uses_exports_ref: bool = false, - uses_module_ref: bool = false, - uses_require_ref: bool = false, - commonjs_module_exports_assigned_deoptimized: bool = false, - - force_cjs_to_esm: bool = false, - exports_kind: ExportsKind = ExportsKind.none, - - // This is a list of ES6 features. They are ranges instead of booleans so - // that they can be used in log messages. Check to see if "Len > 0". - import_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax or "import()" - export_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax - top_level_await_keyword: logger.Range = logger.Range.None, - - /// These are stored at the AST level instead of on individual AST nodes so - /// they can be manipulated efficiently without a full AST traversal - import_records: ImportRecord.List = .{}, - - hashbang: string = "", - directive: ?string = null, - parts: Part.List = Part.List{}, - // This list may be mutated later, so we should store the capacity - symbols: Symbol.List = Symbol.List{}, - module_scope: Scope = Scope{}, - char_freq: ?CharFreq = null, - exports_ref: Ref = Ref.None, - module_ref: Ref = Ref.None, - /// When using format .bake_internal_dev, this is the HMR variable instead - /// of the wrapper. This is because that format does not store module - /// wrappers in a variable. - wrapper_ref: Ref = Ref.None, - require_ref: Ref = Ref.None, - - // These are used when bundling. They are filled in during the parser pass - // since we already have to traverse the AST then anyway and the parser pass - // is conveniently fully parallelized. - named_imports: NamedImports = .{}, - named_exports: NamedExports = .{}, - export_star_import_records: []u32 = &([_]u32{}), - - // allocator: std.mem.Allocator, - top_level_symbols_to_parts: TopLevelSymbolToParts = .{}, - - commonjs_named_exports: CommonJSNamedExports = .{}, - - redirect_import_record_index: ?u32 = null, - - /// Only populated when bundling - target: bun.options.Target = .browser, - // const_values: ConstValuesMap = .{}, - ts_enums: TsEnumsMap = .{}, - - /// Not to be confused with `commonjs_named_exports` - /// This is a list of named exports that may exist in a CommonJS module - /// We use this with `commonjs_at_runtime` to re-export CommonJS - has_commonjs_export_names: bool = false, - import_meta_ref: Ref = Ref.None, - - pub const CommonJSNamedExport = struct { - loc_ref: LocRef, - needs_decl: bool = true, - }; - pub const CommonJSNamedExports = bun.StringArrayHashMapUnmanaged(CommonJSNamedExport); - - pub const NamedImports = std.ArrayHashMapUnmanaged(Ref, NamedImport, RefHashCtx, true); - pub const NamedExports = bun.StringArrayHashMapUnmanaged(NamedExport); - pub const ConstValuesMap = std.ArrayHashMapUnmanaged(Ref, Expr, RefHashCtx, false); - pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged(InlinedEnumValue), RefHashCtx, false); - - pub fn fromParts(parts: []Part) Ast { - return Ast{ - .parts = Part.List.init(parts), - .runtime_imports = .{}, - }; - } - - pub fn initTest(parts: []Part) Ast { - return Ast{ - .parts = Part.List.init(parts), - .runtime_imports = .{}, - }; - } - - pub const empty = Ast{ .parts = Part.List{}, .runtime_imports = .{} }; - - pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void { - const opts = std.json.StringifyOptions{ .whitespace = std.json.StringifyOptions.Whitespace{ - .separator = true, - } }; - try std.json.stringify(self.parts, opts, stream); - } - - /// Do not call this if it wasn't globally allocated! - pub fn deinit(this: *Ast) void { - // TODO: assert mimalloc-owned memory - if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator); - if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator); - if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator); - } -}; - /// TLA => Top Level Await pub const TlaCheck = struct { depth: u32 = 0, @@ -7120,349 +240,11 @@ pub const TlaCheck = struct { import_record_index: Index.Int = Index.invalid.get(), }; -/// Like Ast but slimmer and for bundling only. -/// -/// On Linux, the hottest function in the bundler is: -/// src.multi_array_list.MultiArrayList(src.js_ast.Ast).ensureTotalCapacity -/// https://share.firefox.dev/3NNlRKt -/// -/// So we make a slimmer version of Ast for bundling that doesn't allocate as much memory -pub const BundledAst = struct { - approximate_newline_count: u32 = 0, - nested_scope_slot_counts: SlotCounts = .{}, - - exports_kind: ExportsKind = .none, - - /// These are stored at the AST level instead of on individual AST nodes so - /// they can be manipulated efficiently without a full AST traversal - import_records: ImportRecord.List = .{}, - - hashbang: string = "", - parts: Part.List = .{}, - css: ?*bun.css.BundlerStyleSheet = null, - url_for_css: []const u8 = "", - symbols: Symbol.List = .{}, - module_scope: Scope = .{}, - char_freq: CharFreq = undefined, - exports_ref: Ref = Ref.None, - module_ref: Ref = Ref.None, - wrapper_ref: Ref = Ref.None, - require_ref: Ref = Ref.None, - top_level_await_keyword: logger.Range, - tla_check: TlaCheck = .{}, - - // These are used when bundling. They are filled in during the parser pass - // since we already have to traverse the AST then anyway and the parser pass - // is conveniently fully parallelized. - named_imports: NamedImports = .{}, - named_exports: NamedExports = .{}, - export_star_import_records: []u32 = &.{}, - - top_level_symbols_to_parts: TopLevelSymbolToParts = .{}, - - commonjs_named_exports: CommonJSNamedExports = .{}, - - redirect_import_record_index: u32 = std.math.maxInt(u32), - - /// Only populated when bundling. When --server-components is passed, this - /// will be .browser when it is a client component, and the server's target - /// on the server. - target: bun.options.Target = .browser, - - // const_values: ConstValuesMap = .{}, - ts_enums: Ast.TsEnumsMap = .{}, - - flags: BundledAst.Flags = .{}, - - pub const NamedImports = Ast.NamedImports; - pub const NamedExports = Ast.NamedExports; - pub const TopLevelSymbolToParts = Ast.TopLevelSymbolToParts; - pub const CommonJSNamedExports = Ast.CommonJSNamedExports; - pub const ConstValuesMap = Ast.ConstValuesMap; - - pub const Flags = packed struct(u8) { - // This is a list of CommonJS features. When a file uses CommonJS features, - // it's not a candidate for "flat bundling" and must be wrapped in its own - // closure. - uses_exports_ref: bool = false, - uses_module_ref: bool = false, - // uses_require_ref: bool = false, - uses_export_keyword: bool = false, - has_char_freq: bool = false, - force_cjs_to_esm: bool = false, - has_lazy_export: bool = false, - commonjs_module_exports_assigned_deoptimized: bool = false, - has_explicit_use_strict_directive: bool = false, - }; - - pub const empty = BundledAst.init(Ast.empty); - - pub fn toAST(this: *const BundledAst) Ast { - return .{ - .approximate_newline_count = this.approximate_newline_count, - .nested_scope_slot_counts = this.nested_scope_slot_counts, - - .exports_kind = this.exports_kind, - - .import_records = this.import_records, - - .hashbang = this.hashbang, - .parts = this.parts, - // This list may be mutated later, so we should store the capacity - .symbols = this.symbols, - .module_scope = this.module_scope, - .char_freq = if (this.flags.has_char_freq) this.char_freq else null, - .exports_ref = this.exports_ref, - .module_ref = this.module_ref, - .wrapper_ref = this.wrapper_ref, - .require_ref = this.require_ref, - .top_level_await_keyword = this.top_level_await_keyword, - - // These are used when bundling. They are filled in during the parser pass - // since we already have to traverse the AST then anyway and the parser pass - // is conveniently fully parallelized. - .named_imports = this.named_imports, - .named_exports = this.named_exports, - .export_star_import_records = this.export_star_import_records, - - .top_level_symbols_to_parts = this.top_level_symbols_to_parts, - - .commonjs_named_exports = this.commonjs_named_exports, - - .redirect_import_record_index = this.redirect_import_record_index, - - .target = this.target, - - // .const_values = this.const_values, - .ts_enums = this.ts_enums, - - .uses_exports_ref = this.flags.uses_exports_ref, - .uses_module_ref = this.flags.uses_module_ref, - // .uses_require_ref = ast.uses_require_ref, - .export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .{} }, - .force_cjs_to_esm = this.flags.force_cjs_to_esm, - .has_lazy_export = this.flags.has_lazy_export, - .commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized, - .directive = if (this.flags.has_explicit_use_strict_directive) "use strict" else null, - }; - } - - pub fn init(ast: Ast) BundledAst { - return .{ - .approximate_newline_count = @as(u32, @truncate(ast.approximate_newline_count)), - .nested_scope_slot_counts = ast.nested_scope_slot_counts, - - .exports_kind = ast.exports_kind, - - .import_records = ast.import_records, - - .hashbang = ast.hashbang, - .parts = ast.parts, - // This list may be mutated later, so we should store the capacity - .symbols = ast.symbols, - .module_scope = ast.module_scope, - .char_freq = ast.char_freq orelse undefined, - .exports_ref = ast.exports_ref, - .module_ref = ast.module_ref, - .wrapper_ref = ast.wrapper_ref, - .require_ref = ast.require_ref, - .top_level_await_keyword = ast.top_level_await_keyword, - // These are used when bundling. They are filled in during the parser pass - // since we already have to traverse the AST then anyway and the parser pass - // is conveniently fully parallelized. - .named_imports = ast.named_imports, - .named_exports = ast.named_exports, - .export_star_import_records = ast.export_star_import_records, - - // .allocator = ast.allocator, - .top_level_symbols_to_parts = ast.top_level_symbols_to_parts, - - .commonjs_named_exports = ast.commonjs_named_exports, - - .redirect_import_record_index = ast.redirect_import_record_index orelse std.math.maxInt(u32), - - .target = ast.target, - - // .const_values = ast.const_values, - .ts_enums = ast.ts_enums, - - .flags = .{ - .uses_exports_ref = ast.uses_exports_ref, - .uses_module_ref = ast.uses_module_ref, - // .uses_require_ref = ast.uses_require_ref, - .uses_export_keyword = ast.export_keyword.len > 0, - .has_char_freq = ast.char_freq != null, - .force_cjs_to_esm = ast.force_cjs_to_esm, - .has_lazy_export = ast.has_lazy_export, - .commonjs_module_exports_assigned_deoptimized = ast.commonjs_module_exports_assigned_deoptimized, - .has_explicit_use_strict_directive = strings.eqlComptime(ast.directive orelse "", "use strict"), - }, - }; - } - - /// TODO: Move this from being done on all parse tasks into the start of the linker. This currently allocates base64 encoding for every small file loaded thing. - pub fn addUrlForCss( - this: *BundledAst, - allocator: std.mem.Allocator, - source: *const logger.Source, - mime_type_: ?[]const u8, - unique_key: ?[]const u8, - ) void { - { - const mime_type = if (mime_type_) |m| m else MimeType.byExtension(bun.strings.trimLeadingChar(std.fs.path.extension(source.path.text), '.')).value; - const contents = source.contents; - // TODO: make this configurable - const COPY_THRESHOLD = 128 * 1024; // 128kb - const should_copy = contents.len >= COPY_THRESHOLD and unique_key != null; - if (should_copy) return; - this.url_for_css = url_for_css: { - - // Encode as base64 - const encode_len = bun.base64.encodeLen(contents); - const data_url_prefix_len = "data:".len + mime_type.len + ";base64,".len; - const total_buffer_len = data_url_prefix_len + encode_len; - var encoded = allocator.alloc(u8, total_buffer_len) catch bun.outOfMemory(); - _ = std.fmt.bufPrint(encoded[0..data_url_prefix_len], "data:{s};base64,", .{mime_type}) catch unreachable; - const len = bun.base64.encode(encoded[data_url_prefix_len..], contents); - break :url_for_css encoded[0 .. data_url_prefix_len + len]; - }; - } - } -}; - pub const Span = struct { text: string = "", range: logger.Range = .{}, }; -/// This is for TypeScript "enum" and "namespace" blocks. Each block can -/// potentially be instantiated multiple times. The exported members of each -/// block are merged into a single namespace while the non-exported code is -/// still scoped to just within that block: -/// -/// let x = 1; -/// namespace Foo { -/// let x = 2; -/// export let y = 3; -/// } -/// namespace Foo { -/// console.log(x); // 1 -/// console.log(y); // 3 -/// } -/// -/// Doing this also works inside an enum: -/// -/// enum Foo { -/// A = 3, -/// B = A + 1, -/// } -/// enum Foo { -/// C = A + 2, -/// } -/// console.log(Foo.B) // 4 -/// console.log(Foo.C) // 5 -/// -/// This is a form of identifier lookup that works differently than the -/// hierarchical scope-based identifier lookup in JavaScript. Lookup now needs -/// to search sibling scopes in addition to parent scopes. This is accomplished -/// by sharing the map of exported members between all matching sibling scopes. -pub const TSNamespaceScope = struct { - /// This is specific to this namespace block. It's the argument of the - /// immediately-invoked function expression that the namespace block is - /// compiled into: - /// - /// var ns; - /// (function (ns2) { - /// ns2.x = 123; - /// })(ns || (ns = {})); - /// - /// This variable is "ns2" in the above example. It's the symbol to use when - /// generating property accesses off of this namespace when it's in scope. - arg_ref: Ref, - - /// This is shared between all sibling namespace blocks - exported_members: *TSNamespaceMemberMap, - - /// This is a lazily-generated map of identifiers that actually represent - /// property accesses to this namespace's properties. For example: - /// - /// namespace x { - /// export let y = 123 - /// } - /// namespace x { - /// export let z = y - /// } - /// - /// This should be compiled into the following code: - /// - /// var x; - /// (function(x2) { - /// x2.y = 123; - /// })(x || (x = {})); - /// (function(x3) { - /// x3.z = x3.y; - /// })(x || (x = {})); - /// - /// When we try to find the symbol "y", we instead return one of these lazily - /// generated proxy symbols that represent the property access "x3.y". This - /// map is unique per namespace block because "x3" is the argument symbol that - /// is specific to that particular namespace block. - property_accesses: bun.StringArrayHashMapUnmanaged(Ref) = .{}, - - /// Even though enums are like namespaces and both enums and namespaces allow - /// implicit references to properties of sibling scopes, they behave like - /// separate, er, namespaces. Implicit references only work namespace-to- - /// namespace and enum-to-enum. They do not work enum-to-namespace. And I'm - /// not sure what's supposed to happen for the namespace-to-enum case because - /// the compiler crashes: https://github.com/microsoft/TypeScript/issues/46891. - /// So basically these both work: - /// - /// enum a { b = 1 } - /// enum a { c = b } - /// - /// namespace x { export let y = 1 } - /// namespace x { export let z = y } - /// - /// This doesn't work: - /// - /// enum a { b = 1 } - /// namespace a { export let c = b } - /// - /// And this crashes the TypeScript compiler: - /// - /// namespace a { export let b = 1 } - /// enum a { c = b } - /// - /// Therefore we only allow enum/enum and namespace/namespace interactions. - is_enum_scope: bool, -}; - -pub const TSNamespaceMemberMap = bun.StringArrayHashMapUnmanaged(TSNamespaceMember); - -pub const TSNamespaceMember = struct { - loc: logger.Loc, - data: Data, - - pub const Data = union(enum) { - /// "namespace ns { export let it }" - property, - /// "namespace ns { export namespace it {} }" - namespace: *TSNamespaceMemberMap, - /// "enum ns { it }" - enum_number: f64, - /// "enum ns { it = 'it' }" - enum_string: *E.String, - /// "enum ns { it = something() }" - enum_property: void, - - pub fn isEnum(data: Data) bool { - return switch (data) { - inline else => |_, tag| comptime std.mem.startsWith(u8, @tagName(tag), "enum_"), - }; - } - }; -}; - /// Inlined enum values can only be numbers and strings /// This type special cases an encoding similar to JSValue, where nan-boxing is used /// to encode both a 64-bit pointer or a 64-bit float using 64 bits. @@ -7801,1327 +583,13 @@ pub const StrictModeKind = enum(u4) { } }; -pub const Scope = struct { - pub const MemberHashMap = bun.StringHashMapUnmanaged(Member); - - id: usize = 0, - kind: Kind = Kind.block, - parent: ?*Scope = null, - children: BabyList(*Scope) = .{}, - members: MemberHashMap = .{}, - generated: BabyList(Ref) = .{}, - - // This is used to store the ref of the label symbol for ScopeLabel scopes. - label_ref: ?Ref = null, - label_stmt_is_loop: bool = false, - - // If a scope contains a direct eval() expression, then none of the symbols - // inside that scope can be renamed. We conservatively assume that the - // evaluated code might reference anything that it has access to. - contains_direct_eval: bool = false, - - // This is to help forbid "arguments" inside class body scopes - forbid_arguments: bool = false, - - strict_mode: StrictModeKind = StrictModeKind.sloppy_mode, - - is_after_const_local_prefix: bool = false, - - // This will be non-null if this is a TypeScript "namespace" or "enum" - ts_namespace: ?*TSNamespaceScope = null, - - pub const NestedScopeMap = std.AutoArrayHashMap(u32, bun.BabyList(*Scope)); - - pub fn getMemberHash(name: []const u8) u64 { - return bun.StringHashMapContext.hash(.{}, name); - } - - pub fn getMemberWithHash(this: *const Scope, name: []const u8, hash_value: u64) ?Member { - const hashed = bun.StringHashMapContext.Prehashed{ - .value = hash_value, - .input = name, - }; - return this.members.getAdapted(name, hashed); - } - - pub fn getOrPutMemberWithHash( - this: *Scope, - allocator: std.mem.Allocator, - name: []const u8, - hash_value: u64, - ) !MemberHashMap.GetOrPutResult { - const hashed = bun.StringHashMapContext.Prehashed{ - .value = hash_value, - .input = name, - }; - return this.members.getOrPutContextAdapted(allocator, name, hashed, .{}); - } - - pub fn reset(this: *Scope) void { - this.children.clearRetainingCapacity(); - this.generated.clearRetainingCapacity(); - this.members.clearRetainingCapacity(); - this.parent = null; - this.id = 0; - this.label_ref = null; - this.label_stmt_is_loop = false; - this.contains_direct_eval = false; - this.strict_mode = .sloppy_mode; - this.kind = .block; - } - - // Do not make this a packed struct - // Two hours of debugging time lost to that. - // It causes a crash due to undefined memory - pub const Member = struct { - ref: Ref, - loc: logger.Loc, - - pub fn eql(a: Member, b: Member) bool { - return @call(bun.callmod_inline, Ref.eql, .{ a.ref, b.ref }) and a.loc.start == b.loc.start; - } - }; - - pub const SymbolMergeResult = enum { - forbidden, - replace_with_new, - overwrite_with_new, - keep_existing, - become_private_get_set_pair, - become_private_static_get_set_pair, - }; - - pub fn canMergeSymbols( - scope: *Scope, - existing: Symbol.Kind, - new: Symbol.Kind, - comptime is_typescript_enabled: bool, - ) SymbolMergeResult { - if (existing == .unbound) { - return .replace_with_new; - } - - if (comptime is_typescript_enabled) { - // In TypeScript, imports are allowed to silently collide with symbols within - // the module. Presumably this is because the imports may be type-only: - // - // import {Foo} from 'bar' - // class Foo {} - // - if (existing == .import) { - return .replace_with_new; - } - - // "enum Foo {} enum Foo {}" - // "namespace Foo { ... } enum Foo {}" - if (new == .ts_enum and (existing == .ts_enum or existing == .ts_namespace)) { - return .replace_with_new; - } - - // "namespace Foo { ... } namespace Foo { ... }" - // "function Foo() {} namespace Foo { ... }" - // "enum Foo {} namespace Foo { ... }" - if (new == .ts_namespace) { - switch (existing) { - .ts_namespace, - .ts_enum, - .hoisted_function, - .generator_or_async_function, - .class, - => return .keep_existing, - else => {}, - } - } - } - - // "var foo; var foo;" - // "var foo; function foo() {}" - // "function foo() {} var foo;" - // "function *foo() {} function *foo() {}" but not "{ function *foo() {} function *foo() {} }" - if (Symbol.isKindHoistedOrFunction(new) and - Symbol.isKindHoistedOrFunction(existing) and - (scope.kind == .entry or scope.kind == .function_body or scope.kind == .function_args or - (new == existing and Symbol.isKindHoisted(existing)))) - { - return .replace_with_new; - } - - // "get #foo() {} set #foo() {}" - // "set #foo() {} get #foo() {}" - if ((existing == .private_get and new == .private_set) or - (existing == .private_set and new == .private_get)) - { - return .become_private_get_set_pair; - } - if ((existing == .private_static_get and new == .private_static_set) or - (existing == .private_static_set and new == .private_static_get)) - { - return .become_private_static_get_set_pair; - } - - // "try {} catch (e) { var e }" - if (existing == .catch_identifier and new == .hoisted) { - return .replace_with_new; - } - - // "function() { var arguments }" - if (existing == .arguments and new == .hoisted) { - return .keep_existing; - } - - // "function() { let arguments }" - if (existing == .arguments and new != .hoisted) { - return .overwrite_with_new; - } - - return .forbidden; - } - - pub const Kind = enum(u8) { - block, - with, - label, - class_name, - class_body, - catch_binding, - - // The scopes below stop hoisted variables from extending into parent scopes - entry, // This is a module, TypeScript enum, or TypeScript namespace - function_args, - function_body, - class_static_init, - - pub fn jsonStringify(self: @This(), writer: anytype) !void { - return try writer.write(@tagName(self)); - } - }; - - pub fn recursiveSetStrictMode(s: *Scope, kind: StrictModeKind) void { - if (s.strict_mode == .sloppy_mode) { - s.strict_mode = kind; - for (s.children.slice()) |child| { - child.recursiveSetStrictMode(kind); - } - } - } - - pub inline fn kindStopsHoisting(s: *const Scope) bool { - return @intFromEnum(s.kind) >= @intFromEnum(Kind.entry); - } -}; - pub fn printmem(comptime format: string, args: anytype) void { defer Output.flush(); Output.initTest(); Output.print(format, args); } -pub const Macro = struct { - const JavaScript = bun.JSC; - const Resolver = @import("./resolver/resolver.zig").Resolver; - const isPackagePath = @import("./resolver/resolver.zig").isPackagePath; - const ResolveResult = @import("./resolver/resolver.zig").Result; - const DotEnv = @import("./env_loader.zig"); - const js = @import("./bun.js/javascript_core_c_api.zig"); - const Transpiler = bun.Transpiler; - const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint; - const MacroRemap = @import("./resolver/package_json.zig").MacroMap; - pub const MacroRemapEntry = @import("./resolver/package_json.zig").MacroImportReplacementMap; - - pub const namespace: string = "macro"; - pub const namespaceWithColon: string = namespace ++ ":"; - - pub fn isMacroPath(str: string) bool { - return strings.hasPrefixComptime(str, namespaceWithColon); - } - - pub const MacroContext = struct { - pub const MacroMap = std.AutoArrayHashMap(i32, Macro); - - resolver: *Resolver, - env: *DotEnv.Loader, - macros: MacroMap, - remap: MacroRemap, - javascript_object: JSC.JSValue = JSC.JSValue.zero, - - pub fn getRemap(this: MacroContext, path: string) ?MacroRemapEntry { - if (this.remap.entries.len == 0) return null; - return this.remap.get(path); - } - - pub fn init(transpiler: *Transpiler) MacroContext { - return MacroContext{ - .macros = MacroMap.init(default_allocator), - .resolver = &transpiler.resolver, - .env = transpiler.env, - .remap = transpiler.options.macro_remap, - }; - } - - pub fn call( - this: *MacroContext, - import_record_path: string, - source_dir: string, - log: *logger.Log, - source: *const logger.Source, - import_range: logger.Range, - caller: Expr, - function_name: string, - ) anyerror!Expr { - Expr.Data.Store.disable_reset = true; - Stmt.Data.Store.disable_reset = true; - defer Expr.Data.Store.disable_reset = false; - defer Stmt.Data.Store.disable_reset = false; - // const is_package_path = isPackagePath(specifier); - const import_record_path_without_macro_prefix = if (isMacroPath(import_record_path)) - import_record_path[namespaceWithColon.len..] - else - import_record_path; - - bun.assert(!isMacroPath(import_record_path_without_macro_prefix)); - - const input_specifier = brk: { - if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| { - break :brk replacement.path; - } - - const resolve_result = this.resolver.resolve(source_dir, import_record_path_without_macro_prefix, .stmt) catch |err| { - switch (err) { - error.ModuleNotFound => { - log.addResolveError( - source, - import_range, - log.msgs.allocator, - "Macro \"{s}\" not found", - .{import_record_path}, - .stmt, - err, - ) catch unreachable; - return error.MacroNotFound; - }, - else => { - log.addRangeErrorFmt( - source, - import_range, - log.msgs.allocator, - "{s} resolving macro \"{s}\"", - .{ @errorName(err), import_record_path }, - ) catch unreachable; - return err; - }, - } - }; - break :brk resolve_result.path_pair.primary.text; - }; - - var specifier_buf: [64]u8 = undefined; - var specifier_buf_len: u32 = 0; - const hash = MacroEntryPoint.generateID( - input_specifier, - function_name, - &specifier_buf, - &specifier_buf_len, - ); - - const macro_entry = this.macros.getOrPut(hash) catch unreachable; - if (!macro_entry.found_existing) { - macro_entry.value_ptr.* = Macro.init( - default_allocator, - this.resolver, - input_specifier, - log, - this.env, - function_name, - specifier_buf[0..specifier_buf_len], - hash, - ) catch |err| { - macro_entry.value_ptr.* = Macro{ .resolver = undefined, .disabled = true }; - return err; - }; - Output.flush(); - } - defer Output.flush(); - - const macro = macro_entry.value_ptr.*; - if (macro.disabled) { - return caller; - } - macro.vm.enableMacroMode(); - defer macro.vm.disableMacroMode(); - - const Wrapper = struct { - args: std.meta.ArgsTuple(@TypeOf(Macro.Runner.run)), - ret: Runner.MacroError!Expr, - - pub fn call(self: *@This()) void { - self.ret = @call(.auto, Macro.Runner.run, self.args); - } - }; - var wrapper = Wrapper{ - .args = .{ - macro, - log, - default_allocator, - function_name, - caller, - source, - hash, - this.javascript_object, - }, - .ret = undefined, - }; - - macro.vm.runWithAPILock(Wrapper, &wrapper, Wrapper.call); - return try wrapper.ret; - // this.macros.getOrPut(key: K) - } - }; - - pub const MacroResult = struct { - import_statements: []S.Import = &[_]S.Import{}, - replacement: Expr, - }; - - resolver: *Resolver, - vm: *JavaScript.VirtualMachine = undefined, - - resolved: ResolveResult = undefined, - disabled: bool = false, - - pub fn init( - _: std.mem.Allocator, - resolver: *Resolver, - input_specifier: []const u8, - log: *logger.Log, - env: *DotEnv.Loader, - function_name: string, - specifier: string, - hash: i32, - ) !Macro { - var vm: *JavaScript.VirtualMachine = if (JavaScript.VirtualMachine.isLoaded()) - JavaScript.VirtualMachine.get() - else brk: { - const old_transform_options = resolver.opts.transform_options; - defer resolver.opts.transform_options = old_transform_options; - - // JSC needs to be initialized if building from CLI - JSC.initialize(false); - - var _vm = try JavaScript.VirtualMachine.init(.{ - .allocator = default_allocator, - .args = resolver.opts.transform_options, - .log = log, - .is_main_thread = false, - .env_loader = env, - }); - - _vm.enableMacroMode(); - _vm.eventLoop().ensureWaker(); - - try _vm.transpiler.configureDefines(); - break :brk _vm; - }; - - vm.enableMacroMode(); - - const loaded_result = try vm.loadMacroEntryPoint(input_specifier, function_name, specifier, hash); - - switch (loaded_result.unwrap(vm.jsc, .leave_unhandled)) { - .rejected => |result| { - _ = vm.unhandledRejection(vm.global, result, loaded_result.asValue()); - vm.disableMacroMode(); - return error.MacroLoadError; - }, - else => {}, - } - - return Macro{ - .vm = vm, - .resolver = resolver, - }; - } - - pub const Runner = struct { - const VisitMap = std.AutoHashMapUnmanaged(JSC.JSValue, Expr); - - threadlocal var args_buf: [3]js.JSObjectRef = undefined; - threadlocal var exception_holder: JSC.ZigException.Holder = undefined; - pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError || bun.JSError; - - pub const Run = struct { - caller: Expr, - function_name: string, - macro: *const Macro, - global: *JSC.JSGlobalObject, - allocator: std.mem.Allocator, - id: i32, - log: *logger.Log, - source: *const logger.Source, - visited: VisitMap = VisitMap{}, - is_top_level: bool = false, - - pub fn runAsync( - macro: Macro, - log: *logger.Log, - allocator: std.mem.Allocator, - function_name: string, - caller: Expr, - args: []JSC.JSValue, - source: *const logger.Source, - id: i32, - ) MacroError!Expr { - const macro_callback = macro.vm.macros.get(id) orelse return caller; - - const result = js.JSObjectCallAsFunctionReturnValueHoldingAPILock( - macro.vm.global, - macro_callback, - null, - args.len, - @as([*]js.JSObjectRef, @ptrCast(args.ptr)), - ); - - var runner = Run{ - .caller = caller, - .function_name = function_name, - .macro = ¯o, - .allocator = allocator, - .global = macro.vm.global, - .id = id, - .log = log, - .source = source, - .visited = VisitMap{}, - }; - - defer runner.visited.deinit(allocator); - - return try runner.run( - result, - ); - } - - pub fn run( - this: *Run, - value: JSC.JSValue, - ) MacroError!Expr { - return switch (JSC.ConsoleObject.Formatter.Tag.get(value, this.global).tag) { - .Error => this.coerce(value, .Error), - .Undefined => this.coerce(value, .Undefined), - .Null => this.coerce(value, .Null), - .Private => this.coerce(value, .Private), - .Boolean => this.coerce(value, .Boolean), - .Array => this.coerce(value, .Array), - .Object => this.coerce(value, .Object), - .toJSON, .JSON => this.coerce(value, .JSON), - .Integer => this.coerce(value, .Integer), - .Double => this.coerce(value, .Double), - .String => this.coerce(value, .String), - .Promise => this.coerce(value, .Promise), - else => brk: { - const name = value.getClassInfoName() orelse "unknown"; - - this.log.addErrorFmt( - this.source, - this.caller.loc, - this.allocator, - "cannot coerce {s} ({s}) to Bun's AST. Please return a simpler type", - .{ name, @tagName(value.jsType()) }, - ) catch unreachable; - break :brk error.MacroFailed; - }, - }; - } - - pub fn coerce( - this: *Run, - value: JSC.JSValue, - comptime tag: JSC.ConsoleObject.Formatter.Tag, - ) MacroError!Expr { - switch (comptime tag) { - .Error => { - _ = this.macro.vm.uncaughtException(this.global, value, false); - return this.caller; - }, - .Undefined => if (this.is_top_level) - return this.caller - else - return Expr.init(E.Undefined, E.Undefined{}, this.caller.loc), - .Null => return Expr.init(E.Null, E.Null{}, this.caller.loc), - .Private => { - this.is_top_level = false; - const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; - if (_entry.found_existing) { - return _entry.value_ptr.*; - } - - var blob_: ?JSC.WebCore.Blob = null; - const mime_type: ?MimeType = null; - - if (value.jsType() == .DOMWrapper) { - if (value.as(JSC.WebCore.Response)) |resp| { - return this.run(try resp.getBlobWithoutCallFrame(this.global)); - } else if (value.as(JSC.WebCore.Request)) |resp| { - return this.run(try resp.getBlobWithoutCallFrame(this.global)); - } else if (value.as(JSC.WebCore.Blob)) |resp| { - blob_ = resp.*; - blob_.?.allocator = null; - } else if (value.as(bun.api.ResolveMessage) != null or value.as(bun.api.BuildMessage) != null) { - _ = this.macro.vm.uncaughtException(this.global, value, false); - return error.MacroFailed; - } - } - - if (blob_) |*blob| { - const out_expr = Expr.fromBlob( - blob, - this.allocator, - mime_type, - this.log, - this.caller.loc, - ) catch { - blob.deinit(); - return error.MacroFailed; - }; - if (out_expr.data == .e_string) { - blob.deinit(); - } - - return out_expr; - } - - return Expr.init(E.String, E.String.empty, this.caller.loc); - }, - - .Boolean => { - return Expr{ .data = .{ .e_boolean = .{ .value = value.toBoolean() } }, .loc = this.caller.loc }; - }, - JSC.ConsoleObject.Formatter.Tag.Array => { - this.is_top_level = false; - - const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; - if (_entry.found_existing) { - switch (_entry.value_ptr.*.data) { - .e_object, .e_array => { - this.log.addErrorFmt(this.source, this.caller.loc, this.allocator, "converting circular structure to Bun AST is not implemented yet", .{}) catch unreachable; - return error.MacroFailed; - }, - else => {}, - } - return _entry.value_ptr.*; - } - - var iter = JSC.JSArrayIterator.init(value, this.global); - if (iter.len == 0) { - const result = Expr.init( - E.Array, - E.Array{ - .items = ExprNodeList.init(&[_]Expr{}), - .was_originally_macro = true, - }, - this.caller.loc, - ); - _entry.value_ptr.* = result; - return result; - } - var array = this.allocator.alloc(Expr, iter.len) catch unreachable; - var out = Expr.init( - E.Array, - E.Array{ - .items = ExprNodeList.init(array[0..0]), - .was_originally_macro = true, - }, - this.caller.loc, - ); - _entry.value_ptr.* = out; - - errdefer this.allocator.free(array); - var i: usize = 0; - while (iter.next()) |item| { - array[i] = try this.run(item); - if (array[i].isMissing()) - continue; - i += 1; - } - out.data.e_array.items = ExprNodeList.init(array); - _entry.value_ptr.* = out; - return out; - }, - // TODO: optimize this - JSC.ConsoleObject.Formatter.Tag.Object => { - this.is_top_level = false; - const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; - if (_entry.found_existing) { - switch (_entry.value_ptr.*.data) { - .e_object, .e_array => { - this.log.addErrorFmt(this.source, this.caller.loc, this.allocator, "converting circular structure to Bun AST is not implemented yet", .{}) catch unreachable; - return error.MacroFailed; - }, - else => {}, - } - return _entry.value_ptr.*; - } - // SAFETY: tag ensures `value` is an object. - const obj = value.getObject() orelse unreachable; - var object_iter = try JSC.JSPropertyIterator(.{ - .skip_empty_name = false, - .include_value = true, - }).init(this.global, obj); - defer object_iter.deinit(); - var properties = this.allocator.alloc(G.Property, object_iter.len) catch unreachable; - errdefer this.allocator.free(properties); - var out = Expr.init( - E.Object, - E.Object{ - .properties = BabyList(G.Property).init(properties), - .was_originally_macro = true, - }, - this.caller.loc, - ); - _entry.value_ptr.* = out; - - while (try object_iter.next()) |prop| { - properties[object_iter.i] = G.Property{ - .key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc), - .value = try this.run(object_iter.value), - }; - } - out.data.e_object.properties = BabyList(G.Property).init(properties[0..object_iter.i]); - _entry.value_ptr.* = out; - return out; - }, - - .JSON => { - this.is_top_level = false; - // if (console_tag.cell == .JSDate) { - // // in the code for printing dates, it never exceeds this amount - // var iso_string_buf = this.allocator.alloc(u8, 36) catch unreachable; - // var str = JSC.ZigString.init(""); - // value.jsonStringify(this.global, 0, &str); - // var out_buf: []const u8 = std.fmt.bufPrint(iso_string_buf, "{}", .{str}) catch ""; - // if (out_buf.len > 2) { - // // trim the quotes - // out_buf = out_buf[1 .. out_buf.len - 1]; - // } - // return Expr.init(E.New, E.New{.target = Expr.init(E.Dot{.target = E}) }) - // } - }, - - .Integer => { - return Expr.init(E.Number, E.Number{ .value = @as(f64, @floatFromInt(value.toInt32())) }, this.caller.loc); - }, - .Double => { - return Expr.init(E.Number, E.Number{ .value = value.asNumber() }, this.caller.loc); - }, - .String => { - var bun_str = try value.toBunString(this.global); - defer bun_str.deref(); - - // encode into utf16 so the printer escapes the string correctly - var utf16_bytes = this.allocator.alloc(u16, bun_str.length()) catch unreachable; - const out_slice = utf16_bytes[0 .. (bun_str.encodeInto(std.mem.sliceAsBytes(utf16_bytes), .utf16le) catch 0) / 2]; - return Expr.init(E.String, E.String.init(out_slice), this.caller.loc); - }, - .Promise => { - const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable; - if (_entry.found_existing) { - return _entry.value_ptr.*; - } - - const promise = value.asAnyPromise() orelse @panic("Unexpected promise type"); - - this.macro.vm.waitForPromise(promise); - - const promise_result = promise.result(this.macro.vm.jsc); - const rejected = promise.status(this.macro.vm.jsc) == .rejected; - - if (promise_result.isUndefined() and this.is_top_level) { - this.is_top_level = false; - return this.caller; - } - - if (rejected or promise_result.isError() or promise_result.isAggregateError(this.global) or promise_result.isException(this.global.vm())) { - _ = this.macro.vm.unhandledRejection(this.global, promise_result, promise.asValue()); - return error.MacroFailed; - } - this.is_top_level = false; - const result = try this.run(promise_result); - - _entry.value_ptr.* = result; - return result; - }, - else => {}, - } - - this.log.addErrorFmt( - this.source, - this.caller.loc, - this.allocator, - "cannot coerce {s} to Bun's AST. Please return a simpler type", - .{@tagName(value.jsType())}, - ) catch unreachable; - return error.MacroFailed; - } - }; - - pub fn run( - macro: Macro, - log: *logger.Log, - allocator: std.mem.Allocator, - function_name: string, - caller: Expr, - source: *const logger.Source, - id: i32, - javascript_object: JSC.JSValue, - ) MacroError!Expr { - if (comptime Environment.isDebug) Output.prettyln("[macro] call {s}", .{function_name}); - - exception_holder = JSC.ZigException.Holder.init(); - var js_args: []JSC.JSValue = &.{}; - var js_processed_args_len: usize = 0; - defer { - for (js_args[0..js_processed_args_len -| @as(usize, @intFromBool(javascript_object != .zero))]) |arg| { - arg.unprotect(); - } - - allocator.free(js_args); - } - - const globalObject = JSC.VirtualMachine.get().global; - - switch (caller.data) { - .e_call => |call| { - const call_args: []Expr = call.args.slice(); - js_args = try allocator.alloc(JSC.JSValue, call_args.len + @as(usize, @intFromBool(javascript_object != .zero))); - js_processed_args_len = js_args.len; - - for (0.., call_args, js_args[0..call_args.len]) |i, in, *out| { - const value = in.toJS( - allocator, - globalObject, - ) catch |e| { - // Keeping a separate variable instead of modifying js_args.len - // due to allocator.free call in defer - js_processed_args_len = i; - return e; - }; - value.protect(); - out.* = value; - } - }, - .e_template => { - @panic("TODO: support template literals in macros"); - }, - else => { - @panic("Unexpected caller type"); - }, - } - - if (javascript_object != .zero) { - if (js_args.len == 0) { - js_args = try allocator.alloc(JSC.JSValue, 1); - } - - js_args[js_args.len - 1] = javascript_object; - } - - const CallFunction = @TypeOf(Run.runAsync); - const CallArgs = std.meta.ArgsTuple(CallFunction); - const CallData = struct { - threadlocal var call_args: CallArgs = undefined; - threadlocal var result: MacroError!Expr = undefined; - pub fn callWrapper(args: CallArgs) MacroError!Expr { - JSC.markBinding(@src()); - call_args = args; - Bun__startMacro(&call, JSC.VirtualMachine.get().global); - return result; - } - - pub fn call() callconv(.C) void { - const call_args_copy = call_args; - const local_result = @call(.auto, Run.runAsync, call_args_copy); - result = local_result; - } - }; - - // TODO: can change back to `return CallData.callWrapper(.{` - // when https://github.com/ziglang/zig/issues/16242 is fixed - return CallData.callWrapper(CallArgs{ - macro, - log, - allocator, - function_name, - caller, - js_args, - source, - id, - }); - } - - extern "c" fn Bun__startMacro(function: *const anyopaque, *anyopaque) void; - }; -}; - -pub const ASTMemoryAllocator = struct { - const SFA = std.heap.StackFallbackAllocator(@min(8192, std.heap.page_size_min)); - - stack_allocator: SFA = undefined, - bump_allocator: std.mem.Allocator = undefined, - allocator: std.mem.Allocator, - previous: ?*ASTMemoryAllocator = null, - - pub fn enter(this: *ASTMemoryAllocator, allocator: std.mem.Allocator) ASTMemoryAllocator.Scope { - this.allocator = allocator; - this.stack_allocator = SFA{ - .buffer = undefined, - .fallback_allocator = allocator, - .fixed_buffer_allocator = undefined, - }; - this.bump_allocator = this.stack_allocator.get(); - this.previous = null; - var ast_scope = ASTMemoryAllocator.Scope{ - .current = this, - .previous = Stmt.Data.Store.memory_allocator, - }; - ast_scope.enter(); - return ast_scope; - } - pub const Scope = struct { - current: ?*ASTMemoryAllocator = null, - previous: ?*ASTMemoryAllocator = null, - - pub fn enter(this: *@This()) void { - bun.debugAssert(Expr.Data.Store.memory_allocator == Stmt.Data.Store.memory_allocator); - - this.previous = Expr.Data.Store.memory_allocator; - - const current = this.current; - - Expr.Data.Store.memory_allocator = current; - Stmt.Data.Store.memory_allocator = current; - - if (current == null) { - Stmt.Data.Store.begin(); - Expr.Data.Store.begin(); - } - } - - pub fn exit(this: *const @This()) void { - Expr.Data.Store.memory_allocator = this.previous; - Stmt.Data.Store.memory_allocator = this.previous; - } - }; - - pub fn reset(this: *ASTMemoryAllocator) void { - this.stack_allocator = SFA{ - .buffer = undefined, - .fallback_allocator = this.allocator, - .fixed_buffer_allocator = undefined, - }; - this.bump_allocator = this.stack_allocator.get(); - } - - pub fn push(this: *ASTMemoryAllocator) void { - Stmt.Data.Store.memory_allocator = this; - Expr.Data.Store.memory_allocator = this; - } - - pub fn pop(this: *ASTMemoryAllocator) void { - const prev = this.previous; - bun.assert(prev != this); - Stmt.Data.Store.memory_allocator = prev; - Expr.Data.Store.memory_allocator = prev; - this.previous = null; - } - - pub fn append(this: ASTMemoryAllocator, comptime ValueType: type, value: anytype) *ValueType { - const ptr = this.bump_allocator.create(ValueType) catch unreachable; - ptr.* = value; - return ptr; - } - - /// Initialize ASTMemoryAllocator as `undefined`, and call this. - pub fn initWithoutStack(this: *ASTMemoryAllocator, arena: std.mem.Allocator) void { - this.stack_allocator = SFA{ - .buffer = undefined, - .fallback_allocator = arena, - .fixed_buffer_allocator = .init(&.{}), - }; - this.bump_allocator = this.stack_allocator.get(); - } -}; - -pub const UseDirective = enum(u2) { - // TODO: Remove this, and provide `UseDirective.Optional` instead - none, - /// "use client" - client, - /// "use server" - server, - - pub const Boundering = enum(u2) { - client = @intFromEnum(UseDirective.client), - server = @intFromEnum(UseDirective.server), - }; - - pub const Flags = struct { - has_any_client: bool = false, - }; - - pub fn isBoundary(this: UseDirective, other: UseDirective) bool { - if (this == other or other == .none) - return false; - - return true; - } - - pub fn boundering(this: UseDirective, other: UseDirective) ?Boundering { - if (this == other or other == .none) - return null; - return @enumFromInt(@intFromEnum(other)); - } - - pub fn parse(contents: []const u8) ?UseDirective { - const truncated = std.mem.trimLeft(u8, contents, " \t\n\r;"); - - if (truncated.len < "'use client';".len) - return .none; - - const directive_string = truncated[0.."'use client';".len].*; - - const first_quote = directive_string[0]; - const last_quote = directive_string[directive_string.len - 2]; - if (first_quote != last_quote or (first_quote != '"' and first_quote != '\'' and first_quote != '`')) - return .none; - - const unquoted = directive_string[1 .. directive_string.len - 2]; - - if (strings.eqlComptime(unquoted, "use client")) { - return .client; - } - - if (strings.eqlComptime(unquoted, "use server")) { - return .server; - } - - return null; - } -}; - -/// Represents a boundary between client and server code. Every boundary -/// gets bundled twice, once for the desired target, and once to generate -/// a module of "references". Specifically, the generated file takes the -/// canonical Ast as input to derive a wrapper. See `Framework.ServerComponents` -/// for more details about this generated file. -/// -/// This is sometimes abbreviated as SCB -pub const ServerComponentBoundary = struct { - use_directive: UseDirective, - - /// The index of the original file. - source_index: Index.Int, - - /// Index to the file imported on the opposite platform, which is - /// generated by the bundler. For client components, this is the - /// server's code. For server actions, this is the client's code. - reference_source_index: Index.Int, - - /// When `bake.Framework.ServerComponents.separate_ssr_graph` is enabled this - /// points to the separated module. When the SSR graph is not separate, this is - /// equal to `reference_source_index` - // - // TODO: Is this used for server actions. - ssr_source_index: Index.Int, - - /// The requirements for this data structure is to have reasonable lookup - /// speed, but also being able to pull a `[]const Index.Int` of all - /// boundaries for iteration. - pub const List = struct { - list: std.MultiArrayList(ServerComponentBoundary) = .{}, - /// Used to facilitate fast lookups into `items` by `.source_index` - map: Map = .{}, - - const Map = std.ArrayHashMapUnmanaged(void, void, struct {}, true); - - /// Can only be called on the bundler thread. - pub fn put( - m: *List, - allocator: std.mem.Allocator, - source_index: Index.Int, - use_directive: UseDirective, - reference_source_index: Index.Int, - ssr_source_index: Index.Int, - ) !void { - try m.list.append(allocator, .{ - .source_index = source_index, - .use_directive = use_directive, - .reference_source_index = reference_source_index, - .ssr_source_index = ssr_source_index, - }); - const gop = try m.map.getOrPutAdapted( - allocator, - source_index, - Adapter{ .list = m.list.slice() }, - ); - bun.assert(!gop.found_existing); - } - - /// Can only be called on the bundler thread. - pub fn getIndex(l: *const List, real_source_index: Index.Int) ?usize { - return l.map.getIndexAdapted( - real_source_index, - Adapter{ .list = l.list.slice() }, - ); - } - - /// Use this to improve speed of accessing fields at the cost of - /// storing more pointers. Invalidated when input is mutated. - pub fn slice(l: List) Slice { - return .{ .list = l.list.slice(), .map = l.map }; - } - - pub const Slice = struct { - list: std.MultiArrayList(ServerComponentBoundary).Slice, - map: Map, - - pub fn getIndex(l: *const Slice, real_source_index: Index.Int) ?usize { - return l.map.getIndexAdapted( - real_source_index, - Adapter{ .list = l.list }, - ) orelse return null; - } - - pub fn getReferenceSourceIndex(l: *const Slice, real_source_index: Index.Int) ?u32 { - const i = l.map.getIndexAdapted( - real_source_index, - Adapter{ .list = l.list }, - ) orelse return null; - bun.unsafeAssert(l.list.capacity > 0); // optimize MultiArrayList.Slice.items - return l.list.items(.reference_source_index)[i]; - } - - pub fn bitSet(scbs: Slice, alloc: std.mem.Allocator, input_file_count: usize) !bun.bit_set.DynamicBitSetUnmanaged { - var scb_bitset = try bun.bit_set.DynamicBitSetUnmanaged.initEmpty(alloc, input_file_count); - for (scbs.list.items(.source_index)) |source_index| { - scb_bitset.set(source_index); - } - return scb_bitset; - } - }; - - pub const Adapter = struct { - list: std.MultiArrayList(ServerComponentBoundary).Slice, - - pub fn hash(_: Adapter, key: Index.Int) u32 { - return std.hash.uint32(key); - } - - pub fn eql(adapt: Adapter, a: Index.Int, _: void, b_index: usize) bool { - bun.unsafeAssert(adapt.list.capacity > 0); // optimize MultiArrayList.Slice.items - return a == adapt.list.items(.source_index)[b_index]; - } - }; - }; -}; - -extern fn JSC__jsToNumber(latin1_ptr: [*]const u8, len: usize) f64; - -fn stringToEquivalentNumberValue(str: []const u8) f64 { - // +"" -> 0 - if (str.len == 0) return 0; - if (!bun.strings.isAllASCII(str)) - return std.math.nan(f64); - return JSC__jsToNumber(str.ptr, str.len); -} - -// test "Binding.init" { -// var binding = Binding.alloc( -// std.heap.page_allocator, -// B.Identifier{ .ref = Ref{ .source_index = 0, .innerIndex() = 10 } }, -// logger.Loc{ .start = 1 }, -// ); -// std.testing.expect(binding.loc.start == 1); -// std.testing.expect(@as(Binding.Tag, binding.data) == Binding.Tag.b_identifier); - -// printmem("-------Binding: {d} bits\n", .{@bitSizeOf(Binding)}); -// printmem("B.Identifier: {d} bits\n", .{@bitSizeOf(B.Identifier)}); -// printmem("B.Array: {d} bits\n", .{@bitSizeOf(B.Array)}); -// printmem("B.Property: {d} bits\n", .{@bitSizeOf(B.Property)}); -// printmem("B.Object: {d} bits\n", .{@bitSizeOf(B.Object)}); -// printmem("B.Missing: {d} bits\n", .{@bitSizeOf(B.Missing)}); -// printmem("-------Binding: {d} bits\n", .{@bitSizeOf(Binding)}); -// } - -// test "Stmt.init" { -// var stmt = Stmt.alloc( -// std.heap.page_allocator, -// S.Continue{}, -// logger.Loc{ .start = 1 }, -// ); -// std.testing.expect(stmt.loc.start == 1); -// std.testing.expect(@as(Stmt.Tag, stmt.data) == Stmt.Tag.s_continue); - -// printmem("-----Stmt {d} bits\n", .{@bitSizeOf(Stmt)}); -// printmem("StmtNodeList: {d} bits\n", .{@bitSizeOf(StmtNodeList)}); -// printmem("StmtOrExpr: {d} bits\n", .{@bitSizeOf(StmtOrExpr)}); -// printmem("S.Block {d} bits\n", .{@bitSizeOf(S.Block)}); -// printmem("S.Comment {d} bits\n", .{@bitSizeOf(S.Comment)}); -// printmem("S.Directive {d} bits\n", .{@bitSizeOf(S.Directive)}); -// printmem("S.ExportClause {d} bits\n", .{@bitSizeOf(S.ExportClause)}); -// printmem("S.Empty {d} bits\n", .{@bitSizeOf(S.Empty)}); -// printmem("S.TypeScript {d} bits\n", .{@bitSizeOf(S.TypeScript)}); -// printmem("S.Debugger {d} bits\n", .{@bitSizeOf(S.Debugger)}); -// printmem("S.ExportFrom {d} bits\n", .{@bitSizeOf(S.ExportFrom)}); -// printmem("S.ExportDefault {d} bits\n", .{@bitSizeOf(S.ExportDefault)}); -// printmem("S.Enum {d} bits\n", .{@bitSizeOf(S.Enum)}); -// printmem("S.Namespace {d} bits\n", .{@bitSizeOf(S.Namespace)}); -// printmem("S.Function {d} bits\n", .{@bitSizeOf(S.Function)}); -// printmem("S.Class {d} bits\n", .{@bitSizeOf(S.Class)}); -// printmem("S.If {d} bits\n", .{@bitSizeOf(S.If)}); -// printmem("S.For {d} bits\n", .{@bitSizeOf(S.For)}); -// printmem("S.ForIn {d} bits\n", .{@bitSizeOf(S.ForIn)}); -// printmem("S.ForOf {d} bits\n", .{@bitSizeOf(S.ForOf)}); -// printmem("S.DoWhile {d} bits\n", .{@bitSizeOf(S.DoWhile)}); -// printmem("S.While {d} bits\n", .{@bitSizeOf(S.While)}); -// printmem("S.With {d} bits\n", .{@bitSizeOf(S.With)}); -// printmem("S.Try {d} bits\n", .{@bitSizeOf(S.Try)}); -// printmem("S.Switch {d} bits\n", .{@bitSizeOf(S.Switch)}); -// printmem("S.Import {d} bits\n", .{@bitSizeOf(S.Import)}); -// printmem("S.Return {d} bits\n", .{@bitSizeOf(S.Return)}); -// printmem("S.Throw {d} bits\n", .{@bitSizeOf(S.Throw)}); -// printmem("S.Local {d} bits\n", .{@bitSizeOf(S.Local)}); -// printmem("S.Break {d} bits\n", .{@bitSizeOf(S.Break)}); -// printmem("S.Continue {d} bits\n", .{@bitSizeOf(S.Continue)}); -// printmem("-----Stmt {d} bits\n", .{@bitSizeOf(Stmt)}); -// } - -// test "Expr.init" { -// var allocator = std.heap.page_allocator; -// const ident = Expr.init(E.Identifier, E.Identifier{}, logger.Loc{ .start = 100 }); -// var list = [_]Expr{ident}; -// var expr = Expr.init( -// E.Array, -// E.Array{ .items = list[0..] }, -// logger.Loc{ .start = 1 }, -// ); -// try std.testing.expect(expr.loc.start == 1); -// try std.testing.expect(@as(Expr.Tag, expr.data) == Expr.Tag.e_array); -// try std.testing.expect(expr.data.e_array.items[0].loc.start == 100); - -// printmem("--Ref {d} bits\n", .{@bitSizeOf(Ref)}); -// printmem("--LocRef {d} bits\n", .{@bitSizeOf(LocRef)}); -// printmem("--logger.Loc {d} bits\n", .{@bitSizeOf(logger.Loc)}); -// printmem("--logger.Range {d} bits\n", .{@bitSizeOf(logger.Range)}); -// printmem("----------Expr: {d} bits\n", .{@bitSizeOf(Expr)}); -// printmem("ExprNodeList: {d} bits\n", .{@bitSizeOf(ExprNodeList)}); -// printmem("E.Array: {d} bits\n", .{@bitSizeOf(E.Array)}); - -// printmem("E.Unary: {d} bits\n", .{@bitSizeOf(E.Unary)}); -// printmem("E.Binary: {d} bits\n", .{@bitSizeOf(E.Binary)}); -// printmem("E.Boolean: {d} bits\n", .{@bitSizeOf(E.Boolean)}); -// printmem("E.Super: {d} bits\n", .{@bitSizeOf(E.Super)}); -// printmem("E.Null: {d} bits\n", .{@bitSizeOf(E.Null)}); -// printmem("E.Undefined: {d} bits\n", .{@bitSizeOf(E.Undefined)}); -// printmem("E.New: {d} bits\n", .{@bitSizeOf(E.New)}); -// printmem("E.NewTarget: {d} bits\n", .{@bitSizeOf(E.NewTarget)}); -// printmem("E.Function: {d} bits\n", .{@bitSizeOf(E.Function)}); -// printmem("E.ImportMeta: {d} bits\n", .{@bitSizeOf(E.ImportMeta)}); -// printmem("E.Call: {d} bits\n", .{@bitSizeOf(E.Call)}); -// printmem("E.Dot: {d} bits\n", .{@bitSizeOf(E.Dot)}); -// printmem("E.Index: {d} bits\n", .{@bitSizeOf(E.Index)}); -// printmem("E.Arrow: {d} bits\n", .{@bitSizeOf(E.Arrow)}); -// printmem("E.Identifier: {d} bits\n", .{@bitSizeOf(E.Identifier)}); -// printmem("E.ImportIdentifier: {d} bits\n", .{@bitSizeOf(E.ImportIdentifier)}); -// printmem("E.PrivateIdentifier: {d} bits\n", .{@bitSizeOf(E.PrivateIdentifier)}); -// printmem("E.JSXElement: {d} bits\n", .{@bitSizeOf(E.JSXElement)}); -// printmem("E.Missing: {d} bits\n", .{@bitSizeOf(E.Missing)}); -// printmem("E.Number: {d} bits\n", .{@bitSizeOf(E.Number)}); -// printmem("E.BigInt: {d} bits\n", .{@bitSizeOf(E.BigInt)}); -// printmem("E.Object: {d} bits\n", .{@bitSizeOf(E.Object)}); -// printmem("E.Spread: {d} bits\n", .{@bitSizeOf(E.Spread)}); -// printmem("E.String: {d} bits\n", .{@bitSizeOf(E.String)}); -// printmem("E.TemplatePart: {d} bits\n", .{@bitSizeOf(E.TemplatePart)}); -// printmem("E.Template: {d} bits\n", .{@bitSizeOf(E.Template)}); -// printmem("E.RegExp: {d} bits\n", .{@bitSizeOf(E.RegExp)}); -// printmem("E.Await: {d} bits\n", .{@bitSizeOf(E.Await)}); -// printmem("E.Yield: {d} bits\n", .{@bitSizeOf(E.Yield)}); -// printmem("E.If: {d} bits\n", .{@bitSizeOf(E.If)}); -// printmem("E.RequireResolveString: {d} bits\n", .{@bitSizeOf(E.RequireResolveString)}); -// printmem("E.Import: {d} bits\n", .{@bitSizeOf(E.Import)}); -// printmem("----------Expr: {d} bits\n", .{@bitSizeOf(Expr)}); -// } - -// -- ESBuild bit sizes -// EArray | 256 -// EArrow | 512 -// EAwait | 192 -// EBinary | 448 -// ECall | 448 -// EDot | 384 -// EIdentifier | 96 -// EIf | 576 -// EImport | 448 -// EImportIdentifier | 96 -// EIndex | 448 -// EJSXElement | 448 -// ENew | 448 -// EnumValue | 384 -// EObject | 256 -// EPrivateIdentifier | 64 -// ERequire | 32 -// ERequireResolve | 32 -// EString | 256 -// ETemplate | 640 -// EUnary | 256 -// Expr | 192 -// ExprOrStmt | 128 -// EYield | 128 -// Finally | 256 -// Fn | 704 -// FnBody | 256 -// LocRef | 96 -// NamedExport | 96 -// NamedImport | 512 -// NameMinifier | 256 -// NamespaceAlias | 192 -// opTableEntry | 256 -// Part | 1088 -// Property | 640 -// PropertyBinding | 512 -// Ref | 64 -// SBlock | 192 -// SBreak | 64 -// SClass | 704 -// SComment | 128 -// SContinue | 64 -// Scope | 704 -// ScopeMember | 96 -// SDirective | 256 -// SDoWhile | 384 -// SEnum | 448 -// SExportClause | 256 -// SExportDefault | 256 -// SExportEquals | 192 -// SExportFrom | 320 -// SExportStar | 192 -// SExpr | 256 -// SFor | 384 -// SForIn | 576 -// SForOf | 640 -// SFunction | 768 -// SIf | 448 -// SImport | 320 -// SLabel | 320 -// SLazyExport | 192 -// SLocal | 256 -// SNamespace | 448 -// Span | 192 -// SReturn | 64 -// SSwitch | 448 -// SThrow | 192 -// Stmt | 192 -// STry | 384 -// -- ESBuild bit sizes - -const ToJSError = error{ +pub const ToJSError = error{ @"Cannot convert argument type to JS", @"Cannot convert identifier to JS. Try a statically-known value", MacroError, @@ -9129,8 +597,6 @@ const ToJSError = error{ JSError, }; -const writeAnyToHasher = bun.writeAnyToHasher; - /// Say you need to allocate a bunch of tiny arrays /// You could just do separate allocations for each, but that is slow /// With std.ArrayList, pointers invalidate on resize and that means it will crash. @@ -9168,3 +634,48 @@ pub fn NewBatcher(comptime Type: type) type { } }; } + +// @sortImports + +pub const ASTMemoryAllocator = @import("ast/ASTMemoryAllocator.zig"); +pub const Ast = @import("ast/Ast.zig"); +pub const Binding = @import("ast/Binding.zig"); +pub const BindingNodeIndex = Binding; +pub const BundledAst = @import("ast/BundledAst.zig"); +pub const E = @import("ast/E.zig"); +pub const Expr = @import("ast/Expr.zig"); +pub const ExprNodeIndex = Expr; +pub const G = @import("ast/G.zig"); +pub const Macro = @import("ast/Macro.zig"); +pub const Op = @import("ast/Op.zig"); +pub const S = @import("ast/S.zig"); +pub const Scope = @import("ast/Scope.zig"); +pub const ServerComponentBoundary = @import("ast/ServerComponentBoundary.zig"); +pub const Stmt = @import("ast/Stmt.zig"); +pub const StmtNodeIndex = Stmt; +pub const Symbol = @import("ast/Symbol.zig"); +const std = @import("std"); +pub const B = @import("ast/B.zig").B; +pub const NewStore = @import("ast/NewStore.zig").NewStore; +const TypeScript = @import("./js_parser.zig").TypeScript; +pub const UseDirective = @import("ast/UseDirective.zig").UseDirective; + +pub const CharFreq = @import("ast/CharFreq.zig"); +const char_freq_count = CharFreq.char_freq_count; + +pub const TS = @import("ast/TS.zig"); +pub const TSNamespaceMember = TS.TSNamespaceMember; +pub const TSNamespaceMemberMap = TS.TSNamespaceMemberMap; +pub const TSNamespaceScope = TS.TSNamespaceScope; + +pub const Index = @import("ast/base.zig").Index; +pub const Ref = @import("ast/base.zig").Ref; +pub const RefHashCtx = @import("ast/base.zig").RefHashCtx; + +const bun = @import("bun"); +pub const BabyList = bun.BabyList; +const Environment = bun.Environment; +const Output = bun.Output; +const logger = bun.logger; +const string = bun.string; +const strings = bun.strings; diff --git a/src/js_parser.zig b/src/js_parser.zig index dad2a9eb5e..e55e9227f2 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -19382,22 +19382,36 @@ fn NewParser_( // > export default default_export; // > $RefreshReg(default_export, "App.tsx:default") const ref = if (data.value == .expr) emit_temp_var: { - const temp_id = p.generateTempRef("default_export"); - try p.current_scope.generated.push(p.allocator, temp_id); + const ref_to_use = brk: { + if (func.func.name) |*loc_ref| { + // Input: + // + // export default function Foo() {} + // + // Output: + // + // const Foo = _s(function Foo() {}) + // export default Foo; + if (loc_ref.ref) |ref| break :brk ref; + } + const temp_id = p.generateTempRef("default_export"); + try p.current_scope.generated.push(p.allocator, temp_id); + break :brk temp_id; + }; stmts.append(Stmt.alloc(S.Local, .{ .kind = .k_const, .decls = try G.Decl.List.fromSlice(p.allocator, &.{ .{ - .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = temp_id }, stmt.loc), + .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = ref_to_use }, stmt.loc), .value = data.value.expr, }, }), }, stmt.loc)) catch bun.outOfMemory(); - data.value = .{ .expr = .initIdentifier(temp_id, stmt.loc) }; + data.value = .{ .expr = .initIdentifier(ref_to_use, stmt.loc) }; - break :emit_temp_var temp_id; + break :emit_temp_var ref_to_use; } else data.default_name.ref.?; if (p.options.features.server_components.wrapsExports()) { diff --git a/src/logger.zig b/src/logger.zig index 0e69ad5fe7..8fdea0d6ca 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -64,8 +64,8 @@ pub const Kind = enum(u8) { pub const Loc = struct { start: i32 = -1, - pub inline fn toNullable(loc: *Loc) ?Loc { - return if (loc.start == -1) null else loc.*; + pub inline fn toNullable(loc: Loc) ?Loc { + return if (loc.start == -1) null else loc; } pub const toUsize = i; @@ -780,7 +780,7 @@ pub const Log = struct { const arr = try JSC.JSValue.createEmptyArray(global, msgs.len); for (msgs, 0..) |msg, i| { - arr.putIndex(global, @as(u32, @intCast(i)), try msg.toJS(global, allocator)); + try arr.putIndex(global, @as(u32, @intCast(i)), try msg.toJS(global, allocator)); } return arr; diff --git a/src/macho.zig b/src/macho.zig index 9ada9746e6..051a0628b8 100644 --- a/src/macho.zig +++ b/src/macho.zig @@ -169,7 +169,7 @@ pub const MachoFile = struct { // We need to shift [...data after __BUN] forward by size_diff bytes. const after_bun_slice = self.data.items[original_data_end + @as(usize, @intCast(size_diff)) ..]; const prev_after_bun_slice = prev_data_slice[original_segsize..]; - bun.move(after_bun_slice, prev_after_bun_slice); + bun.memmove(after_bun_slice, prev_after_bun_slice); // Now we copy the u32 size header std.mem.writeInt(u32, self.data.items[original_fileoff..][0..4], @intCast(data.len), .little); diff --git a/src/meta.zig b/src/meta.zig index 49d5b794dc..607124e246 100644 --- a/src/meta.zig +++ b/src/meta.zig @@ -330,3 +330,11 @@ pub fn Tagged(comptime U: type, comptime T: type) type { info.decls = &.{}; return @Type(.{ .@"union" = info }); } + +pub fn SliceChild(comptime T: type) type { + const tyinfo = @typeInfo(T); + if (tyinfo == .pointer and tyinfo.pointer.size == .slice) { + return tyinfo.pointer.child; + } + return T; +} diff --git a/src/multi_array_list.zig b/src/multi_array_list.zig index b544d3043d..a9a4276490 100644 --- a/src/multi_array_list.zig +++ b/src/multi_array_list.zig @@ -184,9 +184,9 @@ pub fn MultiArrayList(comptime T: type) type { }; /// Release all allocated memory. - pub fn deinit(self: *Self, gpa: Allocator) void { + pub fn deinit(self: *const Self, gpa: Allocator) void { gpa.free(self.allocatedBytes()); - self.* = undefined; + @constCast(self).* = undefined; } /// The caller owns the returned memory. Empties this MultiArrayList. diff --git a/src/napi/js_native_api.h b/src/napi/js_native_api.h index 3840310cdd..a96d99cce8 100644 --- a/src/napi/js_native_api.h +++ b/src/napi/js_native_api.h @@ -17,7 +17,7 @@ // functions available in a new version of N-API that is not yet ported in all // LTS versions, they can set NAPI_VERSION knowing that they have specifically // depended on that version. -#define NAPI_VERSION 8 +#define NAPI_VERSION 10 #endif #endif diff --git a/src/napi/napi.zig b/src/napi/napi.zig index 334b96c173..e6cf797d08 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -570,7 +570,7 @@ pub export fn napi_get_array_length(env_: napi_env, value_: napi_value, result_: return env.setLastError(.array_expected); } - result.* = @as(u32, @truncate(value.getLength(env.toJS()))); + result.* = @truncate(value.getLength(env.toJS()) catch return env.setLastError(.pending_exception)); return env.ok(); } pub export fn napi_strict_equals(env_: napi_env, lhs_: napi_value, rhs_: napi_value, result_: ?*bool) napi_status { @@ -582,8 +582,8 @@ pub export fn napi_strict_equals(env_: napi_env, lhs_: napi_value, rhs_: napi_va return env.invalidArg(); }; const lhs, const rhs = .{ lhs_.get(), rhs_.get() }; - // there is some nuance with NaN here i'm not sure about - result.* = lhs.isSameValue(rhs, env.toJS()); + // TODO: this needs to be strictEquals not isSameValue (NaN !== NaN and -0 === 0) + result.* = lhs.isSameValue(rhs, env.toJS()) catch return env.setLastError(.pending_exception); return env.ok(); } pub extern fn napi_call_function(env: napi_env, recv: napi_value, func: napi_value, argc: usize, argv: [*c]const napi_value, result: *napi_value) napi_status; @@ -808,7 +808,11 @@ pub export fn napi_get_arraybuffer_info(env_: napi_env, arraybuffer_: napi_value }; env.checkGC(); const arraybuffer = arraybuffer_.get(); - const array_buffer = arraybuffer.asArrayBuffer(env.toJS()) orelse return env.setLastError(.arraybuffer_expected); + const array_buffer = arraybuffer.asArrayBuffer(env.toJS()) orelse return env.setLastError(.invalid_arg); + if (array_buffer.typed_array_type != .ArrayBuffer) { + return env.setLastError(.invalid_arg); + } + const slice = array_buffer.slice(); if (data) |dat| dat.* = slice.ptr; @@ -997,30 +1001,8 @@ pub export fn napi_is_date(env_: napi_env, value_: napi_value, is_date_: ?*bool) } pub extern fn napi_get_date_value(env: napi_env, value: napi_value, result: *f64) napi_status; pub extern fn napi_add_finalizer(env: napi_env, js_object: napi_value, native_object: ?*anyopaque, finalize_cb: napi_finalize, finalize_hint: ?*anyopaque, result: napi_ref) napi_status; -pub export fn napi_create_bigint_int64(env_: napi_env, value: i64, result_: ?*napi_value) napi_status { - log("napi_create_bigint_int64", .{}); - const env = env_ orelse { - return envIsNull(); - }; - env.checkGC(); - const result = result_ orelse { - return env.invalidArg(); - }; - result.set(env, JSC.JSValue.fromInt64NoTruncate(env.toJS(), value)); - return env.ok(); -} -pub export fn napi_create_bigint_uint64(env_: napi_env, value: u64, result_: ?*napi_value) napi_status { - log("napi_create_bigint_uint64", .{}); - const env = env_ orelse { - return envIsNull(); - }; - env.checkGC(); - const result = result_ orelse { - return env.invalidArg(); - }; - result.set(env, JSC.JSValue.fromUInt64NoTruncate(env.toJS(), value)); - return env.ok(); -} +pub extern fn napi_create_bigint_int64(env: napi_env, value: i64, result_: ?*napi_value) napi_status; +pub extern fn napi_create_bigint_uint64(env: napi_env, value: u64, result_: ?*napi_value) napi_status; pub extern fn napi_create_bigint_words(env: napi_env, sign_bit: c_int, word_count: usize, words: [*c]const u64, result: *napi_value) napi_status; pub extern fn napi_get_value_bigint_int64(_: napi_env, value_: napi_value, result_: ?*i64, _: *bool) napi_status; pub extern fn napi_get_value_bigint_uint64(_: napi_env, value_: napi_value, result_: ?*u64, _: *bool) napi_status; @@ -1047,7 +1029,7 @@ pub const napi_async_work = struct { data: ?*anyopaque = null, status: std.atomic.Value(Status) = .init(.pending), scheduled: bool = false, - ref: Async.KeepAlive = .{}, + poll_ref: Async.KeepAlive = .{}, pub const Status = enum(u32) { pending = 0, @@ -1056,30 +1038,36 @@ pub const napi_async_work = struct { cancelled = 3, }; - pub fn new(env: *NapiEnv, execute: napi_async_execute_callback, complete: ?napi_async_complete_callback, data: ?*anyopaque) !*napi_async_work { - const work = try bun.default_allocator.create(napi_async_work); + pub fn new(env: *NapiEnv, execute: napi_async_execute_callback, complete: ?napi_async_complete_callback, data: ?*anyopaque) *napi_async_work { const global = env.toJS(); - work.* = .{ + + const work = bun.new(napi_async_work, .{ .global = global, .env = env, .execute = execute, .event_loop = global.bunVM().eventLoop(), .complete = complete, .data = data, - }; + }); return work; } pub fn destroy(this: *napi_async_work) void { - bun.default_allocator.destroy(this); + bun.destroy(this); + } + + pub fn schedule(this: *napi_async_work) void { + if (this.scheduled) return; + this.scheduled = true; + this.poll_ref.ref(this.global.bunVM()); + WorkPool.schedule(&this.task); } pub fn runFromThreadPool(task: *WorkPoolTask) void { var this: *napi_async_work = @fieldParentPtr("task", task); - this.run(); } - pub fn run(this: *napi_async_work) void { + fn run(this: *napi_async_work) void { if (this.status.cmpxchgStrong(.pending, .started, .seq_cst, .seq_cst)) |state| { if (state == .cancelled) { this.event_loop.enqueueTaskConcurrent(this.concurrent_task.from(this, .manual_deinit)); @@ -1092,34 +1080,22 @@ pub const napi_async_work = struct { this.event_loop.enqueueTaskConcurrent(this.concurrent_task.from(this, .manual_deinit)); } - pub fn schedule(this: *napi_async_work) void { - if (this.scheduled) return; - this.scheduled = true; - this.ref.ref(this.global.bunVM()); - WorkPool.schedule(&this.task); - } - pub fn cancel(this: *napi_async_work) bool { return this.status.cmpxchgStrong(.pending, .cancelled, .seq_cst, .seq_cst) == null; } - fn runFromJSWithError(this: *napi_async_work) bun.JSError!void { - - // likely `complete` will call `napi_delete_async_work`, so take a copy - // of `ref` beforehand - var ref = this.ref; - const env = this.env; - defer { - const global = env.toJS(); - ref.unref(global.bunVM()); - } + pub fn runFromJS(this: *napi_async_work, vm: *JSC.VirtualMachine, global: *JSC.JSGlobalObject) void { + // Note: the "this" value here may already be freed by the user in `complete` + var poll_ref = this.poll_ref; + defer poll_ref.unref(vm); // https://github.com/nodejs/node/blob/a2de5b9150da60c77144bb5333371eaca3fab936/src/node_api.cc#L1201 const complete = this.complete orelse { return; }; - const handle_scope = NapiHandleScope.open(this.env, false); + const env = this.env; + const handle_scope = NapiHandleScope.open(env, false); defer if (handle_scope) |scope| scope.close(env); const status: NapiStatus = if (this.status.load(.seq_cst) == .cancelled) @@ -1128,22 +1104,15 @@ pub const napi_async_work = struct { .ok; complete( - this.env, + env, @intFromEnum(status), this.data, ); - const global = env.toJS(); if (global.hasException()) { - return error.JSError; + global.reportActiveExceptionAsUnhandled(error.JSError); } } - - pub fn runFromJS(this: *napi_async_work) void { - this.runFromJSWithError() catch |e| { - this.global.reportActiveExceptionAsUnhandled(e); - }; - } }; pub const napi_threadsafe_function = *ThreadSafeFunction; pub const napi_threadsafe_function_release_mode = enum(c_uint) { @@ -1210,20 +1179,7 @@ pub export fn napi_fatal_error(location_ptr: ?[*:0]const u8, location_len: usize bun.Output.panic("napi: {s}", .{message}); } -pub export fn napi_create_buffer(env_: napi_env, length: usize, data: ?**anyopaque, result: *napi_value) napi_status { - log("napi_create_buffer: {d}", .{length}); - const env = env_ orelse { - return envIsNull(); - }; - var buffer = JSC.JSValue.createBufferFromLength(env.toJS(), length); - if (length > 0) { - if (data) |ptr| { - ptr.* = buffer.asArrayBuffer(env.toJS()).?.ptr; - } - } - result.set(env, buffer); - return env.ok(); -} +pub extern fn napi_create_buffer(env: napi_env, length: usize, data: ?**anyopaque, result: *napi_value) napi_status; pub extern fn napi_create_external_buffer(env: napi_env, length: usize, data: ?*anyopaque, finalize_cb: napi_finalize, finalize_hint: ?*anyopaque, result: *napi_value) napi_status; pub export fn napi_create_buffer_copy(env_: napi_env, length: usize, data: [*]u8, result_data: ?*?*anyopaque, result_: ?*napi_value) napi_status { log("napi_create_buffer_copy: {d}", .{length}); @@ -1293,9 +1249,7 @@ pub export fn napi_create_async_work( const execute = execute_ orelse { return env.invalidArg(); }; - result.* = napi_async_work.new(env, execute, complete, data) catch { - return env.genericFailure(); - }; + result.* = napi_async_work.new(env, execute, complete, data); return env.ok(); } pub export fn napi_delete_async_work(env_: napi_env, work_: ?*napi_async_work) napi_status { @@ -1306,7 +1260,7 @@ pub export fn napi_delete_async_work(env_: napi_env, work_: ?*napi_async_work) n const work = work_ orelse { return env.invalidArg(); }; - bun.assert(env.toJS() == work.global); + if (comptime bun.Environment.allow_assert) bun.assert(env.toJS() == work.global); work.destroy(); return env.ok(); } @@ -1318,7 +1272,7 @@ pub export fn napi_queue_async_work(env_: napi_env, work_: ?*napi_async_work) na const work = work_ orelse { return env.invalidArg(); }; - bun.assert(env.toJS() == work.global); + if (comptime bun.Environment.allow_assert) bun.assert(env.toJS() == work.global); work.schedule(); return env.ok(); } @@ -1330,7 +1284,7 @@ pub export fn napi_cancel_async_work(env_: napi_env, work_: ?*napi_async_work) n const work = work_ orelse { return env.invalidArg(); }; - bun.assert(env.toJS() == work.global); + if (comptime bun.Environment.allow_assert) bun.assert(env.toJS() == work.global); if (work.cancel()) { return env.ok(); } @@ -1592,7 +1546,7 @@ pub const ThreadSafeFunction = struct { break :brk .{ !this.isClosing(), t }; }; - this.call(task, !is_first); + this.call(task, !is_first) catch return false; if (queue_finalizer_after_call) { this.maybeQueueFinalizer(); @@ -1604,10 +1558,10 @@ pub const ThreadSafeFunction = struct { /// This function can be called multiple times in one tick of the event loop. /// See: https://github.com/nodejs/node/pull/38506 /// In that case, we need to drain microtasks. - fn call(this: *ThreadSafeFunction, task: ?*anyopaque, is_first: bool) void { + fn call(this: *ThreadSafeFunction, task: ?*anyopaque, is_first: bool) bun.JSExecutionTerminated!void { const env = this.env; if (!is_first) { - this.event_loop.drainMicrotasks(); + try this.event_loop.drainMicrotasks(); } const globalObject = env.toJS(); @@ -1893,6 +1847,7 @@ const V8API = if (!bun.Environment.isWindows) struct { pub extern fn _ZNK2v86String19ContainsOnlyOneByteEv() *anyopaque; pub extern fn _ZN2v812api_internal18GlobalizeReferenceEPNS_8internal7IsolateEm() *anyopaque; pub extern fn _ZN2v812api_internal13DisposeGlobalEPm() *anyopaque; + pub extern fn _ZN2v812api_internal23GetFunctionTemplateDataEPNS_7IsolateENS_5LocalINS_4DataEEE() *anyopaque; pub extern fn _ZNK2v88Function7GetNameEv() *anyopaque; pub extern fn _ZNK2v85Value10IsFunctionEv() *anyopaque; pub extern fn _ZN2v812api_internal17FromJustIsNothingEv() *anyopaque; @@ -1964,6 +1919,7 @@ const V8API = if (!bun.Environment.isWindows) struct { pub extern fn @"?ContainsOnlyOneByte@String@v8@@QEBA_NXZ"() *anyopaque; pub extern fn @"?GlobalizeReference@api_internal@v8@@YAPEA_KPEAVIsolate@internal@2@_K@Z"() *anyopaque; pub extern fn @"?DisposeGlobal@api_internal@v8@@YAXPEA_K@Z"() *anyopaque; + pub extern fn @"?GetFunctionTemplateData@api_internal@v8@@YA?AV?$Local@VValue@v8@@@2@PEAVIsolate@2@V?$Local@VData@v8@@@2@@Z"() *anyopaque; pub extern fn @"?GetName@Function@v8@@QEBA?AV?$Local@VValue@v8@@@2@XZ"() *anyopaque; pub extern fn @"?IsFunction@Value@v8@@QEBA_NXZ"() *anyopaque; pub extern fn @"?FromJustIsNothing@api_internal@v8@@YAXXZ"() *anyopaque; diff --git a/src/node-fallbacks/build-fallbacks.ts b/src/node-fallbacks/build-fallbacks.ts index 72f5f2504e..bb5d23b6ee 100644 --- a/src/node-fallbacks/build-fallbacks.ts +++ b/src/node-fallbacks/build-fallbacks.ts @@ -32,7 +32,7 @@ for (let fileIndex = 0; fileIndex < allFiles.length; fileIndex++) { // Create the build command with all the specified options const buildCommand = - Bun.$`bun build --outdir=${outdir} ${name} --minify-syntax --minify-whitespace --format=${name.includes("stream") ? "cjs" : "esm"} --target=node ${{ raw: externalModules }}`.text(); + Bun.$`bun build --define=process.env.NODE_DEBUG:"false" --define=process.env.READABLE_STREAM="'enable'" --define=global:globalThis --outdir=${outdir} ${name} --minify-syntax --minify-whitespace --format=${name.includes("stream") ? "cjs" : "esm"} --target=node ${{ raw: externalModules }}`.text(); commands.push( buildCommand.then(async text => { diff --git a/src/node-fallbacks/crypto.js b/src/node-fallbacks/crypto.js index a5449f32ce..7955f1bea6 100644 --- a/src/node-fallbacks/crypto.js +++ b/src/node-fallbacks/crypto.js @@ -73,3 +73,4 @@ export function getCurves() { } export const webcrypto = crypto; +export default crypto; diff --git a/src/node-fallbacks/http.js b/src/node-fallbacks/http.js index 1ac358b4eb..39a71744a1 100644 --- a/src/node-fallbacks/http.js +++ b/src/node-fallbacks/http.js @@ -1,2 +1,3 @@ import http from "stream-http"; export var { request, get, ClientRequest, IncomingMessage, Agent, globalAgent, STATUS_CODES, METHODS } = http; +export default http; diff --git a/src/node-fallbacks/https.js b/src/node-fallbacks/https.js index 92460d5c82..f7c4617ba0 100644 --- a/src/node-fallbacks/https.js +++ b/src/node-fallbacks/https.js @@ -17,3 +17,5 @@ export var { validateHeaderName, validateHeaderValue, } = https; + +export default https; diff --git a/src/node-fallbacks/net.js b/src/node-fallbacks/net.js index 746e07415a..4e5c9867eb 100644 --- a/src/node-fallbacks/net.js +++ b/src/node-fallbacks/net.js @@ -52,3 +52,9 @@ export function isIP(s) { if (isIPv6(s)) return 6; return 0; } + +export default { + isIPv4, + isIPv6, + isIP, +}; diff --git a/src/node-fallbacks/tty.js b/src/node-fallbacks/tty.js index 985c225fad..05ded0b9ad 100644 --- a/src/node-fallbacks/tty.js +++ b/src/node-fallbacks/tty.js @@ -6,3 +6,4 @@ function ReadStream() { throw new Error("tty.ReadStream is not implemented for browsers"); } export { ReadStream, WriteStream, isatty }; +export default { ReadStream, WriteStream, isatty }; diff --git a/src/node-fallbacks/util.js b/src/node-fallbacks/util.js index f10c242bd1..79b4ce6ce3 100644 --- a/src/node-fallbacks/util.js +++ b/src/node-fallbacks/util.js @@ -123,8 +123,8 @@ export const debuglog = /* @__PURE__ */ ((debugs = {}, debugEnvRegex = {}, debug */ /* legacy: obj, showHidden, depth, colors*/ export const inspect = /* @__PURE__ */ (i => + // http://en.wikipedia.org/wiki/ANSI_escape_code#graphics ( - // http://en.wikipedia.org/wiki/ANSI_escape_code#graphics (i.colors = { "bold": [1, 22], "italic": [3, 23], @@ -947,3 +947,13 @@ export function callbackify(original) { export const TextEncoder = /* @__PURE__ */ globalThis.TextEncoder; export const TextDecoder = /* @__PURE__ */ globalThis.TextDecoder; +export default { + TextEncoder, + TextDecoder, + promisify, + log, + inherits, + _extend, + callbackifyOnRejected, + callbackify, +}; diff --git a/src/patch.zig b/src/patch.zig index 027bdd0258..df7c262fb1 100644 --- a/src/patch.zig +++ b/src/patch.zig @@ -1,10 +1,3 @@ -const Output = bun.Output; -const std = @import("std"); -const bun = @import("bun"); -const JSC = bun.JSC; -const Allocator = std.mem.Allocator; -const List = std.ArrayListUnmanaged; - const WHITESPACE: []const u8 = " \t\n\r"; // TODO: calculate this for different systems @@ -1132,7 +1125,7 @@ pub const TestingAPIs = struct { defer args.deinit(); if (args.patchfile.apply(bun.default_allocator, args.dirfd)) |err| { - return globalThis.throwValue(err.toJSC(globalThis)); + return globalThis.throwValue(err.toJS(globalThis)); } return .true; @@ -1179,7 +1172,7 @@ pub const TestingAPIs = struct { break :brk switch (bun.sys.open(path, bun.O.DIRECTORY | bun.O.RDONLY, 0)) { .err => |e| { - globalThis.throwValue(e.withPath(path).toJSC(globalThis)) catch {}; + globalThis.throwValue(e.withPath(path).toJS(globalThis)) catch {}; return .initErr(.js_undefined); }, .result => |fd| fd, @@ -1541,3 +1534,13 @@ fn shouldSkipLine(line: []const u8) bool { // line like: "--- a/numbers.txt" or "+++ b/numbers.txt" we should not skip (!(line.len >= 4 and (std.mem.eql(u8, line[0..4], "--- ") or std.mem.eql(u8, line[0..4], "+++ "))))); } + +// @sortImports + +const bun = @import("bun"); +const JSC = bun.JSC; +const Output = bun.Output; + +const std = @import("std"); +const List = std.ArrayListUnmanaged; +const Allocator = std.mem.Allocator; diff --git a/src/paths.zig b/src/paths.zig new file mode 100644 index 0000000000..dd9aea3a0f --- /dev/null +++ b/src/paths.zig @@ -0,0 +1,24 @@ +const std = @import("std"); +const bun = @import("bun"); +const Environment = bun.Environment; + +const paths = @import("./paths/Path.zig"); +pub const Path = paths.Path; +pub const AbsPath = paths.AbsPath; +pub const RelPath = paths.RelPath; + +pub const EnvPath = @import("./paths/EnvPath.zig").EnvPath; + +const pools = @import("./paths/path_buffer_pool.zig"); +pub const path_buffer_pool = pools.path_buffer_pool; +pub const w_path_buffer_pool = pools.w_path_buffer_pool; +pub const os_path_buffer_pool = pools.os_path_buffer_pool; + +pub const MAX_PATH_BYTES: usize = if (Environment.isWasm) 1024 else std.fs.max_path_bytes; +pub const PathBuffer = [MAX_PATH_BYTES]u8; +pub const PATH_MAX_WIDE = std.os.windows.PATH_MAX_WIDE; +pub const WPathBuffer = [PATH_MAX_WIDE]u16; +pub const OSPathChar = if (Environment.isWindows) u16 else u8; +pub const OSPathSliceZ = [:0]const OSPathChar; +pub const OSPathSlice = []const OSPathChar; +pub const OSPathBuffer = if (Environment.isWindows) WPathBuffer else PathBuffer; diff --git a/src/paths/EnvPath.zig b/src/paths/EnvPath.zig new file mode 100644 index 0000000000..63e86bef49 --- /dev/null +++ b/src/paths/EnvPath.zig @@ -0,0 +1,90 @@ +const std = @import("std"); +const bun = @import("bun"); +const AbsPath = bun.AbsPath; +const string = bun.string; +const strings = bun.strings; +const OOM = bun.OOM; + +pub const EnvPathOptions = struct { + // +}; + +fn trimPathDelimiters(input: string) string { + var trimmed = input; + while (trimmed.len > 0 and trimmed[0] == std.fs.path.delimiter) { + trimmed = trimmed[1..]; + } + while (trimmed.len > 0 and trimmed[trimmed.len - 1] == std.fs.path.delimiter) { + trimmed = trimmed[0 .. trimmed.len - 1]; + } + return trimmed; +} + +pub fn EnvPath(comptime opts: EnvPathOptions) type { + return struct { + allocator: std.mem.Allocator, + buf: std.ArrayListUnmanaged(u8) = .empty, + + pub fn init(allocator: std.mem.Allocator) @This() { + return .{ .allocator = allocator }; + } + + pub fn initCapacity(allocator: std.mem.Allocator, capacity: usize) OOM!@This() { + return .{ .allocator = allocator, .buf = try .initCapacity(allocator, capacity) }; + } + + pub fn deinit(this: *const @This()) void { + @constCast(this).buf.deinit(this.allocator); + } + + pub fn slice(this: *const @This()) string { + return this.buf.items; + } + + pub fn append(this: *@This(), input: anytype) OOM!void { + const trimmed: string = switch (@TypeOf(input)) { + []u8, []const u8 => strings.withoutTrailingSlash(trimPathDelimiters(input)), + + // assume already trimmed + else => input.slice(), + }; + + if (trimmed.len == 0) { + return; + } + + if (this.buf.items.len != 0) { + try this.buf.ensureUnusedCapacity(this.allocator, trimmed.len + 1); + this.buf.appendAssumeCapacity(std.fs.path.delimiter); + this.buf.appendSliceAssumeCapacity(trimmed); + } else { + try this.buf.appendSlice(this.allocator, trimmed); + } + } + + pub const PathComponentBuilder = struct { + env_path: *EnvPath(opts), + path_buf: AbsPath(.{ .sep = .auto }), + + pub fn append(this: *@This(), component: string) void { + this.path_buf.append(component); + } + + pub fn appendFmt(this: *@This(), comptime component_fmt: string, component_args: anytype) void { + this.path_buf.appendFmt(component_fmt, component_args); + } + + pub fn apply(this: *@This()) OOM!void { + try this.env_path.append(&this.path_buf); + this.path_buf.deinit(); + } + }; + + pub fn pathComponentBuilder(this: *@This()) PathComponentBuilder { + return .{ + .env_path = this, + .path_buf = .init(), + }; + } + }; +} diff --git a/src/paths/Path.zig b/src/paths/Path.zig new file mode 100644 index 0000000000..61339c44d4 --- /dev/null +++ b/src/paths/Path.zig @@ -0,0 +1,808 @@ +const std = @import("std"); +const bun = @import("bun"); +const Output = bun.Output; +const PathBuffer = bun.PathBuffer; +const WPathBuffer = bun.WPathBuffer; +const Environment = bun.Environment; +const FD = bun.FD; + +const Options = struct { + check_length: CheckLength = .assume_always_less_than_max_path, + sep: PathSeparators = .any, + kind: Kind = .any, + buf_type: BufType = .pool, + unit: Unit = .u8, + + const Unit = enum { + u8, + u16, + os, + }; + + const BufType = enum { + pool, + // stack, + // array_list, + }; + + const Kind = enum { + abs, + rel, + + // not recommended, but useful when you don't know + any, + }; + + const CheckLength = enum { + assume_always_less_than_max_path, + check_for_greater_than_max_path, + }; + + const PathSeparators = enum { + any, + auto, + posix, + windows, + + pub fn char(comptime sep: @This()) u8 { + return switch (sep) { + .any => @compileError("use the existing slash"), + .auto => std.fs.path.sep, + .posix => std.fs.path.sep_posix, + .windows => std.fs.path.sep_windows, + }; + } + }; + + pub fn pathUnit(comptime opts: @This()) type { + return switch (opts.unit) { + .u8 => u8, + .u16 => u16, + .os => if (Environment.isWindows) u16 else u8, + }; + } + + pub fn notPathUnit(comptime opts: @This()) type { + return switch (opts.unit) { + .u8 => u16, + .u16 => u8, + .os => if (Environment.isWindows) u8 else u16, + }; + } + + pub fn maxPathLength(comptime opts: @This()) usize { + switch (comptime opts.check_length) { + .assume_always_less_than_max_path => @compileError("max path length is not needed"), + .check_for_greater_than_max_path => { + return switch (comptime opts.unit) { + .u8 => bun.MAX_PATH_BYTES, + .u16 => bun.PATH_MAX_WIDE, + .os => if (Environment.isWindows) bun.PATH_MAX_WIDE else bun.MAX_PATH_BYTES, + }; + }, + } + } + + pub fn Buf(comptime opts: @This()) type { + return switch (opts.buf_type) { + .pool => struct { + pooled: switch (opts.unit) { + .u8 => *PathBuffer, + .u16 => *WPathBuffer, + .os => if (Environment.isWindows) *WPathBuffer else *PathBuffer, + }, + len: usize, + + pub fn setLength(this: *@This(), new_len: usize) void { + this.len = new_len; + } + + pub fn append(this: *@This(), characters: anytype, add_separator: bool) void { + if (add_separator) { + switch (comptime opts.sep) { + .any, .auto => this.pooled[this.len] = std.fs.path.sep, + .posix => this.pooled[this.len] = std.fs.path.sep_posix, + .windows => this.pooled[this.len] = std.fs.path.sep_windows, + } + this.len += 1; + } + + if (opts.inputChildType(@TypeOf(characters)) == opts.pathUnit()) { + switch (comptime opts.sep) { + .any => { + @memcpy(this.pooled[this.len..][0..characters.len], characters); + this.len += characters.len; + }, + .auto, .posix, .windows => { + for (characters) |c| { + switch (c) { + '/', '\\' => this.pooled[this.len] = opts.sep.char(), + else => this.pooled[this.len] = c, + } + this.len += 1; + } + }, + } + } else { + switch (opts.inputChildType(@TypeOf(characters))) { + u8 => { + const converted = bun.strings.convertUTF8toUTF16InBuffer(this.pooled[this.len..], characters); + if (comptime opts.sep != .any) { + for (this.pooled[this.len..][0..converted.len], 0..) |c, off| { + switch (c) { + '/', '\\' => this.pooled[this.len + off] = opts.sep.char(), + else => {}, + } + } + } + this.len += converted.len; + }, + u16 => { + const converted = bun.strings.convertUTF16toUTF8InBuffer(this.pooled[this.len..], characters) catch unreachable; + if (comptime opts.sep != .any) { + for (this.pooled[this.len..][0..converted.len], 0..) |c, off| { + switch (c) { + '/', '\\' => this.pooled[this.len + off] = opts.sep.char(), + else => {}, + } + } + } + this.len += converted.len; + }, + else => @compileError("unexpected character type"), + } + } + + // switch (@TypeOf(characters)) { + // []u8, []const u8, [:0]u8, [:0]const u8 => { + // if (opts.unit == .u8) { + // this.appendT() + // } + // } + // } + } + + // fn append(this: *@This(), characters: []const opts.pathUnit(), add_separator: bool) void { + // if (add_separator) {} + // switch (comptime opts.sep) { + // .any => { + // @memcpy(this.pooled[this.len..][0..characters.len], characters); + // this.len += characters.len; + // }, + // .auto, .posix, .windows => { + // for (characters) |c| { + // switch (c) { + // '/', '\\' => this.pooled[this.len] = opts.sep.char(), + // else => this.pooled[this.len] = c, + // } + // this.len += 1; + // } + // }, + // } + // } + + fn convertAppend(this: *@This(), characters: []const opts.notPathUnit()) void { + _ = this; + _ = characters; + // switch (comptime opts.sep) { + // .any => { + // switch (opts.notPathUnit()) { + // .u8 => { + // const converted = bun.strings.convertUTF8toUTF16InBuffer(this.pooled[this.len..], characters); + // }, + // } + // }, + // } + } + }, + // .stack => struct { + // buf: PathBuffer, + // len: u16, + // }, + // .array_list => struct { + // list: std.ArrayList(opts.pathUnit()), + // }, + + }; + } + + const Error = error{MaxPathExceeded}; + + pub fn ResultFn(comptime opts: @This()) fn (comptime T: type) type { + return struct { + pub fn Result(comptime T: type) type { + return switch (opts.check_length) { + .assume_always_less_than_max_path => T, + .check_for_greater_than_max_path => Error!T, + }; + } + }.Result; + } + + pub fn inputChildType(comptime opts: @This(), comptime InputType: type) type { + _ = opts; + return switch (@typeInfo(std.meta.Child(InputType))) { + // handle string literals + .array => |array| array.child, + else => std.meta.Child(InputType), + }; + } +}; + +pub fn AbsPath(comptime opts: Options) type { + var copy = opts; + copy.kind = .abs; + return Path(copy); +} + +pub fn RelPath(comptime opts: Options) type { + var copy = opts; + copy.kind = .rel; + return Path(copy); +} + +pub fn Path(comptime opts: Options) type { + const Result = opts.ResultFn(); + + // if (opts.unit == .u16 and !Environment.isWindows) { + // @compileError("utf16 not supported"); + // } + + // const log = Output.scoped(.Path, false); + + return struct { + _buf: opts.Buf(), + + pub fn init() @This() { + switch (comptime opts.buf_type) { + .pool => { + return .{ + ._buf = .{ + .pooled = switch (opts.unit) { + .u8 => bun.path_buffer_pool.get(), + .u16 => bun.w_path_buffer_pool.get(), + .os => if (comptime Environment.isWindows) + bun.w_path_buffer_pool.get() + else + bun.path_buffer_pool.get(), + }, + .len = 0, + }, + }; + }, + } + } + + pub fn deinit(this: *const @This()) void { + switch (comptime opts.buf_type) { + .pool => { + switch (opts.unit) { + .u8 => bun.path_buffer_pool.put(this._buf.pooled), + .u16 => bun.w_path_buffer_pool.put(this._buf.pooled), + .os => if (comptime Environment.isWindows) + bun.w_path_buffer_pool.put(this._buf.pooled) + else + bun.path_buffer_pool.put(this._buf.pooled), + } + }, + } + @constCast(this).* = undefined; + } + + pub fn move(this: *const @This()) @This() { + const moved = this.*; + @constCast(this).* = undefined; + return moved; + } + + pub fn initTopLevelDir() @This() { + bun.debugAssert(bun.fs.FileSystem.instance_loaded); + const top_level_dir = bun.fs.FileSystem.instance.top_level_dir; + + const trimmed = switch (comptime opts.kind) { + .abs => trimmed: { + bun.debugAssert(isInputAbsolute(top_level_dir)); + break :trimmed trimInput(.abs, top_level_dir); + }, + .rel => @compileError("cannot create a relative path from top_level_dir"), + .any => trimInput(.abs, top_level_dir), + }; + + var this = init(); + this._buf.append(trimmed, false); + return this; + } + + pub fn initFdPath(fd: FD) !@This() { + switch (comptime opts.kind) { + .abs => {}, + .rel => @compileError("cannot create a relative path from getFdPath"), + .any => {}, + } + + var this = init(); + switch (comptime opts.buf_type) { + .pool => { + const raw = try fd.getFdPath(this._buf.pooled); + const trimmed = trimInput(.abs, raw); + this._buf.len = trimmed.len; + }, + } + + return this; + } + + pub fn from(input: anytype) Result(@This()) { + switch (comptime @TypeOf(input)) { + []u8, []const u8, [:0]u8, [:0]const u8 => {}, + []u16, []const u16, [:0]u16, [:0]const u16 => {}, + else => @compileError("unsupported type: " ++ @typeName(@TypeOf(input))), + } + const trimmed = switch (comptime opts.kind) { + .abs => trimmed: { + bun.debugAssert(isInputAbsolute(input)); + break :trimmed trimInput(.abs, input); + }, + .rel => trimmed: { + bun.debugAssert(!isInputAbsolute(input)); + break :trimmed trimInput(.rel, input); + }, + .any => trimInput(if (isInputAbsolute(input)) .abs else .rel, input), + }; + + if (comptime opts.check_length == .check_for_greater_than_max_path) { + if (trimmed.len >= opts.maxPathLength()) { + return error.MaxPathExceeded; + } + } + + var this = init(); + this._buf.append(trimmed, false); + return this; + } + + pub fn isAbsolute(this: *const @This()) bool { + return switch (comptime opts.kind) { + .abs => @compileError("already known to be absolute"), + .rel => @compileError("already known to not be absolute"), + .any => isInputAbsolute(this.slice()), + }; + } + + pub fn basename(this: *const @This()) []const opts.pathUnit() { + return bun.strings.basename(opts.pathUnit(), this.slice()); + } + + pub fn basenameZ(this: *const @This()) [:0]const opts.pathUnit() { + const full = this.sliceZ(); + const base = bun.strings.basename(opts.pathUnit(), full); + return full[full.len - base.len ..][0..base.len :0]; + } + + pub fn dirname(this: *const @This()) ?[]const opts.pathUnit() { + return bun.Dirname.dirname(opts.pathUnit(), this.slice()); + } + + pub fn slice(this: *const @This()) []const opts.pathUnit() { + switch (comptime opts.buf_type) { + .pool => return this._buf.pooled[0..this._buf.len], + } + } + + pub fn sliceZ(this: *const @This()) [:0]const opts.pathUnit() { + switch (comptime opts.buf_type) { + .pool => { + this._buf.pooled[this._buf.len] = 0; + return this._buf.pooled[0..this._buf.len :0]; + }, + } + } + + // pub fn buf(this: *const @This()) []opts.pathUnit() { + // switch (comptime opts.buf_type) { + // .pool => { + // return this._buf.pooled; + // }, + // } + // } + + pub fn len(this: *const @This()) usize { + switch (comptime opts.buf_type) { + .pool => { + return this._buf.len; + }, + } + } + + pub fn clone(this: *const @This()) @This() { + switch (comptime opts.buf_type) { + .pool => { + var cloned = init(); + @memcpy(cloned._buf.pooled[0..this._buf.len], this._buf.pooled[0..this._buf.len]); + cloned._buf.len = this._buf.len; + return cloned; + }, + } + } + + pub fn clear(this: *@This()) void { + this._buf.setLength(0); + } + + pub fn rootLen(input: anytype) ?usize { + if (comptime Environment.isWindows) { + if (input.len > 2 and input[1] == ':' and switch (input[2]) { + '/', '\\' => true, + else => false, + }) { + const letter = input[0]; + if (('a' <= letter and letter <= 'z') or ('A' <= letter and letter <= 'Z')) { + // C:\ + return 3; + } + } + + if (input.len > 5 and + switch (input[0]) { + '/', '\\' => true, + else => false, + } and + switch (input[1]) { + '/', '\\' => true, + else => false, + } and + switch (input[2]) { + '\\', '.' => false, + else => true, + }) + { + var i: usize = 3; + // \\network\share\ + // ^ + while (i < input.len and switch (input[i]) { + '/', '\\' => false, + else => true, + }) { + i += 1; + } + + i += 1; + // \\network\share\ + // ^ + const start = i; + while (i < input.len and switch (input[i]) { + '/', '\\' => false, + else => true, + }) { + i += 1; + } + + if (start != i and i < input.len and switch (input[i]) { + '/', '\\' => true, + else => false, + }) { + // \\network\share\ + // ^ + if (i + 1 < input.len) { + return i + 1; + } + return i; + } + } + + if (input.len > 0 and switch (input[0]) { + '/', '\\' => true, + else => false, + }) { + // \ + return 1; + } + + return null; + } + + if (input.len > 0 and input[0] == '/') { + // / + return 1; + } + + return null; + } + + const TrimInputKind = enum { + abs, + rel, + }; + + fn trimInput(kind: TrimInputKind, input: anytype) []const opts.inputChildType(@TypeOf(input)) { + var trimmed: []const opts.inputChildType(@TypeOf(input)) = input[0..]; + + if (comptime Environment.isWindows) { + switch (kind) { + .abs => { + const root_len = rootLen(input) orelse 0; + while (trimmed.len > root_len and switch (trimmed[trimmed.len - 1]) { + '/', '\\' => true, + else => false, + }) { + trimmed = trimmed[0 .. trimmed.len - 1]; + } + }, + .rel => { + if (trimmed.len > 1 and trimmed[0] == '.') { + const c = trimmed[1]; + if (c == '/' or c == '\\') { + trimmed = trimmed[2..]; + } + } + while (trimmed.len > 0 and switch (trimmed[0]) { + '/', '\\' => true, + else => false, + }) { + trimmed = trimmed[1..]; + } + while (trimmed.len > 0 and switch (trimmed[trimmed.len - 1]) { + '/', '\\' => true, + else => false, + }) { + trimmed = trimmed[0 .. trimmed.len - 1]; + } + }, + } + + return trimmed; + } + + switch (kind) { + .abs => { + const root_len = rootLen(input) orelse 0; + while (trimmed.len > root_len and trimmed[trimmed.len - 1] == '/') { + trimmed = trimmed[0 .. trimmed.len - 1]; + } + }, + .rel => { + if (trimmed.len > 1 and trimmed[0] == '.' and trimmed[1] == '/') { + trimmed = trimmed[2..]; + } + while (trimmed.len > 0 and trimmed[0] == '/') { + trimmed = trimmed[1..]; + } + + while (trimmed.len > 0 and trimmed[trimmed.len - 1] == '/') { + trimmed = trimmed[0 .. trimmed.len - 1]; + } + }, + } + + return trimmed; + } + + fn isInputAbsolute(input: anytype) bool { + if (input.len == 0) { + return false; + } + + if (input[0] == '/') { + return true; + } + + if (comptime Environment.isWindows) { + if (input[0] == '\\') { + return true; + } + + if (input.len < 3) { + return false; + } + + if (input[1] == ':' and switch (input[2]) { + '/', '\\' => true, + else => false, + }) { + return true; + } + } + + return false; + } + + pub fn append(this: *@This(), input: anytype) Result(void) { + const needs_sep = this.len() > 0 and switch (comptime opts.sep) { + .any => switch (this.slice()[this.len() - 1]) { + '/', '\\' => false, + else => true, + }, + else => this.slice()[this.len() - 1] != opts.sep.char(), + }; + + switch (comptime opts.kind) { + .abs => { + const has_root = this.len() > 0; + + if (comptime Environment.isDebug) { + if (has_root) { + bun.debugAssert(!isInputAbsolute(input)); + } else { + bun.debugAssert(isInputAbsolute(input)); + } + } + + const trimmed = trimInput(if (has_root) .rel else .abs, input); + + if (trimmed.len == 0) { + return; + } + + if (comptime opts.check_length == .check_for_greater_than_max_path) { + if (this.len() + trimmed.len + @intFromBool(needs_sep) >= opts.maxPathLength()) { + return error.MaxPathExceeded; + } + } + + this._buf.append(trimmed, needs_sep); + }, + .rel => { + bun.debugAssert(!isInputAbsolute(input)); + + const trimmed = trimInput(.rel, input); + + if (trimmed.len == 0) { + return; + } + + if (comptime opts.check_length == .check_for_greater_than_max_path) { + if (this.len() + trimmed.len + @intFromBool(needs_sep) >= opts.maxPathLength()) { + return error.MaxPathExceeded; + } + } + + this._buf.append(trimmed, needs_sep); + }, + .any => { + const input_is_absolute = isInputAbsolute(input); + + if (comptime Environment.isDebug) { + if (needs_sep) { + bun.debugAssert(!input_is_absolute); + } + } + + const trimmed = trimInput(if (this.len() > 0) + // anything appended to an existing path should be trimmed + // as a relative path + .rel + else if (isInputAbsolute(input)) + // path is empty, trim based on input + .abs + else + .rel, input); + + if (trimmed.len == 0) { + return; + } + + if (comptime opts.check_length == .check_for_greater_than_max_path) { + if (this.len() + trimmed.len + @intFromBool(needs_sep) >= opts.maxPathLength()) { + return error.MaxPathExceeded; + } + } + + this._buf.append(trimmed, needs_sep); + }, + } + } + + pub fn appendFmt(this: *@This(), comptime fmt: []const u8, args: anytype) Result(void) { + // TODO: there's probably a better way to do this. needed for trimming slashes + var temp: Path(.{ .buf_type = .pool }) = .init(); + defer temp.deinit(); + + const input = switch (comptime opts.buf_type) { + .pool => std.fmt.bufPrint(temp._buf.pooled, fmt, args) catch { + if (comptime opts.check_length == .check_for_greater_than_max_path) { + return error.MaxPathExceeded; + } + unreachable; + }, + }; + + return this.append(input); + } + + pub fn join(this: *@This(), parts: []const []const opts.pathUnit()) Result(void) { + switch (comptime opts.unit) { + .u8 => {}, + .u16 => @compileError("unsupported unit type"), + .os => if (Environment.isWindows) @compileError("unsupported unit type"), + } + + switch (comptime opts.kind) { + .abs => {}, + .rel => @compileError("cannot join with relative path"), + .any => { + bun.debugAssert(this.isAbsolute()); + }, + } + + const cloned = this.clone(); + defer cloned.deinit(); + + switch (comptime opts.buf_type) { + .pool => { + const joined = bun.path.joinAbsStringBuf( + cloned.slice(), + this._buf.pooled, + parts, + switch (opts.sep) { + .any, .auto => .auto, + .posix => .posix, + .windows => .windows, + }, + ); + + const trimmed = trimInput(.abs, joined); + this._buf.len = trimmed.len; + }, + } + } + + pub fn relative(this: *const @This(), to: anytype) RelPath(opts) { + switch (comptime opts.buf_type) { + .pool => { + var output: RelPath(opts) = .init(); + const rel = bun.path.relativeBufZ(output._buf.pooled, this.slice(), to.slice()); + const trimmed = trimInput(.rel, rel); + output._buf.len = trimmed.len; + return output; + }, + } + } + + pub fn undo(this: *@This(), n_components: usize) void { + const min_len = switch (comptime opts.kind) { + .abs => rootLen(this.slice()) orelse 0, + .rel => 0, + .any => min_len: { + if (this.isAbsolute()) { + break :min_len rootLen(this.slice()) orelse 0; + } + break :min_len 0; + }, + }; + + var i: usize = 0; + while (i < n_components) { + const slash = switch (comptime opts.sep) { + .any => std.mem.lastIndexOfAny(opts.pathUnit(), this.slice(), &.{ std.fs.path.sep_posix, std.fs.path.sep_windows }), + .auto => std.mem.lastIndexOfScalar(opts.pathUnit(), this.slice(), std.fs.path.sep), + .posix => std.mem.lastIndexOfScalar(opts.pathUnit(), this.slice(), std.fs.path.sep_posix), + .windows => std.mem.lastIndexOfScalar(opts.pathUnit(), this.slice(), std.fs.path.sep_windows), + } orelse { + this._buf.setLength(min_len); + return; + }; + + if (slash < min_len) { + this._buf.setLength(min_len); + return; + } + + this._buf.setLength(slash); + i += 1; + } + } + + const ResetScope = struct { + path: *Path(opts), + saved_len: usize, + + pub fn restore(this: *const ResetScope) void { + this.path._buf.setLength(this.saved_len); + } + }; + + pub fn save(this: *@This()) ResetScope { + return .{ .path = this, .saved_len = this.len() }; + } + }; +} diff --git a/src/paths/path_buffer_pool.zig b/src/paths/path_buffer_pool.zig new file mode 100644 index 0000000000..3489d920a0 --- /dev/null +++ b/src/paths/path_buffer_pool.zig @@ -0,0 +1,34 @@ +const bun = @import("bun"); +const Environment = bun.Environment; +const ObjectPool = bun.ObjectPool; +const PathBuffer = bun.PathBuffer; +const WPathBuffer = bun.WPathBuffer; + +// This pool exists because on Windows, each path buffer costs 64 KB. +// This makes the stack memory usage very unpredictable, which means we can't really know how much stack space we have left. +// This pool is a workaround to make the stack memory usage more predictable. +// We keep up to 4 path buffers alive per thread at a time. +fn PathBufferPoolT(comptime T: type) type { + return struct { + const Pool = ObjectPool(T, null, true, 4); + + pub fn get() *T { + // use a threadlocal allocator so mimalloc deletes it on thread deinit. + return &Pool.get(bun.threadlocalAllocator()).data; + } + + pub fn put(buffer: *const T) void { + // there's no deinit function on T so @constCast is fine + var node: *Pool.Node = @alignCast(@fieldParentPtr("data", @constCast(buffer))); + node.release(); + } + + pub fn deleteAll() void { + Pool.deleteAll(); + } + }; +} + +pub const path_buffer_pool = PathBufferPoolT(PathBuffer); +pub const w_path_buffer_pool = PathBufferPoolT(WPathBuffer); +pub const os_path_buffer_pool = if (Environment.isWindows) w_path_buffer_pool else path_buffer_pool; diff --git a/src/ptr/CowSlice.zig b/src/ptr/CowSlice.zig index 33411b21ce..d971e8fbbb 100644 --- a/src/ptr/CowSlice.zig +++ b/src/ptr/CowSlice.zig @@ -221,8 +221,8 @@ pub fn CowSliceZ(T: type, comptime sentinel: ?T) type { } } - pub fn format(str: Self, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - return std.fmt.formatType(str.slice(), fmt, options, writer, 1); + pub fn format(str: Self, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + return try writer.writeAll(str.slice()); } /// Free this `Cow`'s allocation if it is owned. diff --git a/src/ptr/ref_count.zig b/src/ptr/ref_count.zig index 127dbfe1c9..932f94ca18 100644 --- a/src/ptr/ref_count.zig +++ b/src/ptr/ref_count.zig @@ -75,6 +75,7 @@ pub fn RefCount(T: type, field_name: []const u8, destructor_untyped: anytype, op const debug_name = options.debug_name orelse bun.meta.typeBaseName(@typeName(T)); pub const scope = bun.Output.Scoped(debug_name, true); + const DEBUG_STACK_TRACE = false; const Destructor = if (options.destructor_ctx) |ctx| fn (*T, ctx) void else fn (*T) void; const destructor: Destructor = destructor_untyped; @@ -106,10 +107,12 @@ pub fn RefCount(T: type, field_name: []const u8, destructor_untyped: anytype, op counter.active_counts, counter.active_counts + 1, }); - bun.crash_handler.dumpCurrentStackTrace(@returnAddress(), .{ - .frame_count = 2, - .skip_file_patterns = &.{"ptr/ref_count.zig"}, - }); + if (DEBUG_STACK_TRACE) { + bun.crash_handler.dumpCurrentStackTrace(@returnAddress(), .{ + .frame_count = 2, + .skip_file_patterns = &.{"ptr/ref_count.zig"}, + }); + } } counter.assertNonThreadSafeCountIsSingleThreaded(); counter.active_counts += 1; @@ -130,10 +133,12 @@ pub fn RefCount(T: type, field_name: []const u8, destructor_untyped: anytype, op counter.active_counts, counter.active_counts - 1, }); - bun.crash_handler.dumpCurrentStackTrace(@returnAddress(), .{ - .frame_count = 2, - .skip_file_patterns = &.{"ptr/ref_count.zig"}, - }); + if (DEBUG_STACK_TRACE) { + bun.crash_handler.dumpCurrentStackTrace(@returnAddress(), .{ + .frame_count = 2, + .skip_file_patterns = &.{"ptr/ref_count.zig"}, + }); + } } counter.assertNonThreadSafeCountIsSingleThreaded(); counter.active_counts -= 1; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 733127d023..24d52e4a00 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -89,7 +89,7 @@ const bufs = struct { pub threadlocal var esm_absolute_package_path_joined: bun.PathBuffer = undefined; pub threadlocal var dir_entry_paths_to_resolve: [256]DirEntryResolveQueueItem = undefined; - pub threadlocal var open_dirs: [256]std.fs.Dir = undefined; + pub threadlocal var open_dirs: [256]FD = undefined; pub threadlocal var resolve_without_remapping: bun.PathBuffer = undefined; pub threadlocal var index: bun.PathBuffer = undefined; pub threadlocal var dir_info_uncached_filename: bun.PathBuffer = undefined; @@ -2216,7 +2216,7 @@ pub const Resolver = struct { var dir_entries_option: *Fs.FileSystem.RealFS.EntriesOption = undefined; var needs_iter = true; var in_place: ?*Fs.FileSystem.DirEntry = null; - const open_dir = bun.openDirForIteration(std.fs.cwd(), dir_path) catch |err| { + const open_dir = bun.openDirForIteration(FD.cwd(), dir_path).unwrap() catch |err| { // TODO: handle this error better r.log.addErrorFmt( null, @@ -2264,10 +2264,10 @@ pub const Resolver = struct { dir_entries_ptr.* = new_entry; if (r.store_fd) { - dir_entries_ptr.fd = .fromStdDir(open_dir); + dir_entries_ptr.fd = open_dir; } - bun.fs.debug("readdir({}, {s}) = {d}", .{ bun.FD.fromStdDir(open_dir), dir_path, dir_entries_ptr.data.count() }); + bun.fs.debug("readdir({}, {s}) = {d}", .{ open_dir, dir_path, dir_entries_ptr.data.count() }); dir_entries_option = rfs.entries.put(&cached_dir_entry_result, .{ .entries = dir_entries_ptr, @@ -2288,7 +2288,7 @@ pub const Resolver = struct { // to check for a parent package.json null, allocators.NotFound, - .fromStdDir(open_dir), + open_dir, package_id, ); return dir_info_ptr; @@ -2783,9 +2783,9 @@ pub const Resolver = struct { // When this function halts, any item not processed means it's not found. defer { if (open_dir_count > 0 and (!r.store_fd or r.fs.fs.needToCloseFiles())) { - const open_dirs: []std.fs.Dir = bufs(.open_dirs)[0..open_dir_count]; + const open_dirs = bufs(.open_dirs)[0..open_dir_count]; for (open_dirs) |open_dir| { - bun.FD.fromStdDir(open_dir).close(); + open_dir.close(); } } } @@ -2810,8 +2810,8 @@ pub const Resolver = struct { defer top_parent = queue_top.result; queue_slice.len -= 1; - const open_dir: std.fs.Dir = if (queue_top.fd.isValid()) - queue_top.fd.stdDir() + const open_dir: FD = if (queue_top.fd.isValid()) + queue_top.fd else open_dir: { // This saves us N copies of .toPosixPath // which was likely the perf gain from resolving directories relative to the parent directory, anyway. @@ -2820,19 +2820,20 @@ pub const Resolver = struct { defer path.ptr[queue_top.unsafe_path.len] = prev_char; const sentinel = path.ptr[0..queue_top.unsafe_path.len :0]; - const open_req = if (comptime Environment.isPosix) - std.fs.openDirAbsoluteZ( + const open_req = if (comptime Environment.isPosix) open_req: { + const dir_result = std.fs.openDirAbsoluteZ( sentinel, .{ .no_follow = !follow_symlinks, .iterate = true }, - ) - else if (comptime Environment.isWindows) open_req: { + ) catch |err| break :open_req err; + break :open_req FD.fromStdDir(dir_result); + } else if (comptime Environment.isWindows) open_req: { const dirfd_result = bun.sys.openDirAtWindowsA(bun.invalid_fd, sentinel, .{ .iterable = true, .no_follow = !follow_symlinks, .read_only = true, }); if (dirfd_result.unwrap()) |result| { - break :open_req result.stdDir(); + break :open_req result; } else |err| { break :open_req err; } @@ -2879,7 +2880,7 @@ pub const Resolver = struct { }; if (!queue_top.fd.isValid()) { - Fs.FileSystem.setMaxFd(open_dir.fd); + Fs.FileSystem.setMaxFd(open_dir.cast()); // these objects mostly just wrap the file descriptor, so it's fine to keep it. bufs(.open_dirs)[open_dir_count] = open_dir; open_dir_count += 1; @@ -2945,13 +2946,13 @@ pub const Resolver = struct { if (in_place) |existing| { existing.data.clearAndFree(allocator); } - new_entry.fd = if (r.store_fd) .fromStdDir(open_dir) else .invalid; + new_entry.fd = if (r.store_fd) open_dir else .invalid; var dir_entries_ptr = in_place orelse allocator.create(Fs.FileSystem.DirEntry) catch unreachable; dir_entries_ptr.* = new_entry; dir_entries_option = try rfs.entries.put(&cached_dir_entry_result, .{ .entries = dir_entries_ptr, }); - bun.fs.debug("readdir({}, {s}) = {d}", .{ bun.FD.fromStdDir(open_dir), dir_path, dir_entries_ptr.data.count() }); + bun.fs.debug("readdir({}, {s}) = {d}", .{ open_dir, dir_path, dir_entries_ptr.data.count() }); } // We must initialize it as empty so that the result index is correct. @@ -2966,7 +2967,7 @@ pub const Resolver = struct { cached_dir_entry_result.index, r.dir_cache.atIndex(top_parent.index), top_parent.index, - .fromStdDir(open_dir), + open_dir, null, ); diff --git a/src/router.zig b/src/router.zig index 5d2d8b37b4..b61930074c 100644 --- a/src/router.zig +++ b/src/router.zig @@ -20,7 +20,7 @@ const StoredFileDescriptorType = bun.StoredFileDescriptorType; const DirInfo = @import("./resolver/dir_info.zig"); const Fs = @import("./fs.zig"); const Options = @import("./options.zig"); -const URLPath = @import("./http/url_path.zig"); +const URLPath = @import("./http/URLPath.zig"); const PathnameScanner = @import("./url.zig").PathnameScanner; const CodepointIterator = @import("./string_immutable.zig").CodepointIterator; diff --git a/src/runtime.js b/src/runtime.js index 2848d4d3b3..ea410c7a32 100644 --- a/src/runtime.js +++ b/src/runtime.js @@ -160,7 +160,7 @@ export var __legacyDecorateClassTS = function (decorators, target, key, desc) { else for (var i = decorators.length - 1; i >= 0; i--) if ((d = decorators[i])) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; + return (c > 3 && r && Object.defineProperty(target, key, r), r); }; export var __legacyDecorateParamTS = (index, decorator) => (target, key) => decorator(target, key, index); diff --git a/src/s3/client.zig b/src/s3/client.zig index 7f6eb6c76f..65feaa76dd 100644 --- a/src/s3/client.zig +++ b/src/s3/client.zig @@ -261,21 +261,30 @@ pub fn writableStream( ) bun.JSError!JSC.JSValue { const Wrapper = struct { pub fn callback(result: S3UploadResult, sink: *JSC.WebCore.NetworkSink) void { - if (sink.endPromise.hasValue()) { + if (sink.endPromise.hasValue() or sink.flushPromise.hasValue()) { const event_loop = sink.globalThis.bunVM().eventLoop(); event_loop.enter(); defer event_loop.exit(); switch (result) { .success => { - sink.endPromise.resolve(sink.globalThis, JSC.jsNumber(0)); + if (sink.flushPromise.hasValue()) { + sink.flushPromise.resolve(sink.globalThis, JSC.jsNumber(0)); + } + if (sink.endPromise.hasValue()) { + sink.endPromise.resolve(sink.globalThis, JSC.jsNumber(0)); + } }, .failure => |err| { + const js_err = err.toJS(sink.globalThis, sink.path()); + if (sink.flushPromise.hasValue()) { + sink.flushPromise.reject(sink.globalThis, js_err); + } + if (sink.endPromise.hasValue()) { + sink.endPromise.reject(sink.globalThis, js_err); + } if (!sink.done) { sink.abort(); - return; } - - sink.endPromise.reject(sink.globalThis, err.toJS(sink.globalThis, sink.path())); }, } } @@ -285,7 +294,7 @@ pub fn writableStream( const proxy_url = (proxy orelse ""); this.ref(); // ref the credentials const task = bun.new(MultiPartUpload, .{ - .ref_count = .init(), + .ref_count = .initExactRefs(2), // +1 for the stream .credentials = this, .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), .proxy = if (proxy_url.len > 0) bun.default_allocator.dupe(u8, proxy_url) catch bun.outOfMemory() else "", @@ -301,16 +310,14 @@ pub fn writableStream( task.poll_ref.ref(task.vm); - task.ref(); // + 1 for the stream var response_stream = JSC.WebCore.NetworkSink.new(.{ - .task = .{ .s3_upload = task }, - .buffer = .{}, + .task = task, .globalThis = globalThis, - .encoded = false, - .endPromise = JSC.JSPromise.Strong.init(globalThis), + .highWaterMark = @truncate(options.partSize), }).toSink(); task.callback_context = @ptrCast(response_stream); + task.onWritable = @ptrCast(&JSC.WebCore.NetworkSink.onWritable); var signal = &response_stream.sink.signal; signal.* = JSC.WebCore.NetworkSink.JSSink.SinkSignal.init(.zero); @@ -322,96 +329,105 @@ pub fn writableStream( return response_stream.sink.toJS(globalThis); } -const S3UploadStreamWrapper = struct { +pub const S3UploadStreamWrapper = struct { const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{}); pub const ref = RefCount.ref; pub const deref = RefCount.deref; + pub const ResumableSink = @import("../bun.js/webcore/ResumableSink.zig").ResumableS3UploadSink; + const log = bun.Output.scoped(.S3UploadStream, false); ref_count: RefCount, - readable_stream_ref: JSC.WebCore.ReadableStream.Strong, - sink: *JSC.WebCore.NetworkSink, + + sink: ?*ResumableSink, task: *MultiPartUpload, + endPromise: JSC.JSPromise.Strong, callback: ?*const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque, path: []const u8, // this is owned by the task not by the wrapper global: *JSC.JSGlobalObject, - pub fn resolve(result: S3UploadResult, self: *@This()) void { - const sink = self.sink; - defer self.deref(); - if (sink.endPromise.hasValue()) { - switch (result) { - .success => sink.endPromise.resolve(self.global, JSC.jsNumber(0)), - .failure => |err| { - if (!sink.done) { - sink.abort(); - return; - } - sink.endPromise.reject(self.global, err.toJS(self.global, self.path)); - }, - } + fn detachSink(self: *@This()) void { + log("detachSink {}", .{self.sink != null}); + if (self.sink) |sink| { + self.sink = null; + sink.deref(); } + } + pub fn onWritable(task: *MultiPartUpload, self: *@This(), _: u64) void { + log("onWritable {} {}", .{ self.sink != null, task.ended }); + // end was called we dont need to drain anymore + if (task.ended) return; + // we have more space in the queue, drain it + if (self.sink) |sink| { + sink.drain(); + } + } + + pub fn writeRequestData(this: *@This(), data: []const u8) bool { + log("writeRequestData {}", .{data.len}); + return this.task.writeBytes(data, false) catch bun.outOfMemory(); + } + + pub fn writeEndRequest(this: *@This(), err: ?JSC.JSValue) void { + log("writeEndRequest {}", .{err != null}); + this.detachSink(); + defer this.deref(); + if (err) |js_err| { + if (this.endPromise.hasValue() and !js_err.isEmptyOrUndefinedOrNull()) { + // if we have a explicit error, reject the promise + // if not when calling .fail will create a S3Error instance + // this match the previous behavior + this.endPromise.reject(this.global, js_err); + this.endPromise = .empty; + } + if (!this.task.ended) { + this.task.fail(.{ + .code = "UnknownError", + .message = "ReadableStream ended with an error", + }); + } + } else { + _ = this.task.writeBytes("", true) catch bun.outOfMemory(); + } + } + + pub fn resolve(result: S3UploadResult, self: *@This()) void { + log("resolve {any}", .{result}); + defer self.deref(); + switch (result) { + .success => { + if (self.endPromise.hasValue()) { + self.endPromise.resolve(self.global, JSC.jsNumber(0)); + self.endPromise = .empty; + } + }, + .failure => |err| { + if (self.sink) |sink| { + self.sink = null; + // sink in progress, cancel it (will call writeEndRequest for cleanup and will reject the endPromise) + sink.cancel(err.toJS(self.global, self.path)); + sink.deref(); + } else if (self.endPromise.hasValue()) { + self.endPromise.reject(self.global, err.toJS(self.global, self.path)); + self.endPromise = .empty; + } + }, + } + if (self.callback) |callback| { callback(result, self.callback_context); } } fn deinit(self: *@This()) void { - self.readable_stream_ref.deinit(); - self.sink.finalize(); - self.sink.deinit(); + log("deinit {}", .{self.sink != null}); + self.detachSink(); self.task.deref(); + self.endPromise.deinit(); bun.destroy(self); } }; -pub fn onUploadStreamResolveRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var args = callframe.arguments_old(2); - var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); - defer this.deref(); - - if (this.readable_stream_ref.get(globalThis)) |stream| { - stream.done(globalThis); - } - this.readable_stream_ref.deinit(); - this.task.continueStream(); - - return .js_undefined; -} - -pub fn onUploadStreamRejectRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const args = callframe.arguments_old(2); - var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); - defer this.deref(); - - const err = args.ptr[0]; - if (this.sink.endPromise.hasValue()) { - this.sink.endPromise.reject(globalThis, err); - } - - if (this.readable_stream_ref.get(globalThis)) |stream| { - stream.cancel(globalThis); - this.readable_stream_ref.deinit(); - } - if (this.sink.task) |task| { - if (task == .s3_upload) { - task.s3_upload.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - } - } - this.task.continueStream(); - - return .js_undefined; -} -comptime { - const jsonResolveRequestStream = JSC.toJSHostFn(onUploadStreamResolveRequestStream); - @export(&jsonResolveRequestStream, .{ .name = "Bun__S3UploadStream__onResolveRequestStream" }); - const jsonRejectRequestStream = JSC.toJSHostFn(onUploadStreamRejectRequestStream); - @export(&jsonRejectRequestStream, .{ .name = "Bun__S3UploadStream__onRejectRequestStream" }); -} - /// consumes the readable stream and upload to s3 pub fn uploadStream( this: *S3Credentials, @@ -428,14 +444,13 @@ pub fn uploadStream( ) JSC.JSValue { this.ref(); // ref the credentials const proxy_url = (proxy orelse ""); - if (readable_stream.isDisturbed(globalThis)) { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, bun.String.static("ReadableStream is already disturbed").toErrorInstance(globalThis)); + return JSC.JSPromise.rejectedPromise(globalThis, bun.String.static("ReadableStream is already disturbed").toErrorInstance(globalThis)).toJS(); } switch (readable_stream.ptr) { .Invalid => { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, bun.String.static("ReadableStream is invalid").toErrorInstance(globalThis)); + return JSC.JSPromise.rejectedPromise(globalThis, bun.String.static("ReadableStream is invalid").toErrorInstance(globalThis)).toJS(); }, inline .File, .Bytes => |stream| { if (stream.pending.result == .err) { @@ -454,7 +469,7 @@ pub fn uploadStream( } const task = bun.new(MultiPartUpload, .{ - .ref_count = .init(), + .ref_count = .initExactRefs(2), // +1 for the stream ctx (only deinit after task and context ended) .credentials = this, .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), .proxy = if (proxy_url.len > 0) bun.default_allocator.dupe(u8, proxy_url) catch bun.outOfMemory() else "", @@ -471,125 +486,22 @@ pub fn uploadStream( task.poll_ref.ref(task.vm); - task.ref(); // + 1 for the stream sink - - var response_stream = JSC.WebCore.NetworkSink.new(.{ - .task = .{ .s3_upload = task }, - .buffer = .{}, - .globalThis = globalThis, - .encoded = false, - .endPromise = JSC.JSPromise.Strong.init(globalThis), - }).toSink(); - task.ref(); // + 1 for the stream wrapper - - const endPromise = response_stream.sink.endPromise.value(); const ctx = bun.new(S3UploadStreamWrapper, .{ - .ref_count = .init(), - .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(readable_stream, globalThis), - .sink = &response_stream.sink, + .ref_count = .initExactRefs(2), // +1 for the stream sink (only deinit after both sink and task ended) + .sink = null, .callback = callback, .callback_context = callback_context, .path = task.path, .task = task, + .endPromise = JSC.JSPromise.Strong.init(globalThis), .global = globalThis, }); + // +1 because the ctx refs the sink + ctx.sink = S3UploadStreamWrapper.ResumableSink.initExactRefs(globalThis, readable_stream, ctx, 2); task.callback_context = @ptrCast(ctx); - // keep the task alive until we are done configuring the signal - task.ref(); - defer task.deref(); - - var signal = &response_stream.sink.signal; - - signal.* = JSC.WebCore.NetworkSink.JSSink.SinkSignal.init(.zero); - - // explicitly set it to a dead pointer - // we use this memory address to disable signals being sent - signal.clear(); - bun.assert(signal.isDead()); - - // We are already corked! - const assignment_result: JSC.JSValue = JSC.WebCore.NetworkSink.JSSink.assignToStream( - globalThis, - readable_stream.value, - response_stream, - @as(**anyopaque, @ptrCast(&signal.ptr)), - ); - - assignment_result.ensureStillAlive(); - - // assert that it was updated - bun.assert(!signal.isDead()); - - if (assignment_result.toError()) |err| { - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.reject(globalThis, err); - } - - task.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - readable_stream.cancel(globalThis); - return endPromise; - } - - if (!assignment_result.isEmptyOrUndefinedOrNull()) { - assignment_result.ensureStillAlive(); - // it returns a Promise when it goes through ReadableStreamDefaultReader - if (assignment_result.asAnyPromise()) |promise| { - switch (promise.status(globalThis.vm())) { - .pending => { - // if we eended and its not canceled the promise is the endPromise - // because assignToStream can return the sink.end() promise - // we set the endPromise in the NetworkSink so we need to resolve it - if (response_stream.sink.ended and !response_stream.sink.cancel) { - task.continueStream(); - - readable_stream.done(globalThis); - return endPromise; - } - ctx.ref(); - - assignment_result.then( - globalThis, - task.callback_context, - onUploadStreamResolveRequestStream, - onUploadStreamRejectRequestStream, - ); - // we need to wait the promise to resolve because can be an error/cancel here - if (!task.ended) - task.continueStream(); - }, - .fulfilled => { - task.continueStream(); - - readable_stream.done(globalThis); - }, - .rejected => { - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.reject(globalThis, promise.result(globalThis.vm())); - } - - task.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - readable_stream.cancel(globalThis); - }, - } - } else { - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.reject(globalThis, assignment_result); - } - - task.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - readable_stream.cancel(globalThis); - } - } - return endPromise; + task.onWritable = @ptrCast(&S3UploadStreamWrapper.onWritable); + task.continueStream(); + return ctx.endPromise.value(); } /// download a file from s3 chunk by chunk aka streaming (used on readableStream) diff --git a/src/s3/credentials.zig b/src/s3/credentials.zig index 3e965c286e..c72c6a9289 100644 --- a/src/s3/credentials.zig +++ b/src/s3/credentials.zig @@ -182,6 +182,17 @@ pub const S3Credentials = struct { new_credentials.options.partSize = @intCast(pageSize); } } + if (try opts.getOptional(globalObject, "partSize", i64)) |partSize| { + if (partSize < MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE and partSize > MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { + return globalObject.throwRangeError(partSize, .{ + .min = @intCast(MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE), + .max = @intCast(MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE), + .field_name = "partSize", + }); + } else { + new_credentials.options.partSize = @intCast(partSize); + } + } if (try opts.getOptional(globalObject, "queueSize", i32)) |queueSize| { if (queueSize < 1) { diff --git a/src/s3/list_objects.zig b/src/s3/list_objects.zig index fff2cad804..317e6d4c19 100644 --- a/src/s3/list_objects.zig +++ b/src/s3/list_objects.zig @@ -160,7 +160,7 @@ pub const S3ListObjectsV2Result = struct { objectInfo.put(globalObject, JSC.ZigString.static("owner"), jsOwner); } - jsContents.putIndex(globalObject, @intCast(i), objectInfo); + try jsContents.putIndex(globalObject, @intCast(i), objectInfo); } jsResult.put(globalObject, JSC.ZigString.static("contents"), jsContents); @@ -172,7 +172,7 @@ pub const S3ListObjectsV2Result = struct { for (common_prefixes.items, 0..) |prefix, i| { const jsPrefix = JSValue.createEmptyObject(globalObject, 1); jsPrefix.put(globalObject, JSC.ZigString.static("prefix"), bun.String.createUTF8ForJS(globalObject, prefix)); - jsCommonPrefixes.putIndex(globalObject, @intCast(i), jsPrefix); + try jsCommonPrefixes.putIndex(globalObject, @intCast(i), jsPrefix); } jsResult.put(globalObject, JSC.ZigString.static("commonPrefixes"), jsCommonPrefixes); diff --git a/src/s3/multipart.zig b/src/s3/multipart.zig index a90468e252..854f82c76d 100644 --- a/src/s3/multipart.zig +++ b/src/s3/multipart.zig @@ -121,8 +121,7 @@ pub const MultiPartUpload = struct { vm: *JSC.VirtualMachine, globalThis: *JSC.JSGlobalObject, - buffered: std.ArrayListUnmanaged(u8) = .{}, - offset: usize = 0, + buffered: bun.io.StreamBuffer = .{}, path: []const u8, proxy: []const u8, @@ -143,6 +142,7 @@ pub const MultiPartUpload = struct { } = .not_started, callback: *const fn (S3SimpleRequest.S3UploadResult, *anyopaque) void, + onWritable: ?*const fn (task: *MultiPartUpload, ctx: *anyopaque, flushed: u64) void = null, callback_context: *anyopaque, const Self = @This(); @@ -220,6 +220,7 @@ pub const MultiPartUpload = struct { }, .etag => |etag| { log("onPartResponse {} success", .{this.partNumber}); + const sent = this.data.len; this.freeAllocatedSlice(); // we will need to order this this.ctx.multipart_etags.append(bun.default_allocator, .{ @@ -231,7 +232,7 @@ pub const MultiPartUpload = struct { // mark as available this.ctx.available.set(this.index); // drain more - this.ctx.drainEnqueuedParts(); + this.ctx.drainEnqueuedParts(sent); }, } } @@ -309,7 +310,7 @@ pub const MultiPartUpload = struct { .path = this.path, .method = .PUT, .proxy_url = this.proxyUrl(), - .body = this.buffered.items, + .body = this.buffered.slice(), .content_type = this.content_type, .acl = this.acl, .storage_class = this.storage_class, @@ -323,6 +324,10 @@ pub const MultiPartUpload = struct { }, .success => { log("singleSendUploadResponse success", .{}); + + if (this.onWritable) |callback| { + callback(this, this.callback_context, this.buffered.size()); + } this.done(); }, } @@ -374,7 +379,7 @@ pub const MultiPartUpload = struct { } /// Drain the parts, this is responsible for starting the parts and processing the buffered data - fn drainEnqueuedParts(this: *@This()) void { + fn drainEnqueuedParts(this: *@This(), flushed: u64) void { if (this.state == .finished or this.state == .singlefile_started) { return; } @@ -390,13 +395,24 @@ pub const MultiPartUpload = struct { } } const partSize = this.partSizeInBytes(); - if (this.ended or this.buffered.items.len >= partSize) { + if (this.ended or this.buffered.size() >= partSize) { this.processMultiPart(partSize); } - if (this.ended and this.available.mask == std.bit_set.IntegerBitSet(MAX_QUEUE_SIZE).initFull().mask) { - // we are done and no more parts are running - this.done(); + // empty queue + if (this.isQueueEmpty()) { + if (this.onWritable) |callback| { + callback(this, this.callback_context, flushed); + } + if (this.ended) { + // we are done and no more parts are running + this.done(); + } + } else if (!this.hasBackpressure() and flushed > 0) { + // we have more space in the queue, we can drain more + if (this.onWritable) |callback| { + callback(this, this.callback_context, flushed); + } } } /// Finalize the upload with a failure @@ -444,10 +460,10 @@ pub const MultiPartUpload = struct { this.multipart_upload_list.append(bun.default_allocator, "") catch bun.outOfMemory(); // will deref and ends after commit this.commitMultiPartRequest(); - } else { + } else if (this.state == .singlefile_started) { + this.state = .finished; // single file upload no need to commit this.callback(.{ .success = {} }, this.callback_context); - this.state = .finished; this.deref(); } } @@ -482,7 +498,7 @@ pub const MultiPartUpload = struct { log("startMultiPartRequestResult {s} success id: {s}", .{ this.path, this.upload_id }); this.state = .multipart_completed; // start draining the parts - this.drainEnqueuedParts(); + this.drainEnqueuedParts(0); }, // this is "unreachable" but we cover in case AWS returns 404 .not_found => this.fail(.{ @@ -504,12 +520,13 @@ pub const MultiPartUpload = struct { this.commitMultiPartRequest(); return; } + this.state = .finished; this.callback(.{ .failure = err }, this.callback_context); this.deref(); }, .success => { - this.callback(.{ .success = {} }, this.callback_context); this.state = .finished; + this.callback(.{ .success = {} }, this.callback_context); this.deref(); }, } @@ -588,25 +605,28 @@ pub const MultiPartUpload = struct { fn processMultiPart(this: *@This(), part_size: usize) void { log("processMultiPart {s} {d}", .{ this.path, part_size }); + if (this.buffered.isEmpty() and this.isQueueEmpty() and this.ended) { + // no more data to send and we are done + this.done(); + return; + } // need to split in multiple parts because of the size - var buffer = this.buffered.items[this.offset..]; - defer if (this.offset >= this.buffered.items.len) { - this.buffered.clearRetainingCapacity(); - this.offset = 0; + defer if (this.buffered.isEmpty()) { + this.buffered.reset(); }; - while (buffer.len > 0) { - const len = @min(part_size, buffer.len); + while (this.buffered.isNotEmpty()) { + const len = @min(part_size, this.buffered.size()); if (len < part_size and !this.ended) { log("processMultiPart {s} {d} slice too small", .{ this.path, len }); //slice is too small, we need to wait for more data break; } // if is one big chunk we can pass ownership and avoid dupe - if (len == this.buffered.items.len) { + if (this.buffered.cursor == 0 and this.buffered.size() == len) { // we need to know the allocated size to free the memory later - const allocated_size = this.buffered.capacity; - const slice = this.buffered.items; + const allocated_size = this.buffered.memoryCost(); + const slice = this.buffered.slice(); // we dont care about the result because we are sending everything if (this.enqueuePart(slice, allocated_size, false)) { @@ -615,7 +635,6 @@ pub const MultiPartUpload = struct { // queue is not full, we can clear the buffer part now owns the data // if its full we will retry later this.buffered = .{}; - this.offset = 0; return; } log("processMultiPart {s} {d} queue full", .{ this.path, slice.len }); @@ -623,13 +642,12 @@ pub const MultiPartUpload = struct { return; } - const slice = buffer[0..len]; - buffer = buffer[len..]; + const slice = this.buffered.slice()[0..len]; // allocated size is the slice len because we dupe the buffer if (this.enqueuePart(slice, slice.len, true)) { log("processMultiPart {s} {d} slice enqueued", .{ this.path, slice.len }); // queue is not full, we can set the offset - this.offset += len; + this.buffered.wrote(len); } else { log("processMultiPart {s} {d} queue full", .{ this.path, slice.len }); // queue is full stop enqueue and retry later @@ -642,7 +660,7 @@ pub const MultiPartUpload = struct { return this.proxy; } fn processBuffered(this: *@This(), part_size: usize) void { - if (this.ended and this.buffered.items.len < this.partSizeInBytes() and this.state == .not_started) { + if (this.ended and this.buffered.size() < this.partSizeInBytes() and this.state == .not_started) { log("processBuffered {s} singlefile_started", .{this.path}); this.state = .singlefile_started; // we can do only 1 request @@ -650,7 +668,7 @@ pub const MultiPartUpload = struct { .path = this.path, .method = .PUT, .proxy_url = this.proxyUrl(), - .body = this.buffered.items, + .body = this.buffered.slice(), .content_type = this.content_type, .acl = this.acl, .storage_class = this.storage_class, @@ -674,34 +692,74 @@ pub const MultiPartUpload = struct { } } - pub fn sendRequestData(this: *@This(), chunk: []const u8, is_last: bool) void { - if (this.ended) return; + pub fn hasBackpressure(this: *@This()) bool { + // if we dont have any space in the queue, we have backpressure + // since we are not allowed to send more data + const index = this.available.findFirstSet() orelse return true; + return index >= this.options.queueSize; + } + + pub fn isQueueEmpty(this: *@This()) bool { + return this.available.mask == std.bit_set.IntegerBitSet(MAX_QUEUE_SIZE).initFull().mask; + } + + pub const WriteEncoding = enum { + bytes, + latin1, + utf16, + }; + + fn write(this: *@This(), chunk: []const u8, is_last: bool, comptime encoding: WriteEncoding) bun.OOM!bool { + if (this.ended) return true; // no backpressure since we are done + // we may call done inside processBuffered so we ensure that we keep a ref until we are done + this.ref(); + defer this.deref(); if (this.state == .wait_stream_check and chunk.len == 0 and is_last) { // we do this because stream will close if the file dont exists and we dont wanna to send an empty part in this case this.ended = true; - if (this.buffered.items.len > 0) { + if (this.buffered.size() > 0) { this.processBuffered(this.partSizeInBytes()); } - return; + return !this.hasBackpressure(); } if (is_last) { this.ended = true; if (chunk.len > 0) { - this.buffered.appendSlice(bun.default_allocator, chunk) catch bun.outOfMemory(); + switch (encoding) { + .bytes => try this.buffered.write(chunk), + .latin1 => try this.buffered.writeLatin1(chunk, true), + .utf16 => try this.buffered.writeUTF16(@alignCast(std.mem.bytesAsSlice(u16, chunk))), + } } this.processBuffered(this.partSizeInBytes()); } else { // still have more data and receive empty, nothing todo here - if (chunk.len == 0) return; - this.buffered.appendSlice(bun.default_allocator, chunk) catch bun.outOfMemory(); + if (chunk.len == 0) return this.hasBackpressure(); + switch (encoding) { + .bytes => try this.buffered.write(chunk), + .latin1 => try this.buffered.writeLatin1(chunk, true), + .utf16 => try this.buffered.writeUTF16(@alignCast(std.mem.bytesAsSlice(u16, chunk))), + } const partSize = this.partSizeInBytes(); - if (this.buffered.items.len >= partSize) { + if (this.buffered.size() >= partSize) { // send the part we have enough data this.processBuffered(partSize); - return; } // wait for more } + return !this.hasBackpressure(); + } + + pub fn writeLatin1(this: *@This(), chunk: []const u8, is_last: bool) bun.OOM!bool { + return try this.write(chunk, is_last, .latin1); + } + + pub fn writeUTF16(this: *@This(), chunk: []const u8, is_last: bool) bun.OOM!bool { + return try this.write(chunk, is_last, .utf16); + } + + pub fn writeBytes(this: *@This(), chunk: []const u8, is_last: bool) bun.OOM!bool { + return try this.write(chunk, is_last, .bytes); } }; diff --git a/src/semver/SemverString.zig b/src/semver/SemverString.zig index 9f278ac087..ee30c31e1a 100644 --- a/src/semver/SemverString.zig +++ b/src/semver/SemverString.zig @@ -135,7 +135,7 @@ pub const String = extern struct { str: *const String, buf: string, - pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { const str = formatter.str; try writer.writeAll(str.slice(formatter.buf)); } @@ -159,11 +159,33 @@ pub const String = extern struct { quote: bool = true, }; - pub fn format(formatter: JsonFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + pub fn format(formatter: JsonFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { try writer.print("{}", .{bun.fmt.formatJSONStringUTF8(formatter.str.slice(formatter.buf), .{ .quote = formatter.opts.quote })}); } }; + pub inline fn fmtStorePath(self: *const String, buf: []const u8) StorePathFormatter { + return .{ + .buf = buf, + .str = self, + }; + } + + pub const StorePathFormatter = struct { + str: *const String, + buf: string, + + pub fn format(this: StorePathFormatter, comptime _: string, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + for (this.str.slice(this.buf)) |c| { + switch (c) { + '/' => try writer.writeByte('+'), + '\\' => try writer.writeByte('+'), + else => try writer.writeByte(c), + } + } + } + }; + pub fn Sorter(comptime direction: enum { asc, desc }) type { return struct { lhs_buf: []const u8, diff --git a/src/shell/AllocScope.zig b/src/shell/AllocScope.zig new file mode 100644 index 0000000000..82d9ecf941 --- /dev/null +++ b/src/shell/AllocScope.zig @@ -0,0 +1,41 @@ +//! This is just a wrapper around `bun.AllocationScope` that ensures that it is +//! zero-cost in release builds. +const AllocScope = @This(); + +__scope: if (bun.Environment.enableAllocScopes) bun.AllocationScope else void, + +pub fn beginScope(alloc: std.mem.Allocator) AllocScope { + if (comptime bun.Environment.enableAllocScopes) { + return .{ .__scope = bun.AllocationScope.init(alloc) }; + } + return .{ .__scope = {} }; +} + +pub fn endScope(this: *AllocScope) void { + if (comptime bun.Environment.enableAllocScopes) { + this.__scope.deinit(); + } +} + +pub fn leakSlice(this: *AllocScope, memory: anytype) void { + if (comptime bun.Environment.enableAllocScopes) { + _ = @typeInfo(@TypeOf(memory)).pointer; + bun.assert(!this.__scope.trackExternalFree(memory, null)); + } +} + +pub fn assertInScope(this: *AllocScope, memory: anytype) void { + if (comptime bun.Environment.enableAllocScopes) { + this.__scope.assertOwned(memory); + } +} + +pub inline fn allocator(this: *AllocScope) std.mem.Allocator { + if (comptime bun.Environment.enableAllocScopes) { + return this.__scope.allocator(); + } + return bun.default_allocator; +} + +const std = @import("std"); +const bun = @import("bun"); diff --git a/src/shell/Builtin.zig b/src/shell/Builtin.zig index 44cb842c15..0379b3f0fd 100644 --- a/src/shell/Builtin.zig +++ b/src/shell/Builtin.zig @@ -20,11 +20,19 @@ export_env: *EnvMap, cmd_local_env: *EnvMap, arena: *bun.ArenaAllocator, -/// The following are allocated with the above arena -args: *const std.ArrayList(?[*:0]const u8), -args_slice: ?[]const [:0]const u8 = null, cwd: bun.FileDescriptor, +/// TODO: It would be nice to make this mutable so that certain commands (e.g. +/// `export`) don't have to duplicate arguments. However, it is tricky because +/// modifications will invalidate any codepath which previously sliced the array +/// list (e.g. turned it into a `[]const [:0]const u8`) +args: *const std.ArrayList(?[*:0]const u8), +/// Cached slice of `args`. +/// +/// This caches the result of calling `bun.span(this.args.items[i])` since the +/// items in `this.args` are sentinel terminated and don't carry their length. +args_slice: ?[]const [:0]const u8 = null, + impl: Impl, pub const Impl = union(Kind) { @@ -126,7 +134,6 @@ pub const BuiltinIO = struct { /// in the case of blob, we write to the file descriptor pub const Output = union(enum) { fd: struct { writer: *IOWriter, captured: ?*bun.ByteList = null }, - /// array list not owned by this type buf: std.ArrayList(u8), arraybuf: ArrayBuf, blob: *Blob, @@ -156,7 +163,13 @@ pub const BuiltinIO = struct { this.fd.writer.deref(); }, .blob => this.blob.deref(), - else => {}, + .arraybuf => this.arraybuf.buf.deinit(), + .buf => { + const alloc = this.buf.allocator; + this.buf.deinit(); + this.* = .{ .buf = std.ArrayList(u8).init(alloc) }; + }, + .ignore => {}, } } @@ -182,12 +195,16 @@ pub const BuiltinIO = struct { comptime fmt_: []const u8, args: anytype, _: OutputNeedsIOSafeGuard, - ) void { - this.fd.writer.enqueueFmtBltn(ptr, this.fd.captured, kind, fmt_, args); + ) Yield { + return this.fd.writer.enqueueFmtBltn(ptr, this.fd.captured, kind, fmt_, args); } - pub fn enqueue(this: *@This(), ptr: anytype, buf: []const u8, _: OutputNeedsIOSafeGuard) void { - this.fd.writer.enqueue(ptr, this.fd.captured, buf); + pub fn enqueue(this: *@This(), ptr: anytype, buf: []const u8, _: OutputNeedsIOSafeGuard) Yield { + return this.fd.writer.enqueue(ptr, this.fd.captured, buf); + } + + pub fn enqueueFmt(this: *@This(), ptr: anytype, comptime fmt: []const u8, args: anytype, _: OutputNeedsIOSafeGuard) Yield { + return this.fd.writer.enqueueFmt(ptr, this.fd.captured, fmt, args); } }; @@ -216,7 +233,13 @@ pub const BuiltinIO = struct { this.fd.deref(); }, .blob => this.blob.deref(), - else => {}, + .buf => { + const alloc = this.buf.allocator; + this.buf.deinit(); + this.* = .{ .buf = std.ArrayList(u8).init(alloc) }; + }, + .arraybuf => this.arraybuf.buf.deinit(), + .ignore => {}, } } @@ -300,6 +323,7 @@ fn callImplWithType(this: *Builtin, comptime BuiltinImpl: type, comptime Ret: ty } pub inline fn allocator(this: *Builtin) Allocator { + // FIXME: This should be `this.parentCmd().base.allocator()` return this.parentCmd().base.interpreter.allocator; } @@ -314,19 +338,19 @@ pub fn init( cmd_local_env: *EnvMap, cwd: bun.FileDescriptor, io: *IO, -) CoroutineResult { +) ?Yield { const stdin: BuiltinIO.Input = switch (io.stdin) { .fd => |fd| .{ .fd = fd.refSelf() }, .ignore => .ignore, }; const stdout: BuiltinIO.Output = switch (io.stdout) { .fd => |val| .{ .fd = .{ .writer = val.writer.refSelf(), .captured = val.captured } }, - .pipe => .{ .buf = std.ArrayList(u8).init(bun.default_allocator) }, + .pipe => .{ .buf = std.ArrayList(u8).init(cmd.base.allocator()) }, .ignore => .ignore, }; const stderr: BuiltinIO.Output = switch (io.stderr) { .fd => |val| .{ .fd = .{ .writer = val.writer.refSelf(), .captured = val.captured } }, - .pipe => .{ .buf = std.ArrayList(u8).init(bun.default_allocator) }, + .pipe => .{ .buf = std.ArrayList(u8).init(cmd.base.allocator()) }, .ignore => .ignore, }; @@ -361,45 +385,102 @@ pub fn init( }, }; }, + .ls => { + cmd.exec.bltn.impl = .{ + .ls = Ls{ + .alloc_scope = shell.AllocScope.beginScope(bun.default_allocator), + }, + }; + }, inline else => |tag| { cmd.exec.bltn.impl = @unionInit(Impl, @tagName(tag), .{}); }, } + return initRedirections(cmd, kind, node, interpreter); +} + +fn initRedirections( + cmd: *Cmd, + kind: Kind, + node: *const ast.Cmd, + interpreter: *Interpreter, +) ?Yield { if (node.redirect_file) |file| { switch (file) { .atom => { if (cmd.redirection_file.items.len == 0) { - cmd.writeFailingError("bun: ambiguous redirect: at `{s}`\n", .{@tagName(kind)}); - return .yield; + return cmd.writeFailingError("bun: ambiguous redirect: at `{s}`\n", .{@tagName(kind)}); } - // Regular files are not pollable on linux - const is_pollable: bool = if (bun.Environment.isLinux) false else true; + // Regular files are not pollable on linux and macos + const is_pollable: bool = if (bun.Environment.isPosix) false else true; const path = cmd.redirection_file.items[0..cmd.redirection_file.items.len -| 1 :0]; log("EXPANDED REDIRECT: {s}\n", .{cmd.redirection_file.items[0..]}); const perm = 0o666; - const is_nonblocking = false; - const flags = node.redirect.toFlags(); - const redirfd = switch (ShellSyscall.openat(cmd.base.shell.cwd_fd, path, flags, perm)) { - .err => |e| { - cmd.writeFailingError("bun: {s}: {s}", .{ e.toShellSystemError().message, path }); - return .yield; - }, - .result => |f| f, + + var pollable = false; + var is_socket = false; + var is_nonblocking = false; + + const redirfd = redirfd: { + if (node.redirect.stdin) { + break :redirfd switch (ShellSyscall.openat(cmd.base.shell.cwd_fd, path, node.redirect.toFlags(), perm)) { + .err => |e| { + return cmd.writeFailingError("bun: {s}: {s}", .{ e.toShellSystemError().message, path }); + }, + .result => |f| f, + }; + } + + const result = bun.io.openForWritingImpl( + cmd.base.shell.cwd_fd, + path, + node.redirect.toFlags(), + perm, + &pollable, + &is_socket, + false, + &is_nonblocking, + void, + {}, + struct { + fn onForceSyncOrIsaTTY(_: void) void {} + }.onForceSyncOrIsaTTY, + shell.interpret.isPollableFromMode, + ShellSyscall.openat, + ); + + break :redirfd switch (result) { + .err => |e| { + return cmd.writeFailingError("bun: {s}: {s}", .{ e.toShellSystemError().message, path }); + }, + .result => |f| { + if (bun.Environment.isWindows) { + switch (f.makeLibUVOwnedForSyscall(.open, .close_on_fail)) { + .err => |e| { + return cmd.writeFailingError("bun: {s}: {s}", .{ e.toShellSystemError().message, path }); + }, + .result => |f2| break :redirfd f2, + } + } + break :redirfd f; + }, + }; }; + if (node.redirect.stdin) { cmd.exec.bltn.stdin.deref(); cmd.exec.bltn.stdin = .{ .fd = IOReader.init(redirfd, cmd.base.eventLoop()) }; } if (node.redirect.stdout) { cmd.exec.bltn.stdout.deref(); - cmd.exec.bltn.stdout = .{ .fd = .{ .writer = IOWriter.init(redirfd, .{ .pollable = is_pollable, .nonblocking = is_nonblocking }, cmd.base.eventLoop()) } }; + cmd.exec.bltn.stdout = .{ .fd = .{ .writer = IOWriter.init(redirfd, .{ .pollable = is_pollable, .nonblocking = is_nonblocking, .is_socket = is_socket }, cmd.base.eventLoop()) } }; } if (node.redirect.stderr) { cmd.exec.bltn.stderr.deref(); - cmd.exec.bltn.stderr = .{ .fd = .{ .writer = IOWriter.init(redirfd, .{ .pollable = is_pollable, .nonblocking = is_nonblocking }, cmd.base.eventLoop()) } }; + cmd.exec.bltn.stderr = .{ .fd = .{ .writer = IOWriter.init(redirfd, .{ .pollable = is_pollable, .nonblocking = is_nonblocking, .is_socket = is_socket }, cmd.base.eventLoop()) } }; } }, .jsbuf => |val| { @@ -428,7 +509,7 @@ pub fn init( if ((node.redirect.stdout or node.redirect.stderr) and !(body.* == .Blob and !body.Blob.needsToReadFile())) { // TODO: Locked->stream -> file -> blob conversion via .toBlobIfPossible() except we want to avoid modifying the Response/Request if unnecessary. cmd.base.interpreter.event_loop.js.global.throw("Cannot redirect stdout/stderr to an immutable blob. Expected a file", .{}) catch {}; - return .yield; + return .failed; } var original_blob = body.use(); @@ -457,7 +538,7 @@ pub fn init( if ((node.redirect.stdout or node.redirect.stderr) and !blob.needsToReadFile()) { // TODO: Locked->stream -> file -> blob conversion via .toBlobIfPossible() except we want to avoid modifying the Response/Request if unnecessary. cmd.base.interpreter.event_loop.js.global.throw("Cannot redirect stdout/stderr to an immutable blob. Expected a file", .{}) catch {}; - return .yield; + return .failed; } const theblob: *BuiltinIO.Blob = bun.new(BuiltinIO.Blob, .{ @@ -478,7 +559,7 @@ pub fn init( } else { const jsval = cmd.base.interpreter.jsobjs[val.idx]; cmd.base.interpreter.event_loop.js.global.throw("Unknown JS value used in shell: {}", .{jsval.fmtString(globalObject)}) catch {}; - return .yield; + return .failed; } }, } @@ -494,7 +575,7 @@ pub fn init( } } - return .cont; + return null; } pub inline fn eventLoop(this: *const Builtin) JSC.EventLoopHandle { @@ -505,6 +586,7 @@ pub inline fn throw(this: *const Builtin, err: *const bun.shell.ShellErr) void { this.parentCmd().base.throw(err) catch {}; } +/// The `Cmd` state node associated with this builtin pub inline fn parentCmd(this: *const Builtin) *const Cmd { const union_ptr: *const Cmd.Exec = @fieldParentPtr("bltn", this); return @fieldParentPtr("exec", union_ptr); @@ -515,7 +597,7 @@ pub inline fn parentCmdMut(this: *Builtin) *Cmd { return @fieldParentPtr("exec", union_ptr); } -pub fn done(this: *Builtin, exit_code: anytype) void { +pub fn done(this: *Builtin, exit_code: anytype) Yield { const code: ExitCode = switch (@TypeOf(exit_code)) { bun.sys.E => @intFromEnum(exit_code), u1, u8, u16 => exit_code, @@ -537,16 +619,11 @@ pub fn done(this: *Builtin, exit_code: anytype) void { cmd.base.shell.buffered_stderr().append(bun.default_allocator, this.stderr.buf.items[0..]) catch bun.outOfMemory(); } - cmd.parent.childDone(cmd, this.exit_code.?); + return cmd.parent.childDone(cmd, this.exit_code.?); } -pub fn start(this: *Builtin) Maybe(void) { - switch (this.callImpl(Maybe(void), "start", .{})) { - .err => |e| return Maybe(void).initErr(e), - .result => {}, - } - - return Maybe(void).success; +pub fn start(this: *Builtin) Yield { + return this.callImpl(Yield, "start", .{}); } pub fn deinit(this: *Builtin) void { @@ -685,6 +762,7 @@ pub const Mv = @import("./builtin/mv.zig"); const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Interpreter = shell.interpret.Interpreter; @@ -704,4 +782,3 @@ const ShellSyscall = shell.interpret.ShellSyscall; const Allocator = std.mem.Allocator; const ast = shell.AST; const IO = shell.Interpreter.IO; -const CoroutineResult = shell.interpret.CoroutineResult; diff --git a/src/shell/EnvMap.zig b/src/shell/EnvMap.zig index eeeba042d3..1651ac16bc 100644 --- a/src/shell/EnvMap.zig +++ b/src/shell/EnvMap.zig @@ -35,6 +35,8 @@ pub fn deinit(this: *EnvMap) void { this.map.deinit(); } +/// NOTE: This will `.ref()` value, so you should `defer value.deref()` it +/// before handing it to this function!!! pub fn insert(this: *EnvMap, key: EnvStr, val: EnvStr) void { const result = this.map.getOrPut(key) catch bun.outOfMemory(); if (!result.found_existing) { diff --git a/src/shell/EnvStr.zig b/src/shell/EnvStr.zig index 7464e7f114..78d83060b8 100644 --- a/src/shell/EnvStr.zig +++ b/src/shell/EnvStr.zig @@ -41,6 +41,19 @@ pub const EnvStr = packed struct(u128) { return @bitCast(num[0..6].*); } + /// Same thing as `initRefCounted` except it duplicates thepassed string + pub fn dupeRefCounted(old_str: []const u8) EnvStr { + if (old_str.len == 0) + return .{ .tag = .empty, .ptr = 0, .len = 0 }; + + const str = bun.default_allocator.dupe(u8, old_str) catch bun.outOfMemory(); + return .{ + .ptr = toPtr(RefCountedStr.init(str)), + .len = str.len, + .tag = .refcounted, + }; + } + pub fn initRefCounted(str: []const u8) EnvStr { if (str.len == 0) return .{ .tag = .empty, .ptr = 0, .len = 0 }; diff --git a/src/shell/IO.zig b/src/shell/IO.zig index e5119d7515..07598eeba9 100644 --- a/src/shell/IO.zig +++ b/src/shell/IO.zig @@ -5,6 +5,10 @@ stdin: InKind, stdout: OutKind, stderr: OutKind, +pub fn format(this: IO, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("stdin: {}\nstdout: {}\nstderr: {}", .{ this.stdin, this.stdout, this.stderr }); +} + pub fn deinit(this: *IO) void { this.stdin.close(); this.stdout.close(); @@ -33,6 +37,13 @@ pub const InKind = union(enum) { fd: *Interpreter.IOReader, ignore, + pub fn format(this: InKind, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + switch (this) { + .fd => try writer.print("fd: {}", .{this.fd.fd}), + .ignore => try writer.print("ignore", .{}), + } + } + pub fn ref(this: InKind) InKind { switch (this) { .fd => this.fd.ref(), @@ -74,11 +85,20 @@ pub const OutKind = union(enum) { /// in the Interpreter struct fd: struct { writer: *Interpreter.IOWriter, captured: ?*bun.ByteList = null }, /// Buffers the output (handled in Cmd.BufferedIoClosed.close()) + /// + /// This is set when the shell is called with `.quiet()` pipe, /// Discards output ignore, - // fn dupeForSubshell(this: *ShellState, + // fn dupeForSubshell(this: *ShellExecEnv, + pub fn format(this: OutKind, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + switch (this) { + .fd => try writer.print("fd: {}", .{this.fd.writer.fd}), + .pipe => try writer.print("pipe", .{}), + .ignore => try writer.print("ignore", .{}), + } + } pub fn ref(this: @This()) @This() { switch (this) { diff --git a/src/shell/IOReader.zig b/src/shell/IOReader.zig index 4ee9662a24..5fb7d2b885 100644 --- a/src/shell/IOReader.zig +++ b/src/shell/IOReader.zig @@ -67,21 +67,23 @@ pub fn init(fd: bun.FileDescriptor, evtloop: JSC.EventLoopHandle) *IOReader { } /// Idempotent function to start the reading -pub fn start(this: *IOReader) void { +pub fn start(this: *IOReader) Yield { if (bun.Environment.isPosix) { if (this.reader.handle == .closed or !this.reader.handle.poll.isRegistered()) { if (this.reader.start(this.fd, true).asErr()) |e| { this.onReaderError(e); } } - return; + return .suspended; } - if (this.is_reading) return; + if (this.is_reading) return .suspended; this.is_reading = true; if (this.reader.startWithCurrentPipe().asErr()) |e| { this.onReaderError(e); + return .failed; } + return .suspended; } /// Only does things on windows @@ -128,13 +130,12 @@ pub fn onReadChunk(ptr: *anyopaque, chunk: []const u8, has_more: bun.io.ReadStat var i: usize = 0; while (i < this.readers.len()) { var r = this.readers.get(i); - switch (r.onReadChunk(chunk)) { - .cont => { - i += 1; - }, - .stop_listening => { - this.readers.swapRemove(i); - }, + var remove = false; + r.onReadChunk(chunk, &remove).run(); + if (remove) { + this.readers.swapRemove(i); + } else { + i += 1; } } @@ -164,7 +165,7 @@ pub fn onReaderError(this: *IOReader, err: bun.sys.Error) void { r.onReaderDone(if (this.err) |*e| brk: { e.ref(); break :brk e.*; - } else null); + } else null).run(); } } @@ -175,7 +176,7 @@ pub fn onReaderDone(this: *IOReader) void { r.onReaderDone(if (this.err) |*err| brk: { err.ref(); break :brk err.*; - } else null); + } else null).run(); } } @@ -223,12 +224,12 @@ pub const IOReaderChildPtr = struct { } /// Return true if the child should be deleted - pub fn onReadChunk(this: IOReaderChildPtr, chunk: []const u8) ReadChunkAction { - return this.ptr.call("onIOReaderChunk", .{chunk}, ReadChunkAction); + pub fn onReadChunk(this: IOReaderChildPtr, chunk: []const u8, remove: *bool) Yield { + return this.ptr.call("onIOReaderChunk", .{ chunk, remove }, Yield); } - pub fn onReaderDone(this: IOReaderChildPtr, err: ?JSC.SystemError) void { - return this.ptr.call("onIOReaderDone", .{err}, void); + pub fn onReaderDone(this: IOReaderChildPtr, err: ?JSC.SystemError) Yield { + return this.ptr.call("onIOReaderDone", .{err}, Yield); } }; @@ -262,11 +263,11 @@ pub const AsyncDeinitReader = struct { }; const SmolList = bun.shell.SmolList; -const ReadChunkAction = bun.shell.interpret.ReadChunkAction; const std = @import("std"); const bun = @import("bun"); const shell = bun.shell; +const Yield = shell.Yield; const Interpreter = bun.shell.Interpreter; const log = bun.shell.interpret.log; diff --git a/src/shell/IOWriter.zig b/src/shell/IOWriter.zig index a7eb64fd1d..bdb28bb6f6 100644 --- a/src/shell/IOWriter.zig +++ b/src/shell/IOWriter.zig @@ -25,15 +25,16 @@ buf: std.ArrayListUnmanaged(u8) = .{}, /// quick hack to get windows working /// ideally this should be removed winbuf: if (bun.Environment.isWindows) std.ArrayListUnmanaged(u8) else u0 = if (bun.Environment.isWindows) .empty else 0, -__idx: usize = 0, +writer_idx: usize = 0, total_bytes_written: usize = 0, err: ?JSC.SystemError = null, evtloop: JSC.EventLoopHandle, concurrent_task: JSC.EventLoopTask, -is_writing: if (bun.Environment.isWindows) bool else u0 = if (bun.Environment.isWindows) false else 0, +concurrent_task2: JSC.EventLoopTask, +is_writing: bool = false, async_deinit: AsyncDeinitWriter = .{}, started: bool = false, -flags: InitFlags = .{}, +flags: Flags = .{}, const debug = bun.Output.scoped(.IOWriter, true); @@ -44,10 +45,15 @@ pub const ChildPtr = IOWriterChildPtr; /// but if this never happens, we shrink `buf` when it exceeds this threshold const SHRINK_THRESHOLD = 1024 * 128; +const CallstackChild = struct { + child: ChildPtr, + completed: bool = false, +}; + pub const auto_poll = false; pub const WriterImpl = bun.io.BufferedWriter(IOWriter, struct { - pub const onWrite = IOWriter.onWrite; + pub const onWrite = IOWriter.onWritePollable; pub const onError = IOWriter.onError; pub const onClose = IOWriter.onClose; pub const getBuffer = IOWriter.getBuffer; @@ -63,19 +69,21 @@ pub fn refSelf(this: *IOWriter) *IOWriter { return this; } -pub const InitFlags = packed struct(u8) { +pub const Flags = packed struct(u8) { pollable: bool = false, nonblocking: bool = false, is_socket: bool = false, - __unused: u5 = 0, + broken_pipe: bool = false, + __unused: u4 = 0, }; -pub fn init(fd: bun.FileDescriptor, flags: InitFlags, evtloop: JSC.EventLoopHandle) *IOWriter { +pub fn init(fd: bun.FileDescriptor, flags: Flags, evtloop: JSC.EventLoopHandle) *IOWriter { const this = bun.new(IOWriter, .{ .ref_count = .init(), .fd = fd, .evtloop = evtloop, .concurrent_task = JSC.EventLoopTask.fromEventLoop(evtloop), + .concurrent_task2 = JSC.EventLoopTask.fromEventLoop(evtloop), }); this.writer.parent = this; @@ -156,38 +164,49 @@ pub fn eventLoop(this: *IOWriter) JSC.EventLoopHandle { } /// Idempotent write call -pub fn write(this: *IOWriter) void { +fn write(this: *IOWriter) enum { + suspended, + failed, + is_actually_file, +} { + if (bun.Environment.isPosix) + bun.assert(this.flags.pollable); + if (!this.started) { log("IOWriter(0x{x}, fd={}) starting", .{ @intFromPtr(this), this.fd }); if (this.__start().asErr()) |e| { this.onError(e); - return; + return .failed; } this.started = true; if (comptime bun.Environment.isPosix) { - if (this.writer.handle == .fd) {} else return; - } else return; + // if `handle == .fd` it means it's a file which does not + // support polling for writeability and we should just + // write to it + if (this.writer.handle == .fd) { + bun.assert(!this.flags.pollable); + return .is_actually_file; + } + return .suspended; + } + return .suspended; } + if (bun.Environment.isWindows) { log("IOWriter(0x{x}, fd={}) write() is_writing={any}", .{ @intFromPtr(this), this.fd, this.is_writing }); - if (this.is_writing) return; + if (this.is_writing) return .suspended; this.is_writing = true; if (this.writer.startWithCurrentPipe().asErr()) |e| { this.onError(e); - return; + return .failed; } - return; + return .suspended; } - if (this.writer.handle == .poll) { - if (!this.writer.handle.poll.isWatching()) { - log("IOWriter(0x{x}, fd={}) calling this.writer.write()", .{ @intFromPtr(this), this.fd }); - this.writer.write(); - } else log("IOWriter(0x{x}, fd={}) poll already watching", .{ @intFromPtr(this), this.fd }); - } else { - log("IOWriter(0x{x}, fd={}) no poll, calling write", .{ @intFromPtr(this), this.fd }); - this.writer.write(); - } + bun.assert(this.writer.handle == .poll); + if (this.writer.handle.poll.isWatching()) return .suspended; + this.writer.start(this.fd, this.flags.pollable).assert(); + return .suspended; } /// Cancel the chunks enqueued by the given writer by @@ -198,7 +217,7 @@ pub fn cancelChunks(this: *IOWriter, ptr_: anytype) void { else => ChildPtr.init(ptr_), }; if (this.writers.len() == 0) return; - const idx = this.__idx; + const idx = this.writer_idx; const slice: []Writer = this.writers.sliceMutable(); if (idx >= slice.len) return; for (slice[idx..]) |*w| { @@ -214,6 +233,10 @@ const Writer = struct { written: usize = 0, bytelist: ?*bun.ByteList = null, + pub fn wroteEverything(this: *const Writer) bool { + return this.written >= this.len; + } + pub fn rawPtr(this: Writer) ?*anyopaque { return this.ptr.ptr.ptr(); } @@ -223,7 +246,7 @@ const Writer = struct { } pub fn setDead(this: *Writer) void { - this.ptr.ptr = ChildPtr.ChildPtrRaw.Null; + this.ptr.ptr = ChildPtrRaw.Null; } }; @@ -233,9 +256,9 @@ pub const Writers = SmolList(Writer, 2); /// amount they would have written so the buf is skipped as well pub fn skipDead(this: *IOWriter) void { const slice = this.writers.slice(); - for (slice[this.__idx..]) |*w| { + for (slice[this.writer_idx..]) |*w| { if (w.isDead()) { - this.__idx += 1; + this.writer_idx += 1; this.total_bytes_written += w.len - w.written; continue; } @@ -244,13 +267,66 @@ pub fn skipDead(this: *IOWriter) void { return; } -pub fn onWrite(this: *IOWriter, amount: usize, status: bun.io.WriteStatus) void { +pub fn doFileWrite(this: *IOWriter) Yield { + assert(bun.Environment.isPosix); + assert(!this.flags.pollable); + assert(this.writer_idx < this.writers.len()); + + defer this.setWriting(false); + this.skipDead(); + + const child = this.writers.get(this.writer_idx); + assert(!child.isDead()); + + const buf = this.getBuffer(); + assert(buf.len > 0); + + var done = false; + const writeResult = drainBufferedData(this, buf, std.math.maxInt(u32), false); + const amt = switch (writeResult) { + .done => |amt| amt: { + done = true; + break :amt amt; + }, + // .wrote can be returned if an error was encountered but there we wrote + // some data before it happened. In that case, onError will also be + // called so we should just return. + .wrote => |amt| amt: { + if (this.err != null) return .done; + break :amt amt; + }, + // This is returned when we hit EAGAIN which should not be the case + // when writing to files unless we opened the file with non-blocking + // mode + .pending => bun.unreachablePanic("drainBufferedData returning .pending in IOWriter.doFileWrite should not happen", .{}), + .err => |e| { + this.onError(e); + return .done; + }, + }; + if (child.bytelist) |bl| { + const written_slice = this.buf.items[this.total_bytes_written .. this.total_bytes_written + amt]; + bl.append(bun.default_allocator, written_slice) catch bun.outOfMemory(); + } + child.written += amt; + if (!child.wroteEverything()) { + bun.assert(writeResult == .done); + // This should never happen if we are here. The only case where we get + // partial writes is when an error is encountered + bun.unreachablePanic("IOWriter.doFileWrite: child.wroteEverything() is false. This is unexpected behavior and indicates a bug in Bun. Please file a GitHub issue.", .{}); + } + return this.bump(child); +} + +pub fn onWritePollable(this: *IOWriter, amount: usize, status: bun.io.WriteStatus) void { + if (bun.Environment.isPosix) bun.assert(this.flags.pollable); + this.setWriting(false); debug("IOWriter(0x{x}, fd={}) onWrite({d}, {})", .{ @intFromPtr(this), this.fd, amount, status }); - if (this.__idx >= this.writers.len()) return; - const child = this.writers.get(this.__idx); + if (this.writer_idx >= this.writers.len()) return; + const child = this.writers.get(this.writer_idx); if (child.isDead()) { - this.bump(child); + this.bump(child).run(); } else { if (child.bytelist) |bl| { const written_slice = this.buf.items[this.total_bytes_written .. this.total_bytes_written + amount]; @@ -259,35 +335,79 @@ pub fn onWrite(this: *IOWriter, amount: usize, status: bun.io.WriteStatus) void this.total_bytes_written += amount; child.written += amount; if (status == .end_of_file) { - const not_fully_written = !this.isLastIdx(this.__idx) or child.written < child.len; - if (bun.Environment.allow_assert and not_fully_written) { - bun.Output.debugWarn("IOWriter(0x{x}, fd={}) received done without fully writing data, check that onError is thrown", .{ @intFromPtr(this), this.fd }); - } + const not_fully_written = if (this.isLastIdx(this.writer_idx)) true else child.written < child.len; + // We wrote everything + if (!not_fully_written) return; + + // We did not write everything. + // This seems to happen in a pipeline where the command which + // _reads_ the output of the previous command closes before the + // previous command. + // + // Example: `ls . | echo hi` + // + // 1. We call `socketpair()` and give `ls .` a socket to _write_ to and `echo hi` a socket to _read_ from + // 2. `ls .` executes first, but has to do some async work and so is suspended + // 3. `echo hi` then executes and finishes first (since it does less work) and closes its socket + // 4. `ls .` does its thing and then tries to write to its socket + // 5. Because `echo hi` closed its socket, when `ls .` does `send(...)` it will return EPIPE + // 6. Inside our PipeWriter abstraction this gets returned as bun.io.WriteStatus.end_of_file + // + // So what should we do? In a normal shell, `ls .` would receive the SIGPIPE signal and exit. + // We don't support signals right now. In fact we don't even have a way to kill the shell. + // + // So for a quick hack we're just going to have all writes return an error. + bun.assert(this.flags.is_socket); + bun.Output.debugWarn("IOWriter(0x{x}, fd={}) received done without fully writing data", .{ @intFromPtr(this), this.fd }); + this.flags.broken_pipe = true; + this.brokenPipeForWriters(); return; } if (child.written >= child.len) { - this.bump(child); + this.bump(child).run(); } } - const wrote_everything: bool = this.total_bytes_written >= this.buf.items.len; + const wrote_everything: bool = this.wroteEverything(); - log("IOWriter(0x{x}, fd={}) wrote_everything={}, idx={d} writers={d} next_len={d}", .{ @intFromPtr(this), this.fd, wrote_everything, this.__idx, this.writers.len(), if (this.writers.len() >= 1) this.writers.get(0).len else 0 }); - if (!wrote_everything and this.__idx < this.writers.len()) { + log("IOWriter(0x{x}, fd={}) wrote_everything={}, idx={d} writers={d} next_len={d}", .{ @intFromPtr(this), this.fd, wrote_everything, this.writer_idx, this.writers.len(), if (this.writers.len() >= 1) this.writers.get(0).len else 0 }); + if (!wrote_everything and this.writer_idx < this.writers.len()) { debug("IOWriter(0x{x}, fd={}) poll again", .{ @intFromPtr(this), this.fd }); if (comptime bun.Environment.isWindows) { this.setWriting(true); this.writer.write(); } else { - if (this.writer.handle == .poll) - this.writer.registerPoll() - else - this.writer.write(); + bun.assert(this.writer.handle == .poll); + this.writer.registerPoll(); } } } +pub fn brokenPipeForWriters(this: *IOWriter) void { + bun.assert(this.flags.broken_pipe); + var offset: usize = 0; + for (this.writers.sliceMutable()) |*w| { + if (w.isDead()) { + offset += w.len; + continue; + } + log("IOWriter(0x{x}, fd={}) brokenPipeForWriters {s}(0x{x})", .{ @intFromPtr(this), this.fd, @tagName(w.ptr.ptr.tag()), @intFromPtr(w.ptr.ptr.ptr()) }); + const err: JSC.SystemError = bun.sys.Error.fromCode(.PIPE, .write).toSystemError(); + w.ptr.onIOWriterChunk(0, err).run(); + offset += w.len; + } + + this.total_bytes_written = 0; + this.writers.clearRetainingCapacity(); + this.buf.clearRetainingCapacity(); + this.writer_idx = 0; +} + +pub fn wroteEverything(this: *IOWriter) bool { + return this.total_bytes_written >= this.buf.items.len; +} + pub fn onClose(this: *IOWriter) void { this.setWriting(false); } @@ -313,11 +433,19 @@ pub fn onError(this: *IOWriter, err__: bun.sys.Error) void { continue :writer_loop; } - w.ptr.onWriteChunk(0, this.err); seen.append(@intFromPtr(ptr)) catch bun.outOfMemory(); + // TODO: This probably shouldn't call .run() + w.ptr.onIOWriterChunk(0, this.err).run(); } + + this.total_bytes_written = 0; + this.writer_idx = 0; + this.buf.clearRetainingCapacity(); + this.writers.clearRetainingCapacity(); } +/// Returns the buffer of data that needs to be written +/// for the *current* writer. pub fn getBuffer(this: *IOWriter) []const u8 { const result = this.getBufferImpl(); if (comptime bun.Environment.isWindows) { @@ -331,20 +459,20 @@ pub fn getBuffer(this: *IOWriter) []const u8 { fn getBufferImpl(this: *IOWriter) []const u8 { const writer = brk: { - if (this.__idx >= this.writers.len()) { + if (this.writer_idx >= this.writers.len()) { log("IOWriter(0x{x}, fd={}) getBufferImpl all writes done", .{ @intFromPtr(this), this.fd }); return ""; } - log("IOWriter(0x{x}, fd={}) getBufferImpl idx={d} writer_len={d}", .{ @intFromPtr(this), this.fd, this.__idx, this.writers.len() }); - var writer = this.writers.get(this.__idx); + log("IOWriter(0x{x}, fd={}) getBufferImpl idx={d} writer_len={d}", .{ @intFromPtr(this), this.fd, this.writer_idx, this.writers.len() }); + var writer = this.writers.get(this.writer_idx); if (!writer.isDead()) break :brk writer; log("IOWriter(0x{x}, fd={}) skipping dead", .{ @intFromPtr(this), this.fd }); this.skipDead(); - if (this.__idx >= this.writers.len()) { + if (this.writer_idx >= this.writers.len()) { log("IOWriter(0x{x}, fd={}) getBufferImpl all writes done", .{ @intFromPtr(this), this.fd }); return ""; } - writer = this.writers.get(this.__idx); + writer = this.writers.get(this.writer_idx); break :brk writer; }; log("IOWriter(0x{x}, fd={}) getBufferImpl writer_len={} writer_written={}", .{ @intFromPtr(this), this.fd, writer.len, writer.written }); @@ -355,39 +483,32 @@ fn getBufferImpl(this: *IOWriter) []const u8 { return this.buf.items[this.total_bytes_written .. this.total_bytes_written + remaining]; } -pub fn bump(this: *IOWriter, current_writer: *Writer) void { +pub fn bump(this: *IOWriter, current_writer: *Writer) Yield { log("IOWriter(0x{x}, fd={}) bump(0x{x} {s})", .{ @intFromPtr(this), this.fd, @intFromPtr(current_writer), @tagName(current_writer.ptr.ptr.tag()) }); const is_dead = current_writer.isDead(); const written = current_writer.written; const child_ptr = current_writer.ptr; - defer { - if (!is_dead) child_ptr.onWriteChunk(written, null); - } - if (is_dead) { this.skipDead(); } else { if (bun.Environment.allow_assert) { if (!is_dead) assert(current_writer.written == current_writer.len); } - this.__idx += 1; + this.writer_idx += 1; } - if (this.__idx >= this.writers.len()) { + if (this.writer_idx >= this.writers.len()) { log("IOWriter(0x{x}, fd={}) all writers complete: truncating", .{ @intFromPtr(this), this.fd }); this.buf.clearRetainingCapacity(); - this.__idx = 0; + this.writer_idx = 0; this.writers.clearRetainingCapacity(); this.total_bytes_written = 0; - return; - } - - if (this.total_bytes_written >= SHRINK_THRESHOLD) { + } else if (this.total_bytes_written >= SHRINK_THRESHOLD) { const slice = this.buf.items[this.total_bytes_written..]; const remaining_len = slice.len; - log("IOWriter(0x{x}, fd={}) exceeded shrink threshold: truncating (new_len={d}, writer_starting_idx={d})", .{ @intFromPtr(this), this.fd, remaining_len, this.__idx }); + log("IOWriter(0x{x}, fd={}) exceeded shrink threshold: truncating (new_len={d}, writer_starting_idx={d})", .{ @intFromPtr(this), this.fd, remaining_len, this.writer_idx }); if (slice.len == 0) { this.buf.clearRetainingCapacity(); this.total_bytes_written = 0; @@ -396,23 +517,66 @@ pub fn bump(this: *IOWriter, current_writer: *Writer) void { this.buf.items.len = remaining_len; this.total_bytes_written = 0; } - this.writers.truncate(this.__idx); - this.__idx = 0; + this.writers.truncate(this.writer_idx); + this.writer_idx = 0; if (bun.Environment.allow_assert) { if (this.writers.len() > 0) { - const first = this.writers.getConst(this.__idx); + const first = this.writers.getConst(this.writer_idx); assert(this.buf.items.len >= first.len); } } } + + // If the writer was not dead then call its `onIOWriterChunk` callback + if (!is_dead) { + return child_ptr.onIOWriterChunk(written, null); + } + + return .done; } -pub fn enqueue(this: *IOWriter, ptr: anytype, bytelist: ?*bun.ByteList, buf: []const u8) void { +fn enqueueFile(this: *IOWriter) Yield { + if (this.is_writing) { + return .suspended; + } + this.setWriting(true); + + return this.doFileWrite(); +} + +/// `writer` is the new writer to queue +/// +/// You MUST have already added the data to `this.buf`!! +pub fn enqueueInternal(this: *IOWriter) Yield { + bun.assert(!this.flags.broken_pipe); + if (!this.flags.pollable and bun.Environment.isPosix) return this.enqueueFile(); + switch (this.write()) { + .suspended => return .suspended, + .is_actually_file => { + bun.assert(bun.Environment.isPosix); + return this.enqueueFile(); + }, + // FIXME + .failed => return .failed, + } +} + +pub fn handleBrokenPipe(this: *IOWriter, ptr: ChildPtr) ?Yield { + if (this.flags.broken_pipe) { + const err: JSC.SystemError = bun.sys.Error.fromCode(.PIPE, .write).toSystemError(); + log("IOWriter(0x{x}, fd={}) broken pipe {s}(0x{x})", .{ @intFromPtr(this), this.fd, @tagName(ptr.ptr.tag()), @intFromPtr(ptr.ptr.ptr()) }); + return .{ .on_io_writer_chunk = .{ .child = ptr.asAnyOpaque(), .written = 0, .err = err } }; + } + return null; +} + +pub fn enqueue(this: *IOWriter, ptr: anytype, bytelist: ?*bun.ByteList, buf: []const u8) Yield { const childptr = if (@TypeOf(ptr) == ChildPtr) ptr else ChildPtr.init(ptr); + if (this.handleBrokenPipe(childptr)) |yield| return yield; + if (buf.len == 0) { log("IOWriter(0x{x}, fd={}) enqueue EMPTY", .{ @intFromPtr(this), this.fd }); - childptr.onWriteChunk(0, null); - return; + return .{ .on_io_writer_chunk = .{ .child = childptr.asAnyOpaque(), .written = 0, .err = null } }; } const writer: Writer = .{ .ptr = childptr, @@ -422,7 +586,7 @@ pub fn enqueue(this: *IOWriter, ptr: anytype, bytelist: ?*bun.ByteList, buf: []c log("IOWriter(0x{x}, fd={}) enqueue(0x{x} {s}, buf_len={d}, buf={s}, writer_len={d})", .{ @intFromPtr(this), this.fd, @intFromPtr(writer.rawPtr()), @tagName(writer.ptr.ptr.tag()), buf.len, buf[0..@min(128, buf.len)], this.writers.len() + 1 }); this.buf.appendSlice(bun.default_allocator, buf) catch bun.outOfMemory(); this.writers.append(writer); - this.write(); + return this.enqueueInternal(); } pub fn enqueueFmtBltn( @@ -432,10 +596,10 @@ pub fn enqueueFmtBltn( comptime kind: ?Interpreter.Builtin.Kind, comptime fmt_: []const u8, args: anytype, -) void { +) Yield { const cmd_str = comptime if (kind) |k| @tagName(k) ++ ": " else ""; const fmt__ = cmd_str ++ fmt_; - this.enqueueFmt(ptr, bytelist, fmt__, args); + return this.enqueueFmt(ptr, bytelist, fmt__, args); } pub fn enqueueFmt( @@ -444,19 +608,23 @@ pub fn enqueueFmt( bytelist: ?*bun.ByteList, comptime fmt: []const u8, args: anytype, -) void { +) Yield { var buf_writer = this.buf.writer(bun.default_allocator); const start = this.buf.items.len; buf_writer.print(fmt, args) catch bun.outOfMemory(); + + const childptr = if (@TypeOf(ptr) == ChildPtr) ptr else ChildPtr.init(ptr); + if (this.handleBrokenPipe(childptr)) |yield| return yield; + const end = this.buf.items.len; const writer: Writer = .{ - .ptr = if (@TypeOf(ptr) == ChildPtr) ptr else ChildPtr.init(ptr), + .ptr = childptr, .len = end - start, .bytelist = bytelist, }; log("IOWriter(0x{x}, fd={}) enqueue(0x{x} {s}, {s})", .{ @intFromPtr(this), this.fd, @intFromPtr(writer.rawPtr()), @tagName(writer.ptr.ptr.tag()), this.buf.items[start..end] }); this.writers.append(writer); - this.write(); + return this.enqueueInternal(); } fn asyncDeinit(this: *@This()) void { @@ -490,55 +658,177 @@ pub inline fn setWriting(this: *IOWriter, writing: bool) void { } } +// this is unused +pub fn runFromMainThread(_: *IOWriter) void {} + +// this is unused +pub fn runFromMainThreadMini(_: *IOWriter, _: *void) void {} + /// Anything which uses `*IOWriter` to write to a file descriptor needs to /// register itself here so we know how to call its callback on completion. pub const IOWriterChildPtr = struct { ptr: ChildPtrRaw, - pub const ChildPtrRaw = bun.TaggedPointerUnion(.{ - Interpreter.Cmd, - Interpreter.Pipeline, - Interpreter.CondExpr, - Interpreter.Subshell, - Interpreter.Builtin.Cd, - Interpreter.Builtin.Echo, - Interpreter.Builtin.Export, - Interpreter.Builtin.Ls, - Interpreter.Builtin.Ls.ShellLsOutputTask, - Interpreter.Builtin.Mv, - Interpreter.Builtin.Pwd, - Interpreter.Builtin.Rm, - Interpreter.Builtin.Which, - Interpreter.Builtin.Mkdir, - Interpreter.Builtin.Mkdir.ShellMkdirOutputTask, - Interpreter.Builtin.Touch, - Interpreter.Builtin.Touch.ShellTouchOutputTask, - Interpreter.Builtin.Cat, - Interpreter.Builtin.Exit, - Interpreter.Builtin.True, - Interpreter.Builtin.False, - Interpreter.Builtin.Yes, - Interpreter.Builtin.Seq, - Interpreter.Builtin.Dirname, - Interpreter.Builtin.Basename, - Interpreter.Builtin.Cp, - Interpreter.Builtin.Cp.ShellCpOutputTask, - shell.subproc.PipeReader.CapturedWriter, - }); - pub fn init(p: anytype) IOWriterChildPtr { return .{ .ptr = ChildPtrRaw.init(p), }; } + pub fn asAnyOpaque(this: IOWriterChildPtr) *anyopaque { + return this.ptr.ptr(); + } + + pub fn fromAnyOpaque(p: *anyopaque) IOWriterChildPtr { + return .{ .ptr = ChildPtrRaw.from(p) }; + } + /// Called when the IOWriter writes a complete chunk of data the child enqueued - pub fn onWriteChunk(this: IOWriterChildPtr, amount: usize, err: ?JSC.SystemError) void { - return this.ptr.call("onIOWriterChunk", .{ amount, err }, void); + pub fn onIOWriterChunk(this: IOWriterChildPtr, amount: usize, err: ?JSC.SystemError) Yield { + return this.ptr.call("onIOWriterChunk", .{ amount, err }, Yield); + } +}; + +pub const ChildPtrRaw = bun.TaggedPointerUnion(.{ + Interpreter.Cmd, + Interpreter.Pipeline, + Interpreter.CondExpr, + Interpreter.Subshell, + Interpreter.Builtin.Cd, + Interpreter.Builtin.Echo, + Interpreter.Builtin.Export, + Interpreter.Builtin.Ls, + Interpreter.Builtin.Ls.ShellLsOutputTask, + Interpreter.Builtin.Mv, + Interpreter.Builtin.Pwd, + Interpreter.Builtin.Rm, + Interpreter.Builtin.Which, + Interpreter.Builtin.Mkdir, + Interpreter.Builtin.Mkdir.ShellMkdirOutputTask, + Interpreter.Builtin.Touch, + Interpreter.Builtin.Touch.ShellTouchOutputTask, + Interpreter.Builtin.Cat, + Interpreter.Builtin.Exit, + Interpreter.Builtin.True, + Interpreter.Builtin.False, + Interpreter.Builtin.Yes, + Interpreter.Builtin.Seq, + Interpreter.Builtin.Dirname, + Interpreter.Builtin.Basename, + Interpreter.Builtin.Cp, + Interpreter.Builtin.Cp.ShellCpOutputTask, + shell.subproc.PipeReader.CapturedWriter, +}); + +/// TODO: This function and `drainBufferedData` are copy pastes from +/// `PipeWriter.zig`, it would be nice to not have to do that +fn tryWriteWithWriteFn(fd: bun.FileDescriptor, buf: []const u8, comptime write_fn: *const fn (bun.FileDescriptor, []const u8) JSC.Maybe(usize)) bun.io.WriteResult { + var offset: usize = 0; + + while (offset < buf.len) { + switch (write_fn(fd, buf[offset..])) { + .err => |err| { + if (err.isRetry()) { + return .{ .pending = offset }; + } + + if (err.getErrno() == .PIPE) { + return .{ .done = offset }; + } + + return .{ .err = err }; + }, + + .result => |wrote| { + offset += wrote; + if (wrote == 0) { + return .{ .done = offset }; + } + }, + } + } + + return .{ .wrote = offset }; +} + +pub fn drainBufferedData(parent: *IOWriter, buf: []const u8, max_write_size: usize, received_hup: bool) bun.io.WriteResult { + _ = received_hup; + + const trimmed = if (max_write_size < buf.len and max_write_size > 0) buf[0..max_write_size] else buf; + + var drained: usize = 0; + + while (drained < trimmed.len) { + const attempt = tryWriteWithWriteFn(parent.fd, buf, bun.sys.write); + switch (attempt) { + .pending => |pending| { + drained += pending; + return .{ .pending = drained }; + }, + .wrote => |amt| { + drained += amt; + }, + .err => |err| { + if (drained > 0) { + onError(parent, err); + return .{ .wrote = drained }; + } else { + return .{ .err = err }; + } + }, + .done => |amt| { + drained += amt; + return .{ .done = drained }; + }, + } + } + + return .{ .wrote = drained }; +} + +/// TODO: Investigate what we need to do to remove this since we did most of the leg +/// work in removing recursion in the shell. That is what caused the need for +/// making deinitialization asynchronous in the first place. +/// +/// There are two areas which need to change: +/// +/// 1. `IOWriter.onWritePollable` calls `this.bump(child).run()` which could +/// deinitialize the child which will deref and potentially deinitalize the +/// `IOWriter`. Simple solution is to ref and defer ref the `IOWriter` +/// +/// 2. `PipeWriter` seems to try to use this struct after IOWriter +/// deinitializes. We might not be able to get around this. +pub const AsyncDeinitWriter = struct { + ran: bool = false, + + pub fn enqueue(this: *@This()) void { + if (this.ran) return; + this.ran = true; + + var iowriter = this.writer(); + + if (iowriter.evtloop == .js) { + iowriter.evtloop.js.enqueueTaskConcurrent(iowriter.concurrent_task.js.from(this, .manual_deinit)); + } else { + iowriter.evtloop.mini.enqueueTaskConcurrent(iowriter.concurrent_task.mini.from(this, "runFromMainThreadMini")); + } + } + + pub fn writer(this: *@This()) *IOWriter { + return @alignCast(@fieldParentPtr("async_deinit", this)); + } + + pub fn runFromMainThread(this: *@This()) void { + this.writer().deinitOnMainThread(); + } + + pub fn runFromMainThreadMini(this: *@This(), _: *void) void { + this.runFromMainThread(); } }; const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Interpreter = shell.Interpreter; const JSC = bun.JSC; @@ -547,4 +837,3 @@ const assert = bun.assert; const log = bun.Output.scoped(.IOWriter, true); const SmolList = shell.SmolList; const Maybe = JSC.Maybe; -const AsyncDeinitWriter = shell.Interpreter.AsyncDeinitWriter; diff --git a/src/shell/ParsedShellScript.zig b/src/shell/ParsedShellScript.zig index f48092dc6c..1dfce3de9d 100644 --- a/src/shell/ParsedShellScript.zig +++ b/src/shell/ParsedShellScript.zig @@ -110,7 +110,7 @@ pub fn createParsedShellScript(globalThis: *JSC.JSGlobalObject, callframe: *JSC. } const string_args = arguments[0]; const template_args_js = arguments[1]; - var template_args = template_args_js.arrayIterator(globalThis); + var template_args = try template_args_js.arrayIterator(globalThis); var stack_alloc = std.heap.stackFallback(@sizeOf(bun.String) * 4, shargs.arena_allocator()); var jsstrings = try std.ArrayList(bun.String).initCapacity(stack_alloc.get(), 4); diff --git a/src/shell/Yield.zig b/src/shell/Yield.zig new file mode 100644 index 0000000000..ad79aab824 --- /dev/null +++ b/src/shell/Yield.zig @@ -0,0 +1,164 @@ +/// There are constraints on Bun's shell interpreter which are unique to shells in +/// general: +/// 1. We try to keep everything in the Bun process as much as possible for +/// performance reasons and also to leverage Bun's existing IO/FS code +/// 2. We try to use non-blocking IO as much as possible so the shell +/// does not block the main JS thread +/// 3. Zig does not have coroutines (yet) +/// +/// These cause two problems: +/// 1. Unbounded recursion, if we keep calling .next() on state machine structs +/// then the call stack could get really deep, we need some mechanism to allow +/// execution to continue without blowing up the call stack +/// +/// 2. Correctly handling suspension points. These occur when IO would block so +/// we must, for example, wait for epoll/kqueue. The easiest solution is to have +/// functions return some value indicating that they suspended execution of the +/// interpreter. +/// +/// This `Yield` struct solves these problems. It represents a "continuation" of +/// the shell interpreter. Shell interpreter functions must return this value. +/// At the top-level of execution, `Yield.run(...)` serves as a "trampoline" to +/// drive execution without blowing up the callstack. +/// +/// Note that the "top-level of execution" could be in `Interpreter.run` or when +/// shell execution resumes after suspension in a task callback (for example in +/// IOWriter.onWritePoll). +pub const Yield = union(enum) { + script: *Script, + stmt: *Stmt, + pipeline: *Pipeline, + cmd: *Cmd, + assigns: *Assigns, + expansion: *Expansion, + @"if": *If, + subshell: *Subshell, + cond_expr: *CondExpr, + + /// This can occur if data is written using IOWriter and it immediately + /// completes (e.g. the buf to write was empty or the fd was immediately + /// writeable). + /// + /// When that happens, we return this variant to ensure that the + /// `.onIOWriterChunk` is called at the top of the callstack. + /// + /// TODO: this struct is massive, also I think we can remove this since + /// it is only used in 2 places. we might need to implement signals + /// first tho. + on_io_writer_chunk: struct { + err: ?JSC.SystemError, + written: usize, + /// This type is actually `IOWriterChildPtr`, but because + /// of an annoying cyclic Zig compile error we're doing this + /// quick fix of making it `*anyopaque`. + child: *anyopaque, + }, + + suspended, + /// Failed and threw a JS error + failed, + done, + + /// Used in debug builds to ensure the shell is not creating a callstack + /// that is too deep. + threadlocal var _dbg_catch_exec_within_exec: if (Environment.isDebug) usize else u0 = 0; + + /// Ideally this should be 1, but since we actually call the `resolve` of the Promise in + /// Interpreter.finish it could actually result in another shell script running. + const MAX_DEPTH = 2; + + pub fn isDone(this: *const Yield) bool { + return this.* == .done; + } + + pub fn run(this: Yield) void { + if (comptime Environment.isDebug) log("Yield({s}) _dbg_catch_exec_within_exec = {d} + 1 = {d}", .{ @tagName(this), _dbg_catch_exec_within_exec, _dbg_catch_exec_within_exec + 1 }); + bun.debugAssert(_dbg_catch_exec_within_exec <= MAX_DEPTH); + if (comptime Environment.isDebug) _dbg_catch_exec_within_exec += 1; + defer { + if (comptime Environment.isDebug) log("Yield({s}) _dbg_catch_exec_within_exec = {d} - 1 = {d}", .{ @tagName(this), _dbg_catch_exec_within_exec, _dbg_catch_exec_within_exec + 1 }); + if (comptime Environment.isDebug) _dbg_catch_exec_within_exec -= 1; + } + + // A pipeline creates multiple "threads" of execution: + // + // ```bash + // cmd1 | cmd2 | cmd3 + // ``` + // + // We need to start cmd1, go back to the pipeline, start cmd2, and so + // on. + // + // This means we need to store a reference to the pipeline. And + // there can be nested pipelines, so we need a stack. + var sfb = std.heap.stackFallback(@sizeOf(*Pipeline) * 4, bun.default_allocator); + const alloc = sfb.get(); + var pipeline_stack = std.ArrayList(*Pipeline).initCapacity(alloc, 4) catch bun.outOfMemory(); + defer pipeline_stack.deinit(); + + // Note that we're using labelled switch statements but _not_ + // re-assigning `this`, so the `this` variable is stale after the first + // execution. Don't touch it. + state: switch (this) { + .pipeline => |x| { + pipeline_stack.append(x) catch bun.outOfMemory(); + continue :state x.next(); + }, + .cmd => |x| continue :state x.next(), + .script => |x| continue :state x.next(), + .stmt => |x| continue :state x.next(), + .assigns => |x| continue :state x.next(), + .expansion => |x| continue :state x.next(), + .@"if" => |x| continue :state x.next(), + .subshell => |x| continue :state x.next(), + .cond_expr => |x| continue :state x.next(), + .on_io_writer_chunk => |x| { + const child = IOWriterChildPtr.fromAnyOpaque(x.child); + continue :state child.onIOWriterChunk(x.written, x.err); + }, + .failed, .suspended, .done => { + if (drainPipelines(&pipeline_stack)) |yield| { + continue :state yield; + } + return; + }, + } + } + + pub fn drainPipelines(pipeline_stack: *std.ArrayList(*Pipeline)) ?Yield { + if (pipeline_stack.items.len == 0) return null; + var i: i64 = @as(i64, @intCast(pipeline_stack.items.len)) - 1; + while (i >= 0 and i < pipeline_stack.items.len) : (i -= 1) { + const pipeline = pipeline_stack.items[@intCast(i)]; + if (pipeline.state == .starting_cmds) return pipeline.next(); + _ = pipeline_stack.pop(); + if (pipeline.state == .done) { + return pipeline.next(); + } + } + return null; + } +}; + +const std = @import("std"); +const bun = @import("bun"); +const Environment = bun.Environment; +const shell = bun.shell; + +const Interpreter = bun.shell.Interpreter; +const IO = bun.shell.Interpreter.IO; +const log = bun.shell.interpret.log; +const IOWriter = bun.shell.Interpreter.IOWriter; +const IOWriterChildPtr = IOWriter.IOWriterChildPtr; + +const Assigns = bun.shell.Interpreter.Assigns; +const Script = bun.shell.Interpreter.Script; +const Subshell = bun.shell.Interpreter.Subshell; +const Cmd = bun.shell.Interpreter.Cmd; +const If = bun.shell.Interpreter.If; +const CondExpr = bun.shell.Interpreter.CondExpr; +const Expansion = bun.shell.Interpreter.Expansion; +const Stmt = bun.shell.Interpreter.Stmt; +const Pipeline = bun.shell.Interpreter.Pipeline; + +const JSC = bun.JSC; diff --git a/src/shell/builtin/basename.zig b/src/shell/builtin/basename.zig index 1e04d32fee..e296f51bf5 100644 --- a/src/shell/builtin/basename.zig +++ b/src/shell/builtin/basename.zig @@ -1,7 +1,7 @@ -state: enum { idle, waiting_io, err, done } = .idle, +state: enum { idle, err, done } = .idle, buf: std.ArrayListUnmanaged(u8) = .{}, -pub fn start(this: *@This()) Maybe(void) { +pub fn start(this: *@This()) Yield { const args = this.bltn().argsSlice(); var iter = bun.SliceIterator([*:0]const u8).init(args); @@ -9,17 +9,15 @@ pub fn start(this: *@This()) Maybe(void) { while (iter.next()) |item| { const arg = bun.sliceTo(item, 0); - _ = this.print(bun.path.basename(arg)); - _ = this.print("\n"); + this.print(bun.path.basename(arg)); + this.print("\n"); } this.state = .done; if (this.bltn().stdout.needsIO()) |safeguard| { - this.bltn().stdout.enqueue(this, this.buf.items, safeguard); - } else { - this.bltn().done(0); + return this.bltn().stdout.enqueue(this, this.buf.items, safeguard); } - return Maybe(void).success; + return this.bltn().done(0); } pub fn deinit(this: *@This()) void { @@ -27,38 +25,33 @@ pub fn deinit(this: *@This()) void { //basename } -fn fail(this: *@This(), msg: []const u8) Maybe(void) { +fn fail(this: *@This(), msg: []const u8) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .err; - this.bltn().stderr.enqueue(this, msg, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, msg, safeguard); } _ = this.bltn().writeNoIO(.stderr, msg); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } -fn print(this: *@This(), msg: []const u8) Maybe(void) { +fn print(this: *@This(), msg: []const u8) void { if (this.bltn().stdout.needsIO() != null) { this.buf.appendSlice(bun.default_allocator, msg) catch bun.outOfMemory(); - return Maybe(void).success; + return; } - const res = this.bltn().writeNoIO(.stdout, msg); - if (res == .err) return Maybe(void).initErr(res.err); - return Maybe(void).success; + _ = this.bltn().writeNoIO(.stdout, msg); } -pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) Yield { if (maybe_e) |e| { defer e.deref(); this.state = .err; - this.bltn().done(1); - return; + return this.bltn().done(1); } switch (this.state) { - .done => this.bltn().done(0), - .err => this.bltn().done(1), - else => {}, + .done => return this.bltn().done(0), + .err => return this.bltn().done(1), + .idle => bun.shell.unreachableState("Basename.onIOWriterChunk", "idle"), } } @@ -68,9 +61,9 @@ pub inline fn bltn(this: *@This()) *Builtin { } const bun = @import("bun"); +const Yield = bun.shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); diff --git a/src/shell/builtin/cat.zig b/src/shell/builtin/cat.zig index b9ee5e1667..340b44a674 100644 --- a/src/shell/builtin/cat.zig +++ b/src/shell/builtin/cat.zig @@ -24,20 +24,18 @@ state: union(enum) { done, } = .idle, -pub fn writeFailingError(this: *Cat, buf: []const u8, exit_code: ExitCode) Maybe(void) { +pub fn writeFailingError(this: *Cat, buf: []const u8, exit_code: ExitCode) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .waiting_write_err; - this.bltn().stderr.enqueue(this, buf, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stderr, buf); - this.bltn().done(exit_code); - return Maybe(void).success; + return this.bltn().done(exit_code); } -pub fn start(this: *Cat) Maybe(void) { +pub fn start(this: *Cat) Yield { const filepath_args = switch (this.opts.parse(this.bltn().argsSlice())) { .ok => |filepath_args| filepath_args, .err => |e| { @@ -47,8 +45,7 @@ pub fn start(this: *Cat) Maybe(void) { .unsupported => |unsupported| this.bltn().fmtErrorArena(.cat, "unsupported option, please open a GitHub issue -- {s}\n", .{unsupported}), }; - _ = this.writeFailingError(buf, 1); - return Maybe(void).success; + return this.writeFailingError(buf, 1); }, }; @@ -66,12 +63,10 @@ pub fn start(this: *Cat) Maybe(void) { }; } - _ = this.next(); - - return Maybe(void).success; + return this.next(); } -pub fn next(this: *Cat) void { +pub fn next(this: *Cat) Yield { switch (this.state) { .idle => @panic("Invalid state"), .exec_stdin => { @@ -79,17 +74,13 @@ pub fn next(this: *Cat) void { this.state.exec_stdin.in_done = true; const buf = this.bltn().readStdinNoIO(); if (this.bltn().stdout.needsIO()) |safeguard| { - this.bltn().stdout.enqueue(this, buf, safeguard); - } else { - _ = this.bltn().writeNoIO(.stdout, buf); - this.bltn().done(0); - return; + return this.bltn().stdout.enqueue(this, buf, safeguard); } - return; + _ = this.bltn().writeNoIO(.stdout, buf); + return this.bltn().done(0); } this.bltn().stdin.fd.addReader(this); - this.bltn().stdin.fd.start(); - return; + return this.bltn().stdin.fd.start(); }, .exec_filepath_args => { var exec = &this.state.exec_filepath_args; @@ -107,9 +98,8 @@ pub fn next(this: *Cat) void { .result => |fd| fd, .err => |e| { const buf = this.bltn().taskErrorToString(.cat, e); - _ = this.writeFailingError(buf, 1); - exec.deinit(); - return; + defer exec.deinit(); + return this.writeFailingError(buf, 1); }, }; @@ -118,14 +108,14 @@ pub fn next(this: *Cat) void { exec.chunks_queued = 0; exec.reader = reader; exec.reader.?.addReader(this); - exec.reader.?.start(); + return exec.reader.?.start(); }, - .waiting_write_err => return, - .done => this.bltn().done(0), + .waiting_write_err => return .failed, + .done => return this.bltn().done(0), } } -pub fn onIOWriterChunk(this: *Cat, _: usize, err: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Cat, _: usize, err: ?JSC.SystemError) Yield { debug("onIOWriterChunk(0x{x}, {s}, had_err={any})", .{ @intFromPtr(this), @tagName(this.state), err != null }); const errno: ExitCode = if (err) |e| brk: { defer e.deref(); @@ -144,7 +134,7 @@ pub fn onIOWriterChunk(this: *Cat, _: usize, err: ?JSC.SystemError) void { } this.state.exec_stdin.in_done = true; } - this.bltn().done(e.getErrno()); + return this.bltn().done(e.getErrno()); }, .exec_filepath_args => { var exec = &this.state.exec_filepath_args; @@ -152,22 +142,21 @@ pub fn onIOWriterChunk(this: *Cat, _: usize, err: ?JSC.SystemError) void { r.removeReader(this); } exec.deinit(); - this.bltn().done(e.getErrno()); + return this.bltn().done(e.getErrno()); }, - .waiting_write_err => this.bltn().done(e.getErrno()), + .waiting_write_err => return this.bltn().done(e.getErrno()), else => @panic("Invalid state"), } - return; } switch (this.state) { .exec_stdin => { this.state.exec_stdin.chunks_done += 1; if (this.state.exec_stdin.in_done and (this.state.exec_stdin.chunks_done >= this.state.exec_stdin.chunks_queued)) { - this.bltn().done(0); - return; + return this.bltn().done(0); } // Need to wait for more chunks to be written + return .suspended; }, .exec_filepath_args => { this.state.exec_filepath_args.chunks_done += 1; @@ -175,42 +164,42 @@ pub fn onIOWriterChunk(this: *Cat, _: usize, err: ?JSC.SystemError) void { this.state.exec_filepath_args.out_done = true; } if (this.state.exec_filepath_args.in_done and this.state.exec_filepath_args.out_done) { - this.next(); - return; + return this.next(); } // Wait for reader to be done - return; + return .suspended; }, - .waiting_write_err => this.bltn().done(1), + .waiting_write_err => return this.bltn().done(1), else => @panic("Invalid state"), } } -pub fn onIOReaderChunk(this: *Cat, chunk: []const u8) ReadChunkAction { +pub fn onIOReaderChunk(this: *Cat, chunk: []const u8, remove: *bool) Yield { debug("onIOReaderChunk(0x{x}, {s}, chunk_len={d})", .{ @intFromPtr(this), @tagName(this.state), chunk.len }); + remove.* = false; switch (this.state) { .exec_stdin => { if (this.bltn().stdout.needsIO()) |safeguard| { this.state.exec_stdin.chunks_queued += 1; - this.bltn().stdout.enqueue(this, chunk, safeguard); - return .cont; + return this.bltn().stdout.enqueue(this, chunk, safeguard); } _ = this.bltn().writeNoIO(.stdout, chunk); + return .done; }, .exec_filepath_args => { if (this.bltn().stdout.needsIO()) |safeguard| { this.state.exec_filepath_args.chunks_queued += 1; - this.bltn().stdout.enqueue(this, chunk, safeguard); - return .cont; + return this.bltn().stdout.enqueue(this, chunk, safeguard); } _ = this.bltn().writeNoIO(.stdout, chunk); + return .done; }, else => @panic("Invalid state"), } - return .cont; + return .done; } -pub fn onIOReaderDone(this: *Cat, err: ?JSC.SystemError) void { +pub fn onIOReaderDone(this: *Cat, err: ?JSC.SystemError) Yield { const errno: ExitCode = if (err) |e| brk: { defer e.deref(); break :brk @as(ExitCode, @intCast(@intFromEnum(e.getErrno()))); @@ -223,14 +212,13 @@ pub fn onIOReaderDone(this: *Cat, err: ?JSC.SystemError) void { this.state.exec_stdin.in_done = true; if (errno != 0) { if ((this.state.exec_stdin.chunks_done >= this.state.exec_stdin.chunks_queued) or this.bltn().stdout.needsIO() == null) { - this.bltn().done(errno); - return; + return this.bltn().done(errno); } this.bltn().stdout.fd.writer.cancelChunks(this); - return; + return .suspended; } if ((this.state.exec_stdin.chunks_done >= this.state.exec_stdin.chunks_queued) or this.bltn().stdout.needsIO() == null) { - this.bltn().done(0); + return this.bltn().done(0); } }, .exec_filepath_args => { @@ -238,18 +226,19 @@ pub fn onIOReaderDone(this: *Cat, err: ?JSC.SystemError) void { if (errno != 0) { if (this.state.exec_filepath_args.out_done or this.bltn().stdout.needsIO() == null) { this.state.exec_filepath_args.deinit(); - this.bltn().done(errno); - return; + return this.bltn().done(errno); } this.bltn().stdout.fd.writer.cancelChunks(this); - return; + return .suspended; } if (this.state.exec_filepath_args.out_done or (this.state.exec_filepath_args.chunks_done >= this.state.exec_filepath_args.chunks_queued) or this.bltn().stdout.needsIO() == null) { - this.next(); + return this.next(); } }, .done, .waiting_write_err, .idle => {}, } + + return .suspended; } pub fn deinit(_: *Cat) void {} @@ -342,6 +331,7 @@ const Opts = struct { const debug = bun.Output.scoped(.ShellCat, true); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; @@ -352,9 +342,7 @@ const ParseFlagResult = interpreter.ParseFlagResult; const ExitCode = shell.ExitCode; const IOReader = shell.IOReader; const Cat = @This(); -const ReadChunkAction = interpreter.ReadChunkAction; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); const FlagParser = interpreter.FlagParser; diff --git a/src/shell/builtin/cd.zig b/src/shell/builtin/cd.zig index 532477b42b..37980bfd08 100644 --- a/src/shell/builtin/cd.zig +++ b/src/shell/builtin/cd.zig @@ -9,24 +9,21 @@ state: union(enum) { err: Syscall.Error, } = .idle, -fn writeStderrNonBlocking(this: *Cd, comptime fmt: []const u8, args: anytype) void { +fn writeStderrNonBlocking(this: *Cd, comptime fmt: []const u8, args: anytype) Yield { this.state = .waiting_write_stderr; if (this.bltn().stderr.needsIO()) |safeguard| { - this.bltn().stderr.enqueueFmtBltn(this, .cd, fmt, args, safeguard); - } else { - const buf = this.bltn().fmtErrorArena(.cd, fmt, args); - _ = this.bltn().writeNoIO(.stderr, buf); - this.state = .done; - this.bltn().done(1); + return this.bltn().stderr.enqueueFmtBltn(this, .cd, fmt, args, safeguard); } + const buf = this.bltn().fmtErrorArena(.cd, fmt, args); + _ = this.bltn().writeNoIO(.stderr, buf); + this.state = .done; + return this.bltn().done(1); } -pub fn start(this: *Cd) Maybe(void) { +pub fn start(this: *Cd) Yield { const args = this.bltn().argsSlice(); if (args.len > 1) { - this.writeStderrNonBlocking("too many arguments\n", .{}); - // yield execution - return Maybe(void).success; + return this.writeStderrNonBlocking("too many arguments\n", .{}); } if (args.len == 1) { @@ -36,7 +33,10 @@ pub fn start(this: *Cd) Maybe(void) { switch (this.bltn().parentCmd().base.shell.changePrevCwd(this.bltn().parentCmd().base.interpreter)) { .result => {}, .err => |err| { - return this.handleChangeCwdErr(err, this.bltn().parentCmd().base.shell.prevCwdZ()); + return this.handleChangeCwdErr( + err, + this.bltn().parentCmd().base.shell.prevCwdZ(), + ); }, } }, @@ -57,11 +57,10 @@ pub fn start(this: *Cd) Maybe(void) { } } - this.bltn().done(0); - return Maybe(void).success; + return this.bltn().done(0); } -fn handleChangeCwdErr(this: *Cd, err: Syscall.Error, new_cwd_: []const u8) Maybe(void) { +fn handleChangeCwdErr(this: *Cd, err: Syscall.Error, new_cwd_: []const u8) Yield { const errno: usize = @intCast(err.errno); switch (errno) { @@ -70,44 +69,37 @@ fn handleChangeCwdErr(this: *Cd, err: Syscall.Error, new_cwd_: []const u8) Maybe const buf = this.bltn().fmtErrorArena(.cd, "not a directory: {s}\n", .{new_cwd_}); _ = this.bltn().writeNoIO(.stderr, buf); this.state = .done; - this.bltn().done(1); - // yield execution - return Maybe(void).success; + return this.bltn().done(1); } - this.writeStderrNonBlocking("not a directory: {s}\n", .{new_cwd_}); - return Maybe(void).success; + return this.writeStderrNonBlocking("not a directory: {s}\n", .{new_cwd_}); }, @as(usize, @intFromEnum(Syscall.E.NOENT)) => { if (this.bltn().stderr.needsIO() == null) { const buf = this.bltn().fmtErrorArena(.cd, "not a directory: {s}\n", .{new_cwd_}); _ = this.bltn().writeNoIO(.stderr, buf); this.state = .done; - this.bltn().done(1); - // yield execution - return Maybe(void).success; + return this.bltn().done(1); } - this.writeStderrNonBlocking("not a directory: {s}\n", .{new_cwd_}); - return Maybe(void).success; + return this.writeStderrNonBlocking("not a directory: {s}\n", .{new_cwd_}); }, - else => return Maybe(void).success, + else => return .failed, } } -pub fn onIOWriterChunk(this: *Cd, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Cd, _: usize, e: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state == .waiting_write_stderr); } if (e != null) { defer e.?.deref(); - this.bltn().done(e.?.getErrno()); - return; + return this.bltn().done(e.?.getErrno()); } this.state = .done; - this.bltn().done(1); + return this.bltn().done(1); } pub inline fn bltn(this: *Cd) *Builtin { @@ -123,13 +115,13 @@ pub fn deinit(this: *Cd) void { // -- const log = bun.Output.scoped(.Cd, true); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const Cd = @This(); const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); const Syscall = bun.sys; diff --git a/src/shell/builtin/cp.zig b/src/shell/builtin/cp.zig index a4f6a06333..638e3485be 100644 --- a/src/shell/builtin/cp.zig +++ b/src/shell/builtin/cp.zig @@ -64,7 +64,7 @@ const EbusyState = struct { } }; -pub fn start(this: *Cp) Maybe(void) { +pub fn start(this: *Cp) Yield { const maybe_filepath_args = switch (this.opts.parse(this.bltn().argsSlice())) { .ok => |args| args, .err => |e| { @@ -74,14 +74,12 @@ pub fn start(this: *Cp) Maybe(void) { .unsupported => |unsupported| this.bltn().fmtErrorArena(.cp, "unsupported option, please open a GitHub issue -- {s}\n", .{unsupported}), }; - _ = this.writeFailingError(buf, 1); - return Maybe(void).success; + return this.writeFailingError(buf, 1); }, }; if (maybe_filepath_args == null or maybe_filepath_args.?.len <= 1) { - _ = this.writeFailingError(Builtin.Kind.cp.usageString(), 1); - return Maybe(void).success; + return this.writeFailingError(Builtin.Kind.cp.usageString(), 1); } const args = maybe_filepath_args orelse unreachable; @@ -93,12 +91,10 @@ pub fn start(this: *Cp) Maybe(void) { .paths_to_copy = paths_to_copy, } }; - this.next(); - - return Maybe(void).success; + return this.next(); } -pub fn ignoreEbusyErrorIfPossible(this: *Cp) void { +pub fn ignoreEbusyErrorIfPossible(this: *Cp) Yield { if (!bun.Environment.isWindows) @compileError("dont call this plz"); if (this.state.ebusy.idx < this.state.ebusy.state.tasks.items.len) { @@ -115,18 +111,17 @@ pub fn ignoreEbusyErrorIfPossible(this: *Cp) void { continue :outer_loop; } this.state.ebusy.idx += i + 1; - this.printShellCpTask(task); - return; + return this.printShellCpTask(task); } } this.state.ebusy.state.deinit(); const exit_code = this.state.ebusy.main_exit_code; this.state = .done; - this.bltn().done(exit_code); + return this.bltn().done(exit_code); } -pub fn next(this: *Cp) void { +pub fn next(this: *Cp) Yield { while (this.state != .done) { switch (this.state) { .idle => @panic("Invalid state for \"Cp\": idle, this indicates a bug in Bun. Please file a GitHub issue"), @@ -146,10 +141,9 @@ pub fn next(this: *Cp) void { exec.ebusy.deinit(); } this.state = .done; - this.bltn().done(exit_code); - return; + return this.bltn().done(exit_code); } - return; + return .suspended; } exec.started = true; @@ -163,46 +157,44 @@ pub fn next(this: *Cp) void { const cp_task = ShellCpTask.create(this, this.bltn().eventLoop(), this.opts, 1 + exec.paths_to_copy.len, path, exec.target_path, cwd_path); cp_task.schedule(); } - return; + return .suspended; }, .ebusy => { if (comptime bun.Environment.isWindows) { - this.ignoreEbusyErrorIfPossible(); - return; - } else @panic("Should only be called on Windows"); + return this.ignoreEbusyErrorIfPossible(); + } + @panic("Should only be called on Windows"); }, - .waiting_write_err => return, + .waiting_write_err => return .failed, .done => unreachable, } } - this.bltn().done(0); + return this.bltn().done(0); } pub fn deinit(cp: *Cp) void { assert(cp.state == .done or cp.state == .waiting_write_err); } -pub fn writeFailingError(this: *Cp, buf: []const u8, exit_code: ExitCode) Maybe(void) { +pub fn writeFailingError(this: *Cp, buf: []const u8, exit_code: ExitCode) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .waiting_write_err; - this.bltn().stderr.enqueue(this, buf, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stderr, buf); - this.bltn().done(exit_code); - return Maybe(void).success; + return this.bltn().done(exit_code); } -pub fn onIOWriterChunk(this: *Cp, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Cp, _: usize, e: ?JSC.SystemError) Yield { if (e) |err| err.deref(); if (this.state == .waiting_write_err) { return this.bltn().done(1); } this.state.exec.output_done += 1; - this.next(); + return this.next(); } pub inline fn bltn(this: *@This()) *Builtin { @@ -226,7 +218,7 @@ pub fn onShellCpTaskDone(this: *Cp, task: *ShellCpTask) void { { log("{} got ebusy {d} {d}", .{ this, this.state.exec.ebusy.tasks.items.len, this.state.exec.paths_to_copy.len }); this.state.exec.ebusy.tasks.append(bun.default_allocator, task) catch bun.outOfMemory(); - this.next(); + this.next().run(); return; } } else { @@ -239,10 +231,10 @@ pub fn onShellCpTaskDone(this: *Cp, task: *ShellCpTask) void { } } - this.printShellCpTask(task); + this.printShellCpTask(task).run(); } -pub fn printShellCpTask(this: *Cp, task: *ShellCpTask) void { +pub fn printShellCpTask(this: *Cp, task: *ShellCpTask) Yield { // Deinitialize this task as we are starting a new one defer task.deinit(); @@ -256,10 +248,9 @@ pub fn printShellCpTask(this: *Cp, task: *ShellCpTask) void { if (bun.take(&task.err)) |err| { this.state.exec.err = err; const error_string = this.bltn().taskErrorToString(.cp, this.state.exec.err.?); - output_task.start(error_string); - return; + return output_task.start(error_string); } - output_task.start(null); + return output_task.start(null); } pub const ShellCpOutputTask = OutputTask(Cp, .{ @@ -271,36 +262,34 @@ pub const ShellCpOutputTask = OutputTask(Cp, .{ }); const ShellCpOutputTaskVTable = struct { - pub fn writeErr(this: *Cp, childptr: anytype, errbuf: []const u8) CoroutineResult { + pub fn writeErr(this: *Cp, childptr: anytype, errbuf: []const u8) ?Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; - this.bltn().stderr.enqueue(childptr, errbuf, safeguard); - return .yield; + return this.bltn().stderr.enqueue(childptr, errbuf, safeguard); } _ = this.bltn().writeNoIO(.stderr, errbuf); - return .cont; + return null; } pub fn onWriteErr(this: *Cp) void { this.state.exec.output_done += 1; } - pub fn writeOut(this: *Cp, childptr: anytype, output: *OutputSrc) CoroutineResult { + pub fn writeOut(this: *Cp, childptr: anytype, output: *OutputSrc) ?Yield { if (this.bltn().stdout.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; - this.bltn().stdout.enqueue(childptr, output.slice(), safeguard); - return .yield; + return this.bltn().stdout.enqueue(childptr, output.slice(), safeguard); } _ = this.bltn().writeNoIO(.stdout, output.slice()); - return .cont; + return null; } pub fn onWriteOut(this: *Cp) void { this.state.exec.output_done += 1; } - pub fn onDone(this: *Cp) void { - this.next(); + pub fn onDone(this: *Cp) Yield { + return this.next(); } }; @@ -743,6 +732,7 @@ const ArrayList = std.ArrayList; const Syscall = bun.sys; const bun = @import("bun"); const shell = bun.shell; +const Yield = shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; @@ -751,7 +741,6 @@ const ParseError = interpreter.ParseError; const ParseFlagResult = interpreter.ParseFlagResult; const ExitCode = shell.ExitCode; const Cp = @This(); -const CoroutineResult = interpreter.CoroutineResult; const OutputTask = interpreter.OutputTask; const assert = bun.assert; diff --git a/src/shell/builtin/dirname.zig b/src/shell/builtin/dirname.zig index 64ad7faf10..169a8e88e8 100644 --- a/src/shell/builtin/dirname.zig +++ b/src/shell/builtin/dirname.zig @@ -1,7 +1,7 @@ -state: enum { idle, waiting_io, err, done } = .idle, +state: enum { idle, err, done } = .idle, buf: std.ArrayListUnmanaged(u8) = .{}, -pub fn start(this: *@This()) Maybe(void) { +pub fn start(this: *@This()) Yield { const args = this.bltn().argsSlice(); var iter = bun.SliceIterator([*:0]const u8).init(args); @@ -15,11 +15,9 @@ pub fn start(this: *@This()) Maybe(void) { this.state = .done; if (this.bltn().stdout.needsIO()) |safeguard| { - this.bltn().stdout.enqueue(this, this.buf.items, safeguard); - } else { - this.bltn().done(0); + return this.bltn().stdout.enqueue(this, this.buf.items, safeguard); } - return Maybe(void).success; + return this.bltn().done(0); } pub fn deinit(this: *@This()) void { @@ -27,15 +25,13 @@ pub fn deinit(this: *@This()) void { //dirname } -fn fail(this: *@This(), msg: []const u8) Maybe(void) { +fn fail(this: *@This(), msg: []const u8) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .err; - this.bltn().stderr.enqueue(this, msg, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, msg, safeguard); } _ = this.bltn().writeNoIO(.stderr, msg); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } fn print(this: *@This(), msg: []const u8) Maybe(void) { @@ -48,17 +44,16 @@ fn print(this: *@This(), msg: []const u8) Maybe(void) { return Maybe(void).success; } -pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) Yield { if (maybe_e) |e| { defer e.deref(); this.state = .err; - this.bltn().done(1); - return; + return this.bltn().done(1); } switch (this.state) { - .done => this.bltn().done(0), - .err => this.bltn().done(1), - else => {}, + .done => return this.bltn().done(0), + .err => return this.bltn().done(1), + .idle => bun.shell.unreachableState("Dirname.onIOWriterChunk", "idle"), } } @@ -69,6 +64,7 @@ pub inline fn bltn(this: *@This()) *Builtin { // -- const bun = @import("bun"); +const Yield = bun.shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; diff --git a/src/shell/builtin/echo.zig b/src/shell/builtin/echo.zig index 321d4b0140..ea6b0d8830 100644 --- a/src/shell/builtin/echo.zig +++ b/src/shell/builtin/echo.zig @@ -4,14 +4,19 @@ output: std.ArrayList(u8), state: union(enum) { idle, waiting, + waiting_write_err, done, } = .idle, -pub fn start(this: *Echo) Maybe(void) { - const args = this.bltn().argsSlice(); +pub fn start(this: *Echo) Yield { + var args = this.bltn().argsSlice(); + const no_newline = args.len >= 1 and std.mem.eql(u8, bun.sliceTo(args[0], 0), "-n"); - var has_leading_newline: bool = false; + args = args[if (no_newline) 1 else 0..]; const args_len = args.len; + var has_leading_newline: bool = false; + + // TODO: Should flush buffer after it gets to a certain size for (args, 0..) |arg, i| { const thearg = std.mem.span(arg); if (i < args_len - 1) { @@ -25,32 +30,30 @@ pub fn start(this: *Echo) Maybe(void) { } } - if (!has_leading_newline) this.output.append('\n') catch bun.outOfMemory(); + if (!has_leading_newline and !no_newline) this.output.append('\n') catch bun.outOfMemory(); if (this.bltn().stdout.needsIO()) |safeguard| { this.state = .waiting; - this.bltn().stdout.enqueue(this, this.output.items[0..], safeguard); - return Maybe(void).success; + return this.bltn().stdout.enqueue(this, this.output.items[0..], safeguard); } _ = this.bltn().writeNoIO(.stdout, this.output.items[0..]); this.state = .done; - this.bltn().done(0); - return Maybe(void).success; + return this.bltn().done(0); } -pub fn onIOWriterChunk(this: *Echo, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Echo, _: usize, e: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { - assert(this.state == .waiting); + assert(this.state == .waiting or this.state == .waiting_write_err); } if (e != null) { defer e.?.deref(); - this.bltn().done(e.?.getErrno()); - return; + return this.bltn().done(e.?.getErrno()); } this.state = .done; - this.bltn().done(0); + const exit_code: ExitCode = if (this.state == .waiting_write_err) 1 else 0; + return this.bltn().done(exit_code); } pub fn deinit(this: *Echo) void { @@ -63,15 +66,15 @@ pub inline fn bltn(this: *Echo) *Builtin { return @fieldParentPtr("impl", impl); } -// -- const log = bun.Output.scoped(.echo, true); const bun = @import("bun"); +const ExitCode = bun.shell.ExitCode; +const Yield = bun.shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const Echo = @This(); const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); const assert = bun.assert; diff --git a/src/shell/builtin/exit.zig b/src/shell/builtin/exit.zig index c9e98830aa..96d47d563b 100644 --- a/src/shell/builtin/exit.zig +++ b/src/shell/builtin/exit.zig @@ -5,12 +5,11 @@ state: enum { done, } = .idle, -pub fn start(this: *Exit) Maybe(void) { +pub fn start(this: *Exit) Yield { const args = this.bltn().argsSlice(); switch (args.len) { 0 => { - this.bltn().done(0); - return Maybe(void).success; + return this.bltn().done(0); }, 1 => { const first_arg = args[0][0..std.mem.len(args[0]) :0]; @@ -18,8 +17,7 @@ pub fn start(this: *Exit) Maybe(void) { error.Overflow => @intCast((std.fmt.parseInt(usize, first_arg, 10) catch return this.fail("exit: numeric argument required\n")) % 256), error.InvalidCharacter => return this.fail("exit: numeric argument required\n"), }; - this.bltn().done(exit_code); - return Maybe(void).success; + return this.bltn().done(exit_code); }, else => { return this.fail("exit: too many arguments\n"); @@ -27,46 +25,41 @@ pub fn start(this: *Exit) Maybe(void) { } } -fn fail(this: *Exit, msg: []const u8) Maybe(void) { +fn fail(this: *Exit, msg: []const u8) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .waiting_io; - this.bltn().stderr.enqueue(this, msg, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, msg, safeguard); } _ = this.bltn().writeNoIO(.stderr, msg); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } -pub fn next(this: *Exit) void { +pub fn next(this: *Exit) Yield { switch (this.state) { - .idle => @panic("Unexpected \"idle\" state in Exit. This indicates a bug in Bun. Please file a GitHub issue."), + .idle => shell.unreachableState("Exit.next", "idle"), .waiting_io => { - return; + return .suspended; }, .err => { - this.bltn().done(1); - return; + return this.bltn().done(1); }, .done => { - this.bltn().done(1); - return; + return this.bltn().done(1); }, } } -pub fn onIOWriterChunk(this: *Exit, _: usize, maybe_e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Exit, _: usize, maybe_e: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state == .waiting_io); } if (maybe_e) |e| { defer e.deref(); this.state = .err; - this.next(); - return; + return this.next(); } this.state = .done; - this.next(); + return this.next(); } pub fn deinit(this: *Exit) void { @@ -81,13 +74,13 @@ pub inline fn bltn(this: *Exit) *Builtin { // -- const bun = @import("bun"); const shell = bun.shell; +const Yield = shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const ExitCode = shell.ExitCode; const Exit = @This(); const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); const assert = bun.assert; diff --git a/src/shell/builtin/export.zig b/src/shell/builtin/export.zig index 2ad6bcb464..0deb8da7a0 100644 --- a/src/shell/builtin/export.zig +++ b/src/shell/builtin/export.zig @@ -9,21 +9,19 @@ const Entry = struct { } }; -pub fn writeOutput(this: *Export, comptime io_kind: @Type(.enum_literal), comptime fmt: []const u8, args: anytype) Maybe(void) { +pub fn writeOutput(this: *Export, comptime io_kind: @Type(.enum_literal), comptime fmt: []const u8, args: anytype) Yield { if (this.bltn().stdout.needsIO()) |safeguard| { var output: *BuiltinIO.Output = &@field(this.bltn(), @tagName(io_kind)); this.printing = true; - output.enqueueFmtBltn(this, .@"export", fmt, args, safeguard); - return Maybe(void).success; + return output.enqueueFmtBltn(this, .@"export", fmt, args, safeguard); } const buf = this.bltn().fmtErrorArena(.@"export", fmt, args); _ = this.bltn().writeNoIO(io_kind, buf); - this.bltn().done(0); - return Maybe(void).success; + return this.bltn().done(0); } -pub fn onIOWriterChunk(this: *Export, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Export, _: usize, e: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { assert(this.printing); } @@ -33,10 +31,10 @@ pub fn onIOWriterChunk(this: *Export, _: usize, e: ?JSC.SystemError) void { break :brk @intFromEnum(e.?.getErrno()); } else 0; - this.bltn().done(exit_code); + return this.bltn().done(exit_code); } -pub fn start(this: *Export) Maybe(void) { +pub fn start(this: *Export) Yield { const args = this.bltn().argsSlice(); // Calling `export` with no arguments prints all exported variables lexigraphically ordered @@ -72,16 +70,15 @@ pub fn start(this: *Export) Maybe(void) { if (this.bltn().stdout.needsIO()) |safeguard| { this.printing = true; - this.bltn().stdout.enqueue(this, buf, safeguard); - - return Maybe(void).success; + return this.bltn().stdout.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stdout, buf); - this.bltn().done(0); - return Maybe(void).success; + return this.bltn().done(0); } + // TODO: It would be nice to not have to duplicate the arguments here. Can + // we make `Builtin.args` mutable so that we can take it out of the argv? for (args) |arg_raw| { const arg_sentinel = arg_raw[0..std.mem.len(arg_raw) :0]; const arg = arg_sentinel[0..arg_sentinel.len]; @@ -92,17 +89,25 @@ pub fn start(this: *Export) Maybe(void) { const buf = this.bltn().fmtErrorArena(.@"export", "`{s}`: not a valid identifier", .{arg}); return this.writeOutput(.stderr, "{s}\n", .{buf}); } - this.bltn().parentCmd().base.shell.assignVar(this.bltn().parentCmd().base.interpreter, EnvStr.initSlice(arg), EnvStr.initSlice(""), .exported); + + const label_env_str = EnvStr.dupeRefCounted(arg); + defer label_env_str.deref(); + this.bltn().parentCmd().base.shell.assignVar(this.bltn().parentCmd().base.interpreter, label_env_str, EnvStr.initSlice(""), .exported); continue; }; const label = arg[0..eqsign_idx]; const value = arg_sentinel[eqsign_idx + 1 .. :0]; - this.bltn().parentCmd().base.shell.assignVar(this.bltn().parentCmd().base.interpreter, EnvStr.initSlice(label), EnvStr.initSlice(value), .exported); + + const label_env_str = EnvStr.dupeRefCounted(label); + const value_env_str = EnvStr.dupeRefCounted(value); + defer label_env_str.deref(); + defer value_env_str.deref(); + + this.bltn().parentCmd().base.shell.assignVar(this.bltn().parentCmd().base.interpreter, label_env_str, value_env_str, .exported); } - this.bltn().done(0); - return Maybe(void).success; + return this.bltn().done(0); } pub fn deinit(this: *Export) void { @@ -118,6 +123,7 @@ pub inline fn bltn(this: *Export) *Builtin { // -- const debug = bun.Output.scoped(.ShellExport, true); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; @@ -125,7 +131,6 @@ const Builtin = Interpreter.Builtin; const ExitCode = shell.ExitCode; const Export = @This(); const JSC = bun.JSC; -const Maybe = JSC.Maybe; const std = @import("std"); const log = debug; const EnvStr = interpreter.EnvStr; diff --git a/src/shell/builtin/false.zig b/src/shell/builtin/false.zig index b838cd4ca0..a7d3111f2d 100644 --- a/src/shell/builtin/false.zig +++ b/src/shell/builtin/false.zig @@ -1,16 +1,15 @@ -pub fn start(this: *@This()) Maybe(void) { - this.bltn().done(1); - return Maybe(void).success; -} - -pub fn onIOWriterChunk(_: *@This(), _: usize, _: ?JSC.SystemError) void { - // no IO is done +pub fn start(this: *@This()) Yield { + return this.bltn().done(1); } pub fn deinit(this: *@This()) void { _ = this; } +pub fn onIOWriterChunk(_: *@This(), _: usize, _: ?JSC.SystemError) Yield { + return .done; +} + pub inline fn bltn(this: *@This()) *Builtin { const impl: *Builtin.Impl = @alignCast(@fieldParentPtr("false", this)); return @fieldParentPtr("impl", impl); @@ -18,9 +17,9 @@ pub inline fn bltn(this: *@This()) *Builtin { // -- const bun = @import("bun"); +const Yield = bun.shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; diff --git a/src/shell/builtin/ls.zig b/src/shell/builtin/ls.zig index 0cd8549612..06f75972c3 100644 --- a/src/shell/builtin/ls.zig +++ b/src/shell/builtin/ls.zig @@ -13,25 +13,24 @@ state: union(enum) { done, } = .idle, -pub fn start(this: *Ls) Maybe(void) { - this.next(); - return Maybe(void).success; +alloc_scope: shell.AllocScope, + +pub fn start(this: *Ls) Yield { + return this.next(); } -pub fn writeFailingError(this: *Ls, buf: []const u8, exit_code: ExitCode) Maybe(void) { +pub fn writeFailingError(this: *Ls, buf: []const u8, exit_code: ExitCode) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .waiting_write_err; - this.bltn().stderr.enqueue(this, buf, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stderr, buf); - this.bltn().done(exit_code); - return Maybe(void).success; + return this.bltn().done(exit_code); } -fn next(this: *Ls) void { +fn next(this: *Ls) Yield { while (!(this.state == .done)) { switch (this.state) { .idle => { @@ -44,8 +43,7 @@ fn next(this: *Ls) void { .show_usage => Builtin.Kind.ls.usageString(), }; - _ = this.writeFailingError(buf, 1); - return; + return this.writeFailingError(buf, 1); }, }; @@ -59,18 +57,35 @@ fn next(this: *Ls) void { const cwd = this.bltn().cwd; if (paths) |p| { + const print_directory = p.len > 1; for (p) |path_raw| { - const path = path_raw[0..std.mem.len(path_raw) :0]; - var task = ShellLsTask.create(this, this.opts, &this.state.exec.task_count, cwd, path, this.bltn().eventLoop()); + const path = this.alloc_scope.allocator().dupeZ(u8, path_raw[0..std.mem.len(path_raw) :0]) catch bun.outOfMemory(); + var task = ShellLsTask.create( + this, + this.opts, + &this.state.exec.task_count, + cwd, + path, + true, + this.bltn().eventLoop(), + ); + task.print_directory = print_directory; task.schedule(); } } else { - var task = ShellLsTask.create(this, this.opts, &this.state.exec.task_count, cwd, ".", this.bltn().eventLoop()); + var task = ShellLsTask.create( + this, + this.opts, + &this.state.exec.task_count, + cwd, + ".", + false, + this.bltn().eventLoop(), + ); task.schedule(); } }, .exec => { - // It's done log("Ls(0x{x}, state=exec) Check: tasks_done={d} task_count={d} output_done={d} output_waiting={d}", .{ @intFromPtr(this), this.state.exec.tasks_done, @@ -78,56 +93,73 @@ fn next(this: *Ls) void { this.state.exec.output_done, this.state.exec.output_waiting, }); + // It's done if (this.state.exec.tasks_done >= this.state.exec.task_count.load(.monotonic) and this.state.exec.output_done >= this.state.exec.output_waiting) { const exit_code: ExitCode = if (this.state.exec.err != null) 1 else 0; + if (this.state.exec.err) |*err| err.deinitWithAllocator(this.alloc_scope.allocator()); this.state = .done; - this.bltn().done(exit_code); - return; + return this.bltn().done(exit_code); } - return; + return .suspended; }, .waiting_write_err => { - return; + return .failed; }, .done => unreachable, } } - this.bltn().done(0); - return; + return this.bltn().done(0); } -pub fn deinit(_: *Ls) void {} +pub fn deinit(this: *Ls) void { + this.alloc_scope.endScope(); +} -pub fn onIOWriterChunk(this: *Ls, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Ls, _: usize, e: ?JSC.SystemError) Yield { if (e) |err| err.deref(); if (this.state == .waiting_write_err) { return this.bltn().done(1); } this.state.exec.output_done += 1; - this.next(); + return this.next(); } pub fn onShellLsTaskDone(this: *Ls, task: *ShellLsTask) void { - defer task.deinit(true); this.state.exec.tasks_done += 1; var output = task.takeOutput(); // TODO: Reuse the *ShellLsTask allocation const output_task: *ShellLsOutputTask = bun.new(ShellLsOutputTask, .{ .parent = this, - .output = .{ .arrlist = output.moveToUnmanaged() }, + .output = .{ + .arrlist = brk: { + // TODO: This is a quick fix, we should refactor shell.OutputTask to + // also track allocations properly. + this.alloc_scope.leakSlice(output.items); + break :brk output.moveToUnmanaged(); + }, + }, .state = .waiting_write_err, }); - if (task.err) |*err| { - this.state.exec.err = err.*; + if (task.err) |*err_ptr| { + const error_string = error_string: { + if (this.state.exec.err == null) { + this.state.exec.err = err_ptr.*; + break :error_string this.bltn().taskErrorToString(.ls, this.state.exec.err.?); + } + var err = err_ptr.*; + defer err.deinitWithAllocator(this.alloc_scope.allocator()); + break :error_string this.bltn().taskErrorToString(.ls, err); + }; task.err = null; - const error_string = this.bltn().taskErrorToString(.ls, this.state.exec.err.?); - output_task.start(error_string); + task.deinit(); + output_task.start(error_string).run(); return; } - output_task.start(null); + task.deinit(); + output_task.start(null).run(); } pub const ShellLsOutputTask = OutputTask(Ls, .{ @@ -139,36 +171,40 @@ pub const ShellLsOutputTask = OutputTask(Ls, .{ }); const ShellLsOutputTaskVTable = struct { - pub fn writeErr(this: *Ls, childptr: anytype, errbuf: []const u8) CoroutineResult { + pub fn writeErr(this: *Ls, childptr: anytype, errbuf: []const u8) ?Yield { + log("ShellLsOutputTaskVTable.writeErr(0x{x}, {s})", .{ @intFromPtr(this), errbuf }); if (this.bltn().stderr.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; - this.bltn().stderr.enqueue(childptr, errbuf, safeguard); - return .yield; + return this.bltn().stderr.enqueue(childptr, errbuf, safeguard); } _ = this.bltn().writeNoIO(.stderr, errbuf); - return .cont; + return null; } pub fn onWriteErr(this: *Ls) void { + log("ShellLsOutputTaskVTable.onWriteErr(0x{x})", .{@intFromPtr(this)}); this.state.exec.output_done += 1; } - pub fn writeOut(this: *Ls, childptr: anytype, output: *OutputSrc) CoroutineResult { + pub fn writeOut(this: *Ls, childptr: anytype, output: *OutputSrc) ?Yield { + log("ShellLsOutputTaskVTable.writeOut(0x{x}, {s})", .{ @intFromPtr(this), output.slice() }); if (this.bltn().stdout.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; - this.bltn().stdout.enqueue(childptr, output.slice(), safeguard); - return .yield; + return this.bltn().stdout.enqueue(childptr, output.slice(), safeguard); } + log("ShellLsOutputTaskVTable.writeOut(0x{x}, {s}) no IO", .{ @intFromPtr(this), output.slice() }); _ = this.bltn().writeNoIO(.stdout, output.slice()); - return .cont; + return null; } pub fn onWriteOut(this: *Ls) void { + log("ShellLsOutputTaskVTable.onWriteOut(0x{x})", .{@intFromPtr(this)}); this.state.exec.output_done += 1; } - pub fn onDone(this: *Ls) void { - this.next(); + pub fn onDone(this: *Ls) Yield { + log("ShellLsOutputTaskVTable.onDone(0x{x})", .{@intFromPtr(this)}); + return this.next(); } }; @@ -177,13 +213,12 @@ pub const ShellLsTask = struct { ls: *Ls, opts: Opts, - is_root: bool = true, + print_directory: bool = false, + owned_string: bool, task_count: *std.atomic.Value(usize), cwd: bun.FileDescriptor, - /// Should be allocated with bun.default_allocator path: [:0]const u8 = &[0:0]u8{}, - /// Should use bun.default_allocator output: std.ArrayList(u8), is_absolute: bool = false, err: ?Syscall.Error = null, @@ -199,25 +234,39 @@ pub const ShellLsTask = struct { JSC.WorkPool.schedule(&this.task); } - pub fn create(ls: *Ls, opts: Opts, task_count: *std.atomic.Value(usize), cwd: bun.FileDescriptor, path: [:0]const u8, event_loop: JSC.EventLoopHandle) *@This() { - const task = bun.default_allocator.create(@This()) catch bun.outOfMemory(); + pub fn create( + ls: *Ls, + opts: Opts, + task_count: *std.atomic.Value(usize), + cwd: bun.FileDescriptor, + path: [:0]const u8, + owned_string: bool, + event_loop: JSC.EventLoopHandle, + ) *@This() { + // We're going to free `task.path` so ensure it is allocated in this + // scope and NOT a string literal or other string we don't own. + if (owned_string) ls.alloc_scope.assertInScope(path); + + const task = ls.alloc_scope.allocator().create(@This()) catch bun.outOfMemory(); task.* = @This(){ .ls = ls, .opts = opts, .cwd = cwd, - .path = bun.default_allocator.dupeZ(u8, path[0..path.len]) catch bun.outOfMemory(), - .output = std.ArrayList(u8).init(bun.default_allocator), .concurrent_task = JSC.EventLoopTask.fromEventLoop(event_loop), .event_loop = event_loop, .task_count = task_count, + .path = path, + .output = std.ArrayList(u8).init(ls.alloc_scope.allocator()), + .owned_string = owned_string, }; + return task; } pub fn enqueue(this: *@This(), path: [:0]const u8) void { debug("enqueue: {s}", .{path}); const new_path = this.join( - bun.default_allocator, + this.ls.alloc_scope.allocator(), &[_][]const u8{ this.path[0..this.path.len], path[0..path.len], @@ -225,9 +274,9 @@ pub const ShellLsTask = struct { this.is_absolute, ); - var subtask = @This().create(this.ls, this.opts, this.task_count, this.cwd, new_path, this.event_loop); + var subtask = @This().create(this.ls, this.opts, this.task_count, this.cwd, new_path, true, this.event_loop); _ = this.task_count.fetchAdd(1, .monotonic); - subtask.is_root = false; + subtask.print_directory = true; subtask.schedule(); } @@ -269,14 +318,19 @@ pub const ShellLsTask = struct { } if (!this.opts.list_directories) { - if (!this.is_root) { + if (this.print_directory) { const writer = this.output.writer(); std.fmt.format(writer, "{s}:\n", .{this.path}) catch bun.outOfMemory(); } - var iterator = DirIterator.iterate(fd.stdDir(), .u8); + var iterator = DirIterator.iterate(fd, .u8); var entry = iterator.next(); + // If `-a` is used, "." and ".." should show up as results. However, + // our `DirIterator` abstraction skips them, so let's just add them + // now. + this.addDotEntriesIfNeeded(); + while (switch (entry) { .err => |e| { this.err = this.errorWithPath(e, this.path); @@ -300,9 +354,15 @@ pub const ShellLsTask = struct { fn shouldSkipEntry(this: *@This(), name: [:0]const u8) bool { if (this.opts.show_all) return false; + + // Show all directory entries whose name begin with a dot (`.`), EXCEPT + // `.` and `..` if (this.opts.show_almost_all) { - if (bun.strings.eqlComptime(name[0..1], ".") or bun.strings.eqlComptime(name[0..2], "..")) return true; + if (bun.strings.eqlComptime(name, ".") or bun.strings.eqlComptime(name, "..")) return true; + } else { + if (bun.strings.startsWith(name, ".")) return true; } + return false; } @@ -316,9 +376,16 @@ pub const ShellLsTask = struct { this.output.append('\n') catch bun.outOfMemory(); } + fn addDotEntriesIfNeeded(this: *@This()) void { + // `.addEntry()` already checks will check if we can add "." and ".." to + // the result + this.addEntry("."); + this.addEntry(".."); + } + fn errorWithPath(this: *@This(), err: Syscall.Error, path: [:0]const u8) Syscall.Error { - _ = this; - return err.withPath(bun.default_allocator.dupeZ(u8, path[0..path.len]) catch bun.outOfMemory()); + debug("Ls(0x{x}).errorWithPath({s})", .{ @intFromPtr(this), path }); + return err.withPath(this.ls.alloc_scope.allocator().dupeZ(u8, path[0..path.len]) catch bun.outOfMemory()); } pub fn workPoolCallback(task: *JSC.WorkPoolTask) void { @@ -338,7 +405,7 @@ pub const ShellLsTask = struct { pub fn takeOutput(this: *@This()) std.ArrayList(u8) { const ret = this.output; - this.output = std.ArrayList(u8).init(bun.default_allocator); + this.output = std.ArrayList(u8).init(this.ls.alloc_scope.allocator()); return ret; } @@ -351,11 +418,12 @@ pub const ShellLsTask = struct { this.runFromMainThread(); } - pub fn deinit(this: *@This(), comptime free_this: bool) void { - debug("deinit {s}", .{if (free_this) "free_this=true" else "free_this=false"}); - bun.default_allocator.free(this.path); + pub fn deinit(this: *@This()) void { + debug("deinit {s}", .{"free"}); + if (this.owned_string) this.ls.alloc_scope.allocator().free(this.path); + if (this.err) |*err| err.deinitWithAllocator(this.ls.alloc_scope.allocator()); this.output.deinit(); - if (comptime free_this) bun.default_allocator.destroy(this); + this.ls.alloc_scope.allocator().destroy(this); } }; @@ -365,8 +433,9 @@ const Opts = struct { show_all: bool = false, /// `-A`, `--almost-all` - /// Do not list implied . and .. - show_almost_all: bool = true, + /// Include directory entries whose names begin with a dot (‘.’) except for + /// `.` and `..` + show_almost_all: bool = false, /// `--author` /// With -l, print the author of each file @@ -630,7 +699,7 @@ pub fn parseFlags(this: *Ls) Result(?[]const [*:0]const u8, Opts.ParseError) { } } - return .{ .err = .show_usage }; + return .{ .ok = null }; } pub fn parseFlag(this: *Ls, flag: []const u8) union(enum) { continue_parsing, done, illegal_option: []const u8 } { @@ -780,6 +849,7 @@ pub inline fn bltn(this: *Ls) *Builtin { const Ls = @This(); const log = bun.Output.scoped(.ls, true); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; @@ -788,7 +858,6 @@ const Result = Interpreter.Builtin.Result; const ParseError = interpreter.ParseError; const ExitCode = shell.ExitCode; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); const Syscall = bun.sys; const ShellSyscall = interpreter.ShellSyscall; @@ -796,4 +865,3 @@ const Allocator = std.mem.Allocator; const DirIterator = bun.DirIterator; const OutputTask = interpreter.OutputTask; const OutputSrc = interpreter.OutputSrc; -const CoroutineResult = interpreter.CoroutineResult; diff --git a/src/shell/builtin/mkdir.zig b/src/shell/builtin/mkdir.zig index 58921f0143..31e5dbe3db 100644 --- a/src/shell/builtin/mkdir.zig +++ b/src/shell/builtin/mkdir.zig @@ -14,7 +14,7 @@ state: union(enum) { done, } = .idle, -pub fn onIOWriterChunk(this: *Mkdir, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Mkdir, _: usize, e: ?JSC.SystemError) Yield { if (e) |err| err.deref(); switch (this.state) { @@ -25,13 +25,13 @@ pub fn onIOWriterChunk(this: *Mkdir, _: usize, e: ?JSC.SystemError) void { .idle, .done => @panic("Invalid state"), } - this.next(); + return this.next(); } -pub fn writeFailingError(this: *Mkdir, buf: []const u8, exit_code: ExitCode) Maybe(void) { + +pub fn writeFailingError(this: *Mkdir, buf: []const u8, exit_code: ExitCode) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .waiting_write_err; - this.bltn().stderr.enqueue(this, buf, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stderr, buf); @@ -39,11 +39,10 @@ pub fn writeFailingError(this: *Mkdir, buf: []const u8, exit_code: ExitCode) May // return .{ .err = e }; // } - this.bltn().done(exit_code); - return Maybe(void).success; + return this.bltn().done(exit_code); } -pub fn start(this: *Mkdir) Maybe(void) { +pub fn start(this: *Mkdir) Yield { const filepath_args = switch (this.opts.parse(this.bltn().argsSlice())) { .ok => |filepath_args| filepath_args, .err => |e| { @@ -53,12 +52,10 @@ pub fn start(this: *Mkdir) Maybe(void) { .unsupported => |unsupported| this.bltn().fmtErrorArena(.mkdir, "unsupported option, please open a GitHub issue -- {s}\n", .{unsupported}), }; - _ = this.writeFailingError(buf, 1); - return Maybe(void).success; + return this.writeFailingError(buf, 1); }, } orelse { - _ = this.writeFailingError(Builtin.Kind.mkdir.usageString(), 1); - return Maybe(void).success; + return this.writeFailingError(Builtin.Kind.mkdir.usageString(), 1); }; this.state = .{ @@ -67,12 +64,10 @@ pub fn start(this: *Mkdir) Maybe(void) { }, }; - _ = this.next(); - - return Maybe(void).success; + return this.next(); } -pub fn next(this: *Mkdir) void { +pub fn next(this: *Mkdir) Yield { switch (this.state) { .idle => @panic("Invalid state"), .exec => { @@ -82,10 +77,9 @@ pub fn next(this: *Mkdir) void { const exit_code: ExitCode = if (this.state.exec.err != null) 1 else 0; if (this.state.exec.err) |e| e.deref(); this.state = .done; - this.bltn().done(exit_code); - return; + return this.bltn().done(exit_code); } - return; + return .suspended; } exec.started = true; @@ -96,9 +90,10 @@ pub fn next(this: *Mkdir) void { var task = ShellMkdirTask.create(this, this.opts, dir_to_mk, this.bltn().parentCmd().base.shell.cwdZ()); task.schedule(); } + return .suspended; }, - .waiting_write_err => return, - .done => this.bltn().done(0), + .waiting_write_err => return .failed, + .done => return this.bltn().done(0), } } @@ -116,10 +111,10 @@ pub fn onShellMkdirTaskDone(this: *Mkdir, task: *ShellMkdirTask) void { if (err) |e| { const error_string = this.bltn().taskErrorToString(.mkdir, e); this.state.exec.err = e; - output_task.start(error_string); + output_task.start(error_string).run(); return; } - output_task.start(null); + output_task.start(null).run(); } pub const ShellMkdirOutputTask = OutputTask(Mkdir, .{ @@ -131,38 +126,36 @@ pub const ShellMkdirOutputTask = OutputTask(Mkdir, .{ }); const ShellMkdirOutputTaskVTable = struct { - pub fn writeErr(this: *Mkdir, childptr: anytype, errbuf: []const u8) CoroutineResult { + pub fn writeErr(this: *Mkdir, childptr: anytype, errbuf: []const u8) ?Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; - this.bltn().stderr.enqueue(childptr, errbuf, safeguard); - return .yield; + return this.bltn().stderr.enqueue(childptr, errbuf, safeguard); } _ = this.bltn().writeNoIO(.stderr, errbuf); - return .cont; + return null; } pub fn onWriteErr(this: *Mkdir) void { this.state.exec.output_done += 1; } - pub fn writeOut(this: *Mkdir, childptr: anytype, output: *OutputSrc) CoroutineResult { + pub fn writeOut(this: *Mkdir, childptr: anytype, output: *OutputSrc) ?Yield { if (this.bltn().stdout.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; const slice = output.slice(); log("THE SLICE: {d} {s}", .{ slice.len, slice }); - this.bltn().stdout.enqueue(childptr, slice, safeguard); - return .yield; + return this.bltn().stdout.enqueue(childptr, slice, safeguard); } _ = this.bltn().writeNoIO(.stdout, output.slice()); - return .cont; + return null; } pub fn onWriteOut(this: *Mkdir) void { this.state.exec.output_done += 1; } - pub fn onDone(this: *Mkdir) void { - this.next(); + pub fn onDone(this: *Mkdir) Yield { + return this.next(); } }; @@ -379,6 +372,7 @@ pub inline fn bltn(this: *Mkdir) *Builtin { // -- const debug = bun.Output.scoped(.ShellMkdir, true); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; @@ -388,14 +382,12 @@ const ParseError = interpreter.ParseError; const ParseFlagResult = interpreter.ParseFlagResult; const ExitCode = shell.ExitCode; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); const FlagParser = interpreter.FlagParser; const Mkdir = @This(); const log = debug; const OutputTask = interpreter.OutputTask; -const CoroutineResult = interpreter.CoroutineResult; const OutputSrc = interpreter.OutputSrc; const WorkPool = bun.JSC.WorkPool; const ResolvePath = bun.path; diff --git a/src/shell/builtin/mv.zig b/src/shell/builtin/mv.zig index 3e6c2c21c2..358f7097c1 100644 --- a/src/shell/builtin/mv.zig +++ b/src/shell/builtin/mv.zig @@ -166,24 +166,22 @@ pub const ShellMvBatchedTask = struct { } }; -pub fn start(this: *Mv) Maybe(void) { +pub fn start(this: *Mv) Yield { return this.next(); } -pub fn writeFailingError(this: *Mv, buf: []const u8, exit_code: ExitCode) Maybe(void) { +pub fn writeFailingError(this: *Mv, buf: []const u8, exit_code: ExitCode) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .{ .waiting_write_err = .{ .exit_code = exit_code } }; - this.bltn().stderr.enqueue(this, buf, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stderr, buf); - this.bltn().done(exit_code); - return Maybe(void).success; + return this.bltn().done(exit_code); } -pub fn next(this: *Mv) Maybe(void) { +pub fn next(this: *Mv) Yield { while (!(this.state == .done or this.state == .err)) { switch (this.state) { .idle => { @@ -210,10 +208,10 @@ pub fn next(this: *Mv) Maybe(void) { }, }; this.state.check_target.task.task.schedule(); - return Maybe(void).success; + return .suspended; }, .check_target => { - if (this.state.check_target.state == .running) return Maybe(void).success; + if (this.state.check_target.state == .running) return .suspended; const check_target = &this.state.check_target; if (comptime bun.Environment.allow_assert) { @@ -296,36 +294,32 @@ pub fn next(this: *Mv) Maybe(void) { t.task.schedule(); } - return Maybe(void).success; + return .suspended; }, // Shouldn't happen .executing => {}, .waiting_write_err => { - return Maybe(void).success; + return .failed; }, .done, .err => unreachable, } } switch (this.state) { - .done => this.bltn().done(0), - else => this.bltn().done(1), + .done => return this.bltn().done(0), + else => return this.bltn().done(1), } - - return Maybe(void).success; } -pub fn onIOWriterChunk(this: *Mv, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Mv, _: usize, e: ?JSC.SystemError) Yield { defer if (e) |err| err.deref(); switch (this.state) { .waiting_write_err => { if (e != null) { this.state = .err; - _ = this.next(); - return; + return this.next(); } - this.bltn().done(this.state.waiting_write_err.exit_code); - return; + return this.bltn().done(this.state.waiting_write_err.exit_code); }, else => @panic("Invalid state"), } @@ -340,8 +334,7 @@ pub fn checkTargetTaskDone(this: *Mv, task: *ShellMvCheckTargetTask) void { } this.state.check_target.state = .done; - _ = this.next(); - return; + this.next().run(); } pub fn batchedMoveTaskDone(this: *Mv, task: *ShellMvBatchedTask) void { @@ -371,8 +364,7 @@ pub fn batchedMoveTaskDone(this: *Mv, task: *ShellMvBatchedTask) void { } this.state = .done; - _ = this.next(); - return; + this.next().run(); } } @@ -500,6 +492,7 @@ const assert = bun.assert; const std = @import("std"); const bun = @import("bun"); const shell = bun.shell; +const Yield = shell.Yield; const ExitCode = shell.ExitCode; const JSC = bun.JSC; const Maybe = bun.sys.Maybe; diff --git a/src/shell/builtin/pwd.zig b/src/shell/builtin/pwd.zig index 4df76b51ef..734cda8623 100644 --- a/src/shell/builtin/pwd.zig +++ b/src/shell/builtin/pwd.zig @@ -7,53 +7,49 @@ state: union(enum) { done, } = .idle, -pub fn start(this: *Pwd) Maybe(void) { +pub fn start(this: *Pwd) Yield { const args = this.bltn().argsSlice(); if (args.len > 0) { const msg = "pwd: too many arguments\n"; if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .{ .waiting_io = .{ .kind = .stderr } }; - this.bltn().stderr.enqueue(this, msg, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, msg, safeguard); } _ = this.bltn().writeNoIO(.stderr, msg); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } const cwd_str = this.bltn().parentCmd().base.shell.cwd(); if (this.bltn().stdout.needsIO()) |safeguard| { this.state = .{ .waiting_io = .{ .kind = .stdout } }; - this.bltn().stdout.enqueueFmtBltn(this, null, "{s}\n", .{cwd_str}, safeguard); - return Maybe(void).success; + return this.bltn().stdout.enqueueFmtBltn(this, null, "{s}\n", .{cwd_str}, safeguard); } const buf = this.bltn().fmtErrorArena(null, "{s}\n", .{cwd_str}); _ = this.bltn().writeNoIO(.stdout, buf); this.state = .done; - this.bltn().done(0); - return Maybe(void).success; + return this.bltn().done(0); } -pub fn next(this: *Pwd) void { +pub fn next(this: *Pwd) Yield { while (!(this.state == .err or this.state == .done)) { switch (this.state) { - .waiting_io => return, + .waiting_io => return .suspended, .idle => @panic("Unexpected \"idle\" state in Pwd. This indicates a bug in Bun. Please file a GitHub issue."), .done, .err => unreachable, } } switch (this.state) { - .done => this.bltn().done(0), - .err => this.bltn().done(1), - else => {}, + .done => return this.bltn().done(0), + .err => return this.bltn().done(1), + else => unreachable, } } -pub fn onIOWriterChunk(this: *Pwd, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Pwd, _: usize, e: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state == .waiting_io); } @@ -61,8 +57,7 @@ pub fn onIOWriterChunk(this: *Pwd, _: usize, e: ?JSC.SystemError) void { if (e != null) { defer e.?.deref(); this.state = .err; - this.next(); - return; + return this.next(); } this.state = switch (this.state.waiting_io.kind) { @@ -70,7 +65,7 @@ pub fn onIOWriterChunk(this: *Pwd, _: usize, e: ?JSC.SystemError) void { .stderr => .err, }; - this.next(); + return this.next(); } pub fn deinit(this: *Pwd) void { @@ -85,11 +80,11 @@ pub inline fn bltn(this: *Pwd) *Builtin { // -- const bun = @import("bun"); const shell = bun.shell; +const Yield = shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const Pwd = @This(); const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const assert = bun.assert; diff --git a/src/shell/builtin/rm.zig b/src/shell/builtin/rm.zig index 5db57a6b44..8eca2e16da 100644 --- a/src/shell/builtin/rm.zig +++ b/src/shell/builtin/rm.zig @@ -45,6 +45,7 @@ state: union(enum) { } }, done: struct { exit_code: ExitCode }, + waiting_write_err, err: ExitCode, } = .idle, @@ -100,13 +101,14 @@ pub const Opts = struct { }; }; -pub fn start(this: *Rm) Maybe(void) { +pub fn start(this: *Rm) Yield { return this.next(); } -pub noinline fn next(this: *Rm) Maybe(void) { +pub noinline fn next(this: *Rm) Yield { while (this.state != .done and this.state != .err) { switch (this.state) { + .waiting_write_err => return .suspended, .idle => { this.state = .{ .parse_opts = .{ @@ -127,14 +129,12 @@ pub noinline fn next(this: *Rm) Maybe(void) { const error_string = Builtin.Kind.usageString(.rm); if (this.bltn().stderr.needsIO()) |safeguard| { parse_opts.state = .wait_write_err; - this.bltn().stderr.enqueue(this, error_string, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, error_string, safeguard); } _ = this.bltn().writeNoIO(.stderr, error_string); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } const idx = parse_opts.idx; @@ -156,14 +156,11 @@ pub noinline fn next(this: *Rm) Maybe(void) { const buf = "rm: \"-i\" is not supported yet"; if (this.bltn().stderr.needsIO()) |safeguard| { parse_opts.state = .wait_write_err; - this.bltn().stderr.enqueue(this, buf, safeguard); - continue; + return this.bltn().stderr.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stderr, buf); - - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } const filepath_args_start = idx; @@ -174,7 +171,12 @@ pub noinline fn next(this: *Rm) Maybe(void) { var buf: bun.PathBuffer = undefined; const cwd = switch (Syscall.getcwd(&buf)) { .err => |err| { - return .{ .err = err }; + const errbuf = this.bltn().fmtErrorArena( + .rm, + "{s}: {s}", + .{ "getcwd", err.msg() orelse "failed to get cwd" }, + ); + return this.writeFailingError(errbuf, 1); }, .result => |cwd| cwd, }; @@ -192,16 +194,14 @@ pub noinline fn next(this: *Rm) Maybe(void) { if (is_root) { if (this.bltn().stderr.needsIO()) |safeguard| { parse_opts.state = .wait_write_err; - this.bltn().stderr.enqueueFmtBltn(this, .rm, "\"{s}\" may not be removed\n", .{resolved_path}, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueueFmtBltn(this, .rm, "\"{s}\" may not be removed\n", .{resolved_path}, safeguard); } const error_string = this.bltn().fmtErrorArena(.rm, "\"{s}\" may not be removed\n", .{resolved_path}); _ = this.bltn().writeNoIO(.stderr, error_string); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } } } @@ -224,28 +224,24 @@ pub noinline fn next(this: *Rm) Maybe(void) { const error_string = "rm: illegal option -- -\n"; if (this.bltn().stderr.needsIO()) |safeguard| { parse_opts.state = .wait_write_err; - this.bltn().stderr.enqueue(this, error_string, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, error_string, safeguard); } _ = this.bltn().writeNoIO(.stderr, error_string); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); }, .illegal_option_with_flag => { const flag = arg; if (this.bltn().stderr.needsIO()) |safeguard| { parse_opts.state = .wait_write_err; - this.bltn().stderr.enqueueFmtBltn(this, .rm, "illegal option -- {s}\n", .{flag[1..]}, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueueFmtBltn(this, .rm, "illegal option -- {s}\n", .{flag[1..]}, safeguard); } const error_string = this.bltn().fmtErrorArena(.rm, "illegal option -- {s}\n", .{flag[1..]}); _ = this.bltn().writeNoIO(.stderr, error_string); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); }, } }, @@ -284,26 +280,25 @@ pub noinline fn next(this: *Rm) Maybe(void) { } // do nothing - return Maybe(void).success; + return .suspended; }, .done, .err => unreachable, } } switch (this.state) { - .done => this.bltn().done(0), - .err => this.bltn().done(this.state.err), - else => {}, + .done => return this.bltn().done(0), + .err => return this.bltn().done(this.state.err), + else => unreachable, } - - return Maybe(void).success; } -pub fn onIOWriterChunk(this: *Rm, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Rm, _: usize, e: ?JSC.SystemError) Yield { log("Rm(0x{x}).onIOWriterChunk()", .{@intFromPtr(this)}); if (comptime bun.Environment.allow_assert) { assert((this.state == .parse_opts and this.state.parse_opts.state == .wait_write_err) or - (this.state == .exec and this.state.exec.state == .waiting and this.state.exec.output_count.load(.seq_cst) > 0)); + (this.state == .exec and this.state.exec.state == .waiting and this.state.exec.output_count.load(.seq_cst) > 0) or + this.state == .waiting_write_err); } if (this.state == .exec and this.state.exec.state == .waiting) { @@ -311,21 +306,18 @@ pub fn onIOWriterChunk(this: *Rm, _: usize, e: ?JSC.SystemError) void { this.state.exec.incrementOutputCount(.output_done); if (this.state.exec.state.tasksDone() >= this.state.exec.total_tasks and this.state.exec.getOutputCount(.output_done) >= this.state.exec.getOutputCount(.output_count)) { const code: ExitCode = if (this.state.exec.err != null) 1 else 0; - this.bltn().done(code); - return; + return this.bltn().done(code); } - return; + return .suspended; } if (e != null) { defer e.?.deref(); this.state = .{ .err = @intFromEnum(e.?.getErrno()) }; - this.bltn().done(e.?.getErrno()); - return; + return this.bltn().done(e.?.getErrno()); } - this.bltn().done(1); - return; + return this.bltn().done(1); } pub fn deinit(this: *Rm) void { @@ -421,7 +413,7 @@ pub fn onShellRmTaskDone(this: *Rm, task: *ShellRmTask) void { if (this.bltn().stderr.needsIO()) |safeguard| { log("Rm(0x{x}) task=0x{x} ERROR={s}", .{ @intFromPtr(this), @intFromPtr(task), error_string }); exec.incrementOutputCount(.output_count); - this.bltn().stderr.enqueue(this, error_string, safeguard); + this.bltn().stderr.enqueue(this, error_string, safeguard).run(); return; } else { _ = this.bltn().writeNoIO(.stderr, error_string); @@ -437,25 +429,22 @@ pub fn onShellRmTaskDone(this: *Rm, task: *ShellRmTask) void { exec.getOutputCount(.output_done) >= exec.getOutputCount(.output_count)) { this.state = .{ .done = .{ .exit_code = if (exec.err) |theerr| theerr.errno else 0 } }; - _ = this.next(); - return; + this.next().run(); } } -fn writeVerbose(this: *Rm, verbose: *ShellRmTask.DirTask) void { +fn writeVerbose(this: *Rm, verbose: *ShellRmTask.DirTask) Yield { if (this.bltn().stdout.needsIO()) |safeguard| { const buf = verbose.takeDeletedEntries(); defer buf.deinit(); - this.bltn().stdout.enqueue(this, buf.items, safeguard); - } else { - _ = this.bltn().writeNoIO(.stdout, verbose.deleted_entries.items); - _ = this.state.exec.incrementOutputCount(.output_done); - if (this.state.exec.state.tasksDone() >= this.state.exec.total_tasks and this.state.exec.getOutputCount(.output_done) >= this.state.exec.getOutputCount(.output_count)) { - this.bltn().done(if (this.state.exec.err != null) @as(ExitCode, 1) else @as(ExitCode, 0)); - return; - } - return; + return this.bltn().stdout.enqueue(this, buf.items, safeguard); } + _ = this.bltn().writeNoIO(.stdout, verbose.deleted_entries.items); + _ = this.state.exec.incrementOutputCount(.output_done); + if (this.state.exec.state.tasksDone() >= this.state.exec.total_tasks and this.state.exec.getOutputCount(.output_done) >= this.state.exec.getOutputCount(.output_count)) { + return this.bltn().done(if (this.state.exec.err != null) @as(ExitCode, 1) else @as(ExitCode, 0)); + } + return .done; } pub const ShellRmTask = struct { @@ -530,7 +519,7 @@ pub const ShellRmTask = struct { pub fn runFromMainThread(this: *DirTask) void { debug("DirTask(0x{x}, path={s}) runFromMainThread", .{ @intFromPtr(this), this.path }); - this.task_manager.rm.writeVerbose(this); + this.task_manager.rm.writeVerbose(this).run(); } pub fn runFromMainThreadMini(this: *DirTask, _: *void) void { @@ -871,7 +860,7 @@ pub const ShellRmTask = struct { return Maybe(void).success; } - var iterator = DirIterator.iterate(fd.stdDir(), .u8); + var iterator = DirIterator.iterate(fd, .u8); var entry = iterator.next(); var remove_child_vtable = RemoveFileVTable{ @@ -1194,10 +1183,21 @@ inline fn fastMod(val: anytype, comptime rhs: comptime_int) @TypeOf(val) { return val & (rhs - 1); } -// -- +pub fn writeFailingError(this: *Rm, buf: []const u8, exit_code: ExitCode) Yield { + if (this.bltn().stderr.needsIO()) |safeguard| { + this.state = .waiting_write_err; + return this.bltn().stderr.enqueue(this, buf, safeguard); + } + + _ = this.bltn().writeNoIO(.stderr, buf); + + return this.bltn().done(exit_code); +} + const log = bun.Output.scoped(.Rm, true); const bun = @import("bun"); const shell = bun.shell; +const Yield = shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; diff --git a/src/shell/builtin/seq.zig b/src/shell/builtin/seq.zig index ac8e9cf605..8e55a3d530 100644 --- a/src/shell/builtin/seq.zig +++ b/src/shell/builtin/seq.zig @@ -1,4 +1,4 @@ -state: enum { idle, waiting_io, err, done } = .idle, +state: enum { idle, err, done } = .idle, buf: std.ArrayListUnmanaged(u8) = .{}, _start: f32 = 1, _end: f32 = 1, @@ -7,7 +7,7 @@ separator: []const u8 = "\n", terminator: []const u8 = "", fixed_width: bool = false, -pub fn start(this: *@This()) Maybe(void) { +pub fn start(this: *@This()) Yield { const args = this.bltn().argsSlice(); var iter = bun.SliceIterator([*:0]const u8).init(args); @@ -71,18 +71,16 @@ pub fn start(this: *@This()) Maybe(void) { return this.do(); } -fn fail(this: *@This(), msg: []const u8) Maybe(void) { +fn fail(this: *@This(), msg: []const u8) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .err; - this.bltn().stderr.enqueue(this, msg, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, msg, safeguard); } _ = this.bltn().writeNoIO(.stderr, msg); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } -fn do(this: *@This()) Maybe(void) { +fn do(this: *@This()) Yield { var current = this._start; var arena = std.heap.ArenaAllocator.init(bun.default_allocator); defer arena.deinit(); @@ -97,34 +95,30 @@ fn do(this: *@This()) Maybe(void) { this.state = .done; if (this.bltn().stdout.needsIO()) |safeguard| { - this.bltn().stdout.enqueue(this, this.buf.items, safeguard); - } else { - this.bltn().done(0); + return this.bltn().stdout.enqueue(this, this.buf.items, safeguard); } - return Maybe(void).success; + return this.bltn().done(0); } -fn print(this: *@This(), msg: []const u8) Maybe(void) { +fn print(this: *@This(), msg: []const u8) void { if (this.bltn().stdout.needsIO() != null) { this.buf.appendSlice(bun.default_allocator, msg) catch bun.outOfMemory(); - return Maybe(void).success; + return; } - const res = this.bltn().writeNoIO(.stdout, msg); - if (res == .err) return Maybe(void).initErr(res.err); - return Maybe(void).success; + _ = this.bltn().writeNoIO(.stdout, msg); + return; } -pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) Yield { if (maybe_e) |e| { defer e.deref(); this.state = .err; - this.bltn().done(1); - return; + return this.bltn().done(1); } switch (this.state) { - .done => this.bltn().done(0), - .err => this.bltn().done(1), - else => {}, + .done => return this.bltn().done(0), + .err => return this.bltn().done(1), + .idle => bun.shell.unreachableState("Seq.onIOWriterChunk", "idle"), } } @@ -140,9 +134,9 @@ pub inline fn bltn(this: *@This()) *Builtin { // -- const bun = @import("bun"); +const Yield = bun.shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const std = @import("std"); diff --git a/src/shell/builtin/touch.zig b/src/shell/builtin/touch.zig index 5029b3f6e7..77e17cc137 100644 --- a/src/shell/builtin/touch.zig +++ b/src/shell/builtin/touch.zig @@ -25,7 +25,7 @@ pub fn deinit(this: *Touch) void { log("{} deinit", .{this}); } -pub fn start(this: *Touch) Maybe(void) { +pub fn start(this: *Touch) Yield { const filepath_args = switch (this.opts.parse(this.bltn().argsSlice())) { .ok => |filepath_args| filepath_args, .err => |e| { @@ -35,12 +35,10 @@ pub fn start(this: *Touch) Maybe(void) { .unsupported => |unsupported| this.bltn().fmtErrorArena(.touch, "unsupported option, please open a GitHub issue -- {s}\n", .{unsupported}), }; - _ = this.writeFailingError(buf, 1); - return Maybe(void).success; + return this.writeFailingError(buf, 1); }, } orelse { - _ = this.writeFailingError(Builtin.Kind.touch.usageString(), 1); - return Maybe(void).success; + return this.writeFailingError(Builtin.Kind.touch.usageString(), 1); }; this.state = .{ @@ -49,12 +47,10 @@ pub fn start(this: *Touch) Maybe(void) { }, }; - _ = this.next(); - - return Maybe(void).success; + return this.next(); } -pub fn next(this: *Touch) void { +pub fn next(this: *Touch) Yield { switch (this.state) { .idle => @panic("Invalid state"), .exec => { @@ -63,10 +59,9 @@ pub fn next(this: *Touch) void { if (this.state.exec.tasks_done >= this.state.exec.tasks_count and this.state.exec.output_done >= this.state.exec.output_waiting) { const exit_code: ExitCode = if (this.state.exec.err != null) 1 else 0; this.state = .done; - this.bltn().done(exit_code); - return; + return this.bltn().done(exit_code); } - return; + return .suspended; } exec.started = true; @@ -77,33 +72,32 @@ pub fn next(this: *Touch) void { var task = ShellTouchTask.create(this, this.opts, dir_to_mk, this.bltn().parentCmd().base.shell.cwdZ()); task.schedule(); } + return .suspended; }, - .waiting_write_err => return, - .done => this.bltn().done(0), + .waiting_write_err => return .failed, + .done => return this.bltn().done(0), } } -pub fn onIOWriterChunk(this: *Touch, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Touch, _: usize, e: ?JSC.SystemError) Yield { if (this.state == .waiting_write_err) { return this.bltn().done(1); } if (e) |err| err.deref(); - this.next(); + return this.next(); } -pub fn writeFailingError(this: *Touch, buf: []const u8, exit_code: ExitCode) Maybe(void) { +pub fn writeFailingError(this: *Touch, buf: []const u8, exit_code: ExitCode) Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state = .waiting_write_err; - this.bltn().stderr.enqueue(this, buf, safeguard); - return Maybe(void).success; + return this.bltn().stderr.enqueue(this, buf, safeguard); } _ = this.bltn().writeNoIO(.stderr, buf); - this.bltn().done(exit_code); - return Maybe(void).success; + return this.bltn().done(exit_code); } pub fn onShellTouchTaskDone(this: *Touch, task: *ShellTouchTask) void { @@ -121,11 +115,11 @@ pub fn onShellTouchTaskDone(this: *Touch, task: *ShellTouchTask) void { }); const error_string = this.bltn().taskErrorToString(.touch, e); this.state.exec.err = e; - output_task.start(error_string); + output_task.start(error_string).run(); return; } - this.next(); + this.next().run(); } pub const ShellTouchOutputTask = OutputTask(Touch, .{ @@ -137,38 +131,36 @@ pub const ShellTouchOutputTask = OutputTask(Touch, .{ }); const ShellTouchOutputTaskVTable = struct { - pub fn writeErr(this: *Touch, childptr: anytype, errbuf: []const u8) CoroutineResult { + pub fn writeErr(this: *Touch, childptr: anytype, errbuf: []const u8) ?Yield { if (this.bltn().stderr.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; - this.bltn().stderr.enqueue(childptr, errbuf, safeguard); - return .yield; + return this.bltn().stderr.enqueue(childptr, errbuf, safeguard); } _ = this.bltn().writeNoIO(.stderr, errbuf); - return .cont; + return null; } pub fn onWriteErr(this: *Touch) void { this.state.exec.output_done += 1; } - pub fn writeOut(this: *Touch, childptr: anytype, output: *OutputSrc) CoroutineResult { + pub fn writeOut(this: *Touch, childptr: anytype, output: *OutputSrc) ?Yield { if (this.bltn().stdout.needsIO()) |safeguard| { this.state.exec.output_waiting += 1; const slice = output.slice(); log("THE SLICE: {d} {s}", .{ slice.len, slice }); - this.bltn().stdout.enqueue(childptr, slice, safeguard); - return .yield; + return this.bltn().stdout.enqueue(childptr, slice, safeguard); } _ = this.bltn().writeNoIO(.stdout, output.slice()); - return .cont; + return null; } pub fn onWriteOut(this: *Touch) void { this.state.exec.output_done += 1; } - pub fn onDone(this: *Touch) void { - this.next(); + pub fn onDone(this: *Touch) Yield { + return this.next(); } }; @@ -398,10 +390,10 @@ const Touch = @This(); const log = debug; const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const ExitCode = shell.ExitCode; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const WorkPool = bun.JSC.WorkPool; const ResolvePath = bun.path; const Syscall = bun.sys; @@ -414,5 +406,4 @@ const ParseFlagResult = interpreter.ParseFlagResult; const FlagParser = interpreter.FlagParser; const unsupportedFlag = interpreter.unsupportedFlag; const OutputTask = interpreter.OutputTask; -const CoroutineResult = interpreter.CoroutineResult; const OutputSrc = interpreter.OutputSrc; diff --git a/src/shell/builtin/true.zig b/src/shell/builtin/true.zig index b647d50e42..c01bffa576 100644 --- a/src/shell/builtin/true.zig +++ b/src/shell/builtin/true.zig @@ -1,16 +1,15 @@ -pub fn start(this: *@This()) Maybe(void) { - this.bltn().done(0); - return Maybe(void).success; -} - -pub fn onIOWriterChunk(_: *@This(), _: usize, _: ?JSC.SystemError) void { - // no IO is done +pub fn start(this: *@This()) Yield { + return this.bltn().done(0); } pub fn deinit(this: *@This()) void { _ = this; } +pub fn onIOWriterChunk(_: *@This(), _: usize, _: ?JSC.SystemError) Yield { + return .done; +} + pub inline fn bltn(this: *@This()) *Builtin { const impl: *Builtin.Impl = @alignCast(@fieldParentPtr("true", this)); return @fieldParentPtr("impl", impl); @@ -18,9 +17,9 @@ pub inline fn bltn(this: *@This()) *Builtin { // -- const bun = @import("bun"); +const Yield = bun.shell.Yield; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; const Builtin = Interpreter.Builtin; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; diff --git a/src/shell/builtin/which.zig b/src/shell/builtin/which.zig index a4f99e6bbd..50b55ee488 100644 --- a/src/shell/builtin/which.zig +++ b/src/shell/builtin/which.zig @@ -18,22 +18,20 @@ state: union(enum) { err: JSC.SystemError, } = .idle, -pub fn start(this: *Which) Maybe(void) { +pub fn start(this: *Which) Yield { const args = this.bltn().argsSlice(); if (args.len == 0) { if (this.bltn().stdout.needsIO()) |safeguard| { this.state = .one_arg; - this.bltn().stdout.enqueue(this, "\n", safeguard); - return Maybe(void).success; + return this.bltn().stdout.enqueue(this, "\n", safeguard); } _ = this.bltn().writeNoIO(.stdout, "\n"); - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } if (this.bltn().stdout.needsIO() == null) { - const path_buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(path_buf); + const path_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(path_buf); const PATH = this.bltn().parentCmd().base.shell.export_env.get(EnvStr.initSlice("PATH")) orelse EnvStr.initSlice(""); var had_not_found = false; for (args) |arg_raw| { @@ -47,8 +45,7 @@ pub fn start(this: *Which) Maybe(void) { _ = this.bltn().writeNoIO(.stdout, resolved); } - this.bltn().done(@intFromBool(had_not_found)); - return Maybe(void).success; + return this.bltn().done(@intFromBool(had_not_found)); } this.state = .{ @@ -58,63 +55,56 @@ pub fn start(this: *Which) Maybe(void) { .state = .none, }, }; - this.next(); - return Maybe(void).success; + return this.next(); } -pub fn next(this: *Which) void { +pub fn next(this: *Which) Yield { var multiargs = &this.state.multi_args; if (multiargs.arg_idx >= multiargs.args_slice.len) { // Done - this.bltn().done(@intFromBool(multiargs.had_not_found)); - return; + return this.bltn().done(@intFromBool(multiargs.had_not_found)); } const arg_raw = multiargs.args_slice[multiargs.arg_idx]; const arg = arg_raw[0..std.mem.len(arg_raw)]; - const path_buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(path_buf); + const path_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(path_buf); const PATH = this.bltn().parentCmd().base.shell.export_env.get(EnvStr.initSlice("PATH")) orelse EnvStr.initSlice(""); const resolved = which(path_buf, PATH.slice(), this.bltn().parentCmd().base.shell.cwdZ(), arg) orelse { multiargs.had_not_found = true; if (this.bltn().stdout.needsIO()) |safeguard| { multiargs.state = .waiting_write; - this.bltn().stdout.enqueueFmtBltn(this, null, "{s} not found\n", .{arg}, safeguard); - // yield execution - return; + return this.bltn().stdout.enqueueFmtBltn(this, null, "{s} not found\n", .{arg}, safeguard); } const buf = this.bltn().fmtErrorArena(null, "{s} not found\n", .{arg}); _ = this.bltn().writeNoIO(.stdout, buf); - this.argComplete(); - return; + return this.argComplete(); }; if (this.bltn().stdout.needsIO()) |safeguard| { multiargs.state = .waiting_write; - this.bltn().stdout.enqueueFmtBltn(this, null, "{s}\n", .{resolved}, safeguard); - return; + return this.bltn().stdout.enqueueFmtBltn(this, null, "{s}\n", .{resolved}, safeguard); } const buf = this.bltn().fmtErrorArena(null, "{s}\n", .{resolved}); _ = this.bltn().writeNoIO(.stdout, buf); - this.argComplete(); - return; + return this.argComplete(); } -fn argComplete(this: *Which) void { +fn argComplete(this: *Which) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state == .multi_args and this.state.multi_args.state == .waiting_write); } this.state.multi_args.arg_idx += 1; this.state.multi_args.state = .none; - this.next(); + return this.next(); } -pub fn onIOWriterChunk(this: *Which, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Which, _: usize, e: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state == .one_arg or (this.state == .multi_args and this.state.multi_args.state == .waiting_write)); @@ -122,17 +112,15 @@ pub fn onIOWriterChunk(this: *Which, _: usize, e: ?JSC.SystemError) void { if (e != null) { this.state = .{ .err = e.? }; - this.bltn().done(e.?.getErrno()); - return; + return this.bltn().done(e.?.getErrno()); } if (this.state == .one_arg) { // Calling which with on arguments returns exit code 1 - this.bltn().done(1); - return; + return this.bltn().done(1); } - this.argComplete(); + return this.argComplete(); } pub fn deinit(this: *Which) void { @@ -151,9 +139,9 @@ const Which = @This(); const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const JSC = bun.JSC; -const Maybe = bun.sys.Maybe; const assert = bun.assert; const interpreter = @import("../interpreter.zig"); diff --git a/src/shell/builtin/yes.zig b/src/shell/builtin/yes.zig index 6ddcedc7cd..bb6ed6445d 100644 --- a/src/shell/builtin/yes.zig +++ b/src/shell/builtin/yes.zig @@ -2,7 +2,7 @@ state: enum { idle, waiting_io, err, done } = .idle, expletive: []const u8 = "y", task: YesTask = undefined, -pub fn start(this: *@This()) Maybe(void) { +pub fn start(this: *@This()) Yield { const args = this.bltn().argsSlice(); if (args.len > 0) { @@ -16,35 +16,31 @@ pub fn start(this: *@This()) Maybe(void) { .concurrent_task = JSC.EventLoopTask.fromEventLoop(evtloop), }; this.state = .waiting_io; - this.bltn().stdout.enqueue(this, this.expletive, safeguard); - this.bltn().stdout.enqueue(this, "\n", safeguard); this.task.enqueue(); - return Maybe(void).success; + return this.bltn().stdout.enqueueFmt(this, "{s}\n", .{this.expletive}, safeguard); } var res: Maybe(usize) = undefined; while (true) { res = this.bltn().writeNoIO(.stdout, this.expletive); if (res == .err) { - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } res = this.bltn().writeNoIO(.stdout, "\n"); if (res == .err) { - this.bltn().done(1); - return Maybe(void).success; + return this.bltn().done(1); } } @compileError(unreachable); } -pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *@This(), _: usize, maybe_e: ?JSC.SystemError) Yield { if (maybe_e) |e| { defer e.deref(); this.state = .err; - this.bltn().done(1); - return; + return this.bltn().done(1); } + return .suspended; } pub inline fn bltn(this: *@This()) *Builtin { @@ -72,8 +68,7 @@ pub const YesTask = struct { const yes: *Yes = @fieldParentPtr("task", this); // Manually make safeguard since this task should not be created if output does not need IO - yes.bltn().stdout.enqueue(yes, yes.expletive, .output_needs_io); - yes.bltn().stdout.enqueue(yes, "\n", .output_needs_io); + yes.bltn().stdout.enqueueFmt(yes, "{s}\n", .{yes.expletive}, .output_needs_io).run(); this.enqueue(); } @@ -85,6 +80,7 @@ pub const YesTask = struct { // -- const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const interpreter = @import("../interpreter.zig"); const Interpreter = interpreter.Interpreter; diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 067299760b..a62c7e12b5 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -1,21 +1,62 @@ //! The interpreter for the shell language //! -//! Normally, the implementation would be a very simple tree-walk of the AST, -//! but it needs to be non-blocking, and Zig does not have coroutines yet, so -//! this implementation is half tree-walk half one big state machine. The state -//! machine part manually keeps track of execution state (which coroutines would -//! do for us), but makes the code very confusing because control flow is less obvious. +//! There are several constraints on the Bun shell language that make this +//! interpreter implementation unique: //! -//! Things to note: -//! - If you want to do something analogous to yielding execution, you must -//! `return` from the function. For example in the code we start an async -//! BufferedWriter and "yield" execution by calling =.start()= on the writer and -//! then `return`ing form the function -//! - Sometimes a state machine will immediately finish and deinit itself so -//! that might cause some unintuitive things to happen. For example if you -//! `defer` some code, then try to yield execution to some state machine struct, -//! and it immediately finishes, it will deinit itself and the defer code might -//! use undefined memory. +//! 1. We try to keep everything in the Bun process as much as possible for +//! performance reasons and also to leverage Bun's existing IO/FS code +//! 2. We try to use non-blocking IO operations as much as possible so the +//! shell does not block the main JS thread +//! 3. Zig does not have coroutines (yet) +//! +//! The idea is that this is a tree-walking interpreter. Except it's not. +//! +//! Why not? Because 99% of operations in the shell are IO, and we need to do +//! non-blocking IO because Bun is a JS runtime. +//! +//! So what do we do? Instead of iteratively walking the AST like in a traditional +//! tree-walking interpreter, we're also going to build up a tree of state-machines +//! (an AST node becomes a state-machine node), so we can suspend and resume +//! execution without blocking the main thread. +//! +//! We'll also need to do things in continuation-passing style, see `Yield.zig` for +//! more on that. +//! +//! Once all these pieces come together, this ends up being a: +//! "state-machine based [tree-walking], [trampoline]-driven [continuation-passing style] interpreter" +//! +//! [tree-walking]: https://en.wikipedia.org/wiki/Interpreter_(computing)#Abstract_syntax_tree_interpreters +//! [trampoline]: https://en.wikipedia.org/wiki/Trampoline_(computing) +//! [continuation-passing style]: https://en.wikipedia.org/wiki/Continuation-passing_style +//! +//! # Memory management +//! +//! Almost all allocations go through the `AllocationScope` allocator. This +//! trackd memory allocations and frees in debug builds (or builds with asan +//! enabled) and helps us catch memory leaks. +//! +//! The underlying parent allocator that every `AllocationScope` uses in the +//! shell is `bun.default_allocator`. This means in builds of Bun which do not +//! have `AllocationScope` enabled, every allocation just goes straight through +//! to `bun.default_allocator`. +//! +//! Usually every state machine node ends up creating a new allocation scope, +//! so an `AllocationScope` is stored in the base header struct (see `Base.zig`) +//! that all state-machine nodes include in their layout. +//! +//! You will often see `Base.initWithNewAllocScope` to create a new state machine node +//! and allocation scope. +//! +//! Sometimes it is necessary to "leak" an allocation from its scope. For +//! example, argument expansion happens in an allocation scope inside +//! `Expansion.zig`. +//! +//! But the string that is expanded may end up becoming the key/value of an +//! environment variable, which we internally use the reference counted `EnvStr` +//! for. When we turn it into an `EnvStr`, the reference counting scheme is +//! responsible for managing the memory so we can call +//! `allocScope.leakSlice(str)` to tell it not to track the allocation anymore +//! and let `EnvStr` handle it. const std = @import("std"); const builtin = @import("builtin"); const string = []const u8; @@ -39,10 +80,10 @@ const windows = bun.windows; const uv = windows.libuv; const Maybe = JSC.Maybe; const WTFStringImplStruct = @import("../string.zig").WTFStringImplStruct; +const Yield = shell.Yield; pub const Pipe = [2]bun.FileDescriptor; const shell = bun.shell; -const ShellError = shell.ShellError; const ast = shell.AST; pub const SmolList = shell.SmolList; @@ -87,6 +128,11 @@ const assert = bun.assert; /// safely assume the stdout/stderr they are working with require IO. pub const OutputNeedsIOSafeGuard = enum(u0) { output_needs_io }; +/// Similar to `OutputNeedsIOSafeGuard` but to ensure a function is +/// called at the "top" of the call-stack relative to the interpreter's +/// execution. +pub const CallstackGuard = enum(u0) { __i_know_what_i_am_doing }; + pub const ExitCode = u16; pub const StateKind = enum(u8) { @@ -231,7 +277,7 @@ pub const Interpreter = struct { /// This should be allocated using the arena jsobjs: []JSValue, - root_shell: ShellState, + root_shell: ShellExecEnv, root_io: IO, has_pending_activity: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), @@ -252,6 +298,8 @@ pub const Interpreter = struct { exit_code: ?ExitCode = 0, this_jsvalue: JSValue = .zero, + __alloc_scope: if (bun.Environment.enableAllocScopes) bun.AllocationScope else void, + // Here are all the state nodes: pub const State = @import("./states/Base.zig"); pub const Script = @import("./states/Script.zig"); @@ -272,12 +320,12 @@ pub const Interpreter = struct { /// During execution, the shell has an "environment" or "context". This /// contains important details like environment variables, cwd, etc. Every - /// state node is given a `*ShellState` which is stored in its header (see + /// state node is given a `*ShellExecEnv` which is stored in its header (see /// `states/Base.zig`). /// /// Certain state nodes like subshells, pipelines, and cmd substitutions - /// will duplicate their `*ShellState` so that they can make modifications - /// without affecting their parent `ShellState`. This is done in the + /// will duplicate their `*ShellExecEnv` so that they can make modifications + /// without affecting their parent `ShellExecEnv`. This is done in the /// `.dupeForSubshell` function. /// /// For example: @@ -291,8 +339,10 @@ pub const Interpreter = struct { /// Note that stdin/stdout/stderr is also considered to be part of the /// environment/context, but we keep that in a separate struct called `IO`. We do /// this because stdin/stdout/stderr changes a lot and we don't want to copy - /// this `ShellState` struct too much. - pub const ShellState = struct { + /// this `ShellExecEnv` struct too much. + /// + /// More info here: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_12 + pub const ShellExecEnv = struct { kind: Kind = .normal, /// This is the buffered stdout/stderr that captures the entire @@ -323,6 +373,8 @@ pub const Interpreter = struct { async_pids: SmolList(pid_t, 4) = SmolList(pid_t, 4).zeroes, + __alloc_scope: if (bun.Environment.enableAllocScopes) *bun.AllocationScope else void, + const pid_t = if (bun.Environment.isPosix) std.posix.pid_t else uv.uv_pid_t; const Bufio = union(enum) { owned: bun.ByteList, borrowed: *bun.ByteList }; @@ -334,51 +386,56 @@ pub const Interpreter = struct { pipeline, }; - pub fn buffered_stdout(this: *ShellState) *bun.ByteList { + pub fn allocator(this: *ShellExecEnv) std.mem.Allocator { + if (comptime bun.Environment.enableAllocScopes) return this.__alloc_scope.allocator(); + return bun.default_allocator; + } + + pub fn buffered_stdout(this: *ShellExecEnv) *bun.ByteList { return switch (this._buffered_stdout) { .owned => &this._buffered_stdout.owned, .borrowed => this._buffered_stdout.borrowed, }; } - pub fn buffered_stderr(this: *ShellState) *bun.ByteList { + pub fn buffered_stderr(this: *ShellExecEnv) *bun.ByteList { return switch (this._buffered_stderr) { .owned => &this._buffered_stderr.owned, .borrowed => this._buffered_stderr.borrowed, }; } - pub inline fn cwdZ(this: *ShellState) [:0]const u8 { + pub inline fn cwdZ(this: *ShellExecEnv) [:0]const u8 { if (this.__cwd.items.len == 0) return ""; return this.__cwd.items[0..this.__cwd.items.len -| 1 :0]; } - pub inline fn prevCwdZ(this: *ShellState) [:0]const u8 { + pub inline fn prevCwdZ(this: *ShellExecEnv) [:0]const u8 { if (this.__prev_cwd.items.len == 0) return ""; return this.__prev_cwd.items[0..this.__prev_cwd.items.len -| 1 :0]; } - pub inline fn prevCwd(this: *ShellState) []const u8 { + pub inline fn prevCwd(this: *ShellExecEnv) []const u8 { const prevcwdz = this.prevCwdZ(); return prevcwdz[0..prevcwdz.len]; } - pub inline fn cwd(this: *ShellState) []const u8 { + pub inline fn cwd(this: *ShellExecEnv) []const u8 { const cwdz = this.cwdZ(); return cwdz[0..cwdz.len]; } - pub fn deinit(this: *ShellState) void { + pub fn deinit(this: *ShellExecEnv) void { this.deinitImpl(true, true); } /// Doesn't deref `this.io` /// /// If called by interpreter we have to: - /// 1. not free this *ShellState, because its on a field on the interpreter + /// 1. not free this *ShellExecEnv, because its on a field on the interpreter /// 2. don't free buffered_stdout and buffered_stderr, because that is used for output - fn deinitImpl(this: *ShellState, comptime destroy_this: bool, comptime free_buffered_io: bool) void { - log("[ShellState] deinit {x}", .{@intFromPtr(this)}); + fn deinitImpl(this: *ShellExecEnv, comptime destroy_this: bool, comptime free_buffered_io: bool) void { + log("[ShellExecEnv] deinit {x}", .{@intFromPtr(this)}); if (comptime free_buffered_io) { if (this._buffered_stdout == .owned) { @@ -396,11 +453,17 @@ pub const Interpreter = struct { this.__prev_cwd.deinit(); closefd(this.cwd_fd); - if (comptime destroy_this) bun.default_allocator.destroy(this); + if (comptime destroy_this) this.allocator().destroy(this); } - pub fn dupeForSubshell(this: *ShellState, allocator: Allocator, io: IO, kind: Kind) Maybe(*ShellState) { - const duped = allocator.create(ShellState) catch bun.outOfMemory(); + pub fn dupeForSubshell( + this: *ShellExecEnv, + alloc_scope: if (bun.Environment.enableAllocScopes) *bun.AllocationScope else void, + alloc: Allocator, + io: IO, + kind: Kind, + ) Maybe(*ShellExecEnv) { + const duped = alloc.create(ShellExecEnv) catch bun.outOfMemory(); const dupedfd = switch (Syscall.dup(this.cwd_fd)) { .err => |err| return .{ .err = err }, @@ -436,19 +499,21 @@ pub const Interpreter = struct { ._buffered_stdout = stdout, ._buffered_stderr = stderr, .shell_env = this.shell_env.clone(), - .cmd_local_env = EnvMap.init(allocator), + .cmd_local_env = EnvMap.init(alloc), .export_env = this.export_env.clone(), .__prev_cwd = this.__prev_cwd.clone() catch bun.outOfMemory(), .__cwd = this.__cwd.clone() catch bun.outOfMemory(), // TODO probably need to use os.dup here .cwd_fd = dupedfd, + .__alloc_scope = alloc_scope, }; return .{ .result = duped }; } - pub fn assignVar(this: *ShellState, interp: *ThisInterpreter, label: EnvStr, value: EnvStr, assign_ctx: AssignCtx) void { + /// NOTE: This will `.ref()` value, so you should `defer value.deref()` it before handing it to this function. + pub fn assignVar(this: *ShellExecEnv, interp: *ThisInterpreter, label: EnvStr, value: EnvStr, assign_ctx: AssignCtx) void { _ = interp; // autofix switch (assign_ctx) { .cmd => this.cmd_local_env.insert(label, value), @@ -457,15 +522,15 @@ pub const Interpreter = struct { } } - pub fn changePrevCwd(self: *ShellState, interp: *ThisInterpreter) Maybe(void) { + pub fn changePrevCwd(self: *ShellExecEnv, interp: *ThisInterpreter) Maybe(void) { return self.changeCwd(interp, self.prevCwdZ()); } - pub fn changeCwd(this: *ShellState, interp: *ThisInterpreter, new_cwd_: anytype) Maybe(void) { + pub fn changeCwd(this: *ShellExecEnv, interp: *ThisInterpreter, new_cwd_: anytype) Maybe(void) { return this.changeCwdImpl(interp, new_cwd_, false); } - pub fn changeCwdImpl(this: *ShellState, _: *ThisInterpreter, new_cwd_: anytype, comptime in_init: bool) Maybe(void) { + pub fn changeCwdImpl(this: *ShellExecEnv, _: *ThisInterpreter, new_cwd_: anytype, comptime in_init: bool) Maybe(void) { if (comptime @TypeOf(new_cwd_) != [:0]const u8 and @TypeOf(new_cwd_) != []const u8) { @compileError("Bad type for new_cwd " ++ @typeName(@TypeOf(new_cwd_))); } @@ -539,7 +604,7 @@ pub const Interpreter = struct { return Maybe(void).success; } - pub fn getHomedir(self: *ShellState) EnvStr { + pub fn getHomedir(self: *ShellExecEnv) EnvStr { const env_var: ?EnvStr = brk: { const static_str = if (comptime bun.Environment.isWindows) EnvStr.initSlice("USERPROFILE") else EnvStr.initSlice("HOME"); break :brk self.shell_env.get(static_str) orelse self.export_env.get(static_str); @@ -548,24 +613,34 @@ pub const Interpreter = struct { } pub fn writeFailingErrorFmt( - this: *ShellState, + this: *ShellExecEnv, ctx: anytype, enqueueCb: fn (c: @TypeOf(ctx)) void, comptime fmt: []const u8, args: anytype, - ) void { + ) Yield { const io: *IO.OutKind = &@field(ctx.io, "stderr"); switch (io.*) { .fd => |x| { enqueueCb(ctx); - x.writer.enqueueFmt(ctx, x.captured, fmt, args); + return x.writer.enqueueFmt(ctx, x.captured, fmt, args); }, .pipe => { const bufio: *bun.ByteList = this.buffered_stderr(); bufio.appendFmt(bun.default_allocator, fmt, args) catch bun.outOfMemory(); - ctx.parent.childDone(ctx, 1); + return ctx.parent.childDone(ctx, 1); + }, + // FIXME: This is not correct? This would just make the entire shell hang I think? + .ignore => { + const childptr = IOWriterChildPtr.init(ctx); + // TODO: is this necessary + const count = std.fmt.count(fmt, args); + return .{ .on_io_writer_chunk = .{ + .child = childptr.asAnyOpaque(), + .err = null, + .written = count, + } }; }, - .ignore => {}, } } }; @@ -581,9 +656,9 @@ pub const Interpreter = struct { syscall: Syscall.Error, other: ShellErrorKind, - fn toJSC(this: ShellErrorCtx, globalThis: *JSGlobalObject) JSValue { + fn toJS(this: ShellErrorCtx, globalThis: *JSGlobalObject) JSValue { return switch (this) { - .syscall => |err| err.toJSC(globalThis), + .syscall => |err| err.toJS(globalThis), .other => |err| bun.JSC.ZigString.fromBytes(@errorName(err)).toJS(globalThis), }; } @@ -745,8 +820,8 @@ pub const Interpreter = struct { }; // Avoid the large stack allocation on Windows. - const pathbuf = bun.default_allocator.create(bun.PathBuffer) catch bun.outOfMemory(); - defer bun.default_allocator.destroy(pathbuf); + const pathbuf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(pathbuf); const cwd: [:0]const u8 = switch (Syscall.getcwdZ(pathbuf)) { .result => |cwd| cwd, .err => |err| { @@ -785,7 +860,7 @@ pub const Interpreter = struct { .allocator = allocator, .jsobjs = jsobjs, - .root_shell = ShellState{ + .root_shell = ShellExecEnv{ .shell_env = EnvMap.init(allocator), .cmd_local_env = EnvMap.init(allocator), .export_env = export_env, @@ -793,6 +868,8 @@ pub const Interpreter = struct { .__cwd = cwd_arr, .__prev_cwd = cwd_arr.clone() catch bun.outOfMemory(), .cwd_fd = cwd_fd, + + .__alloc_scope = undefined, }, .root_io = .{ @@ -808,6 +885,7 @@ pub const Interpreter = struct { }, .vm_args_utf8 = std.ArrayList(JSC.ZigString.Slice).init(bun.default_allocator), + .__alloc_scope = if (bun.Environment.enableAllocScopes) bun.AllocationScope.init(allocator) else {}, .globalThis = undefined, }; @@ -815,6 +893,8 @@ pub const Interpreter = struct { if (interpreter.root_shell.changeCwdImpl(interpreter, c, true).asErr()) |e| return .{ .err = .{ .sys = e.toShellSystemError() } }; } + interpreter.root_shell.__alloc_scope = if (bun.Environment.enableAllocScopes) &interpreter.__alloc_scope else {}; + return .{ .result = interpreter }; } @@ -1016,18 +1096,20 @@ pub const Interpreter = struct { } pub fn run(this: *ThisInterpreter) !Maybe(void) { + log("Interpreter(0x{x}) run", .{@intFromPtr(this)}); if (this.setupIOBeforeRun().asErr()) |e| { return .{ .err = e }; } var root = Script.init(this, &this.root_shell, &this.args.script_ast, Script.ParentPtr.init(this), this.root_io.copy()); this.started.store(true, .seq_cst); - root.start(); + root.start().run(); return Maybe(void).success; } pub fn runFromJS(this: *ThisInterpreter, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + log("Interpreter(0x{x}) runFromJS", .{@intFromPtr(this)}); _ = callframe; // autofix if (this.setupIOBeforeRun().asErr()) |e| { @@ -1039,7 +1121,7 @@ pub const Interpreter = struct { var root = Script.init(this, &this.root_shell, &this.args.script_ast, Script.ParentPtr.init(this), this.root_io.copy()); this.started.store(true, .seq_cst); - root.start(); + root.start().run(); if (globalThis.hasException()) return error.JSError; return .js_undefined; @@ -1060,34 +1142,34 @@ pub const Interpreter = struct { @"async".actuallyDeinit(); this.async_commands_executing -= 1; if (this.async_commands_executing == 0 and this.exit_code != null) { - this.finish(this.exit_code.?); + this.finish(this.exit_code.?).run(); } } - pub fn childDone(this: *ThisInterpreter, child: InterpreterChildPtr, exit_code: ExitCode) void { + pub fn childDone(this: *ThisInterpreter, child: InterpreterChildPtr, exit_code: ExitCode) Yield { if (child.ptr.is(Script)) { const script = child.as(Script); script.deinitFromInterpreter(); this.exit_code = exit_code; - if (this.async_commands_executing == 0) this.finish(exit_code); - return; + if (this.async_commands_executing == 0) return this.finish(exit_code); + return .suspended; } @panic("Bad child"); } - pub fn finish(this: *ThisInterpreter, exit_code: ExitCode) void { - log("finish {d}", .{exit_code}); + pub fn finish(this: *ThisInterpreter, exit_code: ExitCode) Yield { + log("Interpreter(0x{x}) finish {d}", .{ @intFromPtr(this), exit_code }); defer decrPendingActivityFlag(&this.has_pending_activity); if (this.event_loop == .js) { defer this.deinitAfterJSRun(); this.exit_code = exit_code; - if (this.this_jsvalue != .zero) { - const this_jsvalue = this.this_jsvalue; + const this_jsvalue = this.this_jsvalue; + if (this_jsvalue != .zero) { if (JSC.Codegen.JSShellInterpreter.resolveGetCached(this_jsvalue)) |resolve| { - this.this_jsvalue = .zero; - const globalThis = this.globalThis; const loop = this.event_loop.js; + const globalThis = this.globalThis; + this.this_jsvalue = .zero; this.keep_alive.disable(); loop.enter(); _ = resolve.call(globalThis, .js_undefined, &.{ @@ -1104,38 +1186,12 @@ pub const Interpreter = struct { this.flags.done = true; this.exit_code = exit_code; } - } - fn errored(this: *ThisInterpreter, the_error: ShellError) void { - _ = the_error; // autofix - defer decrPendingActivityFlag(&this.has_pending_activity); - - if (this.event_loop == .js) { - const this_jsvalue = this.this_jsvalue; - if (this_jsvalue != .zero) { - if (JSC.Codegen.JSShellInterpreter.rejectGetCached(this_jsvalue)) |reject| { - const loop = this.event_loop.js; - const globalThis = this.globalThis; - this.this_jsvalue = .zero; - this.keep_alive.disable(); - - loop.enter(); - _ = reject.call(globalThis, &[_]JSValue{ - JSValue.jsNumberFromChar(1), - this.getBufferedStdout(globalThis), - this.getBufferedStderr(globalThis), - }) catch |err| globalThis.reportActiveExceptionAsUnhandled(err); - JSC.Codegen.JSShellInterpreter.resolveSetCached(this_jsvalue, globalThis, .js_undefined); - JSC.Codegen.JSShellInterpreter.rejectSetCached(this_jsvalue, globalThis, .js_undefined); - - loop.exit(); - } - } - } + return .done; } fn deinitAfterJSRun(this: *ThisInterpreter) void { - log("deinit interpreter", .{}); + log("Interpreter(0x{x}) deinitAfterJSRun", .{@intFromPtr(this)}); for (this.jsobjs) |jsobj| { jsobj.unprotect(); } @@ -1185,7 +1241,7 @@ pub const Interpreter = struct { defer slice.deinit(); switch (this.root_shell.changeCwd(this, slice.slice())) { .err => |e| { - return globalThis.throwValue(e.toJSC(globalThis)); + return globalThis.throwValue(e.toJS(globalThis)); }, .result => {}, } @@ -1228,42 +1284,26 @@ pub const Interpreter = struct { return .js_undefined; } - pub fn isRunning( - this: *ThisInterpreter, - _: *JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { + pub fn isRunning(this: *ThisInterpreter, _: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { return JSC.JSValue.jsBoolean(this.hasPendingActivity()); } - pub fn getStarted( - this: *ThisInterpreter, - globalThis: *JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { + pub fn getStarted(this: *ThisInterpreter, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { _ = globalThis; // autofix _ = callframe; // autofix return JSC.JSValue.jsBoolean(this.started.load(.seq_cst)); } - pub fn getBufferedStdout( - this: *ThisInterpreter, - globalThis: *JSGlobalObject, - ) JSC.JSValue { + pub fn getBufferedStdout(this: *ThisInterpreter, globalThis: *JSGlobalObject) JSC.JSValue { return ioToJSValue(globalThis, this.root_shell.buffered_stdout()); } - pub fn getBufferedStderr( - this: *ThisInterpreter, - globalThis: *JSGlobalObject, - ) JSC.JSValue { + pub fn getBufferedStderr(this: *ThisInterpreter, globalThis: *JSGlobalObject) JSC.JSValue { return ioToJSValue(globalThis, this.root_shell.buffered_stderr()); } - pub fn finalize( - this: *ThisInterpreter, - ) void { + pub fn finalize(this: *ThisInterpreter) void { log("Interpreter(0x{x}) finalize", .{@intFromPtr(this)}); this.deinitFromFinalizer(); } @@ -1303,6 +1343,8 @@ pub const Interpreter = struct { pub const Builtin = @import("./Builtin.zig"); + /// TODO: Investigate whether or not this can be removed now that we have + /// removed recursion pub const AsyncDeinitReader = IOReader.AsyncDeinitReader; pub const IO = @import("./IO.zig"); @@ -1310,34 +1352,7 @@ pub const Interpreter = struct { pub const IOReaderChildPtr = IOReader.ChildPtr; pub const IOWriter = @import("./IOWriter.zig"); - pub const AsyncDeinitWriter = struct { - ran: bool = false, - - pub fn enqueue(this: *@This()) void { - if (this.ran) return; - this.ran = true; - - var iowriter = this.writer(); - - if (iowriter.evtloop == .js) { - iowriter.evtloop.js.enqueueTaskConcurrent(iowriter.concurrent_task.js.from(this, .manual_deinit)); - } else { - iowriter.evtloop.mini.enqueueTaskConcurrent(iowriter.concurrent_task.mini.from(this, "runFromMainThreadMini")); - } - } - - pub fn writer(this: *@This()) *IOWriter { - return @alignCast(@fieldParentPtr("async_deinit", this)); - } - - pub fn runFromMainThread(this: *@This()) void { - this.writer().deinitOnMainThread(); - } - - pub fn runFromMainThreadMini(this: *@This(), _: *void) void { - this.runFromMainThread(); - } - }; + pub const AsyncDeinitWriter = IOWriter.AsyncDeinitWriter; }; /// Construct a tagged union of the state nodes provided in `TypesValue`. @@ -1365,16 +1380,65 @@ pub fn StatePtrUnion(comptime TypesValue: anytype) type { return Type.ChildPtr; } - /// Starts the state node. - pub fn start(this: @This()) void { + pub fn scopedAllocator(this: @This()) if (bun.Environment.enableAllocScopes) *bun.AllocationScope else void { + if (comptime !bun.Environment.enableAllocScopes) return; + const tags = comptime std.meta.fields(Ptr.Tag); inline for (tags) |tag| { if (this.tagInt() == tag.value) { const Ty = comptime Ptr.typeFromTag(tag.value); Ptr.assert_type(Ty); var casted = this.as(Ty); - casted.start(); - return; + if (comptime Ty == Interpreter) { + return &casted.__alloc_scope; + } + return casted.base.__alloc_scope.scopedAllocator(); + } + } + unknownTag(this.tagInt()); + } + + pub fn allocator(this: @This()) std.mem.Allocator { + const tags = comptime std.meta.fields(Ptr.Tag); + inline for (tags) |tag| { + if (this.tagInt() == tag.value) { + const Ty = comptime Ptr.typeFromTag(tag.value); + Ptr.assert_type(Ty); + var casted = this.as(Ty); + if (comptime Ty == Interpreter) { + if (bun.Environment.enableAllocScopes) return casted.__alloc_scope.allocator(); + return bun.default_allocator; + } + return casted.base.allocator(); + } + } + unknownTag(this.tagInt()); + } + + pub fn create(this: @This(), comptime Ty: type) *Ty { + if (comptime bun.Environment.enableAllocScopes) { + return this.allocator().create(Ty) catch bun.outOfMemory(); + } + return bun.default_allocator.create(Ty) catch bun.outOfMemory(); + } + + pub fn destroy(this: @This(), ptr: anytype) void { + if (comptime bun.Environment.enableAllocScopes) { + this.allocator().destroy(ptr); + } else { + bun.default_allocator.destroy(ptr); + } + } + + /// Starts the state node. + pub fn start(this: @This()) Yield { + const tags = comptime std.meta.fields(Ptr.Tag); + inline for (tags) |tag| { + if (this.tagInt() == tag.value) { + const Ty = comptime Ptr.typeFromTag(tag.value); + Ptr.assert_type(Ty); + var casted = this.as(Ty); + return casted.start(); } } unknownTag(this.tagInt()); @@ -1398,7 +1462,7 @@ pub fn StatePtrUnion(comptime TypesValue: anytype) type { /// Signals to the state node that one of its children completed with the /// given exit code - pub fn childDone(this: @This(), child: anytype, exit_code: ExitCode) void { + pub fn childDone(this: @This(), child: anytype, exit_code: ExitCode) Yield { const tags = comptime std.meta.fields(Ptr.Tag); inline for (tags) |tag| { if (this.tagInt() == tag.value) { @@ -1409,15 +1473,14 @@ pub fn StatePtrUnion(comptime TypesValue: anytype) type { break :brk ChildPtr.init(child); }; var casted = this.as(Ty); - casted.childDone(child_ptr, exit_code); - return; + return casted.childDone(child_ptr, exit_code); } } unknownTag(this.tagInt()); } - pub fn unknownTag(tag: Ptr.TagInt) void { - if (bun.Environment.allow_assert) std.debug.panic("Bad tag: {d}\n", .{tag}); + pub fn unknownTag(tag: Ptr.TagInt) noreturn { + return bun.Output.panic("Unknown tag for shell state node: {d}\n", .{tag}); } pub fn tagInt(this: @This()) Ptr.TagInt { @@ -1638,14 +1701,15 @@ pub const ShellSyscall = struct { pub fn statat(dir: bun.FileDescriptor, path_: [:0]const u8) Maybe(bun.Stat) { if (bun.Environment.isWindows) { - var buf: bun.PathBuffer = undefined; - const path = switch (getPath(dir, path_, &buf)) { + const buf: *bun.PathBuffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const path = switch (getPath(dir, path_, buf)) { .err => |e| return .{ .err = e }, .result => |p| p, }; return switch (Syscall.stat(path)) { - .err => |e| .{ .err = e.clone(bun.default_allocator) catch bun.outOfMemory() }, + .err => |e| .{ .err = e.clone(bun.default_allocator) }, .result => |s| .{ .result = s }, }; } @@ -1653,12 +1717,15 @@ pub const ShellSyscall = struct { return Syscall.fstatat(dir, path_); } + /// Same thing as bun.sys.openat on posix + /// On windows it will convert paths for us pub fn openat(dir: bun.FileDescriptor, path: [:0]const u8, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { if (bun.Environment.isWindows) { if (flags & bun.O.DIRECTORY != 0) { if (ResolvePath.Platform.posix.isAbsolute(path[0..path.len])) { - var buf: bun.PathBuffer = undefined; - const p = switch (getPath(dir, path, &buf)) { + const buf: *bun.PathBuffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const p = switch (getPath(dir, path, buf)) { .result => |p| p, .err => |e| return .{ .err = e }, }; @@ -1673,8 +1740,9 @@ pub const ShellSyscall = struct { }; } - var buf: bun.PathBuffer = undefined; - const p = switch (getPath(dir, path, &buf)) { + const buf: *bun.PathBuffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const p = switch (getPath(dir, path, buf)) { .result => |p| p, .err => |e| return .{ .err = e }, }; @@ -1717,11 +1785,11 @@ pub const ShellSyscall = struct { pub fn OutputTask( comptime Parent: type, comptime vtable: struct { - writeErr: *const fn (*Parent, childptr: anytype, []const u8) CoroutineResult, + writeErr: *const fn (*Parent, childptr: anytype, []const u8) ?Yield, onWriteErr: *const fn (*Parent) void, - writeOut: *const fn (*Parent, childptr: anytype, *OutputSrc) CoroutineResult, + writeOut: *const fn (*Parent, childptr: anytype, *OutputSrc) ?Yield, onWriteOut: *const fn (*Parent) void, - onDone: *const fn (*Parent) void, + onDone: *const fn (*Parent) Yield, }, ) type { return struct { @@ -1733,59 +1801,49 @@ pub fn OutputTask( done, }, - pub fn deinit(this: *@This()) void { + pub fn deinit(this: *@This()) Yield { if (comptime bun.Environment.allow_assert) assert(this.state == .done); - vtable.onDone(this.parent); - this.output.deinit(); - bun.destroy(this); + log("OutputTask({s}, 0x{x}) deinit", .{ @typeName(Parent), @intFromPtr(this) }); + defer bun.destroy(this); + defer this.output.deinit(); + return vtable.onDone(this.parent); } - pub fn start(this: *@This(), errbuf: ?[]const u8) void { + pub fn start(this: *@This(), errbuf: ?[]const u8) Yield { + log("OutputTask({s}, 0x{x}) start errbuf={s}", .{ @typeName(Parent), @intFromPtr(this), if (errbuf) |err| err[0..@min(128, err.len)] else "null" }); this.state = .waiting_write_err; if (errbuf) |err| { - switch (vtable.writeErr(this.parent, this, err)) { - .cont => { - this.next(); - }, - .yield => return, - } - return; + if (vtable.writeErr(this.parent, this, err)) |yield| return yield; + return this.next(); } this.state = .waiting_write_out; - switch (vtable.writeOut(this.parent, this, &this.output)) { - .cont => { - vtable.onWriteOut(this.parent); - this.state = .done; - this.deinit(); - }, - .yield => return, - } + if (vtable.writeOut(this.parent, this, &this.output)) |yield| return yield; + vtable.onWriteOut(this.parent); + this.state = .done; + return this.deinit(); } - pub fn next(this: *@This()) void { + pub fn next(this: *@This()) Yield { switch (this.state) { .waiting_write_err => { vtable.onWriteErr(this.parent); this.state = .waiting_write_out; - switch (vtable.writeOut(this.parent, this, &this.output)) { - .cont => { - vtable.onWriteOut(this.parent); - this.state = .done; - this.deinit(); - }, - .yield => return, - } + if (vtable.writeOut(this.parent, this, &this.output)) |yield| return yield; + vtable.onWriteOut(this.parent); + this.state = .done; + return this.deinit(); }, .waiting_write_out => { vtable.onWriteOut(this.parent); this.state = .done; - this.deinit(); + return this.deinit(); }, .done => @panic("Invalid state"), } } - pub fn onIOWriterChunk(this: *@This(), _: usize, err: ?JSC.SystemError) void { + pub fn onIOWriterChunk(this: *@This(), _: usize, err: ?JSC.SystemError) Yield { + log("OutputTask({s}, 0x{x}) onIOWriterChunk", .{ @typeName(Parent), @intFromPtr(this) }); if (err) |e| { e.deref(); } @@ -1794,19 +1852,15 @@ pub fn OutputTask( .waiting_write_err => { vtable.onWriteErr(this.parent); this.state = .waiting_write_out; - switch (vtable.writeOut(this.parent, this, &this.output)) { - .cont => { - vtable.onWriteOut(this.parent); - this.state = .done; - this.deinit(); - }, - .yield => return, - } + if (vtable.writeOut(this.parent, this, &this.output)) |yield| return yield; + vtable.onWriteOut(this.parent); + this.state = .done; + return this.deinit(); }, .waiting_write_out => { vtable.onWriteOut(this.parent); this.state = .done; - this.deinit(); + return this.deinit(); }, .done => @panic("Invalid state"), } @@ -1904,7 +1958,23 @@ pub fn isPollable(fd: bun.FileDescriptor, mode: bun.Mode) bool { return switch (bun.Environment.os) { .windows, .wasm => false, .linux => posix.S.ISFIFO(mode) or posix.S.ISSOCK(mode) or posix.isatty(fd.native()), - // macos allows regular files to be pollable: ISREG(mode) == true - .mac => posix.S.ISFIFO(mode) or posix.S.ISSOCK(mode) or posix.S.ISREG(mode) or posix.isatty(fd.native()), + // macos DOES allow regular files to be pollable, but we don't want that because + // our IOWriter code has a separate and better codepath for writing to files. + .mac => if (posix.S.ISREG(mode)) false else posix.S.ISFIFO(mode) or posix.S.ISSOCK(mode) or posix.isatty(fd.native()), }; } + +pub fn isPollableFromMode(mode: bun.Mode) bool { + return switch (bun.Environment.os) { + .windows, .wasm => false, + .linux => posix.S.ISFIFO(mode) or posix.S.ISSOCK(mode), + // macos DOES allow regular files to be pollable, but we don't want that because + // our IOWriter code has a separate and better codepath for writing to files. + .mac => if (posix.S.ISREG(mode)) false else posix.S.ISFIFO(mode) or posix.S.ISSOCK(mode), + }; +} + +pub fn unreachableState(context: []const u8, state: []const u8) noreturn { + @branchHint(.cold); + return bun.Output.panic("Bun shell has reached an unreachable state \"{s}\" in the {s} context. This indicates a bug, please open a GitHub issue.", .{ state, context }); +} diff --git a/src/shell/shell.zig b/src/shell/shell.zig index fcc8cfece0..01eaa54bc0 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -14,6 +14,8 @@ const isAllAscii = @import("../string_immutable.zig").isAllASCII; pub const interpret = @import("./interpreter.zig"); pub const subproc = @import("./subproc.zig"); +pub const AllocScope = @import("./AllocScope.zig"); + pub const EnvMap = interpret.EnvMap; pub const EnvStr = interpret.EnvStr; pub const Interpreter = interpret.Interpreter; @@ -25,6 +27,9 @@ pub const IOReader = Interpreter.IOReader; // pub const IOWriter = interpret.IOWriter; // pub const SubprocessMini = subproc.ShellSubprocessMini; +pub const Yield = @import("./Yield.zig").Yield; +pub const unreachableState = interpret.unreachableState; + const GlobWalker = Glob.GlobWalker_(null, true); // const GlobWalker = Glob.BunGlobWalker; @@ -194,7 +199,7 @@ pub const GlobalJS = struct { } pub inline fn throwError(this: @This(), err: bun.sys.Error) void { - this.globalThis.throwValue(err.toJSC(this.globalThis)); + this.globalThis.throwValue(err.toJS(this.globalThis)); } pub inline fn handleError(this: @This(), err: anytype, comptime fmt: []const u8) ShellErr { @@ -3715,10 +3720,10 @@ pub fn shellCmdFromJS( var builder = ShellSrcBuilder.init(globalThis, out_script, jsstrings); var jsobjref_buf: [128]u8 = [_]u8{0} ** 128; - var string_iter = string_args.arrayIterator(globalThis); + var string_iter = try string_args.arrayIterator(globalThis); var i: u32 = 0; const last = string_iter.len -| 1; - while (string_iter.next()) |js_value| { + while (try string_iter.next()) |js_value| { defer i += 1; if (!try builder.appendJSValueStr(js_value, false)) { return globalThis.throw("Shell script string contains invalid UTF-16", .{}); @@ -3726,7 +3731,7 @@ pub fn shellCmdFromJS( // const str = js_value.getZigString(globalThis); // try script.appendSlice(str.full()); if (i < last) { - const template_value = template_args.next() orelse { + const template_value = try template_args.next() orelse { return globalThis.throw("Shell script is missing JSValue arg", .{}); }; try handleTemplateValue(globalThis, template_value, out_jsobjs, out_script, jsstrings, jsobjref_buf[0..]); @@ -3806,10 +3811,10 @@ pub fn handleTemplateValue( } if (template_value.jsType().isArray()) { - var array = template_value.arrayIterator(globalThis); + var array = try template_value.arrayIterator(globalThis); const last = array.len -| 1; var i: u32 = 0; - while (array.next()) |arr| : (i += 1) { + while (try array.next()) |arr| : (i += 1) { try handleTemplateValue(globalThis, arr, out_jsobjs, out_script, jsstrings, jsobjref_buf); if (i < last) { const str = bun.String.static(" "); @@ -3822,7 +3827,7 @@ pub fn handleTemplateValue( } if (template_value.isObject()) { - if (template_value.getOwnTruthy(globalThis, "raw")) |maybe_str| { + if (try template_value.getOwnTruthy(globalThis, "raw")) |maybe_str| { const bunstr = try maybe_str.toBunString(globalThis); defer bunstr.deref(); if (!try builder.appendBunStr(bunstr, false)) { @@ -4149,6 +4154,19 @@ pub fn SmolList(comptime T: type, comptime INLINED_MAX: comptime_int) type { } } + pub fn pop(this: *@This()) T { + switch (this.*) { + .heap => { + return this.heap.pop().?; + }, + .inlined => { + const val = this.inlined.items[this.inlined.len - 1]; + this.inlined.len -= 1; + return val; + }, + } + } + pub fn swapRemove(this: *@This(), idx: usize) void { switch (this.*) { .heap => { @@ -4308,7 +4326,7 @@ pub const TestingAPIs = struct { const template_args_js = arguments.nextEat() orelse { return globalThis.throw("shell: expected 2 arguments, got 0", .{}); }; - var template_args = template_args_js.arrayIterator(globalThis); + var template_args = try template_args_js.arrayIterator(globalThis); var stack_alloc = std.heap.stackFallback(@sizeOf(bun.String) * 4, arena.allocator()); var jsstrings = try std.ArrayList(bun.String).initCapacity(stack_alloc.get(), 4); defer { @@ -4376,7 +4394,7 @@ pub const TestingAPIs = struct { const template_args_js = arguments.nextEat() orelse { return globalThis.throw("shell: expected 2 arguments, got 0", .{}); }; - var template_args = template_args_js.arrayIterator(globalThis); + var template_args = try template_args_js.arrayIterator(globalThis); var stack_alloc = std.heap.stackFallback(@sizeOf(bun.String) * 4, arena.allocator()); var jsstrings = try std.ArrayList(bun.String).initCapacity(stack_alloc.get(), 4); defer { diff --git a/src/shell/states/Assigns.zig b/src/shell/states/Assigns.zig index f4ae4176d2..ff2bacaa01 100644 --- a/src/shell/states/Assigns.zig +++ b/src/shell/states/Assigns.zig @@ -16,6 +16,7 @@ state: union(enum) { done, }, ctx: AssignCtx, +owned: bool = true, io: IO, pub const ParentPtr = StatePtrUnion(.{ @@ -34,43 +35,68 @@ pub inline fn deinit(this: *Assigns) void { this.state.expanding.current_expansion_result.deinit(); } this.io.deinit(); + this.base.endScope(); + if (this.owned) this.parent.destroy(this); } -pub inline fn start(this: *Assigns) void { - return this.next(); +pub fn start(this: *Assigns) Yield { + return .{ .assigns = this }; } pub fn init( - this: *Assigns, interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: []const ast.Assign, ctx: AssignCtx, parent: ParentPtr, io: IO, -) void { +) *Assigns { + const this = parent.create(Assigns); + log("Assigns(0x{x}) init", .{@intFromPtr(this)}); this.* = .{ - .base = .{ .kind = .assign, .interpreter = interpreter, .shell = shell_state }, + .base = State.initWithNewAllocScope(.assign, interpreter, shell_state), .node = node, .parent = parent, .state = .idle, .ctx = ctx, .io = io, }; + return this; } -pub fn next(this: *Assigns) void { +pub fn initBorrowed( + this: *Assigns, + interpreter: *Interpreter, + shell_state: *ShellExecEnv, + node: []const ast.Assign, + ctx: AssignCtx, + parent: ParentPtr, + io: IO, +) void { + this.* = .{ + .base = State.initWithNewAllocScope(.assign, interpreter, shell_state), + .node = node, + .parent = parent, + .state = .idle, + .ctx = ctx, + .owned = false, + .io = io, + }; +} + +pub fn next(this: *Assigns) Yield { while (!(this.state == .done)) { switch (this.state) { .idle => { this.state = .{ .expanding = .{ - .current_expansion_result = std.ArrayList([:0]const u8).init(bun.default_allocator), + .current_expansion_result = std.ArrayList([:0]const u8).init(this.base.allocator()), .expansion = undefined, } }; continue; }, .expanding => { if (this.state.expanding.idx >= this.node.len) { + this.state.expanding.current_expansion_result.clearAndFree(); this.state = .done; continue; } @@ -86,53 +112,64 @@ pub fn next(this: *Assigns) void { }, this.io.copy(), ); - this.state.expanding.expansion.start(); - return; + return this.state.expanding.expansion.start(); }, .done => unreachable, .err => return this.parent.childDone(this, 1), } } - this.parent.childDone(this, 0); + return this.parent.childDone(this, 0); } -pub fn childDone(this: *Assigns, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *Assigns, child: ChildPtr, exit_code: ExitCode) Yield { if (child.ptr.is(Expansion)) { + bun.assert(this.state == .expanding); const expansion = child.ptr.as(Expansion); if (exit_code != 0) { + this.state.expanding.current_expansion_result.clearAndFree(); this.state = .{ .err = expansion.state.err, }; expansion.deinit(); - return; + return .failed; } var expanding = &this.state.expanding; const label = this.node[expanding.idx].label; + // Did it expand to a single word? if (expanding.current_expansion_result.items.len == 1) { const value = expanding.current_expansion_result.items[0]; + // We're going to let `EnvStr` manage the allocation for `value` + // from here on out + this.base.leakSlice(value); + expanding.current_expansion_result.clearAndFree(); + const ref = EnvStr.initRefCounted(value); defer ref.deref(); + this.base.shell.assignVar(this.base.interpreter, EnvStr.initSlice(label), ref, this.ctx); - expanding.current_expansion_result = std.ArrayList([:0]const u8).init(bun.default_allocator); } else { + // Multiple words, need to concatenate them together. First + // calculate size of the total buffer. const size = brk: { var total: usize = 0; const last = expanding.current_expansion_result.items.len -| 1; for (expanding.current_expansion_result.items, 0..) |slice, i| { total += slice.len; if (i != last) { - // for space + // Let's not forget to count the space in between the + // words! total += 1; } } break :brk total; }; - const value = brk: { - var merged = bun.default_allocator.allocSentinel(u8, size, 0) catch bun.outOfMemory(); + const value: []const u8 = brk: { + if (size == 0) break :brk ""; + var merged = this.base.allocator().alloc(u8, size) catch bun.outOfMemory(); var i: usize = 0; const last = expanding.current_expansion_result.items.len -| 1; for (expanding.current_expansion_result.items, 0..) |slice, j| { @@ -145,20 +182,24 @@ pub fn childDone(this: *Assigns, child: ChildPtr, exit_code: ExitCode) void { } break :brk merged; }; + + // We're going to let `EnvStr` manage the allocation for `value` + // from here on out + this.base.leakSlice(value); + const value_ref = EnvStr.initRefCounted(value); defer value_ref.deref(); this.base.shell.assignVar(this.base.interpreter, EnvStr.initSlice(label), value_ref, this.ctx); for (expanding.current_expansion_result.items) |slice| { - bun.default_allocator.free(slice); + this.base.allocator().free(slice); } expanding.current_expansion_result.clearRetainingCapacity(); } expanding.idx += 1; expansion.deinit(); - this.next(); - return; + return .{ .assigns = this }; } @panic("Invalid child to Assigns expression, this indicates a bug in Bun. Please file a report on Github."); @@ -172,12 +213,14 @@ pub const AssignCtx = enum { const std = @import("std"); const bun = @import("bun"); +const log = bun.shell.interpret.log; +const Yield = bun.shell.Yield; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const EnvStr = bun.shell.interpret.EnvStr; diff --git a/src/shell/states/Async.zig b/src/shell/states/Async.zig index 3149841b95..5657c3deca 100644 --- a/src/shell/states/Async.zig +++ b/src/shell/states/Async.zig @@ -32,39 +32,41 @@ pub fn format(this: *const Async, comptime _: []const u8, _: std.fmt.FormatOptio pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.Expr, parent: ParentPtr, io: IO, ) *Async { interpreter.async_commands_executing += 1; - return bun.new(Async, .{ - .base = .{ .kind = .@"async", .interpreter = interpreter, .shell = shell_state }, + const async_cmd = parent.create(Async); + async_cmd.* = .{ + .base = State.initWithNewAllocScope(.@"async", interpreter, shell_state), .node = node, .parent = parent, .io = io, .event_loop = interpreter.event_loop, .concurrent_task = JSC.EventLoopTask.fromEventLoop(interpreter.event_loop), - }); + }; + return async_cmd; } -pub fn start(this: *Async) void { +pub fn start(this: *Async) Yield { log("{} start", .{this}); this.enqueueSelf(); - this.parent.childDone(this, 0); + return this.parent.childDone(this, 0); } -pub fn next(this: *Async) void { +pub fn next(this: *Async) Yield { log("{} next {s}", .{ this, @tagName(this.state) }); switch (this.state) { .idle => { this.state = .{ .exec = .{} }; this.enqueueSelf(); + return .suspended; }, .exec => { if (this.state.exec.child) |child| { - child.start(); - return; + return child.start(); } const child = brk: { @@ -104,9 +106,11 @@ pub fn next(this: *Async) void { }; this.state.exec.child = child; this.enqueueSelf(); + return .suspended; }, .done => { this.base.interpreter.asyncCmdDone(this); + return .done; }, } } @@ -119,11 +123,12 @@ pub fn enqueueSelf(this: *Async) void { } } -pub fn childDone(this: *Async, child_ptr: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *Async, child_ptr: ChildPtr, exit_code: ExitCode) Yield { log("{} childDone", .{this}); child_ptr.deinit(); this.state = .{ .done = exit_code }; this.enqueueSelf(); + return .suspended; } /// This function is purposefully empty as a hack to ensure Async runs in the background while appearing to @@ -143,7 +148,7 @@ pub fn actuallyDeinit(this: *Async) void { } pub fn runFromMainThread(this: *Async) void { - this.next(); + this.next().run(); } pub fn runFromMainThreadMini(this: *Async, _: *void) void { @@ -152,13 +157,14 @@ pub fn runFromMainThreadMini(this: *Async, _: *void) void { const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/Base.zig b/src/shell/states/Base.zig index 94a63f7550..088d0719b5 100644 --- a/src/shell/states/Base.zig +++ b/src/shell/states/Base.zig @@ -1,17 +1,98 @@ -//! This is a header struct that all state nodes include -//! in their layout. +//! This is the base header struct that all state nodes include in their layout. //! //! TODO: Is this still needed? const Base = @This(); kind: StateKind, interpreter: *Interpreter, -shell: *ShellState, +/// This type is borrowed or owned in specific cases. This affects whether or +/// not this state node should be responsible for deinitializing this +/// `*ShellExecEnv`. +/// +/// Q: When is this the `shell: *ShellExecEnv` field owned? +/// A: When we must create a new shell execution environment. This is +/// essentially all locations where `shell.dupeForSubshell(...)` is called: +/// +/// 1. A `Script` owns it's shell execution environment +/// 2. Each command in a pipeline is executed in it's own shell execution +/// environment. +/// 3. Subshells +/// 4. Command substitutions +/// +/// When `shell: *ShellExecEnv` is owned it must be deinitialized. That is why you +/// only see `this.base.shell.deinit()` in `Script`, `Subshell`, and the +/// children of a `Pipeline`. +shell: *ShellExecEnv, +__alloc_scope: if (bun.Environment.enableAllocScopes) AllocScope else void, + +const AllocScope = union(enum) { + owned: bun.AllocationScope, + borrowed: *bun.AllocationScope, + + pub fn deinit(this: *AllocScope) void { + if (comptime bun.Environment.enableAllocScopes) { + if (this.* == .owned) this.owned.deinit(); + } + } + + pub fn allocator(this: *AllocScope) std.mem.Allocator { + return switch (this.*) { + .borrowed => |scope| scope.allocator(), + .owned => |*scope| scope.allocator(), + }; + } + + pub fn scopedAllocator(this: *AllocScope) *bun.AllocationScope { + return switch (this.*) { + .borrowed => |scope| scope, + .owned => |*scope| scope, + }; + } + + pub fn leakSlice(this: *AllocScope, memory: anytype) void { + if (comptime bun.Environment.enableAllocScopes) { + _ = @typeInfo(@TypeOf(memory)).pointer; + bun.assert(!this.scopedAllocator().trackExternalFree(memory, null)); + } + } +}; + +/// Creates a _new_ allocation scope for this state node. +pub fn initWithNewAllocScope(kind: StateKind, interpreter: *Interpreter, shell: *ShellExecEnv) Base { + return .{ + .kind = kind, + .interpreter = interpreter, + .shell = shell, + .__alloc_scope = if (comptime bun.Environment.enableAllocScopes) .{ .owned = bun.AllocationScope.init(bun.default_allocator) } else {}, + }; +} + +/// This will use the allocation scope provided by `scope` +pub fn initBorrowedAllocScope(kind: StateKind, interpreter: *Interpreter, shell: *ShellExecEnv, scope: if (bun.Environment.enableAllocScopes) *bun.AllocationScope else void) Base { + return .{ + .kind = kind, + .interpreter = interpreter, + .shell = shell, + .__alloc_scope = if (comptime bun.Environment.enableAllocScopes) .{ .borrowed = scope } else {}, + }; +} + +/// This ends the allocation scope associated with this state node. +/// +/// If the allocation scope is borrowed from the parent, this does nothing. +/// +/// This also does nothing in release builds. +pub fn endScope(this: *Base) void { + if (comptime bun.Environment.enableAllocScopes) { + this.__alloc_scope.deinit(); + } +} pub inline fn eventLoop(this: *const Base) JSC.EventLoopHandle { return this.interpreter.event_loop; } +/// FIXME: We should get rid of this pub fn throw(this: *const Base, err: *const bun.shell.ShellErr) void { throwShellErr(err, this.eventLoop()) catch {}; //TODO: } @@ -20,10 +101,35 @@ pub fn rootIO(this: *const Base) *const IO { return this.interpreter.rootIO(); } +pub fn allocator(this: *Base) std.mem.Allocator { + if (comptime bun.Environment.enableAllocScopes) { + return this.__alloc_scope.allocator(); + } + return bun.default_allocator; +} + +pub fn allocScope(this: *Base) if (bun.Environment.enableAllocScopes) *bun.AllocationScope else void { + if (comptime bun.Environment.enableAllocScopes) { + return switch (this.__alloc_scope) { + .borrowed => |scope| scope, + .owned => |*scope| scope, + }; + } + return {}; +} + +/// Stop tracking `memory` +pub fn leakSlice(this: *Base, memory: anytype) void { + if (comptime bun.Environment.enableAllocScopes) { + this.__alloc_scope.leakSlice(memory); + } +} + +const std = @import("std"); const bun = @import("bun"); const Interpreter = bun.shell.Interpreter; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const StateKind = bun.shell.interpret.StateKind; const throwShellErr = bun.shell.interpret.throwShellErr; const IO = bun.shell.Interpreter.IO; diff --git a/src/shell/states/Binary.zig b/src/shell/states/Binary.zig index f8a8c9b570..49c4f558e8 100644 --- a/src/shell/states/Binary.zig +++ b/src/shell/states/Binary.zig @@ -28,14 +28,14 @@ pub const ParentPtr = StatePtrUnion(.{ pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.Binary, parent: ParentPtr, io: IO, ) *Binary { - var binary = interpreter.allocator.create(Binary) catch bun.outOfMemory(); + var binary = parent.create(Binary); binary.node = node; - binary.base = .{ .kind = .binary, .interpreter = interpreter, .shell = shell_state }; + binary.base = State.initWithNewAllocScope(.binary, interpreter, shell_state); binary.parent = parent; binary.io = io; binary.left = null; @@ -44,7 +44,7 @@ pub fn init( return binary; } -pub fn start(this: *Binary) void { +pub fn start(this: *Binary) Yield { log("binary start {x} ({s})", .{ @intFromPtr(this), @tagName(this.node.op) }); if (comptime bun.Environment.allow_assert) { assert(this.left == null); @@ -57,9 +57,8 @@ pub fn start(this: *Binary) void { this.currently_executing = this.makeChild(false); this.left = 0; } - if (this.currently_executing) |exec| { - exec.start(); - } + bun.assert(this.currently_executing != null); + return this.currently_executing.?.start(); } fn makeChild(this: *Binary, left: bool) ?ChildPtr { @@ -78,15 +77,19 @@ fn makeChild(this: *Binary, left: bool) ?ChildPtr { return ChildPtr.init(pipeline); }, .assign => |assigns| { - var assign_machine = this.base.interpreter.allocator.create(Assigns) catch bun.outOfMemory(); - assign_machine.init(this.base.interpreter, this.base.shell, assigns, .shell, Assigns.ParentPtr.init(this), this.io.copy()); - return ChildPtr.init(assign_machine); + const assign = Assigns.init(this.base.interpreter, this.base.shell, assigns, .shell, Assigns.ParentPtr.init(this), this.io.copy()); + return ChildPtr.init(assign); }, .subshell => { - switch (this.base.shell.dupeForSubshell(this.base.interpreter.allocator, this.io, .subshell)) { - .result => |shell_state| { - const script = Subshell.init(this.base.interpreter, shell_state, node.subshell, Subshell.ParentPtr.init(this), this.io.copy()); - return ChildPtr.init(script); + switch (Subshell.initDupeShellState( + this.base.interpreter, + this.base.shell, + node.subshell, + Subshell.ParentPtr.init(this), + this.io.copy(), + )) { + .result => |subshell| { + return ChildPtr.init(subshell); }, .err => |e| { this.base.throw(&bun.shell.ShellErr.newSys(e)); @@ -109,7 +112,7 @@ fn makeChild(this: *Binary, left: bool) ?ChildPtr { } } -pub fn childDone(this: *Binary, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *Binary, child: ChildPtr, exit_code: ExitCode) Yield { if (comptime bun.Environment.allow_assert) { assert(this.left == null or this.right == null); assert(this.currently_executing != null); @@ -122,23 +125,20 @@ pub fn childDone(this: *Binary, child: ChildPtr, exit_code: ExitCode) void { if (this.left == null) { this.left = exit_code; if ((this.node.op == .And and exit_code != 0) or (this.node.op == .Or and exit_code == 0)) { - this.parent.childDone(this, exit_code); - return; + return this.parent.childDone(this, exit_code); } this.currently_executing = this.makeChild(false); if (this.currently_executing == null) { this.right = 0; - this.parent.childDone(this, 0); - return; - } else { - this.currently_executing.?.start(); + return this.parent.childDone(this, 0); } - return; + + return this.currently_executing.?.start(); } this.right = exit_code; - this.parent.childDone(this, exit_code); + return this.parent.childDone(this, exit_code); } pub fn deinit(this: *Binary) void { @@ -146,16 +146,18 @@ pub fn deinit(this: *Binary) void { child.deinit(); } this.io.deinit(); - this.base.interpreter.allocator.destroy(this); + this.base.endScope(); + this.parent.allocator().destroy(this); } const bun = @import("bun"); +const Yield = bun.shell.Yield; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/Cmd.zig b/src/shell/states/Cmd.zig index 19ebd5778c..71cfa06a08 100644 --- a/src/shell/states/Cmd.zig +++ b/src/shell/states/Cmd.zig @@ -25,7 +25,6 @@ parent: ParentPtr, spawn_arena: bun.ArenaAllocator, spawn_arena_freed: bool = false, -/// This allocated by the above arena args: std.ArrayList(?[*:0]const u8), /// If the cmd redirects to a file we have to expand that string. @@ -87,7 +86,7 @@ pub const ShellAsyncSubprocessDone = struct { pub fn runFromMainThread(this: *ShellAsyncSubprocessDone) void { log("{} runFromMainThread", .{this}); defer this.deinit(); - this.cmd.parent.childDone(this.cmd, this.cmd.exit_code orelse 0); + this.cmd.parent.childDone(this.cmd, this.cmd.exit_code orelse 0).run(); } pub fn deinit(this: *ShellAsyncSubprocessDone) void { @@ -219,29 +218,29 @@ pub fn isSubproc(this: *Cmd) bool { /// If starting a command results in an error (failed to find executable in path for example) /// then it should write to the stderr of the entire shell script process -pub fn writeFailingError(this: *Cmd, comptime fmt: []const u8, args: anytype) void { +pub fn writeFailingError(this: *Cmd, comptime fmt: []const u8, args: anytype) Yield { const handler = struct { fn enqueueCb(ctx: *Cmd) void { ctx.state = .waiting_write_err; } }; - this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); + return this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); } pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.Cmd, parent: ParentPtr, io: IO, ) *Cmd { - var cmd = interpreter.allocator.create(Cmd) catch bun.outOfMemory(); + var cmd = parent.create(Cmd); cmd.* = .{ - .base = .{ .kind = .cmd, .interpreter = interpreter, .shell = shell_state }, + .base = State.initWithNewAllocScope(.cmd, interpreter, shell_state), .node = node, .parent = parent, - .spawn_arena = bun.ArenaAllocator.init(interpreter.allocator), + .spawn_arena = undefined, .args = undefined, .redirection_file = undefined, @@ -249,24 +248,23 @@ pub fn init( .io = io, .state = .idle, }; - cmd.args = std.ArrayList(?[*:0]const u8).initCapacity(cmd.spawn_arena.allocator(), node.name_and_args.len) catch bun.outOfMemory(); - + cmd.spawn_arena = bun.ArenaAllocator.init(cmd.base.allocator()); + cmd.args = std.ArrayList(?[*:0]const u8).initCapacity(cmd.base.allocator(), node.name_and_args.len) catch bun.outOfMemory(); cmd.redirection_file = std.ArrayList(u8).init(cmd.spawn_arena.allocator()); return cmd; } -pub fn next(this: *Cmd) void { +pub fn next(this: *Cmd) Yield { while (this.state != .done) { switch (this.state) { .idle => { this.state = .{ .expanding_assigns = undefined }; - Assigns.init(&this.state.expanding_assigns, this.base.interpreter, this.base.shell, this.node.assigns, .cmd, Assigns.ParentPtr.init(this), this.io.copy()); - this.state.expanding_assigns.start(); - return; // yield execution + Assigns.initBorrowed(&this.state.expanding_assigns, this.base.interpreter, this.base.shell, this.node.assigns, .cmd, Assigns.ParentPtr.init(this), this.io.copy()); + return this.state.expanding_assigns.start(); }, .expanding_assigns => { - return; // yield execution + return .suspended; }, .expanding_redirect => { if (this.state.expanding_redirect.idx >= 1) { @@ -305,14 +303,11 @@ pub fn next(this: *Cmd) void { this.io.copy(), ); - this.state.expanding_redirect.expansion.start(); - return; + return this.state.expanding_redirect.expansion.start(); }, .expanding_args => { if (this.state.expanding_args.idx >= this.node.name_and_args.len) { - this.transitionToExecStateAndYield(); - // yield execution to subproc - return; + return this.transitionToExecStateAndYield(); } this.args.ensureUnusedCapacity(1) catch bun.outOfMemory(); @@ -330,51 +325,45 @@ pub fn next(this: *Cmd) void { this.state.expanding_args.idx += 1; - this.state.expanding_args.expansion.start(); - // yield execution to expansion - return; + return this.state.expanding_args.expansion.start(); }, .waiting_write_err => { - return; + bun.shell.unreachableState("Cmd.next", "waiting_write_err"); }, .exec => { - // yield execution to subproc/builtin - return; + bun.shell.unreachableState("Cmd.next", "exec"); }, .done => unreachable, } } if (this.state == .done) { - this.parent.childDone(this, this.exit_code.?); - return; + return this.parent.childDone(this, this.exit_code.?); } - this.parent.childDone(this, 1); - return; + return this.parent.childDone(this, 1); } -fn transitionToExecStateAndYield(this: *Cmd) void { +fn transitionToExecStateAndYield(this: *Cmd) Yield { this.state = .exec; - this.initSubproc(); + return this.initSubproc(); } -pub fn start(this: *Cmd) void { +pub fn start(this: *Cmd) Yield { log("cmd start {x}", .{@intFromPtr(this)}); - return this.next(); + return .{ .cmd = this }; } -pub fn onIOWriterChunk(this: *Cmd, _: usize, e: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Cmd, _: usize, e: ?JSC.SystemError) Yield { if (e) |err| { this.base.throw(&bun.shell.ShellErr.newSys(err)); - return; + return .failed; } assert(this.state == .waiting_write_err); - this.parent.childDone(this, 1); - return; + return this.parent.childDone(this, 1); } -pub fn childDone(this: *Cmd, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *Cmd, child: ChildPtr, exit_code: ExitCode) Yield { if (child.ptr.is(Assigns)) { if (exit_code != 0) { const err = this.state.expanding_assigns.state.err; @@ -382,8 +371,7 @@ pub fn childDone(this: *Cmd, child: ChildPtr, exit_code: ExitCode) void { defer err.deinit(bun.default_allocator); this.state.expanding_assigns.deinit(); - this.writeFailingError("{}\n", .{err}); - return; + return this.writeFailingError("{}\n", .{err}); } this.state.expanding_assigns.deinit(); @@ -392,8 +380,7 @@ pub fn childDone(this: *Cmd, child: ChildPtr, exit_code: ExitCode) void { .expansion = undefined, }, }; - this.next(); - return; + return .{ .cmd = this }; } if (child.ptr.is(Expansion)) { @@ -404,9 +391,8 @@ pub fn childDone(this: *Cmd, child: ChildPtr, exit_code: ExitCode) void { .expanding_args => this.state.expanding_args.expansion.state.err, else => @panic("Invalid state"), }; - defer err.deinit(bun.default_allocator); - this.writeFailingError("{}\n", .{err}); - return; + defer err.deinit(this.base.allocator()); + return this.writeFailingError("{}\n", .{err}); } // Handling this case from the shell spec: // "If there is no command name, but the command contained a @@ -423,18 +409,17 @@ pub fn childDone(this: *Cmd, child: ChildPtr, exit_code: ExitCode) void { { this.exit_code = e.out_exit_code; } - this.next(); - return; + return .{ .cmd = this }; } @panic("Expected Cmd child to be Assigns or Expansion. This indicates a bug in Bun. Please file a GitHub issue. "); } -fn initSubproc(this: *Cmd) void { +fn initSubproc(this: *Cmd) Yield { log("cmd init subproc ({x}, cwd={s})", .{ @intFromPtr(this), this.base.shell.cwd() }); var arena = &this.spawn_arena; - var arena_allocator = arena.allocator(); + // var arena_allocator = arena.allocator(); var spawn_args = Subprocess.SpawnArgs.default(arena, this.base.interpreter.event_loop, false); spawn_args.argv = std.ArrayListUnmanaged(?[*:0]const u8){}; @@ -444,10 +429,15 @@ fn initSubproc(this: *Cmd) void { const args = args: { this.args.append(null) catch bun.outOfMemory(); - if (bun.Environment.allow_assert) { + log("Cmd(0x{x}, {s}) IO: {}", .{ @intFromPtr(this), if (this.args.items.len > 0) this.args.items[0] orelse "" else "", this.io }); + if (bun.Environment.isDebug) { for (this.args.items) |maybe_arg| { if (maybe_arg) |arg| { - log("ARG: {s}\n", .{arg}); + if (bun.sliceTo(arg, 0).len > 80) { + log("ARG: {s}...\n", .{arg[0..80]}); + } else { + log("ARG: {s}\n", .{arg}); + } } } } @@ -466,22 +456,15 @@ fn initSubproc(this: *Cmd) void { // BUT, if the expansion contained a single command // substitution (third example above), then we need to // return the exit code of that command substitution. - this.parent.childDone(this, this.exit_code orelse 0); - return; + return this.parent.childDone(this, this.exit_code orelse 0); }; const first_arg_len = std.mem.len(first_arg); - var first_arg_real = first_arg[0..first_arg_len]; - - if (bun.Environment.isDebug) { - if (bun.strings.eqlComptime(first_arg_real, "bun")) { - first_arg_real = "bun-debug"; - } - } + const first_arg_real = first_arg[0..first_arg_len]; if (Builtin.Kind.fromStr(first_arg[0..first_arg_len])) |b| { const cwd = this.base.shell.cwd_fd; - const coro_result = Builtin.init( + const maybe_yield = Builtin.init( this, this.base.interpreter, b, @@ -493,7 +476,7 @@ fn initSubproc(this: *Cmd) void { cwd, &this.io, ); - if (coro_result == .yield) return; + if (maybe_yield) |yield| return yield; if (comptime bun.Environment.allow_assert) { assert(this.exec == .bltn); @@ -501,27 +484,20 @@ fn initSubproc(this: *Cmd) void { log("Builtin name: {s}", .{@tagName(this.exec)}); - switch (this.exec.bltn.start()) { - .result => {}, - .err => |e| { - this.writeFailingError("bun: {s}: {s}", .{ @tagName(this.exec.bltn.kind), e.toShellSystemError().message }); - return; - }, - } - return; + return this.exec.bltn.start(); } - const path_buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(path_buf); + const path_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(path_buf); const resolved = which(path_buf, spawn_args.PATH, spawn_args.cwd, first_arg_real) orelse blk: { if (bun.strings.eqlComptime(first_arg_real, "bun") or bun.strings.eqlComptime(first_arg_real, "bun-debug")) blk2: { break :blk bun.selfExePath() catch break :blk2; } - this.writeFailingError("bun: command not found: {s}\n", .{first_arg}); - return; + return this.writeFailingError("bun: command not found: {s}\n", .{first_arg}); }; - const duped = arena_allocator.dupeZ(u8, bun.span(resolved)) catch bun.outOfMemory(); + this.base.allocator().free(first_arg_real); + const duped = this.base.allocator().dupeZ(u8, bun.span(resolved)) catch bun.outOfMemory(); this.args.items[0] = duped; break :args this.args; @@ -540,6 +516,42 @@ fn initSubproc(this: *Cmd) void { defer shellio.deref(); this.io.to_subproc_stdio(&spawn_args.stdio, &shellio); + if (this.initRedirections(&spawn_args)) |yield| return yield; + + const buffered_closed = BufferedIoClosed.fromStdio(&spawn_args.stdio); + log("cmd ({x}) set buffered closed => {any}", .{ @intFromPtr(this), buffered_closed }); + + this.exec = .{ .subproc = .{ + .child = undefined, + .buffered_closed = buffered_closed, + } }; + var did_exit_immediately = false; + const subproc = switch (Subprocess.spawnAsync(this.base.eventLoop(), &shellio, spawn_args, &this.exec.subproc.child, &did_exit_immediately)) { + .result => this.exec.subproc.child, + .err => |*e| { + this.exec = .none; + return this.writeFailingError("{}\n", .{e}); + }, + }; + subproc.ref(); + this.spawn_arena_freed = true; + arena.deinit(); + + if (did_exit_immediately) { + if (subproc.process.hasExited()) { + // process has already exited, we called wait4(), but we did not call onProcessExit() + subproc.process.onExit(subproc.process.status, &std.mem.zeroes(bun.spawn.Rusage)); + } else { + // process has already exited, but we haven't called wait4() yet + // https://cs.github.com/libuv/libuv/blob/b00d1bd225b602570baee82a6152eaa823a84fa6/src/unix/process.c#L1007 + subproc.process.wait(false); + } + } + + return .suspended; +} + +fn initRedirections(this: *Cmd, spawn_args: *Subprocess.SpawnArgs) ?Yield { if (this.node.redirect_file) |redirect| { const in_cmd_subst = false; @@ -561,11 +573,11 @@ fn initSubproc(this: *Cmd) void { } else if (this.base.interpreter.jsobjs[val.idx].as(JSC.WebCore.Blob)) |blob__| { const blob = blob__.dupe(); if (this.node.redirect.stdin) { - spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdin_no) catch return; + spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdin_no) catch return .failed; } else if (this.node.redirect.stdout) { - spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdout_no) catch return; + spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stdout_no) catch return .failed; } else if (this.node.redirect.stderr) { - spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stderr_no) catch return; + spawn_args.stdio[stdin_no].extractBlob(global, .{ .Blob = blob }, stderr_no) catch return .failed; } } else if (JSC.WebCore.ReadableStream.fromJS(this.base.interpreter.jsobjs[val.idx], global)) |rstream| { _ = rstream; @@ -573,24 +585,23 @@ fn initSubproc(this: *Cmd) void { } else if (this.base.interpreter.jsobjs[val.idx].as(JSC.WebCore.Response)) |req| { req.getBodyValue().toBlobIfPossible(); if (this.node.redirect.stdin) { - spawn_args.stdio[stdin_no].extractBlob(global, req.getBodyValue().useAsAnyBlob(), stdin_no) catch return; + spawn_args.stdio[stdin_no].extractBlob(global, req.getBodyValue().useAsAnyBlob(), stdin_no) catch return .failed; } if (this.node.redirect.stdout) { - spawn_args.stdio[stdout_no].extractBlob(global, req.getBodyValue().useAsAnyBlob(), stdout_no) catch return; + spawn_args.stdio[stdout_no].extractBlob(global, req.getBodyValue().useAsAnyBlob(), stdout_no) catch return .failed; } if (this.node.redirect.stderr) { - spawn_args.stdio[stderr_no].extractBlob(global, req.getBodyValue().useAsAnyBlob(), stderr_no) catch return; + spawn_args.stdio[stderr_no].extractBlob(global, req.getBodyValue().useAsAnyBlob(), stderr_no) catch return .failed; } } else { const jsval = this.base.interpreter.jsobjs[val.idx]; global.throw("Unknown JS value used in shell: {}", .{jsval.fmtString(global)}) catch {}; // TODO: propagate - return; + return .failed; } }, .atom => { if (this.redirection_file.items.len == 0) { - this.writeFailingError("bun: ambiguous redirect: at `{s}`\n", .{spawn_args.argv.items[0] orelse ""}); - return; + return this.writeFailingError("bun: ambiguous redirect: at `{s}`\n", .{spawn_args.argv.items[0] orelse ""}); } const path = this.redirection_file.items[0..this.redirection_file.items.len -| 1 :0]; log("Expanded Redirect: {s}\n", .{this.redirection_file.items[0..]}); @@ -616,36 +627,7 @@ fn initSubproc(this: *Cmd) void { } } - const buffered_closed = BufferedIoClosed.fromStdio(&spawn_args.stdio); - log("cmd ({x}) set buffered closed => {any}", .{ @intFromPtr(this), buffered_closed }); - - this.exec = .{ .subproc = .{ - .child = undefined, - .buffered_closed = buffered_closed, - } }; - var did_exit_immediately = false; - const subproc = switch (Subprocess.spawnAsync(this.base.eventLoop(), &shellio, spawn_args, &this.exec.subproc.child, &did_exit_immediately)) { - .result => this.exec.subproc.child, - .err => |*e| { - this.exec = .none; - this.writeFailingError("{}\n", .{e}); - return; - }, - }; - subproc.ref(); - this.spawn_arena_freed = true; - arena.deinit(); - - if (did_exit_immediately) { - if (subproc.process.hasExited()) { - // process has already exited, we called wait4(), but we did not call onProcessExit() - subproc.process.onExit(subproc.process.status, &std.mem.zeroes(bun.spawn.Rusage)); - } else { - // process has already exited, but we haven't called wait4() yet - // https://cs.github.com/libuv/libuv/blob/b00d1bd225b602570baee82a6152eaa823a84fa6/src/unix/process.c#L1007 - subproc.process.wait(false); - } - } + return null; } fn setStdioFromRedirect(stdio: *[3]shell.subproc.Stdio, flags: ast.RedirectFlags, val: shell.subproc.Stdio) void { @@ -708,8 +690,7 @@ pub fn onExit(this: *Cmd, exit_code: ExitCode) void { log("cmd exit code={d} has_finished={any} ({x})", .{ exit_code, has_finished, @intFromPtr(this) }); if (has_finished) { this.state = .done; - this.next(); - return; + this.next().run(); } } @@ -739,20 +720,30 @@ pub fn deinit(this: *Cmd) void { this.exec = .none; } + { + for (this.args.items) |maybe_arg| { + if (maybe_arg) |arg| { + this.base.allocator().free(bun.sliceTo(arg, 0)); + } + } + this.args.deinit(); + } + if (!this.spawn_arena_freed) { log("Spawn arena free", .{}); this.spawn_arena.deinit(); } this.io.deref(); - this.base.interpreter.allocator.destroy(this); + this.base.endScope(); + this.parent.destroy(this); } pub fn bufferedInputClose(this: *Cmd) void { this.exec.subproc.buffered_closed.close(this, .stdin); } -pub fn bufferedOutputClose(this: *Cmd, kind: Subprocess.OutKind, err: ?JSC.SystemError) void { +pub fn bufferedOutputClose(this: *Cmd, kind: Subprocess.OutKind, err: ?JSC.SystemError) Yield { switch (kind) { .stdout => this.bufferedOutputCloseStdout(err), .stderr => this.bufferedOutputCloseStderr(err), @@ -764,10 +755,12 @@ pub fn bufferedOutputClose(this: *Cmd, kind: Subprocess.OutKind, err: ?JSC.Syste .concurrent_task = JSC.EventLoopTask.fromEventLoop(this.base.eventLoop()), }); async_subprocess_done.enqueue(); + return .suspended; } else { - this.parent.childDone(this, this.exit_code orelse 0); + return this.parent.childDone(this, this.exit_code orelse 0); } } + return .suspended; } pub fn bufferedOutputCloseStdout(this: *Cmd, err: ?JSC.SystemError) void { @@ -805,6 +798,7 @@ pub fn bufferedOutputCloseStderr(this: *Cmd, err: ?JSC.SystemError) void { const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Allocator = std.mem.Allocator; @@ -813,7 +807,7 @@ const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/CondExpr.zig b/src/shell/states/CondExpr.zig index 0553594cc0..33206cb722 100644 --- a/src/shell/states/CondExpr.zig +++ b/src/shell/states/CondExpr.zig @@ -62,30 +62,33 @@ pub const ChildPtr = StatePtrUnion(.{ pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.CondExpr, parent: ParentPtr, io: IO, ) *CondExpr { - return bun.new(CondExpr, .{ - .base = .{ .kind = .condexpr, .interpreter = interpreter, .shell = shell_state }, + const condexpr = parent.create(CondExpr); + condexpr.* = .{ + .base = State.initWithNewAllocScope(.condexpr, interpreter, shell_state), .node = node, .parent = parent, .io = io, - .args = std.ArrayList([:0]const u8).init(bun.default_allocator), - }); + .args = undefined, + }; + condexpr.args = std.ArrayList([:0]const u8).init(condexpr.base.allocator()); + return condexpr; } pub fn format(this: *const CondExpr, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { try writer.print("CondExpr(0x{x}, op={s})", .{ @intFromPtr(this), @tagName(this.node.op) }); } -pub fn start(this: *CondExpr) void { +pub fn start(this: *CondExpr) Yield { log("{} start", .{this}); - this.next(); + return .{ .cond_expr = this }; } -fn next(this: *CondExpr) void { +pub fn next(this: *CondExpr) Yield { while (this.state != .done) { switch (this.state) { .idle => { @@ -94,8 +97,7 @@ fn next(this: *CondExpr) void { }, .expanding_args => { if (this.state.expanding_args.idx >= this.node.args.len()) { - this.commandImplStart(); - return; + return this.commandImplStart(); } this.args.ensureUnusedCapacity(1) catch bun.outOfMemory(); @@ -111,39 +113,33 @@ fn next(this: *CondExpr) void { this.io.copy(), ); this.state.expanding_args.idx += 1; - this.state.expanding_args.expansion.start(); - return; + return this.state.expanding_args.expansion.start(); }, - .waiting_stat => return, + .waiting_stat => return .suspended, .stat_complete => { switch (this.node.op) { .@"-f" => { - this.parent.childDone(this, if (this.state.stat_complete.stat == .result) 0 else 1); - return; + return this.parent.childDone(this, if (this.state.stat_complete.stat == .result) 0 else 1); }, .@"-d" => { const st: bun.Stat = switch (this.state.stat_complete.stat) { .result => |st| st, .err => { // It seems that bash always gives exit code 1 - this.parent.childDone(this, 1); - return; + return this.parent.childDone(this, 1); }, }; - this.parent.childDone(this, if (bun.S.ISDIR(@intCast(st.mode))) 0 else 1); - return; + return this.parent.childDone(this, if (bun.S.ISDIR(@intCast(st.mode))) 0 else 1); }, .@"-c" => { const st: bun.Stat = switch (this.state.stat_complete.stat) { .result => |st| st, .err => { // It seems that bash always gives exit code 1 - this.parent.childDone(this, 1); - return; + return this.parent.childDone(this, 1); }, }; - this.parent.childDone(this, if (bun.S.ISCHR(@intCast(st.mode))) 0 else 1); - return; + return this.parent.childDone(this, if (bun.S.ISCHR(@intCast(st.mode))) 0 else 1); }, .@"-z", .@"-n", .@"==", .@"!=" => @panic("This conditional expression op does not need `stat()`. This indicates a bug in Bun. Please file a GitHub issue."), else => { @@ -158,32 +154,32 @@ fn next(this: *CondExpr) void { }, } }, - .waiting_write_err => return, + .waiting_write_err => return .suspended, .done => assert(false), } } - this.parent.childDone(this, 0); + return this.parent.childDone(this, 0); } -fn commandImplStart(this: *CondExpr) void { +fn commandImplStart(this: *CondExpr) Yield { switch (this.node.op) { .@"-c", .@"-d", .@"-f", => { this.state = .waiting_stat; - this.doStat(); + return this.doStat(); }, - .@"-z" => this.parent.childDone(this, if (this.args.items.len == 0 or this.args.items[0].len == 0) 0 else 1), - .@"-n" => this.parent.childDone(this, if (this.args.items.len > 0 and this.args.items[0].len != 0) 0 else 1), + .@"-z" => return this.parent.childDone(this, if (this.args.items.len == 0 or this.args.items[0].len == 0) 0 else 1), + .@"-n" => return this.parent.childDone(this, if (this.args.items.len > 0 and this.args.items[0].len != 0) 0 else 1), .@"==" => { const is_eq = this.args.items.len == 0 or (this.args.items.len >= 2 and bun.strings.eql(this.args.items[0], this.args.items[1])); - this.parent.childDone(this, if (is_eq) 0 else 1); + return this.parent.childDone(this, if (is_eq) 0 else 1); }, .@"!=" => { const is_neq = this.args.items.len >= 2 and !bun.strings.eql(this.args.items[0], this.args.items[1]); - this.parent.childDone(this, if (is_neq) 0 else 1); + return this.parent.childDone(this, if (is_neq) 0 else 1); }, // else => @panic("Invalid node op: " ++ @tagName(this.node.op) ++ ", this indicates a bug in Bun. Please file a GithHub issue."), else => { @@ -200,7 +196,7 @@ fn commandImplStart(this: *CondExpr) void { } } -fn doStat(this: *CondExpr) void { +fn doStat(this: *CondExpr) Yield { const stat_task = bun.new(ShellCondExprStatTask, .{ .task = .{ .event_loop = this.base.eventLoop(), @@ -211,25 +207,29 @@ fn doStat(this: *CondExpr) void { .cwdfd = this.base.shell.cwd_fd, }); stat_task.task.schedule(); + return .suspended; } pub fn deinit(this: *CondExpr) void { this.io.deinit(); - bun.destroy(this); + for (this.args.items) |item| { + this.base.allocator().free(item); + } + this.args.deinit(); + this.base.endScope(); + this.parent.destroy(this); } -pub fn childDone(this: *CondExpr, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *CondExpr, child: ChildPtr, exit_code: ExitCode) Yield { if (child.ptr.is(Expansion)) { if (exit_code != 0) { const err = this.state.expanding_args.expansion.state.err; defer err.deinit(bun.default_allocator); this.state.expanding_args.expansion.deinit(); - this.writeFailingError("{}\n", .{err}); - return; + return this.writeFailingError("{}\n", .{err}); } child.deinit(); - this.next(); - return; + return this.next(); } @panic("Invalid child to cond expression, this indicates a bug in Bun. Please file a report on Github."); @@ -241,41 +241,42 @@ pub fn onStatTaskComplete(this: *CondExpr, result: Maybe(bun.Stat)) void { this.state = .{ .stat_complete = .{ .stat = result }, }; - this.next(); + this.next().run(); } -pub fn writeFailingError(this: *CondExpr, comptime fmt: []const u8, args: anytype) void { +pub fn writeFailingError(this: *CondExpr, comptime fmt: []const u8, args: anytype) Yield { const handler = struct { fn enqueueCb(ctx: *CondExpr) void { ctx.state = .waiting_write_err; } }; - this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); + return this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); } -pub fn onIOWriterChunk(this: *CondExpr, _: usize, err: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *CondExpr, _: usize, err: ?JSC.SystemError) Yield { if (err != null) { defer err.?.deref(); const exit_code: ExitCode = @intFromEnum(err.?.getErrno()); - this.parent.childDone(this, exit_code); - return; + return this.parent.childDone(this, exit_code); } if (this.state == .waiting_write_err) { - this.parent.childDone(this, 1); - return; + return this.parent.childDone(this, 1); } + + bun.shell.unreachableState("CondExpr.onIOWriterChunk", @tagName(this.state)); } const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/Expansion.zig b/src/shell/states/Expansion.zig index c7b44fc658..177dce86bf 100644 --- a/src/shell/states/Expansion.zig +++ b/src/shell/states/Expansion.zig @@ -57,7 +57,16 @@ pub const Result = union(enum) { done: bool = false, }, - pub fn pushResultSlice(this: *Result, buf: [:0]const u8) void { + const PushAction = enum { + /// We just copied the buf into the result, caller can just do + /// `.clearRetainingCapacity()` + copied, + /// We took ownershipo of the result and placed the pointer in the buf, + /// caller should remove any references to the underlying data. + moved, + }; + + pub fn pushResultSliceOwned(this: *Result, buf: [:0]const u8) PushAction { if (comptime bun.Environment.allow_assert) { assert(buf[buf.len] == 0); } @@ -65,19 +74,22 @@ pub const Result = union(enum) { switch (this.*) { .array_of_slice => { this.array_of_slice.append(buf) catch bun.outOfMemory(); + return .moved; }, .array_of_ptr => { this.array_of_ptr.append(@as([*:0]const u8, @ptrCast(buf.ptr))) catch bun.outOfMemory(); + return .moved; }, .single => { - if (this.single.done) return; + if (this.single.done) return .copied; this.single.list.appendSlice(buf[0 .. buf.len + 1]) catch bun.outOfMemory(); this.single.done = true; + return .copied; }, } } - pub fn pushResult(this: *Result, buf: *std.ArrayList(u8)) void { + pub fn pushResult(this: *Result, buf: *std.ArrayList(u8)) PushAction { if (comptime bun.Environment.allow_assert) { assert(buf.items[buf.items.len - 1] == 0); } @@ -85,13 +97,16 @@ pub const Result = union(enum) { switch (this.*) { .array_of_slice => { this.array_of_slice.append(buf.items[0 .. buf.items.len - 1 :0]) catch bun.outOfMemory(); + return .moved; }, .array_of_ptr => { this.array_of_ptr.append(@as([*:0]const u8, @ptrCast(buf.items.ptr))) catch bun.outOfMemory(); + return .moved; }, .single => { - if (this.single.done) return; + if (this.single.done) return .copied; this.single.list.appendSlice(buf.items[0..]) catch bun.outOfMemory(); + return .copied; }, } } @@ -101,9 +116,13 @@ pub fn format(this: *const Expansion, comptime _: []const u8, _: std.fmt.FormatO try writer.print("Expansion(0x{x})", .{@intFromPtr(this)}); } +pub fn allocator(this: *Expansion) std.mem.Allocator { + return this.base.allocator(); +} + pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, expansion: *Expansion, node: *const ast.Atom, parent: ParentPtr, @@ -113,11 +132,7 @@ pub fn init( log("Expansion(0x{x}) init", .{@intFromPtr(expansion)}); expansion.* = .{ .node = node, - .base = .{ - .kind = .expansion, - .interpreter = interpreter, - .shell = shell_state, - }, + .base = State.initBorrowedAllocScope(.expansion, interpreter, shell_state, parent.scopedAllocator()), .parent = parent, .word_idx = 0, @@ -125,29 +140,30 @@ pub fn init( .child_state = .idle, .out = out_result, .out_idx = 0, - .current_out = std.ArrayList(u8).init(interpreter.allocator), + .current_out = undefined, .io = io, }; - // var expansion = interpreter.allocator.create(Expansion) catch bun.outOfMemory(); + expansion.current_out = std.ArrayList(u8).init(expansion.base.allocator()); } pub fn deinit(expansion: *Expansion) void { log("Expansion(0x{x}) deinit", .{@intFromPtr(expansion)}); expansion.current_out.deinit(); expansion.io.deinit(); + expansion.base.endScope(); } -pub fn start(this: *Expansion) void { +pub fn start(this: *Expansion) Yield { if (comptime bun.Environment.allow_assert) { assert(this.child_state == .idle); assert(this.word_idx == 0); } this.state = .normal; - this.next(); + return .{ .expansion = this }; } -pub fn next(this: *Expansion) void { +pub fn next(this: *Expansion) Yield { while (!(this.state == .done or this.state == .err)) { switch (this.state) { .normal => { @@ -160,9 +176,7 @@ pub fn next(this: *Expansion) void { } while (this.word_idx < this.node.atomsLen()) { - const is_cmd_subst = this.expandVarAndCmdSubst(this.word_idx); - // yield execution - if (is_cmd_subst) return; + if (this.expandVarAndCmdSubst(this.word_idx)) |yield| return yield; } if (this.word_idx >= this.node.atomsLen()) { @@ -202,10 +216,10 @@ pub fn next(this: *Expansion) void { // Shouldn't fall through to here assert(this.word_idx >= this.node.atomsLen()); - return; + return .suspended; }, .braces => { - var arena = Arena.init(this.base.interpreter.allocator); + var arena = Arena.init(this.base.allocator()); defer arena.deinit(); const arena_allocator = arena.allocator(); const brace_str = this.current_out.items[0..]; @@ -213,18 +227,16 @@ pub fn next(this: *Expansion) void { var lexer_output = Braces.Lexer.tokenize(arena_allocator, brace_str) catch |e| OOM(e); const expansion_count = Braces.calculateExpandedAmount(lexer_output.tokens.items[0..]) catch |e| OOM(e); - var expanded_strings = brk: { - const stack_max = comptime 16; - comptime { - assert(@sizeOf([]std.ArrayList(u8)) * stack_max <= 256); - } - var maybe_stack_alloc = std.heap.stackFallback(@sizeOf([]std.ArrayList(u8)) * stack_max, this.base.interpreter.allocator); - const expanded_strings = maybe_stack_alloc.get().alloc(std.ArrayList(u8), expansion_count) catch bun.outOfMemory(); - break :brk expanded_strings; - }; + const stack_max = comptime 16; + comptime { + assert(@sizeOf([]std.ArrayList(u8)) * stack_max <= 256); + } + var maybe_stack_alloc = std.heap.stackFallback(@sizeOf([]std.ArrayList(u8)) * stack_max, arena_allocator); + const stack_alloc = maybe_stack_alloc.get(); + const expanded_strings = stack_alloc.alloc(std.ArrayList(u8), expansion_count) catch bun.outOfMemory(); for (0..expansion_count) |i| { - expanded_strings[i] = std.ArrayList(u8).init(this.base.interpreter.allocator); + expanded_strings[i] = std.ArrayList(u8).init(this.base.allocator()); } Braces.expand( @@ -239,7 +251,14 @@ pub fn next(this: *Expansion) void { // Add sentinel values for (0..expansion_count) |i| { expanded_strings[i].append(0) catch bun.outOfMemory(); - this.pushResult(&expanded_strings[i]); + switch (this.out.pushResult(&expanded_strings[i])) { + .copied => { + expanded_strings[i].deinit(); + }, + .moved => { + expanded_strings[i].clearRetainingCapacity(); + }, + } } if (this.node.has_glob_expansion()) { @@ -249,28 +268,26 @@ pub fn next(this: *Expansion) void { } }, .glob => { - this.transitionToGlobState(); - // yield - return; + return this.transitionToGlobState(); }, .done, .err => unreachable, } } if (this.state == .done) { - this.parent.childDone(this, 0); - return; + return this.parent.childDone(this, 0); } // Parent will inspect the `this.state.err` if (this.state == .err) { - this.parent.childDone(this, 1); - return; + return this.parent.childDone(this, 1); } + + unreachable; } -fn transitionToGlobState(this: *Expansion) void { - var arena = Arena.init(this.base.interpreter.allocator); +fn transitionToGlobState(this: *Expansion) Yield { + var arena = Arena.init(this.base.allocator()); this.child_state = .{ .glob = .{ .walker = .{} } }; const pattern = this.current_out.items[0..]; @@ -290,16 +307,16 @@ fn transitionToGlobState(this: *Expansion) void { .result => {}, .err => |e| { this.state = .{ .err = bun.shell.ShellErr.newSys(e) }; - this.next(); - return; + return .{ .expansion = this }; }, } - var task = ShellGlobTask.createOnMainThread(this.base.interpreter.allocator, &this.child_state.glob.walker, this); + var task = ShellGlobTask.createOnMainThread(&this.child_state.glob.walker, this); task.schedule(); + return .suspended; } -pub fn expandVarAndCmdSubst(this: *Expansion, start_word_idx: u32) bool { +pub fn expandVarAndCmdSubst(this: *Expansion, start_word_idx: u32) ?Yield { switch (this.node.*) { .simple => |*simp| { const is_cmd_subst = this.expandSimpleNoIO(simp, &this.current_out, true); @@ -309,11 +326,11 @@ pub fn expandVarAndCmdSubst(this: *Expansion, start_word_idx: u32) bool { .stdout = .pipe, .stderr = this.base.rootIO().stderr.ref(), }; - const shell_state = switch (this.base.shell.dupeForSubshell(this.base.interpreter.allocator, io, .cmd_subst)) { + const shell_state = switch (this.base.shell.dupeForSubshell(this.base.allocScope(), this.base.allocator(), io, .cmd_subst)) { .result => |s| s, .err => |e| { this.base.throw(&bun.shell.ShellErr.newSys(e)); - return false; + return .failed; }, }; var script = Script.init(this.base.interpreter, shell_state, &this.node.simple.cmd_subst.script, Script.ParentPtr.init(this), io); @@ -323,8 +340,7 @@ pub fn expandVarAndCmdSubst(this: *Expansion, start_word_idx: u32) bool { .quoted = simp.cmd_subst.quoted, }, }; - script.start(); - return true; + return script.start(); } else { this.word_idx += 1; } @@ -342,11 +358,11 @@ pub fn expandVarAndCmdSubst(this: *Expansion, start_word_idx: u32) bool { .stdout = .pipe, .stderr = this.base.rootIO().stderr.ref(), }; - const shell_state = switch (this.base.shell.dupeForSubshell(this.base.interpreter.allocator, io, .cmd_subst)) { + const shell_state = switch (this.base.shell.dupeForSubshell(this.base.allocScope(), this.base.allocator(), io, .cmd_subst)) { .result => |s| s, .err => |e| { this.base.throw(&bun.shell.ShellErr.newSys(e)); - return false; + return .failed; }, }; var script = Script.init(this.base.interpreter, shell_state, &simple_atom.cmd_subst.script, Script.ParentPtr.init(this), io); @@ -356,8 +372,7 @@ pub fn expandVarAndCmdSubst(this: *Expansion, start_word_idx: u32) bool { .quoted = simple_atom.cmd_subst.quoted, }, }; - script.start(); - return true; + return script.start(); } else { this.word_idx += 1; this.child_state = .idle; @@ -366,7 +381,7 @@ pub fn expandVarAndCmdSubst(this: *Expansion, start_word_idx: u32) bool { }, } - return false; + return null; } /// Remove a set of values from the beginning and end of a slice. @@ -453,7 +468,7 @@ fn convertNewlinesToSpacesSlow(i: usize, stdout: []u8) void { } } -pub fn childDone(this: *Expansion, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *Expansion, child: ChildPtr, exit_code: ExitCode) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state != .done and this.state != .err); assert(this.child_state != .idle); @@ -491,14 +506,13 @@ pub fn childDone(this: *Expansion, child: ChildPtr, exit_code: ExitCode) void { this.word_idx += 1; this.child_state = .idle; child.deinit(); - this.next(); - return; + return .{ .expansion = this }; } @panic("Invalid child to Expansion, this indicates a bug in Bun. Please file a report on Github."); } -fn onGlobWalkDone(this: *Expansion, task: *ShellGlobTask) void { +fn onGlobWalkDone(this: *Expansion, task: *ShellGlobTask) Yield { log("{} onGlobWalkDone", .{this}); if (comptime bun.Environment.allow_assert) { assert(this.child_state == .glob); @@ -511,7 +525,7 @@ fn onGlobWalkDone(this: *Expansion, task: *ShellGlobTask) void { }, .unknown => |errtag| { this.base.throw(&.{ - .custom = bun.default_allocator.dupe(u8, @errorName(errtag)) catch bun.outOfMemory(), + .custom = this.base.allocator().dupe(u8, @errorName(errtag)) catch bun.outOfMemory(), }); }, } @@ -524,11 +538,10 @@ fn onGlobWalkDone(this: *Expansion, task: *ShellGlobTask) void { this.child_state.glob.walker.deinit(true); this.child_state = .idle; this.state = .done; - this.next(); - return; + return .{ .expansion = this }; } - const msg = std.fmt.allocPrint(bun.default_allocator, "no matches found: {s}", .{this.child_state.glob.walker.pattern}) catch bun.outOfMemory(); + const msg = std.fmt.allocPrint(this.base.allocator(), "no matches found: {s}", .{this.child_state.glob.walker.pattern}) catch bun.outOfMemory(); this.state = .{ .err = bun.shell.ShellErr{ .custom = msg, @@ -536,21 +549,25 @@ fn onGlobWalkDone(this: *Expansion, task: *ShellGlobTask) void { }; this.child_state.glob.walker.deinit(true); this.child_state = .idle; - this.next(); - return; + return .{ .expansion = this }; } for (task.result.items) |sentinel_str| { // The string is allocated in the glob walker arena and will be freed, so needs to be duped here - const duped = this.base.interpreter.allocator.dupeZ(u8, sentinel_str[0..sentinel_str.len]) catch bun.outOfMemory(); - this.pushResultSlice(duped); + const duped = this.base.allocator().dupeZ(u8, sentinel_str[0..sentinel_str.len]) catch bun.outOfMemory(); + switch (this.out.pushResultSliceOwned(duped)) { + .copied => { + this.base.allocator().free(duped); + }, + .moved => {}, + } } this.word_idx += 1; this.child_state.glob.walker.deinit(true); this.child_state = .idle; this.state = .done; - this.next(); + return .{ .expansion = this }; } /// If the atom is actually a command substitution then does nothing and returns true @@ -602,19 +619,17 @@ pub fn appendSlice(this: *Expansion, buf: *std.ArrayList(u8), slice: []const u8) buf.appendSlice(slice) catch bun.outOfMemory(); } -pub fn pushResultSlice(this: *Expansion, buf: [:0]const u8) void { - this.out.pushResultSlice(buf); -} - pub fn pushCurrentOut(this: *Expansion) void { if (this.current_out.items.len == 0) return; if (this.current_out.items[this.current_out.items.len - 1] != 0) this.current_out.append(0) catch bun.outOfMemory(); - this.pushResult(&this.current_out); - this.current_out = std.ArrayList(u8).init(this.base.interpreter.allocator); -} - -pub fn pushResult(this: *Expansion, buf: *std.ArrayList(u8)) void { - this.out.pushResult(buf); + switch (this.out.pushResult(&this.current_out)) { + .copied => { + this.current_out.clearRetainingCapacity(); + }, + .moved => { + this.current_out = std.ArrayList(u8).init(this.base.allocator()); + }, + } } fn expandVar(this: *const Expansion, label: []const u8) []const u8 { @@ -728,12 +743,12 @@ pub const ShellGlobTask = struct { walker: *GlobWalker, result: std.ArrayList([:0]const u8), - allocator: Allocator, event_loop: JSC.EventLoopHandle, concurrent_task: JSC.EventLoopTask, // This is a poll because we want it to enter the uSockets loop ref: bun.Async.KeepAlive = .{}, err: ?Err = null, + alloc_scope: bun.AllocationScope, const This = @This(); @@ -741,24 +756,25 @@ pub const ShellGlobTask = struct { syscall: Syscall.Error, unknown: anyerror, - pub fn toJSC(this: Err, globalThis: *JSGlobalObject) JSValue { + pub fn toJS(this: Err, globalThis: *JSGlobalObject) JSValue { return switch (this) { - .syscall => |err| err.toJSC(globalThis), + .syscall => |err| err.toJS(globalThis), .unknown => |err| JSC.ZigString.fromBytes(@errorName(err)).toJS(globalThis), }; } }; - pub fn createOnMainThread(allocator: Allocator, walker: *GlobWalker, expansion: *Expansion) *This { + pub fn createOnMainThread(walker: *GlobWalker, expansion: *Expansion) *This { debug("createOnMainThread", .{}); - var this = allocator.create(This) catch bun.outOfMemory(); + var alloc_scope = bun.AllocationScope.init(bun.default_allocator); + var this = alloc_scope.allocator().create(This) catch bun.outOfMemory(); this.* = .{ + .alloc_scope = alloc_scope, .event_loop = expansion.base.eventLoop(), .concurrent_task = JSC.EventLoopTask.fromEventLoop(expansion.base.eventLoop()), .walker = walker, - .allocator = allocator, .expansion = expansion, - .result = std.ArrayList([:0]const u8).init(allocator), + .result = std.ArrayList([:0]const u8).init(this.alloc_scope.allocator()), }; this.ref.ref(this.event_loop); @@ -800,7 +816,7 @@ pub const ShellGlobTask = struct { pub fn runFromMainThread(this: *This) void { debug("runFromJS", .{}); - this.expansion.onGlobWalkDone(this); + this.expansion.onGlobWalkDone(this).run(); this.ref.unref(this.event_loop); } @@ -825,12 +841,15 @@ pub const ShellGlobTask = struct { pub fn deinit(this: *This) void { debug("deinit", .{}); this.result.deinit(); - this.allocator.destroy(this); + var alloc_scope = this.alloc_scope; + alloc_scope.allocator().destroy(this); + alloc_scope.deinit(); } }; const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const Allocator = std.mem.Allocator; @@ -839,7 +858,7 @@ const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; const GlobWalker = bun.shell.interpret.GlobWalker; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/If.zig b/src/shell/states/If.zig index 42cd157d52..3195282563 100644 --- a/src/shell/states/If.zig +++ b/src/shell/states/If.zig @@ -40,24 +40,26 @@ pub fn format(this: *const If, comptime _: []const u8, _: std.fmt.FormatOptions, pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.If, parent: ParentPtr, io: IO, ) *If { - return bun.new(If, .{ - .base = .{ .kind = .cmd, .interpreter = interpreter, .shell = shell_state }, + const if_stmt = parent.create(If); + if_stmt.* = .{ + .base = State.initWithNewAllocScope(.if_clause, interpreter, shell_state), .node = node, .parent = parent, .io = io, - }); + }; + return if_stmt; } -pub fn start(this: *If) void { - this.next(); +pub fn start(this: *If) Yield { + return .{ .@"if" = this }; } -fn next(this: *If) void { +pub fn next(this: *If) Yield { while (this.state != .done) { switch (this.state) { .idle => { @@ -79,8 +81,7 @@ fn next(this: *If) void { } switch (this.node.else_parts.len()) { 0 => { - this.parent.childDone(this, 0); - return; + return this.parent.childDone(this, 0); }, 1 => { this.state.exec.state = .@"else"; @@ -98,8 +99,7 @@ fn next(this: *If) void { }, // done .then => { - this.parent.childDone(this, this.state.exec.last_exit_code); - return; + return this.parent.childDone(this, this.state.exec.last_exit_code); }, // if succesful, execute the elif's then branch // otherwise, move to the next elif, or to the final else if it exists @@ -114,8 +114,7 @@ fn next(this: *If) void { this.state.exec.state.elif.idx += 2; if (this.state.exec.state.elif.idx >= this.node.else_parts.len()) { - this.parent.childDone(this, 0); - return; + return this.parent.childDone(this, 0); } if (this.state.exec.state.elif.idx == this.node.else_parts.len() -| 1) { @@ -130,8 +129,7 @@ fn next(this: *If) void { continue; }, .@"else" => { - this.parent.childDone(this, this.state.exec.last_exit_code); - return; + return this.parent.childDone(this, this.state.exec.last_exit_code); }, } } @@ -140,24 +138,24 @@ fn next(this: *If) void { this.state.exec.stmt_idx += 1; const stmt = this.state.exec.stmts.getConst(idx); var newstmt = Stmt.init(this.base.interpreter, this.base.shell, stmt, this, this.io.copy()); - newstmt.start(); - return; + return newstmt.start(); }, - .waiting_write_err => return, // yield execution + .waiting_write_err => return .suspended, // yield execution .done => @panic("This code should not be reachable"), } } - this.parent.childDone(this, 0); + return this.parent.childDone(this, 0); } pub fn deinit(this: *If) void { log("{} deinit", .{this}); this.io.deref(); - bun.destroy(this); + this.base.endScope(); + this.parent.destroy(this); } -pub fn childDone(this: *If, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *If, child: ChildPtr, exit_code: ExitCode) Yield { defer child.deinit(); if (this.state != .exec) { @@ -168,8 +166,8 @@ pub fn childDone(this: *If, child: ChildPtr, exit_code: ExitCode) void { exec.last_exit_code = exit_code; switch (exec.state) { - .cond => this.next(), - .then => this.next(), + .cond => return .{ .@"if" = this }, + .then => return .{ .@"if" = this }, .elif => { // if (exit_code == 0) { // exec.stmts = this.node.else_parts.getConst(exec.state.elif.idx + 1); @@ -178,22 +176,22 @@ pub fn childDone(this: *If, child: ChildPtr, exit_code: ExitCode) void { // this.next(); // return; // } - this.next(); - return; + return .{ .@"if" = this }; }, - .@"else" => this.next(), + .@"else" => return .{ .@"if" = this }, } } const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/Pipeline.zig b/src/shell/states/Pipeline.zig index 45f9a3f1e4..2ee5e7fa9a 100644 --- a/src/shell/states/Pipeline.zig +++ b/src/shell/states/Pipeline.zig @@ -4,17 +4,24 @@ base: State, node: *const ast.Pipeline, /// Based on precedence rules pipeline can only be child of a stmt or /// binary +/// +/// *WARNING*: Do not directly call `this.parent.childDone`, it should +/// be handed in `Pipeline.next()` parent: ParentPtr, exited_count: u32, cmds: ?[]CmdOrResult, pipes: ?[]Pipe, io: IO, state: union(enum) { - idle, - executing, + starting_cmds: struct { + idx: u32, + }, + pending, waiting_write_err, - done, -} = .idle, + done: struct { + exit_code: ExitCode = 0, + }, +} = .{ .starting_cmds = .{ .idx = 0 } }, pub const ParentPtr = StatePtrUnion(.{ Stmt, @@ -44,14 +51,14 @@ const CmdOrResult = union(enum) { pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.Pipeline, parent: ParentPtr, io: IO, ) *Pipeline { - const pipeline = interpreter.allocator.create(Pipeline) catch bun.outOfMemory(); + const pipeline = parent.create(Pipeline); pipeline.* = .{ - .base = .{ .kind = .pipeline, .interpreter = interpreter, .shell = shell_state }, + .base = State.initWithNewAllocScope(.pipeline, interpreter, shell_state), .node = node, .parent = parent, .exited_count = 0, @@ -67,16 +74,16 @@ fn getIO(this: *Pipeline) IO { return this.io; } -fn writeFailingError(this: *Pipeline, comptime fmt: []const u8, args: anytype) void { +fn writeFailingError(this: *Pipeline, comptime fmt: []const u8, args: anytype) Yield { const handler = struct { fn enqueueCb(ctx: *Pipeline) void { ctx.state = .waiting_write_err; } }; - this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); + return this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); } -fn setupCommands(this: *Pipeline) bun.shell.interpret.CoroutineResult { +fn setupCommands(this: *Pipeline) ?Yield { const cmd_count = brk: { var i: u32 = 0; for (this.node.items) |*item| { @@ -88,9 +95,9 @@ fn setupCommands(this: *Pipeline) bun.shell.interpret.CoroutineResult { break :brk i; }; - this.cmds = if (cmd_count >= 1) this.base.interpreter.allocator.alloc(CmdOrResult, this.node.items.len) catch bun.outOfMemory() else null; - if (this.cmds == null) return .cont; - var pipes = this.base.interpreter.allocator.alloc(Pipe, if (cmd_count > 1) cmd_count - 1 else 1) catch bun.outOfMemory(); + this.cmds = if (cmd_count >= 1) this.base.allocator().alloc(CmdOrResult, this.node.items.len) catch bun.outOfMemory() else null; + if (this.cmds == null) return null; + var pipes = this.base.allocator().alloc(Pipe, if (cmd_count > 1) cmd_count - 1 else 1) catch bun.outOfMemory(); if (cmd_count > 1) { var pipes_set: u32 = 0; @@ -100,8 +107,7 @@ fn setupCommands(this: *Pipeline) bun.shell.interpret.CoroutineResult { closefd(pipe[1]); } const system_err = err.toShellSystemError(); - this.writeFailingError("bun: {s}\n", .{system_err.message}); - return .yield; + return this.writeFailingError("bun: {s}\n", .{system_err.message}); } } @@ -116,12 +122,11 @@ fn setupCommands(this: *Pipeline) bun.shell.interpret.CoroutineResult { cmd_io.stdin = stdin; cmd_io.stdout = stdout; _ = cmd_io.stderr.ref(); - const subshell_state = switch (this.base.shell.dupeForSubshell(this.base.interpreter.allocator, cmd_io, .pipeline)) { + const subshell_state = switch (this.base.shell.dupeForSubshell(this.base.allocScope(), this.base.allocator(), cmd_io, .pipeline)) { .result => |s| s, .err => |err| { const system_err = err.toShellSystemError(); - this.writeFailingError("bun: {s}\n", .{system_err.message}); - return .yield; + return this.writeFailingError("bun: {s}\n", .{system_err.message}); }, }; this.cmds.?[i] = .{ @@ -142,55 +147,66 @@ fn setupCommands(this: *Pipeline) bun.shell.interpret.CoroutineResult { this.pipes = pipes; - return .cont; + return null; } -pub fn start(this: *Pipeline) void { - if (this.setupCommands() == .yield) return; - - if (this.state == .waiting_write_err or this.state == .done) return; - const cmds = this.cmds orelse { - this.state = .done; - this.parent.childDone(this, 0); - return; - }; - - if (comptime bun.Environment.allow_assert) { - assert(this.exited_count == 0); +pub fn start(this: *Pipeline) Yield { + if (this.setupCommands()) |yield| return yield; + if (this.state == .waiting_write_err or this.state == .done) return .suspended; + if (this.cmds == null) { + this.state = .{ .done = .{} }; + return .done; } + + assert(this.exited_count == 0); + log("pipeline start {x} (count={d})", .{ @intFromPtr(this), this.node.items.len }); + if (this.node.items.len == 0) { - this.state = .done; - this.parent.childDone(this, 0); - return; + this.state = .{ .done = .{} }; + return .done; } - for (cmds) |*cmd_or_result| { - assert(cmd_or_result.* == .cmd); - log("Pipeline start cmd", .{}); - var cmd = cmd_or_result.cmd; - cmd.call("start", .{}, void); + return .{ .pipeline = this }; +} + +pub fn next(this: *Pipeline) Yield { + switch (this.state) { + .starting_cmds => { + const cmds = this.cmds.?; + const idx = this.state.starting_cmds.idx; + if (idx >= cmds.len) { + this.state = .pending; + return .suspended; + } + log("Pipeline(0x{x}) starting cmd {d}/{d}", .{ @intFromPtr(this), idx + 1, cmds.len }); + this.state.starting_cmds.idx += 1; + const cmd_or_result = cmds[idx]; + assert(cmd_or_result == .cmd); + return cmd_or_result.cmd.call("start", .{}, Yield); + }, + .pending => shell.unreachableState("Pipeline.next", "pending"), + .waiting_write_err => shell.unreachableState("Pipeline.next", "waiting_write_err"), + .done => return this.parent.childDone(this, this.state.done.exit_code), } } -pub fn onIOWriterChunk(this: *Pipeline, _: usize, err: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Pipeline, _: usize, err: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state == .waiting_write_err); } if (err) |e| { this.base.throw(&shell.ShellErr.newSys(e)); - return; + return .failed; } - this.state = .done; - this.parent.childDone(this, 0); + this.state = .{ .done = .{} }; + return .done; } -pub fn childDone(this: *Pipeline, child: ChildPtr, exit_code: ExitCode) void { - if (comptime bun.Environment.allow_assert) { - assert(this.cmds.?.len > 0); - } +pub fn childDone(this: *Pipeline, child: ChildPtr, exit_code: ExitCode) Yield { + assert(this.cmds.?.len > 0); const idx = brk: { const ptr_value: u64 = @bitCast(child.ptr.repr); @@ -204,7 +220,7 @@ pub fn childDone(this: *Pipeline, child: ChildPtr, exit_code: ExitCode) void { @panic("Invalid pipeline state"); }; - log("pipeline child done {x} ({d}) i={d}", .{ @intFromPtr(this), exit_code, idx }); + log("Pipeline(0x{x}) child done ({d}) i={d}", .{ @intFromPtr(this), exit_code, idx }); // We duped the subshell for commands in the pipeline so we need to // deinitialize it. if (child.ptr.is(Cmd)) { @@ -226,18 +242,22 @@ pub fn childDone(this: *Pipeline, child: ChildPtr, exit_code: ExitCode) void { this.cmds.?[idx] = .{ .result = exit_code }; this.exited_count += 1; + log("Pipeline(0x{x}) check exited_count={d} cmds.len={d}", .{ @intFromPtr(this), this.exited_count, this.cmds.?.len }); if (this.exited_count >= this.cmds.?.len) { var last_exit_code: ExitCode = 0; - for (this.cmds.?) |cmd_or_result| { + var i: i64 = @as(i64, @intCast(this.cmds.?.len)) - 1; + while (i > 0) : (i -= 1) { + const cmd_or_result = this.cmds.?[@intCast(i)]; if (cmd_or_result == .result) { last_exit_code = cmd_or_result.result; break; } } - this.state = .done; - this.parent.childDone(this, last_exit_code); - return; + this.state = .{ .done = .{ .exit_code = last_exit_code } }; + return this.next(); } + + return .suspended; } pub fn deinit(this: *Pipeline) void { @@ -249,13 +269,14 @@ pub fn deinit(this: *Pipeline) void { } } if (this.pipes) |pipes| { - this.base.interpreter.allocator.free(pipes); + this.base.allocator().free(pipes); } if (this.cmds) |cmds| { - this.base.interpreter.allocator.free(cmds); + this.base.allocator().free(cmds); } this.io.deref(); - this.base.interpreter.allocator.destroy(this); + this.base.endScope(); + this.parent.destroy(this); } fn initializePipes(pipes: []Pipe, set_count: *u32) Maybe(void) { @@ -268,7 +289,8 @@ fn initializePipes(pipes: []Pipe, set_count: *u32) Maybe(void) { pipe[0] = .fromUV(fds[0]); pipe[1] = .fromUV(fds[1]); } else { - switch (bun.sys.socketpair( + switch (bun.sys.socketpairForShell( + // switch (bun.sys.socketpair( std.posix.AF.UNIX, std.posix.SOCK.STREAM, 0, @@ -304,13 +326,14 @@ fn readPipe(pipes: []Pipe, proc_idx: usize, io: *IO, evtloop: JSC.EventLoopHandl const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/Script.zig b/src/shell/states/Script.zig index 0c2a365a05..99fc122a6f 100644 --- a/src/shell/states/Script.zig +++ b/src/shell/states/Script.zig @@ -35,14 +35,14 @@ pub fn format(this: *const Script, comptime _: []const u8, _: std.fmt.FormatOpti pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.Script, parent_ptr: ParentPtr, io: IO, ) *Script { - const script = interpreter.allocator.create(Script) catch bun.outOfMemory(); + const script = parent_ptr.create(Script); script.* = .{ - .base = .{ .kind = .script, .interpreter = interpreter, .shell = shell_state }, + .base = State.initWithNewAllocScope(.script, interpreter, shell_state), .node = node, .parent = parent_ptr, .io = io, @@ -55,43 +55,40 @@ fn getIO(this: *Script) IO { return this.io; } -pub fn start(this: *Script) void { +pub fn start(this: *Script) Yield { if (this.node.stmts.len == 0) return this.finish(0); - this.next(); + return .{ .script = this }; } -fn next(this: *Script) void { +pub fn next(this: *Script) Yield { switch (this.state) { .normal => { - if (this.state.normal.idx >= this.node.stmts.len) return; + if (this.state.normal.idx >= this.node.stmts.len) return .suspended; const stmt_node = &this.node.stmts[this.state.normal.idx]; this.state.normal.idx += 1; var io = this.getIO(); var stmt = Stmt.init(this.base.interpreter, this.base.shell, stmt_node, this, io.ref().*); - stmt.start(); - return; + return stmt.start(); }, } } -fn finish(this: *Script, exit_code: ExitCode) void { +fn finish(this: *Script, exit_code: ExitCode) Yield { if (this.parent.ptr.is(Interpreter)) { log("Interpreter script finish", .{}); - this.base.interpreter.childDone(InterpreterChildPtr.init(this), exit_code); - return; + return this.base.interpreter.childDone(InterpreterChildPtr.init(this), exit_code); } - this.parent.childDone(this, exit_code); + return this.parent.childDone(this, exit_code); } -pub fn childDone(this: *Script, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *Script, child: ChildPtr, exit_code: ExitCode) Yield { child.deinit(); if (this.state.normal.idx >= this.node.stmts.len) { - this.finish(exit_code); - return; + return this.finish(exit_code); } - this.next(); + return this.next(); } pub fn deinit(this: *Script) void { @@ -104,7 +101,8 @@ pub fn deinit(this: *Script) void { this.base.shell.deinit(); } - bun.default_allocator.destroy(this); + this.base.endScope(); + this.parent.destroy(this); } pub fn deinitFromInterpreter(this: *Script) void { @@ -117,13 +115,14 @@ pub fn deinitFromInterpreter(this: *Script) void { const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const Interpreter = bun.shell.Interpreter; const InterpreterChildPtr = Interpreter.InterpreterChildPtr; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/Stmt.zig b/src/shell/states/Stmt.zig index 5c646e9831..6524381521 100644 --- a/src/shell/states/Stmt.zig +++ b/src/shell/states/Stmt.zig @@ -26,18 +26,19 @@ pub const ChildPtr = StatePtrUnion(.{ pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.Stmt, parent: anytype, io: IO, ) *Stmt { - var script = interpreter.allocator.create(Stmt) catch bun.outOfMemory(); - script.base = .{ .kind = .stmt, .interpreter = interpreter, .shell = shell_state }; - script.node = node; - script.parent = switch (@TypeOf(parent)) { + const parent_ptr = switch (@TypeOf(parent)) { ParentPtr => parent, else => ParentPtr.init(parent), }; + var script = parent_ptr.create(Stmt); + script.base = State.initWithNewAllocScope(.stmt, interpreter, shell_state); + script.node = node; + script.parent = parent_ptr; script.idx = 0; script.last_exit_code = null; script.currently_executing = null; @@ -46,16 +47,16 @@ pub fn init( return script; } -pub fn start(this: *Stmt) void { +pub fn start(this: *Stmt) Yield { if (bun.Environment.allow_assert) { assert(this.idx == 0); assert(this.last_exit_code == null); assert(this.currently_executing == null); } - this.next(); + return .{ .stmt = this }; } -pub fn next(this: *Stmt) void { +pub fn next(this: *Stmt) Yield { if (this.idx >= this.node.exprs.len) return this.parent.childDone(this, this.last_exit_code orelse 0); @@ -64,50 +65,54 @@ pub fn next(this: *Stmt) void { .binary => { const binary = Binary.init(this.base.interpreter, this.base.shell, child.binary, Binary.ParentPtr.init(this), this.io.copy()); this.currently_executing = ChildPtr.init(binary); - binary.start(); + return binary.start(); }, .cmd => { const cmd = Cmd.init(this.base.interpreter, this.base.shell, child.cmd, Cmd.ParentPtr.init(this), this.io.copy()); this.currently_executing = ChildPtr.init(cmd); - cmd.start(); + return cmd.start(); }, .pipeline => { const pipeline = Pipeline.init(this.base.interpreter, this.base.shell, child.pipeline, Pipeline.ParentPtr.init(this), this.io.copy()); this.currently_executing = ChildPtr.init(pipeline); - pipeline.start(); + return pipeline.start(); }, .assign => |assigns| { - var assign_machine = this.base.interpreter.allocator.create(Assigns) catch bun.outOfMemory(); - assign_machine.init(this.base.interpreter, this.base.shell, assigns, .shell, Assigns.ParentPtr.init(this), this.io.copy()); - assign_machine.start(); + const assign_machine = Assigns.init(this.base.interpreter, this.base.shell, assigns, .shell, Assigns.ParentPtr.init(this), this.io.copy()); + return assign_machine.start(); }, .subshell => { - switch (this.base.shell.dupeForSubshell(this.base.interpreter.allocator, this.io, .subshell)) { - .result => |shell_state| { - var script = Subshell.init(this.base.interpreter, shell_state, child.subshell, Subshell.ParentPtr.init(this), this.io.copy()); - script.start(); - }, + var script = switch (Subshell.initDupeShellState( + this.base.interpreter, + this.base.shell, + child.subshell, + Subshell.ParentPtr.init(this), + this.io.copy(), + )) { + .result => |s| s, .err => |e| { this.base.throw(&bun.shell.ShellErr.newSys(e)); + return .failed; }, - } + }; + return script.start(); }, .@"if" => { const if_clause = If.init(this.base.interpreter, this.base.shell, child.@"if", If.ParentPtr.init(this), this.io.copy()); - if_clause.start(); + return if_clause.start(); }, .condexpr => { const condexpr = CondExpr.init(this.base.interpreter, this.base.shell, child.condexpr, CondExpr.ParentPtr.init(this), this.io.copy()); - condexpr.start(); + return condexpr.start(); }, .@"async" => { const @"async" = Async.init(this.base.interpreter, this.base.shell, child.@"async", Async.ParentPtr.init(this), this.io.copy()); - @"async".start(); + return @"async".start(); }, } } -pub fn childDone(this: *Stmt, child: ChildPtr, exit_code: ExitCode) void { +pub fn childDone(this: *Stmt, child: ChildPtr, exit_code: ExitCode) Yield { const data = child.ptr.repr.data; log("child done Stmt {x} child({s})={x} exit={d}", .{ @intFromPtr(this), child.tagName(), @as(usize, @intCast(child.ptr.repr._ptr)), exit_code }); this.last_exit_code = exit_code; @@ -116,7 +121,7 @@ pub fn childDone(this: *Stmt, child: ChildPtr, exit_code: ExitCode) void { log("{d} {d}", .{ data, data2 }); child.deinit(); this.currently_executing = null; - this.next(); + return this.next(); } pub fn deinit(this: *Stmt) void { @@ -125,16 +130,18 @@ pub fn deinit(this: *Stmt) void { if (this.currently_executing) |child| { child.deinit(); } - this.base.interpreter.allocator.destroy(this); + this.base.endScope(); + this.parent.destroy(this); } const bun = @import("bun"); +const Yield = bun.shell.Yield; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/states/Subshell.zig b/src/shell/states/Subshell.zig index 1d875d3c44..bdbb30df9d 100644 --- a/src/shell/states/Subshell.zig +++ b/src/shell/states/Subshell.zig @@ -25,7 +25,6 @@ pub const ParentPtr = StatePtrUnion(.{ pub const ChildPtr = StatePtrUnion(.{ Script, - Subshell, Expansion, }); @@ -35,39 +34,67 @@ pub fn format(this: *const Subshell, comptime _: []const u8, _: std.fmt.FormatOp pub fn init( interpreter: *Interpreter, - shell_state: *ShellState, + shell_state: *ShellExecEnv, node: *const ast.Subshell, parent: ParentPtr, io: IO, ) *Subshell { - return bun.new(Subshell, .{ - .base = .{ .kind = .condexpr, .interpreter = interpreter, .shell = shell_state }, + const subshell = parent.create(Subshell); + subshell.* = .{ + .base = State.initWithNewAllocScope(.subshell, interpreter, shell_state), .node = node, .parent = parent, .io = io, - .redirection_file = std.ArrayList(u8).init(bun.default_allocator), - }); + .redirection_file = undefined, + }; + subshell.redirection_file = std.ArrayList(u8).init(subshell.base.allocator()); + return subshell; } -pub fn start(this: *Subshell) void { +pub fn initDupeShellState( + interpreter: *Interpreter, + shell_state: *ShellExecEnv, + node: *const ast.Subshell, + parent: ParentPtr, + io: IO, +) bun.JSC.Maybe(*Subshell) { + const subshell = parent.create(Subshell); + subshell.* = .{ + .base = State.initWithNewAllocScope(.subshell, interpreter, shell_state), + .node = node, + .parent = parent, + .io = io, + .redirection_file = undefined, + }; + subshell.base.shell = switch (shell_state.dupeForSubshell(subshell.base.allocScope(), subshell.base.allocator(), io, .subshell)) { + .result => |s| s, + .err => |e| { + parent.destroy(subshell); + return .{ .err = e }; + }, + }; + subshell.redirection_file = std.ArrayList(u8).init(subshell.base.allocator()); + return .{ .result = subshell }; +} + +pub fn start(this: *Subshell) Yield { log("{} start", .{this}); const script = Script.init(this.base.interpreter, this.base.shell, &this.node.script, Script.ParentPtr.init(this), this.io.copy()); - script.start(); + return script.start(); } -pub fn next(this: *Subshell) void { +pub fn next(this: *Subshell) Yield { while (this.state != .done) { switch (this.state) { .idle => { this.state = .{ .expanding_redirect = .{ .expansion = undefined }, }; - this.next(); + return .{ .subshell = this }; }, .expanding_redirect => { if (this.state.expanding_redirect.idx >= 1) { - this.transitionToExec(); - return; + return this.transitionToExec(); } this.state.expanding_redirect.idx += 1; @@ -75,8 +102,7 @@ pub fn next(this: *Subshell) void { // `expanding_args` state const node_to_expand = brk: { if (this.node.redirect != null and this.node.redirect.? == .atom) break :brk &this.node.redirect.?.atom; - this.transitionToExec(); - return; + return this.transitionToExec(); }; Expansion.init( @@ -93,45 +119,45 @@ pub fn next(this: *Subshell) void { this.io.copy(), ); - this.state.expanding_redirect.expansion.start(); - return; + return this.state.expanding_redirect.expansion.start(); }, - .wait_write_err, .exec => return, + .wait_write_err, .exec => return .suspended, .done => @panic("This should not be possible."), } } - this.parent.childDone(this, 0); + return this.parent.childDone(this, 0); } -pub fn transitionToExec(this: *Subshell) void { +pub fn transitionToExec(this: *Subshell) Yield { log("{} transitionToExec", .{this}); const script = Script.init(this.base.interpreter, this.base.shell, &this.node.script, Script.ParentPtr.init(this), this.io.copy()); this.state = .exec; - script.start(); + return script.start(); } -pub fn childDone(this: *Subshell, child_ptr: ChildPtr, exit_code: ExitCode) void { - defer child_ptr.deinit(); +pub fn childDone(this: *Subshell, child_ptr: ChildPtr, exit_code: ExitCode) Yield { this.exit_code = exit_code; if (child_ptr.ptr.is(Expansion) and exit_code != 0) { if (exit_code != 0) { const err = this.state.expanding_redirect.expansion.state.err; defer err.deinit(bun.default_allocator); this.state.expanding_redirect.expansion.deinit(); - this.writeFailingError("{}\n", .{err}); - return; + return this.writeFailingError("{}\n", .{err}); } - this.next(); + child_ptr.deinit(); + return .{ .subshell = this }; } if (child_ptr.ptr.is(Script)) { - this.parent.childDone(this, exit_code); - return; + child_ptr.deinit(); + return this.parent.childDone(this, exit_code); } + + bun.shell.unreachableState("Subshell.childDone", "expected Script or Expansion"); } -pub fn onIOWriterChunk(this: *Subshell, _: usize, err: ?JSC.SystemError) void { +pub fn onIOWriterChunk(this: *Subshell, _: usize, err: ?JSC.SystemError) Yield { if (comptime bun.Environment.allow_assert) { assert(this.state == .wait_write_err); } @@ -141,34 +167,36 @@ pub fn onIOWriterChunk(this: *Subshell, _: usize, err: ?JSC.SystemError) void { } this.state = .done; - this.parent.childDone(this, this.exit_code); + return this.parent.childDone(this, this.exit_code); } pub fn deinit(this: *Subshell) void { this.base.shell.deinit(); this.io.deref(); this.redirection_file.deinit(); - bun.destroy(this); + this.base.endScope(); + this.parent.destroy(this); } -pub fn writeFailingError(this: *Subshell, comptime fmt: []const u8, args: anytype) void { +pub fn writeFailingError(this: *Subshell, comptime fmt: []const u8, args: anytype) Yield { const handler = struct { fn enqueueCb(ctx: *Subshell) void { ctx.state = .wait_write_err; } }; - this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); + return this.base.shell.writeFailingErrorFmt(this, handler.enqueueCb, fmt, args); } const std = @import("std"); const bun = @import("bun"); +const Yield = bun.shell.Yield; const shell = bun.shell; const Interpreter = bun.shell.Interpreter; const StatePtrUnion = bun.shell.interpret.StatePtrUnion; const ast = bun.shell.AST; const ExitCode = bun.shell.ExitCode; -const ShellState = Interpreter.ShellState; +const ShellExecEnv = Interpreter.ShellExecEnv; const State = bun.shell.Interpreter.State; const IO = bun.shell.Interpreter.IO; const log = bun.shell.interpret.log; diff --git a/src/shell/subproc.zig b/src/shell/subproc.zig index ed36bc17cf..31d0bd575d 100644 --- a/src/shell/subproc.zig +++ b/src/shell/subproc.zig @@ -1,5 +1,6 @@ const default_allocator = bun.default_allocator; const bun = @import("bun"); +const Yield = bun.shell.Yield; const Environment = bun.Environment; const strings = bun.strings; const Output = bun.Output; @@ -148,7 +149,7 @@ pub const ShellSubprocess = struct { if (Environment.isWindows) { switch (stdio) { - .pipe => { + .pipe, .readable_stream => { if (result == .buffer) { const pipe = JSC.WebCore.FileSink.createWithPipe(event_loop, result.buffer); @@ -235,6 +236,10 @@ pub const ShellSubprocess = struct { .ipc, .capture => { return Writable{ .ignore = {} }; }, + .readable_stream => { + // The shell never uses this + @panic("Unimplemented stdin readable_stream"); + }, } } @@ -246,7 +251,7 @@ pub const ShellSubprocess = struct { .pipe => |pipe| { this.* = .{ .ignore = {} }; if (subprocess.process.hasExited() and !subprocess.flags.has_stdin_destructor_called) { - pipe.onAttachedProcessExit(); + pipe.onAttachedProcessExit(&subprocess.process.status); return pipe.toJS(globalThis); } else { subprocess.flags.has_stdin_destructor_called = false; @@ -383,6 +388,7 @@ pub const ShellSubprocess = struct { return readable; }, .capture => Readable{ .pipe = PipeReader.create(event_loop, process, result, shellio, out_type) }, + .readable_stream => Readable{ .ignore = {} }, // Shell doesn't use readable_stream }; } @@ -404,6 +410,7 @@ pub const ShellSubprocess = struct { return readable; }, .capture => Readable{ .pipe = PipeReader.create(event_loop, process, result, shellio, out_type) }, + .readable_stream => Readable{ .ignore = {} }, // Shell doesn't use readable_stream }; } @@ -796,6 +803,8 @@ pub const ShellSubprocess = struct { } } + const no_sigpipe = if (shellio.stdout) |iowriter| !iowriter.flags.is_socket else true; + var spawn_options = bun.spawn.SpawnOptions{ .cwd = spawn_args.cwd, .stdin = switch (spawn_args.stdio[0].asSpawnOption(0)) { @@ -828,6 +837,9 @@ pub const ShellSubprocess = struct { .loop = event_loop, }, }; + if (bun.Environment.isPosix) { + spawn_options.no_sigpipe = no_sigpipe; + } spawn_args.argv.append(allocator, null) catch { return .{ .err = .{ .custom = bun.default_allocator.dupe(u8, "out of memory") catch bun.outOfMemory() } }; @@ -1003,7 +1015,10 @@ pub const PipeReader = struct { .bytelist => { this.bytelist.deinitWithAllocator(bun.default_allocator); }, - .array_buffer => {}, + .array_buffer => { + // FIXME: SHOULD THIS BE HERE? + this.array_buffer.buf.deinit(); + }, } } }; @@ -1018,7 +1033,7 @@ pub const PipeReader = struct { if (this.dead or this.err != null) return; log("CapturedWriter(0x{x}, {s}) doWrite len={d} parent_amount={d}", .{ @intFromPtr(this), @tagName(this.parent().out_type), chunk.len, this.parent().buffered_output.len() }); - this.writer.enqueue(this, null, chunk); + this.writer.enqueue(this, null, chunk).run(); } pub fn getBuffer(this: *CapturedWriter) []const u8 { @@ -1047,16 +1062,17 @@ pub const PipeReader = struct { return this.written + just_written >= this.parent().buffered_output.len(); } - pub fn onIOWriterChunk(this: *CapturedWriter, amount: usize, err: ?JSC.SystemError) void { + pub fn onIOWriterChunk(this: *CapturedWriter, amount: usize, err: ?JSC.SystemError) Yield { log("CapturedWriter({x}, {s}) onWrite({d}, has_err={any}) total_written={d} total_to_write={d}", .{ @intFromPtr(this), @tagName(this.parent().out_type), amount, err != null, this.written + amount, this.parent().buffered_output.len() }); this.written += amount; if (err) |e| { log("CapturedWriter(0x{x}, {s}) onWrite errno={d} errmsg={} errfd={} syscall={}", .{ @intFromPtr(this), @tagName(this.parent().out_type), e.errno, e.message, e.fd, e.syscall }); this.err = e; - this.parent().trySignalDoneToCmd(); + return this.parent().trySignalDoneToCmd(); } else if (this.written >= this.parent().buffered_output.len() and !(this.parent().state == .pending)) { - this.parent().trySignalDoneToCmd(); + return this.parent().trySignalDoneToCmd(); } + return .suspended; } pub fn onError(this: *CapturedWriter, err: bun.sys.Error) void { @@ -1093,7 +1109,7 @@ pub const PipeReader = struct { } pub fn onCapturedWriterDone(this: *PipeReader) void { - this.trySignalDoneToCmd(); + this.trySignalDoneToCmd().run(); } pub fn create(event_loop: JSC.EventLoopHandle, process: *ShellSubprocess, result: StdioResult, capture: ?*sh.IOWriter, out_type: bun.shell.Subprocess.OutKind) *PipeReader { @@ -1186,7 +1202,7 @@ pub const PipeReader = struct { // we need to ref because the process might be done and deref inside signalDoneToCmd and we wanna to keep it alive to check this.process this.ref(); defer this.deref(); - this.trySignalDoneToCmd(); + this.trySignalDoneToCmd().run(); if (this.process) |process| { // this.process = null; @@ -1197,8 +1213,8 @@ pub const PipeReader = struct { pub fn trySignalDoneToCmd( this: *PipeReader, - ) void { - if (!this.isDone()) return; + ) Yield { + if (!this.isDone()) return .suspended; log("signalDoneToCmd ({x}: {s}) isDone={any}", .{ @intFromPtr(this), @tagName(this.out_type), this.isDone() }); if (bun.Environment.allow_assert) assert(this.process != null); if (this.process) |proc| { @@ -1216,9 +1232,10 @@ pub const PipeReader = struct { } break :brk null; }; - cmd.bufferedOutputClose(this.out_type, e); + return cmd.bufferedOutputClose(this.out_type, e); } } + return .suspended; } pub fn kind(reader: *const PipeReader, process: *const ShellSubprocess) StdioKind { @@ -1309,7 +1326,7 @@ pub const PipeReader = struct { // we need to ref because the process might be done and deref inside signalDoneToCmd and we wanna to keep it alive to check this.process this.ref(); defer this.deref(); - this.trySignalDoneToCmd(); + this.trySignalDoneToCmd().run(); if (this.process) |process| { // this.process = null; process.onCloseIO(this.kind(process)); diff --git a/src/sourcemap/CodeCoverage.zig b/src/sourcemap/CodeCoverage.zig index 167f7d2ad7..d920ba42ac 100644 --- a/src/sourcemap/CodeCoverage.zig +++ b/src/sourcemap/CodeCoverage.zig @@ -1,7 +1,6 @@ const bun = @import("bun"); const std = @import("std"); const LineOffsetTable = bun.sourcemap.LineOffsetTable; -const SourceMap = bun.sourcemap; const Bitset = bun.bit_set.DynamicBitSetUnmanaged; const LinesHits = @import("../baby_list.zig").BabyList(u32); const Output = bun.Output; @@ -561,7 +560,7 @@ pub const ByteRangeMapping = struct { } const column_position = byte_offset -| line_start_byte_offset; - if (SourceMap.Mapping.find(parsed_mapping.mappings, @intCast(new_line_index), @intCast(column_position))) |point| { + if (parsed_mapping.mappings.find(@intCast(new_line_index), @intCast(column_position))) |*point| { if (point.original.lines < 0) continue; const line: u32 = @as(u32, @intCast(point.original.lines)); @@ -605,7 +604,7 @@ pub const ByteRangeMapping = struct { const column_position = byte_offset -| line_start_byte_offset; - if (SourceMap.Mapping.find(parsed_mapping.mappings, @intCast(new_line_index), @intCast(column_position))) |point| { + if (parsed_mapping.mappings.find(@intCast(new_line_index), @intCast(column_position))) |point| { if (point.original.lines < 0) continue; const line: u32 = @as(u32, @intCast(point.original.lines)); diff --git a/src/sourcemap/JSSourceMap.zig b/src/sourcemap/JSSourceMap.zig new file mode 100644 index 0000000000..99afae53e4 --- /dev/null +++ b/src/sourcemap/JSSourceMap.zig @@ -0,0 +1,306 @@ +/// This implements the JavaScript SourceMap class from Node.js. +/// +const JSSourceMap = @This(); + +sourcemap: *bun.sourcemap.ParsedSourceMap, +sources: []bun.String = &.{}, +names: []bun.String = &.{}, + +fn findSourceMap( + globalObject: *JSGlobalObject, + callFrame: *CallFrame, +) bun.JSError!JSValue { + const source_url_value = callFrame.argument(0); + if (!source_url_value.isString()) { + return .js_undefined; + } + + var source_url_string = try bun.String.fromJS(source_url_value, globalObject); + defer source_url_string.deref(); + + var source_url_slice = source_url_string.toUTF8(bun.default_allocator); + defer source_url_slice.deinit(); + + var source_url = source_url_slice.slice(); + if (bun.strings.hasPrefix(source_url, "node:") or bun.strings.hasPrefix(source_url, "bun:") or bun.strings.hasPrefix(source_url, "data:")) { + return .js_undefined; + } + + if (bun.strings.indexOf(source_url, "://")) |source_url_index| { + if (bun.strings.eqlComptime(source_url[0..source_url_index], "file")) { + const path = bun.JSC.URL.pathFromFileURL(source_url_string); + + if (path.tag == .Dead) { + return globalObject.ERR(.INVALID_URL, "Invalid URL: {s}", .{source_url}).throw(); + } + + // Replace the file:// URL with the absolute path. + source_url_string.deref(); + source_url_slice.deinit(); + source_url_string = path; + source_url_slice = path.toUTF8(bun.default_allocator); + source_url = source_url_slice.slice(); + } + } + + const vm = globalObject.bunVM(); + const source_map = vm.source_mappings.get(source_url) orelse return .js_undefined; + const fake_sources_array = bun.default_allocator.alloc(bun.String, 1) catch return globalObject.throwOutOfMemory(); + fake_sources_array[0] = source_url_string.dupeRef(); + + const this = bun.new(JSSourceMap, .{ + .sourcemap = source_map, + .sources = fake_sources_array, + .names = &.{}, + }); + + return this.toJS(globalObject); +} + +pub fn constructor( + globalObject: *JSGlobalObject, + callFrame: *CallFrame, + thisValue: JSValue, +) bun.JSError!*JSSourceMap { + const payload_arg = callFrame.argument(0); + const options_arg = callFrame.argument(1); + + try globalObject.validateObject("payload", payload_arg, .{}); + + var line_lengths: JSValue = .zero; + if (options_arg.isObject()) { + // Node doesn't check it further than this. + if (try options_arg.getIfPropertyExists(globalObject, "lineLengths")) |lengths| { + if (lengths.jsType().isArray()) { + line_lengths = lengths; + } + } + } + + // Parse the payload to create a proper sourcemap + var arena = bun.ArenaAllocator.init(bun.default_allocator); + defer arena.deinit(); + const arena_allocator = arena.allocator(); + + // Extract mappings string from payload + const mappings_value = try payload_arg.getStringish(globalObject, "mappings") orelse { + return globalObject.throwInvalidArguments("payload 'mappings' must be a string", .{}); + }; + defer mappings_value.deref(); + + const mappings_str = mappings_value.toUTF8(arena_allocator); + defer mappings_str.deinit(); + + var names = std.ArrayList(bun.String).init(bun.default_allocator); + errdefer { + for (names.items) |*str| { + str.deref(); + } + names.deinit(); + } + + var sources = std.ArrayList(bun.String).init(bun.default_allocator); + errdefer { + for (sources.items) |*str| { + str.deref(); + } + sources.deinit(); + } + + if (try payload_arg.getArray(globalObject, "sources")) |sources_value| { + var iter = try sources_value.arrayIterator(globalObject); + while (try iter.next()) |source| { + const source_str = try source.toBunString(globalObject); + try sources.append(source_str); + } + } + + if (try payload_arg.getArray(globalObject, "names")) |names_value| { + var iter = try names_value.arrayIterator(globalObject); + while (try iter.next()) |name| { + const name_str = try name.toBunString(globalObject); + try names.append(name_str); + } + } + + // Parse the VLQ mappings + const parse_result = bun.sourcemap.Mapping.parse( + bun.default_allocator, + mappings_str.slice(), + null, // estimated_mapping_count + @intCast(sources.items.len), // sources_count + std.math.maxInt(i32), + .{ .allow_names = true, .sort = true }, + ); + + const mapping_list = switch (parse_result) { + .success => |parsed| parsed, + .fail => |fail| { + if (fail.loc.toNullable()) |loc| { + return globalObject.throwValue(globalObject.createSyntaxErrorInstance("{s} at {d}", .{ fail.msg, loc.start })); + } + return globalObject.throwValue(globalObject.createSyntaxErrorInstance("{s}", .{fail.msg})); + }, + }; + + const source_map = bun.new(JSSourceMap, .{ + .sourcemap = bun.new(bun.sourcemap.ParsedSourceMap, mapping_list), + .sources = sources.items, + .names = names.items, + }); + + if (payload_arg != .zero) { + js.payloadSetCached(thisValue, globalObject, payload_arg); + } + if (line_lengths != .zero) { + js.lineLengthsSetCached(thisValue, globalObject, line_lengths); + } + + return source_map; +} + +pub fn memoryCost(this: *const JSSourceMap) usize { + return @sizeOf(JSSourceMap) + this.sources.len * @sizeOf(bun.String) + this.sourcemap.memoryCost(); +} + +pub fn estimatedSize(this: *JSSourceMap) usize { + return this.memoryCost(); +} + +// The cached value should handle this. +pub fn getPayload(_: *JSSourceMap, _: *JSGlobalObject) JSValue { + return .js_undefined; +} + +// The cached value should handle this. +pub fn getLineLengths(_: *JSSourceMap, _: *JSGlobalObject) JSValue { + return .js_undefined; +} + +fn getLineColumn(globalObject: *JSGlobalObject, callFrame: *CallFrame) bun.JSError![2]i32 { + const line_number_value = callFrame.argument(0); + const column_number_value = callFrame.argument(1); + + return .{ + // Node.js does no validations. + try line_number_value.coerce(i32, globalObject), + try column_number_value.coerce(i32, globalObject), + }; +} + +fn mappingNameToJS(this: *const JSSourceMap, globalObject: *JSGlobalObject, mapping: *const bun.sourcemap.Mapping) bun.JSError!JSValue { + const name_index = mapping.nameIndex(); + if (name_index >= 0) { + if (this.sourcemap.mappings.getName(name_index)) |name| { + return bun.String.createUTF8ForJS(globalObject, name); + } else { + const index: usize = @intCast(name_index); + if (index < this.names.len) { + return this.names[index].toJS(globalObject); + } + } + } + return .js_undefined; +} + +fn sourceNameToJS(this: *const JSSourceMap, globalObject: *JSGlobalObject, mapping: *const bun.sourcemap.Mapping) bun.JSError!JSValue { + const source_index = mapping.sourceIndex(); + if (source_index >= 0 and source_index < @as(i32, @intCast(this.sources.len))) { + return this.sources[@intCast(source_index)].toJS(globalObject); + } + + return .js_undefined; +} + +extern fn Bun__createNodeModuleSourceMapOriginObject( + globalObject: *JSGlobalObject, + name: JSValue, + line: JSValue, + column: JSValue, + source: JSValue, +) JSValue; + +extern fn Bun__createNodeModuleSourceMapEntryObject( + globalObject: *JSGlobalObject, + generatedLine: JSValue, + generatedColumn: JSValue, + originalLine: JSValue, + originalColumn: JSValue, + source: JSValue, + name: JSValue, +) JSValue; + +pub fn findOrigin(this: *JSSourceMap, globalObject: *JSGlobalObject, callFrame: *CallFrame) bun.JSError!JSValue { + const line_number, const column_number = try getLineColumn(globalObject, callFrame); + + const mapping = this.sourcemap.mappings.find(line_number, column_number) orelse return JSC.JSValue.createEmptyObject(globalObject, 0); + const name = try mappingNameToJS(this, globalObject, &mapping); + const source = try sourceNameToJS(this, globalObject, &mapping); + return Bun__createNodeModuleSourceMapOriginObject( + globalObject, + name, + JSC.JSValue.jsNumber(mapping.originalLine()), + JSC.JSValue.jsNumber(mapping.originalColumn()), + source, + ); +} + +pub fn findEntry(this: *JSSourceMap, globalObject: *JSGlobalObject, callFrame: *CallFrame) bun.JSError!JSValue { + const line_number, const column_number = try getLineColumn(globalObject, callFrame); + + const mapping = this.sourcemap.mappings.find(line_number, column_number) orelse return JSC.JSValue.createEmptyObject(globalObject, 0); + + const name = try mappingNameToJS(this, globalObject, &mapping); + const source = try sourceNameToJS(this, globalObject, &mapping); + return Bun__createNodeModuleSourceMapEntryObject( + globalObject, + JSC.JSValue.jsNumber(mapping.generatedLine()), + JSC.JSValue.jsNumber(mapping.generatedColumn()), + JSC.JSValue.jsNumber(mapping.originalLine()), + JSC.JSValue.jsNumber(mapping.originalColumn()), + source, + name, + ); +} + +pub fn deinit(this: *JSSourceMap) void { + for (this.sources) |*str| { + str.deref(); + } + bun.default_allocator.free(this.sources); + + for (this.names) |*name| { + name.deref(); + } + + bun.default_allocator.free(this.names); + + this.sourcemap.deref(); + bun.destroy(this); +} + +pub fn finalize(this: *JSSourceMap) void { + this.deinit(); +} + +comptime { + const jsFunctionFindSourceMap = JSC.toJSHostFn(findSourceMap); + @export(&jsFunctionFindSourceMap, .{ .name = "Bun__JSSourceMap__find" }); +} + +// @sortImports + +const std = @import("std"); + +const bun = @import("bun"); +const string = bun.string; + +const JSC = bun.JSC; +const CallFrame = JSC.CallFrame; +const JSGlobalObject = JSC.JSGlobalObject; +const JSValue = JSC.JSValue; + +pub const js = JSC.Codegen.JSSourceMap; +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; +pub const toJS = js.toJS; diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index b15d5fe3d7..b580dfe61d 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -10,7 +10,7 @@ const JSPrinter = bun.js_printer; const URL = bun.URL; const FileSystem = bun.fs.FileSystem; -const SourceMap = @This(); +pub const SourceMap = @This(); const debug = bun.Output.scoped(.SourceMap, false); /// Coordinates in source maps are stored using relative offsets for size @@ -42,7 +42,11 @@ pub const ParseUrlResultHint = union(enum) { /// In order to fetch source contents, you need to know the /// index, but you cant know the index until the mappings /// are loaded. So pass in line+col. - all: struct { line: i32, column: i32 }, + all: struct { + line: i32, + column: i32, + include_names: bool = false, + }, }; pub const ParseUrl = struct { @@ -179,19 +183,46 @@ pub fn parseJSON( }; const map = if (hint != .source_only) map: { - const map_data = switch (Mapping.parse( + var map_data = switch (Mapping.parse( alloc, mappings_str.data.e_string.slice(arena), null, std.math.maxInt(i32), std.math.maxInt(i32), + .{ .allow_names = hint == .all and hint.all.include_names, .sort = true }, )) { .success => |x| x, .fail => |fail| return fail.err, }; + if (hint == .all and hint.all.include_names and map_data.mappings.impl == .with_names) { + if (json.get("names")) |names| { + if (names.data == .e_array) { + var names_list = try std.ArrayListUnmanaged(bun.Semver.String).initCapacity(alloc, names.data.e_array.items.len); + errdefer names_list.deinit(alloc); + + var names_buffer = std.ArrayListUnmanaged(u8){}; + errdefer names_buffer.deinit(alloc); + + for (names.data.e_array.items.slice()) |*item| { + if (item.data != .e_string) { + return error.InvalidSourceMap; + } + + const str = try item.data.e_string.string(arena); + + names_list.appendAssumeCapacity(try bun.Semver.String.initAppendIfNeeded(alloc, &names_buffer, str)); + } + + map_data.mappings.names = names_list.items; + map_data.mappings.names_buffer = .fromList(names_buffer); + } + } + } + const ptr = bun.new(ParsedSourceMap, map_data); ptr.external_source_names = source_paths_slice.?; + break :map ptr; } else null; errdefer if (map) |m| m.deref(); @@ -199,7 +230,7 @@ pub fn parseJSON( const mapping, const source_index = switch (hint) { .source_only => |index| .{ null, index }, .all => |loc| brk: { - const mapping = Mapping.find(map.?.mappings, loc.line, loc.column) orelse + const mapping = map.?.mappings.find(loc.line, loc.column) orelse break :brk .{ null, null }; break :brk .{ mapping, std.math.cast(u32, mapping.source_index) }; }, @@ -234,8 +265,206 @@ pub const Mapping = struct { generated: LineColumnOffset, original: LineColumnOffset, source_index: i32, + name_index: i32 = -1, - pub const List = bun.MultiArrayList(Mapping); + /// Optimization: if we don't care about the "names" column, then don't store the names. + pub const MappingWithoutName = struct { + generated: LineColumnOffset, + original: LineColumnOffset, + source_index: i32, + + pub fn toNamed(this: *const MappingWithoutName) Mapping { + return .{ + .generated = this.generated, + .original = this.original, + .source_index = this.source_index, + .name_index = -1, + }; + } + }; + + pub const List = struct { + impl: Value = .{ .without_names = .{} }, + names: []const bun.Semver.String = &[_]bun.Semver.String{}, + names_buffer: bun.ByteList = .{}, + + pub const Value = union(enum) { + without_names: bun.MultiArrayList(MappingWithoutName), + with_names: bun.MultiArrayList(Mapping), + + pub fn memoryCost(this: *const Value) usize { + return switch (this.*) { + .without_names => |*list| list.memoryCost(), + .with_names => |*list| list.memoryCost(), + }; + } + + pub fn ensureTotalCapacity(this: *Value, allocator: std.mem.Allocator, count: usize) !void { + switch (this.*) { + inline else => |*list| try list.ensureTotalCapacity(allocator, count), + } + } + }; + + fn ensureWithNames(this: *List, allocator: std.mem.Allocator) !void { + if (this.impl == .with_names) return; + + var without_names = this.impl.without_names; + var with_names = bun.MultiArrayList(Mapping){}; + try with_names.ensureTotalCapacity(allocator, without_names.len); + defer without_names.deinit(allocator); + + with_names.len = without_names.len; + var old_slices = without_names.slice(); + var new_slices = with_names.slice(); + + @memcpy(new_slices.items(.generated), old_slices.items(.generated)); + @memcpy(new_slices.items(.original), old_slices.items(.original)); + @memcpy(new_slices.items(.source_index), old_slices.items(.source_index)); + @memset(new_slices.items(.name_index), -1); + + this.impl = .{ .with_names = with_names }; + } + + fn findIndexFromGenerated(line_column_offsets: []const LineColumnOffset, line: i32, column: i32) ?usize { + var count = line_column_offsets.len; + var index: usize = 0; + while (count > 0) { + const step = count / 2; + const i: usize = index + step; + const mapping = line_column_offsets[i]; + if (mapping.lines < line or (mapping.lines == line and mapping.columns <= column)) { + index = i + 1; + count -|= step + 1; + } else { + count = step; + } + } + + if (index > 0) { + if (line_column_offsets[index - 1].lines == line) { + return index - 1; + } + } + + return null; + } + + pub fn findIndex(this: *const List, line: i32, column: i32) ?usize { + switch (this.impl) { + inline else => |*list| { + if (findIndexFromGenerated(list.items(.generated), line, column)) |i| { + return i; + } + }, + } + + return null; + } + + const SortContext = struct { + generated: []const LineColumnOffset, + pub fn lessThan(ctx: SortContext, a_index: usize, b_index: usize) bool { + const a = ctx.generated[a_index]; + const b = ctx.generated[b_index]; + + return a.lines < b.lines or (a.lines == b.lines and a.columns <= b.columns); + } + }; + + pub fn sort(this: *List) void { + switch (this.impl) { + .without_names => |*list| list.sort(SortContext{ .generated = list.items(.generated) }), + .with_names => |*list| list.sort(SortContext{ .generated = list.items(.generated) }), + } + } + + pub fn append(this: *List, allocator: std.mem.Allocator, mapping: *const Mapping) !void { + switch (this.impl) { + .without_names => |*list| { + try list.append(allocator, .{ + .generated = mapping.generated, + .original = mapping.original, + .source_index = mapping.source_index, + }); + }, + .with_names => |*list| { + try list.append(allocator, mapping.*); + }, + } + } + + pub fn find(this: *const List, line: i32, column: i32) ?Mapping { + switch (this.impl) { + inline else => |*list, tag| { + if (findIndexFromGenerated(list.items(.generated), line, column)) |i| { + if (tag == .without_names) { + return list.get(i).toNamed(); + } else { + return list.get(i); + } + } + }, + } + + return null; + } + pub fn generated(self: *const List) []const LineColumnOffset { + return switch (self.impl) { + inline else => |*list| list.items(.generated), + }; + } + + pub fn original(self: *const List) []const LineColumnOffset { + return switch (self.impl) { + inline else => |*list| list.items(.original), + }; + } + + pub fn sourceIndex(self: *const List) []const i32 { + return switch (self.impl) { + inline else => |*list| list.items(.source_index), + }; + } + + pub fn nameIndex(self: *const List) []const i32 { + return switch (self.impl) { + inline else => |*list| list.items(.name_index), + }; + } + + pub fn deinit(self: *List, allocator: std.mem.Allocator) void { + switch (self.impl) { + inline else => |*list| list.deinit(allocator), + } + + self.names_buffer.deinitWithAllocator(allocator); + allocator.free(self.names); + } + + pub fn getName(this: *List, index: i32) ?[]const u8 { + if (index < 0) return null; + const i: usize = @intCast(index); + + if (i >= this.names.len) return null; + + if (this.impl == .with_names) { + const str: *const bun.Semver.String = &this.names[i]; + return str.slice(this.names_buffer.slice()); + } + + return null; + } + + pub fn memoryCost(this: *const List) usize { + return this.impl.memoryCost() + this.names_buffer.memoryCost() + + (this.names.len * @sizeOf(bun.Semver.String)); + } + + pub fn ensureTotalCapacity(this: *List, allocator: std.mem.Allocator, count: usize) !void { + try this.impl.ensureTotalCapacity(allocator, count); + } + }; pub const Lookup = struct { mapping: Mapping, @@ -244,6 +473,8 @@ pub const Mapping = struct { /// use `getSourceCode` to access this as a Slice prefetched_source_code: ?[]const u8, + name: ?[]const u8 = null, + /// This creates a bun.String if the source remap *changes* the source url, /// which is only possible if the executed file differs from the source file: /// @@ -336,58 +567,28 @@ pub const Mapping = struct { } }; - pub inline fn generatedLine(mapping: Mapping) i32 { + pub inline fn generatedLine(mapping: *const Mapping) i32 { return mapping.generated.lines; } - pub inline fn generatedColumn(mapping: Mapping) i32 { + pub inline fn generatedColumn(mapping: *const Mapping) i32 { return mapping.generated.columns; } - pub inline fn sourceIndex(mapping: Mapping) i32 { + pub inline fn sourceIndex(mapping: *const Mapping) i32 { return mapping.source_index; } - pub inline fn originalLine(mapping: Mapping) i32 { + pub inline fn originalLine(mapping: *const Mapping) i32 { return mapping.original.lines; } - pub inline fn originalColumn(mapping: Mapping) i32 { + pub inline fn originalColumn(mapping: *const Mapping) i32 { return mapping.original.columns; } - pub fn find(mappings: Mapping.List, line: i32, column: i32) ?Mapping { - if (findIndex(mappings, line, column)) |i| { - return mappings.get(i); - } - - return null; - } - - pub fn findIndex(mappings: Mapping.List, line: i32, column: i32) ?usize { - const generated = mappings.items(.generated); - - var count = generated.len; - var index: usize = 0; - while (count > 0) { - const step = count / 2; - const i: usize = index + step; - const mapping = generated[i]; - if (mapping.lines < line or (mapping.lines == line and mapping.columns <= column)) { - index = i + 1; - count -|= step + 1; - } else { - count = step; - } - } - - if (index > 0) { - if (generated[index - 1].lines == line) { - return index - 1; - } - } - - return null; + pub inline fn nameIndex(mapping: *const Mapping) i32 { + return mapping.name_index; } pub fn parse( @@ -396,19 +597,35 @@ pub const Mapping = struct { estimated_mapping_count: ?usize, sources_count: i32, input_line_count: usize, + options: struct { + allow_names: bool = false, + sort: bool = false, + }, ) ParseResult { debug("parse mappings ({d} bytes)", .{bytes.len}); var mapping = Mapping.List{}; + errdefer mapping.deinit(allocator); + if (estimated_mapping_count) |count| { - mapping.ensureTotalCapacity(allocator, count) catch unreachable; + mapping.ensureTotalCapacity(allocator, count) catch { + return .{ + .fail = .{ + .msg = "Out of memory", + .err = error.OutOfMemory, + .loc = .{}, + }, + }; + }; } var generated = LineColumnOffset{ .lines = 0, .columns = 0 }; var original = LineColumnOffset{ .lines = 0, .columns = 0 }; + var name_index: i32 = 0; var source_index: i32 = 0; var needs_sort = false; var remain = bytes; + var has_names = false; while (remain.len > 0) { if (remain[0] == ';') { generated.columns = 0; @@ -558,28 +775,70 @@ pub const Mapping = struct { if (remain.len > 0) { switch (remain[0]) { ',' => { + // 4 column, but there's more on this line. remain = remain[1..]; }, + // 4 column, and there's no more on this line. ';' => {}, + + // 5th column: the name else => |c| { - return .{ - .fail = .{ - .msg = "Invalid character after mapping", - .err = error.InvalidSourceMap, - .value = @as(i32, @intCast(c)), - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; + // Read the name index + const name_index_delta = decodeVLQ(remain, 0); + if (name_index_delta.start == 0) { + return .{ + .fail = .{ + .msg = "Invalid name index delta", + .err = error.InvalidNameIndexDelta, + .value = @intCast(c), + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + remain = remain[name_index_delta.start..]; + + if (options.allow_names) { + name_index += name_index_delta.value; + if (!has_names) { + mapping.ensureWithNames(allocator) catch { + return .{ + .fail = .{ + .msg = "Out of memory", + .err = error.OutOfMemory, + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + }; + } + has_names = true; + } + + if (remain.len > 0) { + switch (remain[0]) { + // There's more on this line. + ',' => { + remain = remain[1..]; + }, + // That's the end of the line. + ';' => {}, + else => {}, + } + } }, } } - mapping.append(allocator, .{ + mapping.append(allocator, &.{ .generated = generated, .original = original, .source_index = source_index, + .name_index = name_index, }) catch bun.outOfMemory(); } + if (needs_sort and options.sort) { + mapping.sort(); + } + return .{ .success = .{ .ref_count = .init(), .mappings = mapping, @@ -622,6 +881,7 @@ pub const ParsedSourceMap = struct { input_line_count: usize = 0, mappings: Mapping.List = .{}, + /// If this is empty, this implies that the source code is a single file /// transpiled on-demand. If there are items, then it means this is a file /// loaded without transpilation but with external sources. This array @@ -639,18 +899,51 @@ pub const ParsedSourceMap = struct { is_standalone_module_graph: bool = false, - const SourceContentPtr = packed struct(u64) { - load_hint: SourceMapLoadHint, - data: u62, + const SourceProviderKind = enum(u1) { zig, bake }; + const AnySourceProvider = union(enum) { + zig: *SourceProviderMap, + bake: *BakeSourceProvider, - pub const none: SourceContentPtr = .{ .load_hint = .none, .data = 0 }; - - fn fromProvider(p: *SourceProviderMap) SourceContentPtr { - return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)) }; + pub fn ptr(this: AnySourceProvider) *anyopaque { + return switch (this) { + .zig => @ptrCast(this.zig), + .bake => @ptrCast(this.bake), + }; } - pub fn provider(sc: SourceContentPtr) ?*SourceProviderMap { - return @ptrFromInt(sc.data); + pub fn getSourceMap( + this: AnySourceProvider, + source_filename: []const u8, + load_hint: SourceMapLoadHint, + result: ParseUrlResultHint, + ) ?SourceMap.ParseUrl { + return switch (this) { + .zig => this.zig.getSourceMap(source_filename, load_hint, result), + .bake => this.bake.getSourceMap(source_filename, load_hint, result), + }; + } + }; + + const SourceContentPtr = packed struct(u64) { + load_hint: SourceMapLoadHint, + kind: SourceProviderKind, + data: u61, + + pub const none: SourceContentPtr = .{ .load_hint = .none, .kind = .zig, .data = 0 }; + + fn fromProvider(p: *SourceProviderMap) SourceContentPtr { + return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .zig }; + } + + fn fromBakeProvider(p: *BakeSourceProvider) SourceContentPtr { + return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .bake }; + } + + pub fn provider(sc: SourceContentPtr) ?AnySourceProvider { + switch (sc.kind) { + .zig => return .{ .zig = @ptrFromInt(sc.data) }, + .bake => return .{ .bake = @ptrFromInt(sc.data) }, + } } }; @@ -677,16 +970,20 @@ pub const ParsedSourceMap = struct { return @ptrFromInt(this.underlying_provider.data); } - pub fn writeVLQs(map: ParsedSourceMap, writer: anytype) !void { + pub fn memoryCost(this: *const ParsedSourceMap) usize { + return @sizeOf(ParsedSourceMap) + this.mappings.memoryCost() + this.external_source_names.len * @sizeOf([]const u8); + } + + pub fn writeVLQs(map: *const ParsedSourceMap, writer: anytype) !void { var last_col: i32 = 0; var last_src: i32 = 0; var last_ol: i32 = 0; var last_oc: i32 = 0; var current_line: i32 = 0; for ( - map.mappings.items(.generated), - map.mappings.items(.original), - map.mappings.items(.source_index), + map.mappings.generated(), + map.mappings.original(), + map.mappings.sourceIndex(), 0.., ) |gen, orig, source_index, i| { if (current_line != gen.lines) { @@ -735,26 +1032,137 @@ pub const SourceMapLoadHint = enum(u2) { is_external_map, }; +fn findSourceMappingURL(comptime T: type, source: []const T, alloc: std.mem.Allocator) ?bun.JSC.ZigString.Slice { + const needle = comptime bun.strings.literal(T, "\n//# sourceMappingURL="); + const found = bun.strings.indexOfT(T, source, needle) orelse return null; + const end = std.mem.indexOfScalarPos(T, source, found + needle.len, '\n') orelse source.len; + const url = std.mem.trimRight(T, source[found + needle.len .. end], &.{ ' ', '\r' }); + return switch (T) { + u8 => bun.JSC.ZigString.Slice.fromUTF8NeverFree(url), + u16 => bun.JSC.ZigString.Slice.init( + alloc, + bun.strings.toUTF8Alloc(alloc, url) catch bun.outOfMemory(), + ), + else => @compileError("Not Supported"), + }; +} + +/// The last two arguments to this specify loading hints +pub fn getSourceMapImpl( + comptime SourceProviderKind: type, + provider: *SourceProviderKind, + source_filename: []const u8, + load_hint: SourceMapLoadHint, + result: ParseUrlResultHint, +) ?SourceMap.ParseUrl { + // This was previously 65535 but that is a size that can risk stack overflow + // and due to the many layers of indirections and wrappers this function is called in, it + // is difficult to reason about how deeply nested of a callstack this + // function is called in. 1024 is a safer number. + // + // TODO: Experiment in debug builds calculating how much stack space we have left and using that to + // adjust the size + const STACK_SPACE_TO_USE = 1024; + var sfb = std.heap.stackFallback(STACK_SPACE_TO_USE, bun.default_allocator); + var arena = bun.ArenaAllocator.init(sfb.get()); + defer arena.deinit(); + const allocator = arena.allocator(); + + const new_load_hint: SourceMapLoadHint, const parsed = parsed: { + var inline_err: ?anyerror = null; + + // try to get an inline source map + if (load_hint != .is_external_map) try_inline: { + const source = SourceProviderKind.getSourceSlice(provider); + defer source.deref(); + bun.assert(source.tag == .ZigString); + + const found_url = (if (source.is8Bit()) + findSourceMappingURL(u8, source.latin1(), allocator) + else + findSourceMappingURL(u16, source.utf16(), allocator)) orelse + break :try_inline; + defer found_url.deinit(); + + break :parsed .{ + .is_inline_map, + parseUrl( + bun.default_allocator, + allocator, + found_url.slice(), + result, + ) catch |err| { + inline_err = err; + break :try_inline; + }, + }; + } + + // try to load a .map file + if (load_hint != .is_inline_map) try_external: { + var load_path_buf: *bun.PathBuffer = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(load_path_buf); + if (source_filename.len + 4 > load_path_buf.len) + break :try_external; + @memcpy(load_path_buf[0..source_filename.len], source_filename); + @memcpy(load_path_buf[source_filename.len..][0..4], ".map"); + + const load_path = load_path_buf[0 .. source_filename.len + 4]; + const data = switch (bun.sys.File.readFrom(std.fs.cwd(), load_path, allocator)) { + .err => break :try_external, + .result => |data| data, + }; + + break :parsed .{ + .is_external_map, + parseJSON( + bun.default_allocator, + allocator, + data, + result, + ) catch |err| { + // Print warning even if this came from non-visible code like + // calling `error.stack`. This message is only printed if + // the sourcemap has been found but is invalid, such as being + // invalid JSON text or corrupt mappings. + bun.Output.warn("Could not decode sourcemap in '{s}': {s}", .{ + source_filename, + @errorName(err), + }); // Disable the "try using --sourcemap=external" hint + bun.JSC.SavedSourceMap.MissingSourceMapNoteInfo.seen_invalid = true; + return null; + }, + }; + } + + if (inline_err) |err| { + bun.Output.warn("Could not decode sourcemap in '{s}': {s}", .{ + source_filename, + @errorName(err), + }); + // Disable the "try using --sourcemap=external" hint + bun.JSC.SavedSourceMap.MissingSourceMapNoteInfo.seen_invalid = true; + return null; + } + + return null; + }; + if (parsed.map) |ptr| { + ptr.underlying_provider = SourceProviderKind.toSourceContentPtr(provider); + ptr.underlying_provider.load_hint = new_load_hint; + } + return parsed; +} + /// This is a pointer to a ZigSourceProvider that may or may not have a `//# sourceMappingURL` comment /// when we want to lookup this data, we will then resolve it to a ParsedSourceMap if it does. /// /// This is used for files that were pre-bundled with `bun build --target=bun --sourcemap` pub const SourceProviderMap = opaque { extern fn ZigSourceProvider__getSourceSlice(*SourceProviderMap) bun.String; - - fn findSourceMappingURL(comptime T: type, source: []const T, alloc: std.mem.Allocator) ?bun.JSC.ZigString.Slice { - const needle = comptime bun.strings.literal(T, "\n//# sourceMappingURL="); - const found = bun.strings.indexOfT(T, source, needle) orelse return null; - const end = std.mem.indexOfScalarPos(T, source, found + needle.len, '\n') orelse source.len; - const url = std.mem.trimRight(T, source[found + needle.len .. end], &.{ ' ', '\r' }); - return switch (T) { - u8 => bun.JSC.ZigString.Slice.fromUTF8NeverFree(url), - u16 => bun.JSC.ZigString.Slice.init( - alloc, - bun.strings.toUTF8Alloc(alloc, url) catch bun.outOfMemory(), - ), - else => @compileError("Not Supported"), - }; + pub const getSourceSlice = ZigSourceProvider__getSourceSlice; + pub fn toSourceContentPtr(this: *SourceProviderMap) ParsedSourceMap.SourceContentPtr { + return ParsedSourceMap.SourceContentPtr.fromProvider(this); } /// The last two arguments to this specify loading hints @@ -764,94 +1172,37 @@ pub const SourceProviderMap = opaque { load_hint: SourceMapLoadHint, result: ParseUrlResultHint, ) ?SourceMap.ParseUrl { - var sfb = std.heap.stackFallback(65536, bun.default_allocator); - var arena = bun.ArenaAllocator.init(sfb.get()); - defer arena.deinit(); - const allocator = arena.allocator(); + return getSourceMapImpl( + SourceProviderMap, + provider, + source_filename, + load_hint, + result, + ); + } +}; - const new_load_hint: SourceMapLoadHint, const parsed = parsed: { - var inline_err: ?anyerror = null; +pub const BakeSourceProvider = opaque { + extern fn BakeSourceProvider__getSourceSlice(*BakeSourceProvider) bun.String; + pub const getSourceSlice = BakeSourceProvider__getSourceSlice; + pub fn toSourceContentPtr(this: *BakeSourceProvider) ParsedSourceMap.SourceContentPtr { + return ParsedSourceMap.SourceContentPtr.fromBakeProvider(this); + } - // try to get an inline source map - if (load_hint != .is_external_map) try_inline: { - const source = ZigSourceProvider__getSourceSlice(provider); - defer source.deref(); - bun.assert(source.tag == .ZigString); - - const found_url = (if (source.is8Bit()) - findSourceMappingURL(u8, source.latin1(), allocator) - else - findSourceMappingURL(u16, source.utf16(), allocator)) orelse - break :try_inline; - defer found_url.deinit(); - - break :parsed .{ - .is_inline_map, - parseUrl( - bun.default_allocator, - allocator, - found_url.slice(), - result, - ) catch |err| { - inline_err = err; - break :try_inline; - }, - }; - } - - // try to load a .map file - if (load_hint != .is_inline_map) try_external: { - var load_path_buf: bun.PathBuffer = undefined; - if (source_filename.len + 4 > load_path_buf.len) - break :try_external; - @memcpy(load_path_buf[0..source_filename.len], source_filename); - @memcpy(load_path_buf[source_filename.len..][0..4], ".map"); - - const load_path = load_path_buf[0 .. source_filename.len + 4]; - const data = switch (bun.sys.File.readFrom(std.fs.cwd(), load_path, allocator)) { - .err => break :try_external, - .result => |data| data, - }; - - break :parsed .{ - .is_external_map, - parseJSON( - bun.default_allocator, - allocator, - data, - result, - ) catch |err| { - // Print warning even if this came from non-visible code like - // calling `error.stack`. This message is only printed if - // the sourcemap has been found but is invalid, such as being - // invalid JSON text or corrupt mappings. - bun.Output.warn("Could not decode sourcemap in '{s}': {s}", .{ - source_filename, - @errorName(err), - }); // Disable the "try using --sourcemap=external" hint - bun.JSC.SavedSourceMap.MissingSourceMapNoteInfo.seen_invalid = true; - return null; - }, - }; - } - - if (inline_err) |err| { - bun.Output.warn("Could not decode sourcemap in '{s}': {s}", .{ - source_filename, - @errorName(err), - }); - // Disable the "try using --sourcemap=external" hint - bun.JSC.SavedSourceMap.MissingSourceMapNoteInfo.seen_invalid = true; - return null; - } - - return null; - }; - if (parsed.map) |ptr| { - ptr.underlying_provider = ParsedSourceMap.SourceContentPtr.fromProvider(provider); - ptr.underlying_provider.load_hint = new_load_hint; - } - return parsed; + /// The last two arguments to this specify loading hints + pub fn getSourceMap( + provider: *BakeSourceProvider, + source_filename: []const u8, + load_hint: SourceMap.SourceMapLoadHint, + result: SourceMap.ParseUrlResultHint, + ) ?SourceMap.ParseUrl { + return getSourceMapImpl( + BakeSourceProvider, + provider, + source_filename, + load_hint, + result, + ); } }; @@ -969,7 +1320,7 @@ pub fn find( line: i32, column: i32, ) ?Mapping { - return Mapping.find(this.mapping, line, column); + return this.mapping.find(line, column); } pub const SourceMapShifts = struct { @@ -1584,6 +1935,7 @@ const assert = bun.assert; pub const coverage = @import("./CodeCoverage.zig"); pub const VLQ = @import("./VLQ.zig"); pub const LineOffsetTable = @import("./LineOffsetTable.zig"); +pub const JSSourceMap = @import("./JSSourceMap.zig"); const decodeVLQAssumeValid = VLQ.decodeAssumeValid; const decodeVLQ = VLQ.decode; diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index df0664368a..728eeba420 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -1,3273 +1,3 @@ -const bun = @import("bun"); -const JSC = bun.JSC; -const String = bun.String; -const uws = bun.uws; -const std = @import("std"); -pub const debug = bun.Output.scoped(.Postgres, false); -pub const int4 = u32; -pub const PostgresInt32 = int4; -pub const int8 = i64; -pub const PostgresInt64 = int8; -pub const short = u16; -pub const PostgresShort = u16; -const Crypto = JSC.API.Bun.Crypto; -const JSValue = JSC.JSValue; -const BoringSSL = bun.BoringSSL; -pub const AnyPostgresError = error{ - ConnectionClosed, - ExpectedRequest, - ExpectedStatement, - InvalidBackendKeyData, - InvalidBinaryData, - InvalidByteSequence, - InvalidByteSequenceForEncoding, - InvalidCharacter, - InvalidMessage, - InvalidMessageLength, - InvalidQueryBinding, - InvalidServerKey, - InvalidServerSignature, - JSError, - MultidimensionalArrayNotSupportedYet, - NullsInArrayNotSupportedYet, - OutOfMemory, - Overflow, - PBKDFD2, - SASL_SIGNATURE_MISMATCH, - SASL_SIGNATURE_INVALID_BASE64, - ShortRead, - TLSNotAvailable, - TLSUpgradeFailed, - UnexpectedMessage, - UNKNOWN_AUTHENTICATION_METHOD, - UNSUPPORTED_AUTHENTICATION_METHOD, - UnsupportedByteaFormat, - UnsupportedIntegerSize, - UnsupportedArrayFormat, - UnsupportedNumericFormat, - UnknownFormatCode, -}; - -pub fn postgresErrorToJS(globalObject: *JSC.JSGlobalObject, message: ?[]const u8, err: AnyPostgresError) JSValue { - const error_code: JSC.Error = switch (err) { - error.ConnectionClosed => .POSTGRES_CONNECTION_CLOSED, - error.ExpectedRequest => .POSTGRES_EXPECTED_REQUEST, - error.ExpectedStatement => .POSTGRES_EXPECTED_STATEMENT, - error.InvalidBackendKeyData => .POSTGRES_INVALID_BACKEND_KEY_DATA, - error.InvalidBinaryData => .POSTGRES_INVALID_BINARY_DATA, - error.InvalidByteSequence => .POSTGRES_INVALID_BYTE_SEQUENCE, - error.InvalidByteSequenceForEncoding => .POSTGRES_INVALID_BYTE_SEQUENCE_FOR_ENCODING, - error.InvalidCharacter => .POSTGRES_INVALID_CHARACTER, - error.InvalidMessage => .POSTGRES_INVALID_MESSAGE, - error.InvalidMessageLength => .POSTGRES_INVALID_MESSAGE_LENGTH, - error.InvalidQueryBinding => .POSTGRES_INVALID_QUERY_BINDING, - error.InvalidServerKey => .POSTGRES_INVALID_SERVER_KEY, - error.InvalidServerSignature => .POSTGRES_INVALID_SERVER_SIGNATURE, - error.MultidimensionalArrayNotSupportedYet => .POSTGRES_MULTIDIMENSIONAL_ARRAY_NOT_SUPPORTED_YET, - error.NullsInArrayNotSupportedYet => .POSTGRES_NULLS_IN_ARRAY_NOT_SUPPORTED_YET, - error.Overflow => .POSTGRES_OVERFLOW, - error.PBKDFD2 => .POSTGRES_AUTHENTICATION_FAILED_PBKDF2, - error.SASL_SIGNATURE_MISMATCH => .POSTGRES_SASL_SIGNATURE_MISMATCH, - error.SASL_SIGNATURE_INVALID_BASE64 => .POSTGRES_SASL_SIGNATURE_INVALID_BASE64, - error.TLSNotAvailable => .POSTGRES_TLS_NOT_AVAILABLE, - error.TLSUpgradeFailed => .POSTGRES_TLS_UPGRADE_FAILED, - error.UnexpectedMessage => .POSTGRES_UNEXPECTED_MESSAGE, - error.UNKNOWN_AUTHENTICATION_METHOD => .POSTGRES_UNKNOWN_AUTHENTICATION_METHOD, - error.UNSUPPORTED_AUTHENTICATION_METHOD => .POSTGRES_UNSUPPORTED_AUTHENTICATION_METHOD, - error.UnsupportedByteaFormat => .POSTGRES_UNSUPPORTED_BYTEA_FORMAT, - error.UnsupportedArrayFormat => .POSTGRES_UNSUPPORTED_ARRAY_FORMAT, - error.UnsupportedIntegerSize => .POSTGRES_UNSUPPORTED_INTEGER_SIZE, - error.UnsupportedNumericFormat => .POSTGRES_UNSUPPORTED_NUMERIC_FORMAT, - error.UnknownFormatCode => .POSTGRES_UNKNOWN_FORMAT_CODE, - error.JSError => { - return globalObject.takeException(error.JSError); - }, - error.OutOfMemory => { - // TODO: add binding for creating an out of memory error? - return globalObject.takeException(globalObject.throwOutOfMemory()); - }, - error.ShortRead => { - bun.unreachablePanic("Assertion failed: ShortRead should be handled by the caller in postgres", .{}); - }, - }; - if (message) |msg| { - return error_code.fmt(globalObject, "{s}", .{msg}); - } - return error_code.fmt(globalObject, "Failed to bind query: {s}", .{@errorName(err)}); -} - -pub const SSLMode = enum(u8) { - disable = 0, - prefer = 1, - require = 2, - verify_ca = 3, - verify_full = 4, -}; - -pub const Data = union(enum) { - owned: bun.ByteList, - temporary: []const u8, - empty: void, - - pub const Empty: Data = .{ .empty = {} }; - - pub fn toOwned(this: @This()) !bun.ByteList { - return switch (this) { - .owned => this.owned, - .temporary => bun.ByteList.init(try bun.default_allocator.dupe(u8, this.temporary)), - .empty => bun.ByteList.init(&.{}), - }; - } - - pub fn deinit(this: *@This()) void { - switch (this.*) { - .owned => this.owned.deinitWithAllocator(bun.default_allocator), - .temporary => {}, - .empty => {}, - } - } - - /// Zero bytes before deinit - /// Generally, for security reasons. - pub fn zdeinit(this: *@This()) void { - switch (this.*) { - .owned => { - - // Zero bytes before deinit - @memset(this.owned.slice(), 0); - - this.owned.deinitWithAllocator(bun.default_allocator); - }, - .temporary => {}, - .empty => {}, - } - } - - pub fn slice(this: @This()) []const u8 { - return switch (this) { - .owned => this.owned.slice(), - .temporary => this.temporary, - .empty => "", - }; - } - - pub fn substring(this: @This(), start_index: usize, end_index: usize) Data { - return switch (this) { - .owned => .{ .temporary = this.owned.slice()[start_index..end_index] }, - .temporary => .{ .temporary = this.temporary[start_index..end_index] }, - .empty => .{ .empty = {} }, - }; - } - - pub fn sliceZ(this: @This()) [:0]const u8 { - return switch (this) { - .owned => this.owned.slice()[0..this.owned.len :0], - .temporary => this.temporary[0..this.temporary.len :0], - .empty => "", - }; - } -}; -pub const protocol = @import("./postgres/postgres_protocol.zig"); -pub const types = @import("./postgres/postgres_types.zig"); - -const Socket = uws.AnySocket; -const PreparedStatementsMap = std.HashMapUnmanaged(u64, *PostgresSQLStatement, bun.IdentityContext(u64), 80); - -const SocketMonitor = struct { - const DebugSocketMonitorWriter = struct { - var file: std.fs.File = undefined; - var enabled = false; - var check = std.once(load); - pub fn write(data: []const u8) void { - file.writeAll(data) catch {}; - } - - fn load() void { - if (bun.getenvZAnyCase("BUN_POSTGRES_SOCKET_MONITOR")) |monitor| { - enabled = true; - file = std.fs.cwd().createFile(monitor, .{ .truncate = true }) catch { - enabled = false; - return; - }; - debug("writing to {s}", .{monitor}); - } - } - }; - - const DebugSocketMonitorReader = struct { - var file: std.fs.File = undefined; - var enabled = false; - var check = std.once(load); - - fn load() void { - if (bun.getenvZAnyCase("BUN_POSTGRES_SOCKET_MONITOR_READER")) |monitor| { - enabled = true; - file = std.fs.cwd().createFile(monitor, .{ .truncate = true }) catch { - enabled = false; - return; - }; - debug("duplicating reads to {s}", .{monitor}); - } - } - - pub fn write(data: []const u8) void { - file.writeAll(data) catch {}; - } - }; - - pub fn write(data: []const u8) void { - if (comptime bun.Environment.isDebug) { - DebugSocketMonitorWriter.check.call(); - if (DebugSocketMonitorWriter.enabled) { - DebugSocketMonitorWriter.write(data); - } - } - } - - pub fn read(data: []const u8) void { - if (comptime bun.Environment.isDebug) { - DebugSocketMonitorReader.check.call(); - if (DebugSocketMonitorReader.enabled) { - DebugSocketMonitorReader.write(data); - } - } - } -}; - -pub const PostgresSQLContext = struct { - tcp: ?*uws.SocketContext = null, - - onQueryResolveFn: JSC.Strong.Optional = .empty, - onQueryRejectFn: JSC.Strong.Optional = .empty, - - pub fn init(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var ctx = &globalObject.bunVM().rareData().postgresql_context; - ctx.onQueryResolveFn.set(globalObject, callframe.argument(0)); - ctx.onQueryRejectFn.set(globalObject, callframe.argument(1)); - - return .js_undefined; - } - - comptime { - const js_init = JSC.toJSHostFn(init); - @export(&js_init, .{ .name = "PostgresSQLContext__init" }); - } -}; -pub const PostgresSQLQueryResultMode = enum(u2) { - objects = 0, - values = 1, - raw = 2, -}; - -const JSRef = JSC.JSRef; - -pub const PostgresSQLQuery = struct { - statement: ?*PostgresSQLStatement = null, - query: bun.String = bun.String.empty, - cursor_name: bun.String = bun.String.empty, - - thisValue: JSRef = JSRef.empty(), - - status: Status = Status.pending, - - ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), - - flags: packed struct(u8) { - is_done: bool = false, - binary: bool = false, - bigint: bool = false, - simple: bool = false, - result_mode: PostgresSQLQueryResultMode = .objects, - _padding: u2 = 0, - } = .{}, - - pub const js = JSC.Codegen.JSPostgresSQLQuery; - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - pub fn getTarget(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, clean_target: bool) JSC.JSValue { - const thisValue = this.thisValue.get(); - if (thisValue == .zero) { - return .zero; - } - const target = js.targetGetCached(thisValue) orelse return .zero; - if (clean_target) { - js.targetSetCached(thisValue, globalObject, .zero); - } - return target; - } - - pub const Status = enum(u8) { - /// The query was just enqueued, statement status can be checked for more details - pending, - /// The query is being bound to the statement - binding, - /// The query is running - running, - /// The query is waiting for a partial response - partial_response, - /// The query was successful - success, - /// The query failed - fail, - - pub fn isRunning(this: Status) bool { - return @intFromEnum(this) > @intFromEnum(Status.pending) and @intFromEnum(this) < @intFromEnum(Status.success); - } - }; - - pub fn hasPendingActivity(this: *@This()) bool { - return this.ref_count.load(.monotonic) > 1; - } - - pub fn deinit(this: *@This()) void { - this.thisValue.deinit(); - if (this.statement) |statement| { - statement.deref(); - } - this.query.deref(); - this.cursor_name.deref(); - bun.default_allocator.destroy(this); - } - - pub fn finalize(this: *@This()) void { - debug("PostgresSQLQuery finalize", .{}); - if (this.thisValue == .weak) { - // clean up if is a weak reference, if is a strong reference we need to wait until the query is done - // if we are a strong reference, here is probably a bug because GC'd should not happen - this.thisValue.weak = .zero; - } - this.deref(); - } - - pub fn deref(this: *@This()) void { - const ref_count = this.ref_count.fetchSub(1, .monotonic); - - if (ref_count == 1) { - this.deinit(); - } - } - - pub fn ref(this: *@This()) void { - bun.assert(this.ref_count.fetchAdd(1, .monotonic) > 0); - } - - pub fn onWriteFail( - this: *@This(), - err: AnyPostgresError, - globalObject: *JSC.JSGlobalObject, - queries_array: JSValue, - ) void { - this.status = .fail; - const thisValue = this.thisValue.get(); - defer this.thisValue.deinit(); - const targetValue = this.getTarget(globalObject, true); - if (thisValue == .zero or targetValue == .zero) { - return; - } - - const vm = JSC.VirtualMachine.get(); - const function = vm.rareData().postgresql_context.onQueryRejectFn.get().?; - const event_loop = vm.eventLoop(); - event_loop.runCallback(function, globalObject, thisValue, &.{ - targetValue, - postgresErrorToJS(globalObject, null, err), - queries_array, - }); - } - pub fn onJSError(this: *@This(), err: JSC.JSValue, globalObject: *JSC.JSGlobalObject) void { - this.status = .fail; - this.ref(); - defer this.deref(); - - const thisValue = this.thisValue.get(); - defer this.thisValue.deinit(); - const targetValue = this.getTarget(globalObject, true); - if (thisValue == .zero or targetValue == .zero) { - return; - } - - var vm = JSC.VirtualMachine.get(); - const function = vm.rareData().postgresql_context.onQueryRejectFn.get().?; - const event_loop = vm.eventLoop(); - event_loop.runCallback(function, globalObject, thisValue, &.{ - targetValue, - err, - }); - } - pub fn onError(this: *@This(), err: PostgresSQLStatement.Error, globalObject: *JSC.JSGlobalObject) void { - this.onJSError(err.toJS(globalObject), globalObject); - } - - const CommandTag = union(enum) { - // For an INSERT command, the tag is INSERT oid rows, where rows is the - // number of rows inserted. oid used to be the object ID of the inserted - // row if rows was 1 and the target table had OIDs, but OIDs system - // columns are not supported anymore; therefore oid is always 0. - INSERT: u64, - // For a DELETE command, the tag is DELETE rows where rows is the number - // of rows deleted. - DELETE: u64, - // For an UPDATE command, the tag is UPDATE rows where rows is the - // number of rows updated. - UPDATE: u64, - // For a MERGE command, the tag is MERGE rows where rows is the number - // of rows inserted, updated, or deleted. - MERGE: u64, - // For a SELECT or CREATE TABLE AS command, the tag is SELECT rows where - // rows is the number of rows retrieved. - SELECT: u64, - // For a MOVE command, the tag is MOVE rows where rows is the number of - // rows the cursor's position has been changed by. - MOVE: u64, - // For a FETCH command, the tag is FETCH rows where rows is the number - // of rows that have been retrieved from the cursor. - FETCH: u64, - // For a COPY command, the tag is COPY rows where rows is the number of - // rows copied. (Note: the row count appears only in PostgreSQL 8.2 and - // later.) - COPY: u64, - - other: []const u8, - - pub fn toJSTag(this: CommandTag, globalObject: *JSC.JSGlobalObject) JSValue { - return switch (this) { - .INSERT => JSValue.jsNumber(1), - .DELETE => JSValue.jsNumber(2), - .UPDATE => JSValue.jsNumber(3), - .MERGE => JSValue.jsNumber(4), - .SELECT => JSValue.jsNumber(5), - .MOVE => JSValue.jsNumber(6), - .FETCH => JSValue.jsNumber(7), - .COPY => JSValue.jsNumber(8), - .other => |tag| JSC.ZigString.init(tag).toJS(globalObject), - }; - } - - pub fn toJSNumber(this: CommandTag) JSValue { - return switch (this) { - .other => JSValue.jsNumber(0), - inline else => |val| JSValue.jsNumber(val), - }; - } - - const KnownCommand = enum { - INSERT, - DELETE, - UPDATE, - MERGE, - SELECT, - MOVE, - FETCH, - COPY, - - pub const Map = bun.ComptimeEnumMap(KnownCommand); - }; - - pub fn init(tag: []const u8) CommandTag { - const first_space_index = bun.strings.indexOfChar(tag, ' ') orelse return .{ .other = tag }; - const cmd = KnownCommand.Map.get(tag[0..first_space_index]) orelse return .{ - .other = tag, - }; - - const number = brk: { - switch (cmd) { - .INSERT => { - var remaining = tag[@min(first_space_index + 1, tag.len)..]; - const second_space = bun.strings.indexOfChar(remaining, ' ') orelse return .{ .other = tag }; - remaining = remaining[@min(second_space + 1, remaining.len)..]; - break :brk std.fmt.parseInt(u64, remaining, 0) catch |err| { - debug("CommandTag failed to parse number: {s}", .{@errorName(err)}); - return .{ .other = tag }; - }; - }, - else => { - const after_tag = tag[@min(first_space_index + 1, tag.len)..]; - break :brk std.fmt.parseInt(u64, after_tag, 0) catch |err| { - debug("CommandTag failed to parse number: {s}", .{@errorName(err)}); - return .{ .other = tag }; - }; - }, - } - }; - - switch (cmd) { - inline else => |t| return @unionInit(CommandTag, @tagName(t), number), - } - } - }; - - pub fn allowGC(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject) void { - if (thisValue == .zero) { - return; - } - - defer thisValue.ensureStillAlive(); - js.bindingSetCached(thisValue, globalObject, .zero); - js.pendingValueSetCached(thisValue, globalObject, .zero); - js.targetSetCached(thisValue, globalObject, .zero); - } - - fn consumePendingValue(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject) ?JSValue { - const pending_value = js.pendingValueGetCached(thisValue) orelse return null; - js.pendingValueSetCached(thisValue, globalObject, .zero); - return pending_value; - } - - pub fn onResult(this: *@This(), command_tag_str: []const u8, globalObject: *JSC.JSGlobalObject, connection: JSC.JSValue, is_last: bool) void { - this.ref(); - defer this.deref(); - - const thisValue = this.thisValue.get(); - const targetValue = this.getTarget(globalObject, is_last); - if (is_last) { - this.status = .success; - } else { - this.status = .partial_response; - } - defer if (is_last) { - allowGC(thisValue, globalObject); - this.thisValue.deinit(); - }; - if (thisValue == .zero or targetValue == .zero) { - return; - } - - const vm = JSC.VirtualMachine.get(); - const function = vm.rareData().postgresql_context.onQueryResolveFn.get().?; - const event_loop = vm.eventLoop(); - const tag = CommandTag.init(command_tag_str); - - event_loop.runCallback(function, globalObject, thisValue, &.{ - targetValue, - consumePendingValue(thisValue, globalObject) orelse .js_undefined, - tag.toJSTag(globalObject), - tag.toJSNumber(), - if (connection == .zero) .js_undefined else PostgresSQLConnection.js.queriesGetCached(connection) orelse .js_undefined, - JSValue.jsBoolean(is_last), - }); - } - - pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*PostgresSQLQuery { - _ = callframe; - return globalThis.throw("PostgresSQLQuery cannot be constructed directly", .{}); - } - - pub fn estimatedSize(this: *PostgresSQLQuery) usize { - _ = this; - return @sizeOf(PostgresSQLQuery); - } - - pub fn call(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments_old(6).slice(); - var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - const query = args.nextEat() orelse { - return globalThis.throw("query must be a string", .{}); - }; - const values = args.nextEat() orelse { - return globalThis.throw("values must be an array", .{}); - }; - - if (!query.isString()) { - return globalThis.throw("query must be a string", .{}); - } - - if (values.jsType() != .Array) { - return globalThis.throw("values must be an array", .{}); - } - - const pending_value: JSValue = args.nextEat() orelse .js_undefined; - const columns: JSValue = args.nextEat() orelse .js_undefined; - const js_bigint: JSValue = args.nextEat() orelse .false; - const js_simple: JSValue = args.nextEat() orelse .false; - - const bigint = js_bigint.isBoolean() and js_bigint.asBoolean(); - const simple = js_simple.isBoolean() and js_simple.asBoolean(); - if (simple) { - if (values.getLength(globalThis) > 0) { - return globalThis.throwInvalidArguments("simple query cannot have parameters", .{}); - } - if (query.getLength(globalThis) >= std.math.maxInt(i32)) { - return globalThis.throwInvalidArguments("query is too long", .{}); - } - } - if (!pending_value.jsType().isArrayLike()) { - return globalThis.throwInvalidArgumentType("query", "pendingValue", "Array"); - } - - var ptr = try bun.default_allocator.create(PostgresSQLQuery); - - const this_value = ptr.toJS(globalThis); - this_value.ensureStillAlive(); - - ptr.* = .{ - .query = try query.toBunString(globalThis), - .thisValue = JSRef.initWeak(this_value), - .flags = .{ - .bigint = bigint, - .simple = simple, - }, - }; - - js.bindingSetCached(this_value, globalThis, values); - js.pendingValueSetCached(this_value, globalThis, pending_value); - if (!columns.isUndefined()) { - js.columnsSetCached(this_value, globalThis, columns); - } - - return this_value; - } - - pub fn push(this: *PostgresSQLQuery, globalThis: *JSC.JSGlobalObject, value: JSValue) void { - var pending_value = this.pending_value.get() orelse return; - pending_value.push(globalThis, value); - } - - pub fn doDone(this: *@This(), globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - _ = globalObject; - this.flags.is_done = true; - return .js_undefined; - } - pub fn setPendingValue(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const result = callframe.argument(0); - js.pendingValueSetCached(this.thisValue.get(), globalObject, result); - return .js_undefined; - } - pub fn setMode(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const js_mode = callframe.argument(0); - if (js_mode.isEmptyOrUndefinedOrNull() or !js_mode.isNumber()) { - return globalObject.throwInvalidArgumentType("setMode", "mode", "Number"); - } - - const mode = js_mode.coerce(i32, globalObject); - this.flags.result_mode = std.meta.intToEnum(PostgresSQLQueryResultMode, mode) catch { - return globalObject.throwInvalidArgumentTypeValue("mode", "Number", js_mode); - }; - return .js_undefined; - } - - pub fn doRun(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - var arguments_ = callframe.arguments_old(2); - const arguments = arguments_.slice(); - const connection: *PostgresSQLConnection = arguments[0].as(PostgresSQLConnection) orelse { - return globalObject.throw("connection must be a PostgresSQLConnection", .{}); - }; - - connection.poll_ref.ref(globalObject.bunVM()); - var query = arguments[1]; - - if (!query.isObject()) { - return globalObject.throwInvalidArgumentType("run", "query", "Query"); - } - - const this_value = callframe.this(); - const binding_value = js.bindingGetCached(this_value) orelse .zero; - var query_str = this.query.toUTF8(bun.default_allocator); - defer query_str.deinit(); - var writer = connection.writer(); - - if (this.flags.simple) { - debug("executeQuery", .{}); - - const can_execute = !connection.hasQueryRunning(); - if (can_execute) { - PostgresRequest.executeQuery(query_str.slice(), PostgresSQLConnection.Writer, writer) catch |err| { - if (!globalObject.hasException()) - return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to execute query", err)); - return error.JSError; - }; - connection.flags.is_ready_for_query = false; - this.status = .running; - } else { - this.status = .pending; - } - const stmt = bun.default_allocator.create(PostgresSQLStatement) catch { - return globalObject.throwOutOfMemory(); - }; - // Query is simple and it's the only owner of the statement - stmt.* = .{ - .signature = Signature.empty(), - .ref_count = 1, - .status = .parsing, - }; - this.statement = stmt; - // We need a strong reference to the query so that it doesn't get GC'd - connection.requests.writeItem(this) catch return globalObject.throwOutOfMemory(); - this.ref(); - this.thisValue.upgrade(globalObject); - - js.targetSetCached(this_value, globalObject, query); - if (this.status == .running) { - connection.flushDataAndResetTimeout(); - } else { - connection.resetConnectionTimeout(); - } - return .js_undefined; - } - - const columns_value: JSValue = js.columnsGetCached(this_value) orelse .js_undefined; - - var signature = Signature.generate(globalObject, query_str.slice(), binding_value, columns_value, connection.prepared_statement_id, connection.flags.use_unnamed_prepared_statements) catch |err| { - if (!globalObject.hasException()) - return globalObject.throwError(err, "failed to generate signature"); - return error.JSError; - }; - - const has_params = signature.fields.len > 0; - var did_write = false; - enqueue: { - var connection_entry_value: ?**PostgresSQLStatement = null; - if (!connection.flags.use_unnamed_prepared_statements) { - const entry = connection.statements.getOrPut(bun.default_allocator, bun.hash(signature.name)) catch |err| { - signature.deinit(); - return globalObject.throwError(err, "failed to allocate statement"); - }; - connection_entry_value = entry.value_ptr; - if (entry.found_existing) { - this.statement = connection_entry_value.?.*; - this.statement.?.ref(); - signature.deinit(); - - switch (this.statement.?.status) { - .failed => { - // If the statement failed, we need to throw the error - return globalObject.throwValue(this.statement.?.error_response.?.toJS(globalObject)); - }, - .prepared => { - if (!connection.hasQueryRunning()) { - this.flags.binary = this.statement.?.fields.len > 0; - debug("bindAndExecute", .{}); - - // bindAndExecute will bind + execute, it will change to running after binding is complete - PostgresRequest.bindAndExecute(globalObject, this.statement.?, binding_value, columns_value, PostgresSQLConnection.Writer, writer) catch |err| { - if (!globalObject.hasException()) - return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to bind and execute query", err)); - return error.JSError; - }; - connection.flags.is_ready_for_query = false; - this.status = .binding; - - did_write = true; - } - }, - .parsing, .pending => {}, - } - - break :enqueue; - } - } - const can_execute = !connection.hasQueryRunning(); - - if (can_execute) { - // If it does not have params, we can write and execute immediately in one go - if (!has_params) { - debug("prepareAndQueryWithSignature", .{}); - // prepareAndQueryWithSignature will write + bind + execute, it will change to running after binding is complete - PostgresRequest.prepareAndQueryWithSignature(globalObject, query_str.slice(), binding_value, PostgresSQLConnection.Writer, writer, &signature) catch |err| { - signature.deinit(); - if (!globalObject.hasException()) - return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to prepare and query", err)); - return error.JSError; - }; - connection.flags.is_ready_for_query = false; - this.status = .binding; - did_write = true; - } else { - debug("writeQuery", .{}); - - PostgresRequest.writeQuery(query_str.slice(), signature.prepared_statement_name, signature.fields, PostgresSQLConnection.Writer, writer) catch |err| { - signature.deinit(); - if (!globalObject.hasException()) - return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to write query", err)); - return error.JSError; - }; - writer.write(&protocol.Sync) catch |err| { - signature.deinit(); - if (!globalObject.hasException()) - return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to flush", err)); - return error.JSError; - }; - connection.flags.is_ready_for_query = false; - did_write = true; - } - } - { - const stmt = bun.default_allocator.create(PostgresSQLStatement) catch { - return globalObject.throwOutOfMemory(); - }; - // we only have connection_entry_value if we are using named prepared statements - if (connection_entry_value) |entry_value| { - connection.prepared_statement_id += 1; - stmt.* = .{ .signature = signature, .ref_count = 2, .status = if (can_execute) .parsing else .pending }; - this.statement = stmt; - - entry_value.* = stmt; - } else { - stmt.* = .{ .signature = signature, .ref_count = 1, .status = if (can_execute) .parsing else .pending }; - this.statement = stmt; - } - } - } - // We need a strong reference to the query so that it doesn't get GC'd - connection.requests.writeItem(this) catch return globalObject.throwOutOfMemory(); - this.ref(); - this.thisValue.upgrade(globalObject); - - js.targetSetCached(this_value, globalObject, query); - if (did_write) { - connection.flushDataAndResetTimeout(); - } else { - connection.resetConnectionTimeout(); - } - return .js_undefined; - } - - pub fn doCancel(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - _ = callframe; - _ = globalObject; - _ = this; - - return .js_undefined; - } - - comptime { - const jscall = JSC.toJSHostFn(call); - @export(&jscall, .{ .name = "PostgresSQLQuery__createInstance" }); - } -}; - -pub const PostgresRequest = struct { - pub fn writeBind( - name: []const u8, - cursor_name: bun.String, - globalObject: *JSC.JSGlobalObject, - values_array: JSValue, - columns_value: JSValue, - parameter_fields: []const int4, - result_fields: []const protocol.FieldDescription, - comptime Context: type, - writer: protocol.NewWriter(Context), - ) !void { - try writer.write("B"); - const length = try writer.length(); - - try writer.String(cursor_name); - try writer.string(name); - - const len: u32 = @truncate(parameter_fields.len); - - // The number of parameter format codes that follow (denoted C - // below). This can be zero to indicate that there are no - // parameters or that the parameters all use the default format - // (text); or one, in which case the specified format code is - // applied to all parameters; or it can equal the actual number - // of parameters. - try writer.short(len); - - var iter = QueryBindingIterator.init(values_array, columns_value, globalObject); - for (0..len) |i| { - const parameter_field = parameter_fields[i]; - const is_custom_type = std.math.maxInt(short) < parameter_field; - const tag: types.Tag = if (is_custom_type) .text else @enumFromInt(@as(short, @intCast(parameter_field))); - - const force_text = is_custom_type or (tag.isBinaryFormatSupported() and brk: { - iter.to(@truncate(i)); - if (iter.next()) |value| { - break :brk value.isString(); - } - if (iter.anyFailed()) { - return error.InvalidQueryBinding; - } - break :brk false; - }); - - if (force_text) { - // If they pass a value as a string, let's avoid attempting to - // convert it to the binary representation. This minimizes the room - // for mistakes on our end, such as stripping the timezone - // differently than what Postgres does when given a timestamp with - // timezone. - try writer.short(0); - continue; - } - - try writer.short( - tag.formatCode(), - ); - } - - // The number of parameter values that follow (possibly zero). This - // must match the number of parameters needed by the query. - try writer.short(len); - - debug("Bind: {} ({d} args)", .{ bun.fmt.quote(name), len }); - iter.to(0); - var i: usize = 0; - while (iter.next()) |value| : (i += 1) { - const tag: types.Tag = brk: { - if (i >= len) { - // parameter in array but not in parameter_fields - // this is probably a bug a bug in bun lets return .text here so the server will send a error 08P01 - // with will describe better the error saying exactly how many parameters are missing and are expected - // Example: - // SQL error: PostgresError: bind message supplies 0 parameters, but prepared statement "PSELECT * FROM test_table WHERE id=$1 .in$0" requires 1 - // errno: "08P01", - // code: "ERR_POSTGRES_SERVER_ERROR" - break :brk .text; - } - const parameter_field = parameter_fields[i]; - const is_custom_type = std.math.maxInt(short) < parameter_field; - break :brk if (is_custom_type) .text else @enumFromInt(@as(short, @intCast(parameter_field))); - }; - if (value.isEmptyOrUndefinedOrNull()) { - debug(" -> NULL", .{}); - // As a special case, -1 indicates a - // NULL parameter value. No value bytes follow in the NULL case. - try writer.int4(@bitCast(@as(i32, -1))); - continue; - } - if (comptime bun.Environment.enable_logs) { - debug(" -> {s}", .{tag.tagName() orelse "(unknown)"}); - } - - switch ( - // If they pass a value as a string, let's avoid attempting to - // convert it to the binary representation. This minimizes the room - // for mistakes on our end, such as stripping the timezone - // differently than what Postgres does when given a timestamp with - // timezone. - if (tag.isBinaryFormatSupported() and value.isString()) .text else tag) { - .jsonb, .json => { - var str = bun.String.empty; - defer str.deref(); - value.jsonStringify(globalObject, 0, &str); - const slice = str.toUTF8WithoutRef(bun.default_allocator); - defer slice.deinit(); - const l = try writer.length(); - try writer.write(slice.slice()); - try l.writeExcludingSelf(); - }, - .bool => { - const l = try writer.length(); - try writer.write(&[1]u8{@intFromBool(value.toBoolean())}); - try l.writeExcludingSelf(); - }, - .timestamp, .timestamptz => { - const l = try writer.length(); - try writer.int8(types.date.fromJS(globalObject, value)); - try l.writeExcludingSelf(); - }, - .bytea => { - var bytes: []const u8 = ""; - if (value.asArrayBuffer(globalObject)) |buf| { - bytes = buf.byteSlice(); - } - const l = try writer.length(); - debug(" {d} bytes", .{bytes.len}); - - try writer.write(bytes); - try l.writeExcludingSelf(); - }, - .int4 => { - const l = try writer.length(); - try writer.int4(@bitCast(value.coerceToInt32(globalObject))); - try l.writeExcludingSelf(); - }, - .int4_array => { - const l = try writer.length(); - try writer.int4(@bitCast(value.coerceToInt32(globalObject))); - try l.writeExcludingSelf(); - }, - .float8 => { - const l = try writer.length(); - try writer.f64(@bitCast(try value.toNumber(globalObject))); - try l.writeExcludingSelf(); - }, - - else => { - const str = try String.fromJS(value, globalObject); - if (str.tag == .Dead) return error.OutOfMemory; - defer str.deref(); - const slice = str.toUTF8WithoutRef(bun.default_allocator); - defer slice.deinit(); - const l = try writer.length(); - try writer.write(slice.slice()); - try l.writeExcludingSelf(); - }, - } - } - - var any_non_text_fields: bool = false; - for (result_fields) |field| { - if (field.typeTag().isBinaryFormatSupported()) { - any_non_text_fields = true; - break; - } - } - - if (any_non_text_fields) { - try writer.short(result_fields.len); - for (result_fields) |field| { - try writer.short( - field.typeTag().formatCode(), - ); - } - } else { - try writer.short(0); - } - - try length.write(); - } - - pub fn writeQuery( - query: []const u8, - name: []const u8, - params: []const int4, - comptime Context: type, - writer: protocol.NewWriter(Context), - ) AnyPostgresError!void { - { - var q = protocol.Parse{ - .name = name, - .params = params, - .query = query, - }; - try q.writeInternal(Context, writer); - debug("Parse: {}", .{bun.fmt.quote(query)}); - } - - { - var d = protocol.Describe{ - .p = .{ - .prepared_statement = name, - }, - }; - try d.writeInternal(Context, writer); - debug("Describe: {}", .{bun.fmt.quote(name)}); - } - } - - pub fn prepareAndQueryWithSignature( - globalObject: *JSC.JSGlobalObject, - query: []const u8, - array_value: JSValue, - comptime Context: type, - writer: protocol.NewWriter(Context), - signature: *Signature, - ) AnyPostgresError!void { - try writeQuery(query, signature.prepared_statement_name, signature.fields, Context, writer); - try writeBind(signature.prepared_statement_name, bun.String.empty, globalObject, array_value, .zero, &.{}, &.{}, Context, writer); - var exec = protocol.Execute{ - .p = .{ - .prepared_statement = signature.prepared_statement_name, - }, - }; - try exec.writeInternal(Context, writer); - - try writer.write(&protocol.Flush); - try writer.write(&protocol.Sync); - } - - pub fn bindAndExecute( - globalObject: *JSC.JSGlobalObject, - statement: *PostgresSQLStatement, - array_value: JSValue, - columns_value: JSValue, - comptime Context: type, - writer: protocol.NewWriter(Context), - ) !void { - try writeBind(statement.signature.prepared_statement_name, bun.String.empty, globalObject, array_value, columns_value, statement.parameters, statement.fields, Context, writer); - var exec = protocol.Execute{ - .p = .{ - .prepared_statement = statement.signature.prepared_statement_name, - }, - }; - try exec.writeInternal(Context, writer); - - try writer.write(&protocol.Flush); - try writer.write(&protocol.Sync); - } - - pub fn executeQuery( - query: []const u8, - comptime Context: type, - writer: protocol.NewWriter(Context), - ) !void { - try protocol.writeQuery(query, Context, writer); - try writer.write(&protocol.Flush); - try writer.write(&protocol.Sync); - } - - pub fn onData( - connection: *PostgresSQLConnection, - comptime Context: type, - reader: protocol.NewReader(Context), - ) !void { - while (true) { - reader.markMessageStart(); - const c = try reader.int(u8); - debug("read: {c}", .{c}); - switch (c) { - 'D' => try connection.on(.DataRow, Context, reader), - 'd' => try connection.on(.CopyData, Context, reader), - 'S' => { - if (connection.tls_status == .message_sent) { - bun.debugAssert(connection.tls_status.message_sent == 8); - connection.tls_status = .ssl_ok; - connection.setupTLS(); - return; - } - - try connection.on(.ParameterStatus, Context, reader); - }, - 'Z' => try connection.on(.ReadyForQuery, Context, reader), - 'C' => try connection.on(.CommandComplete, Context, reader), - '2' => try connection.on(.BindComplete, Context, reader), - '1' => try connection.on(.ParseComplete, Context, reader), - 't' => try connection.on(.ParameterDescription, Context, reader), - 'T' => try connection.on(.RowDescription, Context, reader), - 'R' => try connection.on(.Authentication, Context, reader), - 'n' => try connection.on(.NoData, Context, reader), - 'K' => try connection.on(.BackendKeyData, Context, reader), - 'E' => try connection.on(.ErrorResponse, Context, reader), - 's' => try connection.on(.PortalSuspended, Context, reader), - '3' => try connection.on(.CloseComplete, Context, reader), - 'G' => try connection.on(.CopyInResponse, Context, reader), - 'N' => { - if (connection.tls_status == .message_sent) { - connection.tls_status = .ssl_not_available; - debug("Server does not support SSL", .{}); - if (connection.ssl_mode == .require) { - connection.fail("Server does not support SSL", error.TLSNotAvailable); - return; - } - continue; - } - - try connection.on(.NoticeResponse, Context, reader); - }, - 'I' => try connection.on(.EmptyQueryResponse, Context, reader), - 'H' => try connection.on(.CopyOutResponse, Context, reader), - 'c' => try connection.on(.CopyDone, Context, reader), - 'W' => try connection.on(.CopyBothResponse, Context, reader), - - else => { - debug("Unknown message: {c}", .{c}); - const to_skip = try reader.length() -| 1; - debug("to_skip: {d}", .{to_skip}); - try reader.skip(@intCast(@max(to_skip, 0))); - }, - } - } - } - - pub const Queue = std.fifo.LinearFifo(*PostgresSQLQuery, .Dynamic); -}; - -pub const PostgresSQLConnection = struct { - socket: Socket, - status: Status = Status.connecting, - ref_count: u32 = 1, - - write_buffer: bun.OffsetByteList = .{}, - read_buffer: bun.OffsetByteList = .{}, - last_message_start: u32 = 0, - requests: PostgresRequest.Queue, - - poll_ref: bun.Async.KeepAlive = .{}, - globalObject: *JSC.JSGlobalObject, - - statements: PreparedStatementsMap, - prepared_statement_id: u64 = 0, - pending_activity_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - js_value: JSValue = .js_undefined, - - backend_parameters: bun.StringMap = bun.StringMap.init(bun.default_allocator, true), - backend_key_data: protocol.BackendKeyData = .{}, - - database: []const u8 = "", - user: []const u8 = "", - password: []const u8 = "", - path: []const u8 = "", - options: []const u8 = "", - options_buf: []const u8 = "", - - authentication_state: AuthenticationState = .{ .pending = {} }, - - tls_ctx: ?*uws.SocketContext = null, - tls_config: JSC.API.ServerConfig.SSLConfig = .{}, - tls_status: TLSStatus = .none, - ssl_mode: SSLMode = .disable, - - idle_timeout_interval_ms: u32 = 0, - connection_timeout_ms: u32 = 0, - - flags: ConnectionFlags = .{}, - - /// Before being connected, this is a connection timeout timer. - /// After being connected, this is an idle timeout timer. - timer: bun.api.Timer.EventLoopTimer = .{ - .tag = .PostgresSQLConnectionTimeout, - .next = .{ - .sec = 0, - .nsec = 0, - }, - }, - - /// This timer controls the maximum lifetime of a connection. - /// It starts when the connection successfully starts (i.e. after handshake is complete). - /// It stops when the connection is closed. - max_lifetime_interval_ms: u32 = 0, - max_lifetime_timer: bun.api.Timer.EventLoopTimer = .{ - .tag = .PostgresSQLConnectionMaxLifetime, - .next = .{ - .sec = 0, - .nsec = 0, - }, - }, - - pub const ConnectionFlags = packed struct { - is_ready_for_query: bool = false, - is_processing_data: bool = false, - use_unnamed_prepared_statements: bool = false, - }; - - pub const TLSStatus = union(enum) { - none, - pending, - - /// Number of bytes sent of the 8-byte SSL request message. - /// Since we may send a partial message, we need to know how many bytes were sent. - message_sent: u8, - - ssl_not_available, - ssl_ok, - }; - - pub const AuthenticationState = union(enum) { - pending: void, - none: void, - ok: void, - SASL: SASL, - md5: void, - - pub fn zero(this: *AuthenticationState) void { - switch (this.*) { - .SASL => |*sasl| { - sasl.deinit(); - }, - else => {}, - } - this.* = .{ .none = {} }; - } - }; - - pub const SASL = struct { - const nonce_byte_len = 18; - const nonce_base64_len = bun.base64.encodeLenFromSize(nonce_byte_len); - - const server_signature_byte_len = 32; - const server_signature_base64_len = bun.base64.encodeLenFromSize(server_signature_byte_len); - - const salted_password_byte_len = 32; - - nonce_base64_bytes: [nonce_base64_len]u8 = .{0} ** nonce_base64_len, - nonce_len: u8 = 0, - - server_signature_base64_bytes: [server_signature_base64_len]u8 = .{0} ** server_signature_base64_len, - server_signature_len: u8 = 0, - - salted_password_bytes: [salted_password_byte_len]u8 = .{0} ** salted_password_byte_len, - salted_password_created: bool = false, - - status: SASLStatus = .init, - - pub const SASLStatus = enum { - init, - @"continue", - }; - - fn hmac(password: []const u8, data: []const u8) ?[32]u8 { - var buf = std.mem.zeroes([bun.BoringSSL.c.EVP_MAX_MD_SIZE]u8); - - // TODO: I don't think this is failable. - const result = bun.hmac.generate(password, data, .sha256, &buf) orelse return null; - - assert(result.len == 32); - return buf[0..32].*; - } - - pub fn computeSaltedPassword(this: *SASL, salt_bytes: []const u8, iteration_count: u32, connection: *PostgresSQLConnection) !void { - this.salted_password_created = true; - if (Crypto.EVP.pbkdf2(&this.salted_password_bytes, connection.password, salt_bytes, iteration_count, .sha256) == null) { - return error.PBKDFD2; - } - } - - pub fn saltedPassword(this: *const SASL) []const u8 { - assert(this.salted_password_created); - return this.salted_password_bytes[0..salted_password_byte_len]; - } - - pub fn serverSignature(this: *const SASL) []const u8 { - assert(this.server_signature_len > 0); - return this.server_signature_base64_bytes[0..this.server_signature_len]; - } - - pub fn computeServerSignature(this: *SASL, auth_string: []const u8) !void { - assert(this.server_signature_len == 0); - - const server_key = hmac(this.saltedPassword(), "Server Key") orelse return error.InvalidServerKey; - const server_signature_bytes = hmac(&server_key, auth_string) orelse return error.InvalidServerSignature; - this.server_signature_len = @intCast(bun.base64.encode(&this.server_signature_base64_bytes, &server_signature_bytes)); - } - - pub fn clientKey(this: *const SASL) [32]u8 { - return hmac(this.saltedPassword(), "Client Key").?; - } - - pub fn clientKeySignature(_: *const SASL, client_key: []const u8, auth_string: []const u8) [32]u8 { - var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); - bun.sha.SHA256.hash(client_key, &sha_digest, JSC.VirtualMachine.get().rareData().boringEngine()); - return hmac(&sha_digest, auth_string).?; - } - - pub fn nonce(this: *SASL) []const u8 { - if (this.nonce_len == 0) { - var bytes: [nonce_byte_len]u8 = .{0} ** nonce_byte_len; - bun.csprng(&bytes); - this.nonce_len = @intCast(bun.base64.encode(&this.nonce_base64_bytes, &bytes)); - } - return this.nonce_base64_bytes[0..this.nonce_len]; - } - - pub fn deinit(this: *SASL) void { - this.nonce_len = 0; - this.salted_password_created = false; - this.server_signature_len = 0; - this.status = .init; - } - }; - - pub const Status = enum { - disconnected, - connecting, - // Prevent sending the startup message multiple times. - // Particularly relevant for TLS connections. - sent_startup_message, - connected, - failed, - }; - - pub const js = JSC.Codegen.JSPostgresSQLConnection; - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - fn getTimeoutInterval(this: *const PostgresSQLConnection) u32 { - return switch (this.status) { - .connected => this.idle_timeout_interval_ms, - .failed => 0, - else => this.connection_timeout_ms, - }; - } - pub fn disableConnectionTimeout(this: *PostgresSQLConnection) void { - if (this.timer.state == .ACTIVE) { - this.globalObject.bunVM().timer.remove(&this.timer); - } - this.timer.state = .CANCELLED; - } - pub fn resetConnectionTimeout(this: *PostgresSQLConnection) void { - // if we are processing data, don't reset the timeout, wait for the data to be processed - if (this.flags.is_processing_data) return; - const interval = this.getTimeoutInterval(); - if (this.timer.state == .ACTIVE) { - this.globalObject.bunVM().timer.remove(&this.timer); - } - if (interval == 0) { - return; - } - - this.timer.next = bun.timespec.msFromNow(@intCast(interval)); - this.globalObject.bunVM().timer.insert(&this.timer); - } - - pub fn getQueries(_: *PostgresSQLConnection, thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { - if (js.queriesGetCached(thisValue)) |value| { - return value; - } - - const array = try JSC.JSValue.createEmptyArray(globalObject, 0); - js.queriesSetCached(thisValue, globalObject, array); - - return array; - } - - pub fn getOnConnect(_: *PostgresSQLConnection, thisValue: JSC.JSValue, _: *JSC.JSGlobalObject) JSC.JSValue { - if (js.onconnectGetCached(thisValue)) |value| { - return value; - } - - return .js_undefined; - } - - pub fn setOnConnect(_: *PostgresSQLConnection, thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { - js.onconnectSetCached(thisValue, globalObject, value); - } - - pub fn getOnClose(_: *PostgresSQLConnection, thisValue: JSC.JSValue, _: *JSC.JSGlobalObject) JSC.JSValue { - if (js.oncloseGetCached(thisValue)) |value| { - return value; - } - - return .js_undefined; - } - - pub fn setOnClose(_: *PostgresSQLConnection, thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { - js.oncloseSetCached(thisValue, globalObject, value); - } - - pub fn setupTLS(this: *PostgresSQLConnection) void { - debug("setupTLS", .{}); - const new_socket = this.socket.SocketTCP.socket.connected.upgrade(this.tls_ctx.?, this.tls_config.server_name) orelse { - this.fail("Failed to upgrade to TLS", error.TLSUpgradeFailed); - return; - }; - this.socket = .{ - .SocketTLS = .{ - .socket = .{ - .connected = new_socket, - }, - }, - }; - - this.start(); - } - fn setupMaxLifetimeTimerIfNecessary(this: *PostgresSQLConnection) void { - if (this.max_lifetime_interval_ms == 0) return; - if (this.max_lifetime_timer.state == .ACTIVE) return; - - this.max_lifetime_timer.next = bun.timespec.msFromNow(@intCast(this.max_lifetime_interval_ms)); - this.globalObject.bunVM().timer.insert(&this.max_lifetime_timer); - } - - pub fn onConnectionTimeout(this: *PostgresSQLConnection) bun.api.Timer.EventLoopTimer.Arm { - debug("onConnectionTimeout", .{}); - - this.timer.state = .FIRED; - if (this.flags.is_processing_data) { - return .disarm; - } - - if (this.getTimeoutInterval() == 0) { - this.resetConnectionTimeout(); - return .disarm; - } - - switch (this.status) { - .connected => { - this.failFmt(.POSTGRES_IDLE_TIMEOUT, "Idle timeout reached after {}", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.idle_timeout_interval_ms) *| std.time.ns_per_ms)}); - }, - else => { - this.failFmt(.POSTGRES_CONNECTION_TIMEOUT, "Connection timeout after {}", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.connection_timeout_ms) *| std.time.ns_per_ms)}); - }, - .sent_startup_message => { - this.failFmt(.POSTGRES_CONNECTION_TIMEOUT, "Connection timed out after {} (sent startup message, but never received response)", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.connection_timeout_ms) *| std.time.ns_per_ms)}); - }, - } - return .disarm; - } - - pub fn onMaxLifetimeTimeout(this: *PostgresSQLConnection) bun.api.Timer.EventLoopTimer.Arm { - debug("onMaxLifetimeTimeout", .{}); - this.max_lifetime_timer.state = .FIRED; - if (this.status == .failed) return .disarm; - this.failFmt(.POSTGRES_LIFETIME_TIMEOUT, "Max lifetime timeout reached after {}", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.max_lifetime_interval_ms) *| std.time.ns_per_ms)}); - return .disarm; - } - - fn start(this: *PostgresSQLConnection) void { - this.setupMaxLifetimeTimerIfNecessary(); - this.resetConnectionTimeout(); - this.sendStartupMessage(); - - const event_loop = this.globalObject.bunVM().eventLoop(); - event_loop.enter(); - defer event_loop.exit(); - this.flushData(); - } - - pub fn hasPendingActivity(this: *PostgresSQLConnection) bool { - return this.pending_activity_count.load(.acquire) > 0; - } - - fn updateHasPendingActivity(this: *PostgresSQLConnection) void { - const a: u32 = if (this.requests.readableLength() > 0) 1 else 0; - const b: u32 = if (this.status != .disconnected) 1 else 0; - this.pending_activity_count.store(a + b, .release); - } - - pub fn setStatus(this: *PostgresSQLConnection, status: Status) void { - if (this.status == status) return; - defer this.updateHasPendingActivity(); - - this.status = status; - this.resetConnectionTimeout(); - - switch (status) { - .connected => { - const on_connect = this.consumeOnConnectCallback(this.globalObject) orelse return; - const js_value = this.js_value; - js_value.ensureStillAlive(); - this.globalObject.queueMicrotask(on_connect, &[_]JSValue{ JSValue.jsNull(), js_value }); - this.poll_ref.unref(this.globalObject.bunVM()); - }, - else => {}, - } - } - - pub fn finalize(this: *PostgresSQLConnection) void { - debug("PostgresSQLConnection finalize", .{}); - this.stopTimers(); - this.js_value = .zero; - this.deref(); - } - - pub fn flushDataAndResetTimeout(this: *PostgresSQLConnection) void { - this.resetConnectionTimeout(); - this.flushData(); - } - - pub fn flushData(this: *PostgresSQLConnection) void { - const chunk = this.write_buffer.remaining(); - if (chunk.len == 0) return; - const wrote = this.socket.write(chunk, false); - if (wrote > 0) { - SocketMonitor.write(chunk[0..@intCast(wrote)]); - this.write_buffer.consume(@intCast(wrote)); - } - } - - pub fn failWithJSValue(this: *PostgresSQLConnection, value: JSValue) void { - defer this.updateHasPendingActivity(); - this.stopTimers(); - if (this.status == .failed) return; - - this.status = .failed; - - this.ref(); - defer this.deref(); - // we defer the refAndClose so the on_close will be called first before we reject the pending requests - defer this.refAndClose(value); - const on_close = this.consumeOnCloseCallback(this.globalObject) orelse return; - - const loop = this.globalObject.bunVM().eventLoop(); - loop.enter(); - defer loop.exit(); - _ = on_close.call( - this.globalObject, - this.js_value, - &[_]JSValue{ - value, - this.getQueriesArray(), - }, - ) catch |e| this.globalObject.reportActiveExceptionAsUnhandled(e); - } - - pub fn failFmt(this: *PostgresSQLConnection, comptime error_code: JSC.Error, comptime fmt: [:0]const u8, args: anytype) void { - this.failWithJSValue(error_code.fmt(this.globalObject, fmt, args)); - } - - pub fn fail(this: *PostgresSQLConnection, message: []const u8, err: AnyPostgresError) void { - debug("failed: {s}: {s}", .{ message, @errorName(err) }); - - const globalObject = this.globalObject; - - this.failWithJSValue(postgresErrorToJS(globalObject, message, err)); - } - - pub fn onClose(this: *PostgresSQLConnection) void { - var vm = this.globalObject.bunVM(); - const loop = vm.eventLoop(); - loop.enter(); - defer loop.exit(); - this.poll_ref.unref(this.globalObject.bunVM()); - - this.fail("Connection closed", error.ConnectionClosed); - } - - fn sendStartupMessage(this: *PostgresSQLConnection) void { - if (this.status != .connecting) return; - debug("sendStartupMessage", .{}); - this.status = .sent_startup_message; - var msg = protocol.StartupMessage{ - .user = Data{ .temporary = this.user }, - .database = Data{ .temporary = this.database }, - .options = Data{ .temporary = this.options }, - }; - msg.writeInternal(Writer, this.writer()) catch |err| { - this.fail("Failed to write startup message", err); - }; - } - - fn startTLS(this: *PostgresSQLConnection, socket: uws.AnySocket) void { - debug("startTLS", .{}); - const offset = switch (this.tls_status) { - .message_sent => |count| count, - else => 0, - }; - const ssl_request = [_]u8{ - 0x00, 0x00, 0x00, 0x08, // Length - 0x04, 0xD2, 0x16, 0x2F, // SSL request code - }; - - const written = socket.write(ssl_request[offset..], false); - if (written > 0) { - this.tls_status = .{ - .message_sent = offset + @as(u8, @intCast(written)), - }; - } else { - this.tls_status = .{ - .message_sent = offset, - }; - } - } - - pub fn onOpen(this: *PostgresSQLConnection, socket: uws.AnySocket) void { - this.socket = socket; - - this.poll_ref.ref(this.globalObject.bunVM()); - this.updateHasPendingActivity(); - - if (this.tls_status == .message_sent or this.tls_status == .pending) { - this.startTLS(socket); - return; - } - - this.start(); - } - - pub fn onHandshake(this: *PostgresSQLConnection, success: i32, ssl_error: uws.us_bun_verify_error_t) void { - debug("onHandshake: {d} {d}", .{ success, ssl_error.error_no }); - const handshake_success = if (success == 1) true else false; - if (handshake_success) { - if (this.tls_config.reject_unauthorized != 0) { - // only reject the connection if reject_unauthorized == true - switch (this.ssl_mode) { - // https://github.com/porsager/postgres/blob/6ec85a432b17661ccacbdf7f765c651e88969d36/src/connection.js#L272-L279 - - .verify_ca, .verify_full => { - if (ssl_error.error_no != 0) { - this.failWithJSValue(ssl_error.toJS(this.globalObject)); - return; - } - - const ssl_ptr: *BoringSSL.c.SSL = @ptrCast(this.socket.getNativeHandle()); - if (BoringSSL.c.SSL_get_servername(ssl_ptr, 0)) |servername| { - const hostname = servername[0..bun.len(servername)]; - if (!BoringSSL.checkServerIdentity(ssl_ptr, hostname)) { - this.failWithJSValue(ssl_error.toJS(this.globalObject)); - } - } - }, - else => { - return; - }, - } - } - } else { - // if we are here is because server rejected us, and the error_no is the cause of this - // no matter if reject_unauthorized is false because we are disconnected by the server - this.failWithJSValue(ssl_error.toJS(this.globalObject)); - } - } - - pub fn onTimeout(this: *PostgresSQLConnection) void { - _ = this; - debug("onTimeout", .{}); - } - - pub fn onDrain(this: *PostgresSQLConnection) void { - - // Don't send any other messages while we're waiting for TLS. - if (this.tls_status == .message_sent) { - if (this.tls_status.message_sent < 8) { - this.startTLS(this.socket); - } - - return; - } - - const event_loop = this.globalObject.bunVM().eventLoop(); - event_loop.enter(); - defer event_loop.exit(); - this.flushData(); - } - - pub fn onData(this: *PostgresSQLConnection, data: []const u8) void { - this.ref(); - this.flags.is_processing_data = true; - const vm = this.globalObject.bunVM(); - - this.disableConnectionTimeout(); - defer { - if (this.status == .connected and !this.hasQueryRunning() and this.write_buffer.remaining().len == 0) { - // Don't keep the process alive when there's nothing to do. - this.poll_ref.unref(vm); - } else if (this.status == .connected) { - // Keep the process alive if there's something to do. - this.poll_ref.ref(vm); - } - this.flags.is_processing_data = false; - - // reset the connection timeout after we're done processing the data - this.resetConnectionTimeout(); - this.deref(); - } - - const event_loop = vm.eventLoop(); - event_loop.enter(); - defer event_loop.exit(); - SocketMonitor.read(data); - // reset the head to the last message so remaining reflects the right amount of bytes - this.read_buffer.head = this.last_message_start; - - if (this.read_buffer.remaining().len == 0) { - var consumed: usize = 0; - var offset: usize = 0; - const reader = protocol.StackReader.init(data, &consumed, &offset); - PostgresRequest.onData(this, protocol.StackReader, reader) catch |err| { - if (err == error.ShortRead) { - if (comptime bun.Environment.allow_assert) { - debug("read_buffer: empty and received short read: last_message_start: {d}, head: {d}, len: {d}", .{ - offset, - consumed, - data.len, - }); - } - - this.read_buffer.head = 0; - this.last_message_start = 0; - this.read_buffer.byte_list.len = 0; - this.read_buffer.write(bun.default_allocator, data[offset..]) catch @panic("failed to write to read buffer"); - } else { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - - this.fail("Failed to read data", err); - } - }; - // no need to reset anything, its already empty - return; - } - // read buffer is not empty, so we need to write the data to the buffer and then read it - this.read_buffer.write(bun.default_allocator, data) catch @panic("failed to write to read buffer"); - PostgresRequest.onData(this, Reader, this.bufferedReader()) catch |err| { - if (err != error.ShortRead) { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - this.fail("Failed to read data", err); - return; - } - - if (comptime bun.Environment.allow_assert) { - debug("read_buffer: not empty and received short read: last_message_start: {d}, head: {d}, len: {d}", .{ - this.last_message_start, - this.read_buffer.head, - this.read_buffer.byte_list.len, - }); - } - return; - }; - - debug("clean read_buffer", .{}); - // success, we read everything! let's reset the last message start and the head - this.last_message_start = 0; - this.read_buffer.head = 0; - } - - pub fn constructor(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*PostgresSQLConnection { - _ = callframe; - return globalObject.throw("PostgresSQLConnection cannot be constructed directly", .{}); - } - - comptime { - const jscall = JSC.toJSHostFn(call); - @export(&jscall, .{ .name = "PostgresSQLConnection__createInstance" }); - } - - pub fn call(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var vm = globalObject.bunVM(); - const arguments = callframe.arguments_old(15).slice(); - const hostname_str = try arguments[0].toBunString(globalObject); - defer hostname_str.deref(); - const port = arguments[1].coerce(i32, globalObject); - - const username_str = try arguments[2].toBunString(globalObject); - defer username_str.deref(); - const password_str = try arguments[3].toBunString(globalObject); - defer password_str.deref(); - const database_str = try arguments[4].toBunString(globalObject); - defer database_str.deref(); - const ssl_mode: SSLMode = switch (arguments[5].toInt32()) { - 0 => .disable, - 1 => .prefer, - 2 => .require, - 3 => .verify_ca, - 4 => .verify_full, - else => .disable, - }; - - const tls_object = arguments[6]; - - var tls_config: JSC.API.ServerConfig.SSLConfig = .{}; - var tls_ctx: ?*uws.SocketContext = null; - if (ssl_mode != .disable) { - tls_config = if (tls_object.isBoolean() and tls_object.toBoolean()) - .{} - else if (tls_object.isObject()) - (JSC.API.ServerConfig.SSLConfig.fromJS(vm, globalObject, tls_object) catch return .zero) orelse .{} - else { - return globalObject.throwInvalidArguments("tls must be a boolean or an object", .{}); - }; - - if (globalObject.hasException()) { - tls_config.deinit(); - return .zero; - } - - // we always request the cert so we can verify it and also we manually abort the connection if the hostname doesn't match - const original_reject_unauthorized = tls_config.reject_unauthorized; - tls_config.reject_unauthorized = 0; - tls_config.request_cert = 1; - // We create it right here so we can throw errors early. - const context_options = tls_config.asUSockets(); - var err: uws.create_bun_socket_error_t = .none; - tls_ctx = uws.SocketContext.createSSLContext(vm.uwsLoop(), @sizeOf(*PostgresSQLConnection), context_options, &err) orelse { - if (err != .none) { - return globalObject.throw("failed to create TLS context", .{}); - } else { - return globalObject.throwValue(err.toJS(globalObject)); - } - }; - // restore the original reject_unauthorized - tls_config.reject_unauthorized = original_reject_unauthorized; - if (err != .none) { - tls_config.deinit(); - if (tls_ctx) |ctx| { - ctx.deinit(true); - } - return globalObject.throwValue(err.toJS(globalObject)); - } - - uws.NewSocketHandler(true).configure(tls_ctx.?, true, *PostgresSQLConnection, SocketHandler(true)); - } - - var username: []const u8 = ""; - var password: []const u8 = ""; - var database: []const u8 = ""; - var options: []const u8 = ""; - var path: []const u8 = ""; - - const options_str = try arguments[7].toBunString(globalObject); - defer options_str.deref(); - - const path_str = try arguments[8].toBunString(globalObject); - defer path_str.deref(); - - const options_buf: []u8 = brk: { - var b = bun.StringBuilder{}; - b.cap += username_str.utf8ByteLength() + 1 + password_str.utf8ByteLength() + 1 + database_str.utf8ByteLength() + 1 + options_str.utf8ByteLength() + 1 + path_str.utf8ByteLength() + 1; - - b.allocate(bun.default_allocator) catch {}; - var u = username_str.toUTF8WithoutRef(bun.default_allocator); - defer u.deinit(); - username = b.append(u.slice()); - - var p = password_str.toUTF8WithoutRef(bun.default_allocator); - defer p.deinit(); - password = b.append(p.slice()); - - var d = database_str.toUTF8WithoutRef(bun.default_allocator); - defer d.deinit(); - database = b.append(d.slice()); - - var o = options_str.toUTF8WithoutRef(bun.default_allocator); - defer o.deinit(); - options = b.append(o.slice()); - - var _path = path_str.toUTF8WithoutRef(bun.default_allocator); - defer _path.deinit(); - path = b.append(_path.slice()); - - break :brk b.allocatedSlice(); - }; - - const on_connect = arguments[9]; - const on_close = arguments[10]; - const idle_timeout = arguments[11].toInt32(); - const connection_timeout = arguments[12].toInt32(); - const max_lifetime = arguments[13].toInt32(); - const use_unnamed_prepared_statements = arguments[14].asBoolean(); - - const ptr: *PostgresSQLConnection = try bun.default_allocator.create(PostgresSQLConnection); - - ptr.* = PostgresSQLConnection{ - .globalObject = globalObject, - - .database = database, - .user = username, - .password = password, - .path = path, - .options = options, - .options_buf = options_buf, - .socket = .{ .SocketTCP = .{ .socket = .{ .detached = {} } } }, - .requests = PostgresRequest.Queue.init(bun.default_allocator), - .statements = PreparedStatementsMap{}, - .tls_config = tls_config, - .tls_ctx = tls_ctx, - .ssl_mode = ssl_mode, - .tls_status = if (ssl_mode != .disable) .pending else .none, - .idle_timeout_interval_ms = @intCast(idle_timeout), - .connection_timeout_ms = @intCast(connection_timeout), - .max_lifetime_interval_ms = @intCast(max_lifetime), - .flags = .{ - .use_unnamed_prepared_statements = use_unnamed_prepared_statements, - }, - }; - - ptr.updateHasPendingActivity(); - ptr.poll_ref.ref(vm); - const js_value = ptr.toJS(globalObject); - js_value.ensureStillAlive(); - ptr.js_value = js_value; - - js.onconnectSetCached(js_value, globalObject, on_connect); - js.oncloseSetCached(js_value, globalObject, on_close); - bun.analytics.Features.postgres_connections += 1; - - { - const hostname = hostname_str.toUTF8(bun.default_allocator); - defer hostname.deinit(); - - const ctx = vm.rareData().postgresql_context.tcp orelse brk: { - const ctx_ = uws.SocketContext.createNoSSLContext(vm.uwsLoop(), @sizeOf(*PostgresSQLConnection)).?; - uws.NewSocketHandler(false).configure(ctx_, true, *PostgresSQLConnection, SocketHandler(false)); - vm.rareData().postgresql_context.tcp = ctx_; - break :brk ctx_; - }; - - if (path.len > 0) { - ptr.socket = .{ - .SocketTCP = uws.SocketTCP.connectUnixAnon(path, ctx, ptr, false) catch |err| { - tls_config.deinit(); - if (tls_ctx) |tls| { - tls.deinit(true); - } - ptr.deinit(); - return globalObject.throwError(err, "failed to connect to postgresql"); - }, - }; - } else { - ptr.socket = .{ - .SocketTCP = uws.SocketTCP.connectAnon(hostname.slice(), port, ctx, ptr, false) catch |err| { - tls_config.deinit(); - if (tls_ctx) |tls| { - tls.deinit(true); - } - ptr.deinit(); - return globalObject.throwError(err, "failed to connect to postgresql"); - }, - }; - } - ptr.resetConnectionTimeout(); - } - - return js_value; - } - - fn SocketHandler(comptime ssl: bool) type { - return struct { - const SocketType = uws.NewSocketHandler(ssl); - fn _socket(s: SocketType) Socket { - if (comptime ssl) { - return Socket{ .SocketTLS = s }; - } - - return Socket{ .SocketTCP = s }; - } - pub fn onOpen(this: *PostgresSQLConnection, socket: SocketType) void { - this.onOpen(_socket(socket)); - } - - fn onHandshake_(this: *PostgresSQLConnection, _: anytype, success: i32, ssl_error: uws.us_bun_verify_error_t) void { - this.onHandshake(success, ssl_error); - } - - pub const onHandshake = if (ssl) onHandshake_ else null; - - pub fn onClose(this: *PostgresSQLConnection, socket: SocketType, _: i32, _: ?*anyopaque) void { - _ = socket; - this.onClose(); - } - - pub fn onEnd(this: *PostgresSQLConnection, socket: SocketType) void { - _ = socket; - this.onClose(); - } - - pub fn onConnectError(this: *PostgresSQLConnection, socket: SocketType, _: i32) void { - _ = socket; - this.onClose(); - } - - pub fn onTimeout(this: *PostgresSQLConnection, socket: SocketType) void { - _ = socket; - this.onTimeout(); - } - - pub fn onData(this: *PostgresSQLConnection, socket: SocketType, data: []const u8) void { - _ = socket; - this.onData(data); - } - - pub fn onWritable(this: *PostgresSQLConnection, socket: SocketType) void { - _ = socket; - this.onDrain(); - } - }; - } - - pub fn ref(this: *@This()) void { - bun.assert(this.ref_count > 0); - this.ref_count += 1; - } - - pub fn doRef(this: *@This(), _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - this.poll_ref.ref(this.globalObject.bunVM()); - this.updateHasPendingActivity(); - return .js_undefined; - } - - pub fn doUnref(this: *@This(), _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - this.poll_ref.unref(this.globalObject.bunVM()); - this.updateHasPendingActivity(); - return .js_undefined; - } - pub fn doFlush(this: *PostgresSQLConnection, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { - this.flushData(); - return .js_undefined; - } - - pub fn deref(this: *@This()) void { - const ref_count = this.ref_count; - this.ref_count -= 1; - - if (ref_count == 1) { - this.disconnect(); - this.deinit(); - } - } - - pub fn doClose(this: *@This(), globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { - _ = globalObject; - this.disconnect(); - this.write_buffer.deinit(bun.default_allocator); - - return .js_undefined; - } - - pub fn stopTimers(this: *PostgresSQLConnection) void { - if (this.timer.state == .ACTIVE) { - this.globalObject.bunVM().timer.remove(&this.timer); - } - if (this.max_lifetime_timer.state == .ACTIVE) { - this.globalObject.bunVM().timer.remove(&this.max_lifetime_timer); - } - } - - pub fn deinit(this: *@This()) void { - this.stopTimers(); - var iter = this.statements.valueIterator(); - while (iter.next()) |stmt_ptr| { - var stmt = stmt_ptr.*; - stmt.deref(); - } - this.statements.deinit(bun.default_allocator); - this.write_buffer.deinit(bun.default_allocator); - this.read_buffer.deinit(bun.default_allocator); - this.backend_parameters.deinit(); - - bun.freeSensitive(bun.default_allocator, this.options_buf); - - this.tls_config.deinit(); - bun.default_allocator.destroy(this); - } - - fn refAndClose(this: *@This(), js_reason: ?JSC.JSValue) void { - // refAndClose is always called when we wanna to disconnect or when we are closed - - if (!this.socket.isClosed()) { - // event loop need to be alive to close the socket - this.poll_ref.ref(this.globalObject.bunVM()); - // will unref on socket close - this.socket.close(); - } - - // cleanup requests - while (this.current()) |request| { - switch (request.status) { - // pending we will fail the request and the stmt will be marked as error ConnectionClosed too - .pending => { - const stmt = request.statement orelse continue; - stmt.error_response = .{ .postgres_error = AnyPostgresError.ConnectionClosed }; - stmt.status = .failed; - if (js_reason) |reason| { - request.onJSError(reason, this.globalObject); - } else { - request.onError(.{ .postgres_error = AnyPostgresError.ConnectionClosed }, this.globalObject); - } - }, - // in the middle of running - .binding, - .running, - .partial_response, - => { - if (js_reason) |reason| { - request.onJSError(reason, this.globalObject); - } else { - request.onError(.{ .postgres_error = AnyPostgresError.ConnectionClosed }, this.globalObject); - } - }, - // just ignore success and fail cases - .success, .fail => {}, - } - request.deref(); - this.requests.discard(1); - } - } - - pub fn disconnect(this: *@This()) void { - this.stopTimers(); - - if (this.status == .connected) { - this.status = .disconnected; - this.refAndClose(null); - } - } - - fn current(this: *PostgresSQLConnection) ?*PostgresSQLQuery { - if (this.requests.readableLength() == 0) { - return null; - } - - return this.requests.peekItem(0); - } - - fn hasQueryRunning(this: *PostgresSQLConnection) bool { - return !this.flags.is_ready_for_query or this.current() != null; - } - - pub const Writer = struct { - connection: *PostgresSQLConnection, - - pub fn write(this: Writer, data: []const u8) AnyPostgresError!void { - var buffer = &this.connection.write_buffer; - try buffer.write(bun.default_allocator, data); - } - - pub fn pwrite(this: Writer, data: []const u8, index: usize) AnyPostgresError!void { - @memcpy(this.connection.write_buffer.byte_list.slice()[index..][0..data.len], data); - } - - pub fn offset(this: Writer) usize { - return this.connection.write_buffer.len(); - } - }; - - pub fn writer(this: *PostgresSQLConnection) protocol.NewWriter(Writer) { - return .{ - .wrapped = .{ - .connection = this, - }, - }; - } - - pub const Reader = struct { - connection: *PostgresSQLConnection, - - pub fn markMessageStart(this: Reader) void { - this.connection.last_message_start = this.connection.read_buffer.head; - } - - pub const ensureLength = ensureCapacity; - - pub fn peek(this: Reader) []const u8 { - return this.connection.read_buffer.remaining(); - } - pub fn skip(this: Reader, count: usize) void { - this.connection.read_buffer.head = @min(this.connection.read_buffer.head + @as(u32, @truncate(count)), this.connection.read_buffer.byte_list.len); - } - pub fn ensureCapacity(this: Reader, count: usize) bool { - return @as(usize, this.connection.read_buffer.head) + count <= @as(usize, this.connection.read_buffer.byte_list.len); - } - pub fn read(this: Reader, count: usize) AnyPostgresError!Data { - var remaining = this.connection.read_buffer.remaining(); - if (@as(usize, remaining.len) < count) { - return error.ShortRead; - } - - this.skip(count); - return Data{ - .temporary = remaining[0..count], - }; - } - pub fn readZ(this: Reader) AnyPostgresError!Data { - const remain = this.connection.read_buffer.remaining(); - - if (bun.strings.indexOfChar(remain, 0)) |zero| { - this.skip(zero + 1); - return Data{ - .temporary = remain[0..zero], - }; - } - - return error.ShortRead; - } - }; - - pub fn bufferedReader(this: *PostgresSQLConnection) protocol.NewReader(Reader) { - return .{ - .wrapped = .{ .connection = this }, - }; - } - - fn advance(this: *PostgresSQLConnection) !void { - while (this.requests.readableLength() > 0) { - var req: *PostgresSQLQuery = this.requests.peekItem(0); - switch (req.status) { - .pending => { - if (req.flags.simple) { - debug("executeQuery", .{}); - var query_str = req.query.toUTF8(bun.default_allocator); - defer query_str.deinit(); - PostgresRequest.executeQuery(query_str.slice(), PostgresSQLConnection.Writer, this.writer()) catch |err| { - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - req.deref(); - this.requests.discard(1); - - continue; - }; - this.flags.is_ready_for_query = false; - req.status = .running; - return; - } else { - const stmt = req.statement orelse return error.ExpectedStatement; - - switch (stmt.status) { - .failed => { - bun.assert(stmt.error_response != null); - req.onError(stmt.error_response.?, this.globalObject); - req.deref(); - this.requests.discard(1); - - continue; - }, - .prepared => { - const thisValue = req.thisValue.get(); - bun.assert(thisValue != .zero); - const binding_value = PostgresSQLQuery.js.bindingGetCached(thisValue) orelse .zero; - const columns_value = PostgresSQLQuery.js.columnsGetCached(thisValue) orelse .zero; - req.flags.binary = stmt.fields.len > 0; - - PostgresRequest.bindAndExecute(this.globalObject, stmt, binding_value, columns_value, PostgresSQLConnection.Writer, this.writer()) catch |err| { - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - req.deref(); - this.requests.discard(1); - - continue; - }; - this.flags.is_ready_for_query = false; - req.status = .binding; - return; - }, - .pending => { - // statement is pending, lets write/parse it - var query_str = req.query.toUTF8(bun.default_allocator); - defer query_str.deinit(); - const has_params = stmt.signature.fields.len > 0; - // If it does not have params, we can write and execute immediately in one go - if (!has_params) { - const thisValue = req.thisValue.get(); - bun.assert(thisValue != .zero); - // prepareAndQueryWithSignature will write + bind + execute, it will change to running after binding is complete - const binding_value = PostgresSQLQuery.js.bindingGetCached(thisValue) orelse .zero; - PostgresRequest.prepareAndQueryWithSignature(this.globalObject, query_str.slice(), binding_value, PostgresSQLConnection.Writer, this.writer(), &stmt.signature) catch |err| { - stmt.status = .failed; - stmt.error_response = .{ .postgres_error = err }; - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - req.deref(); - this.requests.discard(1); - - continue; - }; - this.flags.is_ready_for_query = false; - req.status = .binding; - stmt.status = .parsing; - - return; - } - const connection_writer = this.writer(); - // write query and wait for it to be prepared - PostgresRequest.writeQuery(query_str.slice(), stmt.signature.prepared_statement_name, stmt.signature.fields, PostgresSQLConnection.Writer, connection_writer) catch |err| { - stmt.error_response = .{ .postgres_error = err }; - stmt.status = .failed; - - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - req.deref(); - this.requests.discard(1); - - continue; - }; - connection_writer.write(&protocol.Sync) catch |err| { - stmt.error_response = .{ .postgres_error = err }; - stmt.status = .failed; - - req.onWriteFail(err, this.globalObject, this.getQueriesArray()); - req.deref(); - this.requests.discard(1); - - continue; - }; - this.flags.is_ready_for_query = false; - stmt.status = .parsing; - return; - }, - .parsing => { - // we are still parsing, lets wait for it to be prepared or failed - return; - }, - } - } - }, - - .running, .binding, .partial_response => { - // if we are binding it will switch to running immediately - // if we are running, we need to wait for it to be success or fail - return; - }, - .success, .fail => { - req.deref(); - this.requests.discard(1); - continue; - }, - } - } - } - - pub fn getQueriesArray(this: *const PostgresSQLConnection) JSValue { - return js.queriesGetCached(this.js_value) orelse .zero; - } - - pub const DataCell = @import("./DataCell.zig").DataCell; - - pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_literal), comptime Context: type, reader: protocol.NewReader(Context)) AnyPostgresError!void { - debug("on({s})", .{@tagName(MessageType)}); - - switch (comptime MessageType) { - .DataRow => { - const request = this.current() orelse return error.ExpectedRequest; - var statement = request.statement orelse return error.ExpectedStatement; - var structure: JSValue = .js_undefined; - var cached_structure: ?PostgresCachedStructure = null; - // explicit use switch without else so if new modes are added, we don't forget to check for duplicate fields - switch (request.flags.result_mode) { - .objects => { - cached_structure = statement.structure(this.js_value, this.globalObject); - structure = cached_structure.?.jsValue() orelse .js_undefined; - }, - .raw, .values => { - // no need to check for duplicate fields or structure - }, - } - - var putter = DataCell.Putter{ - .list = &.{}, - .fields = statement.fields, - .binary = request.flags.binary, - .bigint = request.flags.bigint, - .globalObject = this.globalObject, - }; - - var stack_buf: [70]DataCell = undefined; - var cells: []DataCell = stack_buf[0..@min(statement.fields.len, JSC.JSObject.maxInlineCapacity())]; - var free_cells = false; - defer { - for (cells[0..putter.count]) |*cell| { - cell.deinit(); - } - if (free_cells) bun.default_allocator.free(cells); - } - - if (statement.fields.len >= JSC.JSObject.maxInlineCapacity()) { - cells = try bun.default_allocator.alloc(DataCell, statement.fields.len); - free_cells = true; - } - // make sure all cells are reset if reader short breaks the fields will just be null with is better than undefined behavior - @memset(cells, DataCell{ .tag = .null, .value = .{ .null = 0 } }); - putter.list = cells; - - if (request.flags.result_mode == .raw) { - try protocol.DataRow.decode( - &putter, - Context, - reader, - DataCell.Putter.putRaw, - ); - } else { - try protocol.DataRow.decode( - &putter, - Context, - reader, - DataCell.Putter.put, - ); - } - const thisValue = request.thisValue.get(); - bun.assert(thisValue != .zero); - const pending_value = PostgresSQLQuery.js.pendingValueGetCached(thisValue) orelse .zero; - pending_value.ensureStillAlive(); - const result = putter.toJS(this.globalObject, pending_value, structure, statement.fields_flags, request.flags.result_mode, cached_structure); - - if (pending_value == .zero) { - PostgresSQLQuery.js.pendingValueSetCached(thisValue, this.globalObject, result); - } - }, - .CopyData => { - var copy_data: protocol.CopyData = undefined; - try copy_data.decodeInternal(Context, reader); - copy_data.data.deinit(); - }, - .ParameterStatus => { - var parameter_status: protocol.ParameterStatus = undefined; - try parameter_status.decodeInternal(Context, reader); - defer { - parameter_status.deinit(); - } - try this.backend_parameters.insert(parameter_status.name.slice(), parameter_status.value.slice()); - }, - .ReadyForQuery => { - var ready_for_query: protocol.ReadyForQuery = undefined; - try ready_for_query.decodeInternal(Context, reader); - - this.setStatus(.connected); - this.flags.is_ready_for_query = true; - this.socket.setTimeout(300); - defer this.updateRef(); - - if (this.current()) |request| { - if (request.status == .partial_response) { - // if is a partial response, just signal that the query is now complete - request.onResult("", this.globalObject, this.js_value, true); - } - } - try this.advance(); - - this.flushData(); - }, - .CommandComplete => { - var request = this.current() orelse return error.ExpectedRequest; - - var cmd: protocol.CommandComplete = undefined; - try cmd.decodeInternal(Context, reader); - defer { - cmd.deinit(); - } - debug("-> {s}", .{cmd.command_tag.slice()}); - defer this.updateRef(); - - if (request.flags.simple) { - // simple queries can have multiple commands - request.onResult(cmd.command_tag.slice(), this.globalObject, this.js_value, false); - } else { - request.onResult(cmd.command_tag.slice(), this.globalObject, this.js_value, true); - } - }, - .BindComplete => { - try reader.eatMessage(protocol.BindComplete); - var request = this.current() orelse return error.ExpectedRequest; - if (request.status == .binding) { - request.status = .running; - } - }, - .ParseComplete => { - try reader.eatMessage(protocol.ParseComplete); - const request = this.current() orelse return error.ExpectedRequest; - if (request.statement) |statement| { - // if we have params wait for parameter description - if (statement.status == .parsing and statement.signature.fields.len == 0) { - statement.status = .prepared; - } - } - }, - .ParameterDescription => { - var description: protocol.ParameterDescription = undefined; - try description.decodeInternal(Context, reader); - const request = this.current() orelse return error.ExpectedRequest; - var statement = request.statement orelse return error.ExpectedStatement; - statement.parameters = description.parameters; - if (statement.status == .parsing) { - statement.status = .prepared; - } - }, - .RowDescription => { - var description: protocol.RowDescription = undefined; - try description.decodeInternal(Context, reader); - errdefer description.deinit(); - const request = this.current() orelse return error.ExpectedRequest; - var statement = request.statement orelse return error.ExpectedStatement; - statement.fields = description.fields; - }, - .Authentication => { - var auth: protocol.Authentication = undefined; - try auth.decodeInternal(Context, reader); - defer auth.deinit(); - - switch (auth) { - .SASL => { - if (this.authentication_state != .SASL) { - this.authentication_state = .{ .SASL = .{} }; - } - - var mechanism_buf: [128]u8 = undefined; - const mechanism = std.fmt.bufPrintZ(&mechanism_buf, "n,,n=*,r={s}", .{this.authentication_state.SASL.nonce()}) catch unreachable; - var response = protocol.SASLInitialResponse{ - .mechanism = .{ - .temporary = "SCRAM-SHA-256", - }, - .data = .{ - .temporary = mechanism, - }, - }; - - try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); - debug("SASL", .{}); - this.flushData(); - }, - .SASLContinue => |*cont| { - if (this.authentication_state != .SASL) { - debug("Unexpected SASLContinue for authentiation state: {s}", .{@tagName(std.meta.activeTag(this.authentication_state))}); - return error.UnexpectedMessage; - } - var sasl = &this.authentication_state.SASL; - - if (sasl.status != .init) { - debug("Unexpected SASLContinue for SASL state: {s}", .{@tagName(sasl.status)}); - return error.UnexpectedMessage; - } - debug("SASLContinue", .{}); - - const iteration_count = try cont.iterationCount(); - - const server_salt_decoded_base64 = bun.base64.decodeAlloc(bun.z_allocator, cont.s) catch |err| { - return switch (err) { - error.DecodingFailed => error.SASL_SIGNATURE_INVALID_BASE64, - else => |e| e, - }; - }; - defer bun.z_allocator.free(server_salt_decoded_base64); - try sasl.computeSaltedPassword(server_salt_decoded_base64, iteration_count, this); - - const auth_string = try std.fmt.allocPrint( - bun.z_allocator, - "n=*,r={s},r={s},s={s},i={s},c=biws,r={s}", - .{ - sasl.nonce(), - cont.r, - cont.s, - cont.i, - cont.r, - }, - ); - defer bun.z_allocator.free(auth_string); - try sasl.computeServerSignature(auth_string); - - const client_key = sasl.clientKey(); - const client_key_signature = sasl.clientKeySignature(&client_key, auth_string); - var client_key_xor_buffer: [32]u8 = undefined; - for (&client_key_xor_buffer, client_key, client_key_signature) |*out, a, b| { - out.* = a ^ b; - } - - var client_key_xor_base64_buf = std.mem.zeroes([bun.base64.encodeLenFromSize(32)]u8); - const xor_base64_len = bun.base64.encode(&client_key_xor_base64_buf, &client_key_xor_buffer); - - const payload = try std.fmt.allocPrint( - bun.z_allocator, - "c=biws,r={s},p={s}", - .{ cont.r, client_key_xor_base64_buf[0..xor_base64_len] }, - ); - defer bun.z_allocator.free(payload); - - var response = protocol.SASLResponse{ - .data = .{ - .temporary = payload, - }, - }; - - try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); - sasl.status = .@"continue"; - this.flushData(); - }, - .SASLFinal => |final| { - if (this.authentication_state != .SASL) { - debug("SASLFinal - Unexpected SASLContinue for authentiation state: {s}", .{@tagName(std.meta.activeTag(this.authentication_state))}); - return error.UnexpectedMessage; - } - var sasl = &this.authentication_state.SASL; - - if (sasl.status != .@"continue") { - debug("SASLFinal - Unexpected SASLContinue for SASL state: {s}", .{@tagName(sasl.status)}); - return error.UnexpectedMessage; - } - - if (sasl.server_signature_len == 0) { - debug("SASLFinal - Server signature is empty", .{}); - return error.UnexpectedMessage; - } - - const server_signature = sasl.serverSignature(); - - // This will usually start with "v=" - const comparison_signature = final.data.slice(); - - if (comparison_signature.len < 2 or !bun.strings.eqlLong(server_signature, comparison_signature[2..], true)) { - debug("SASLFinal - SASL Server signature mismatch\nExpected: {s}\nActual: {s}", .{ server_signature, comparison_signature[2..] }); - this.fail("The server did not return the correct signature", error.SASL_SIGNATURE_MISMATCH); - } else { - debug("SASLFinal - SASL Server signature match", .{}); - this.authentication_state.zero(); - } - }, - .Ok => { - debug("Authentication OK", .{}); - this.authentication_state.zero(); - this.authentication_state = .{ .ok = {} }; - }, - - .Unknown => { - this.fail("Unknown authentication method", error.UNKNOWN_AUTHENTICATION_METHOD); - }, - - .ClearTextPassword => { - debug("ClearTextPassword", .{}); - var response = protocol.PasswordMessage{ - .password = .{ - .temporary = this.password, - }, - }; - - try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); - this.flushData(); - }, - - .MD5Password => |md5| { - debug("MD5Password", .{}); - // Format is: md5 + md5(md5(password + username) + salt) - var first_hash_buf: bun.sha.MD5.Digest = undefined; - var first_hash_str: [32]u8 = undefined; - var final_hash_buf: bun.sha.MD5.Digest = undefined; - var final_hash_str: [32]u8 = undefined; - var final_password_buf: [36]u8 = undefined; - - // First hash: md5(password + username) - var first_hasher = bun.sha.MD5.init(); - first_hasher.update(this.password); - first_hasher.update(this.user); - first_hasher.final(&first_hash_buf); - const first_hash_str_output = std.fmt.bufPrint(&first_hash_str, "{x}", .{std.fmt.fmtSliceHexLower(&first_hash_buf)}) catch unreachable; - - // Second hash: md5(first_hash + salt) - var final_hasher = bun.sha.MD5.init(); - final_hasher.update(first_hash_str_output); - final_hasher.update(&md5.salt); - final_hasher.final(&final_hash_buf); - const final_hash_str_output = std.fmt.bufPrint(&final_hash_str, "{x}", .{std.fmt.fmtSliceHexLower(&final_hash_buf)}) catch unreachable; - - // Format final password as "md5" + final_hash - const final_password = std.fmt.bufPrintZ(&final_password_buf, "md5{s}", .{final_hash_str_output}) catch unreachable; - - var response = protocol.PasswordMessage{ - .password = .{ - .temporary = final_password, - }, - }; - - this.authentication_state = .{ .md5 = {} }; - try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); - this.flushData(); - }, - - else => { - debug("TODO auth: {s}", .{@tagName(std.meta.activeTag(auth))}); - this.fail("TODO: support authentication method: {s}", error.UNSUPPORTED_AUTHENTICATION_METHOD); - }, - } - }, - .NoData => { - try reader.eatMessage(protocol.NoData); - var request = this.current() orelse return error.ExpectedRequest; - if (request.status == .binding) { - request.status = .running; - } - }, - .BackendKeyData => { - try this.backend_key_data.decodeInternal(Context, reader); - }, - .ErrorResponse => { - var err: protocol.ErrorResponse = undefined; - try err.decodeInternal(Context, reader); - - if (this.status == .connecting or this.status == .sent_startup_message) { - defer { - err.deinit(); - } - - this.failWithJSValue(err.toJS(this.globalObject)); - - // it shouldn't enqueue any requests while connecting - bun.assert(this.requests.count == 0); - return; - } - - var request = this.current() orelse { - debug("ErrorResponse: {}", .{err}); - return error.ExpectedRequest; - }; - var is_error_owned = true; - defer { - if (is_error_owned) { - err.deinit(); - } - } - if (request.statement) |stmt| { - if (stmt.status == PostgresSQLStatement.Status.parsing) { - stmt.status = PostgresSQLStatement.Status.failed; - stmt.error_response = .{ .protocol = err }; - is_error_owned = false; - if (this.statements.remove(bun.hash(stmt.signature.name))) { - stmt.deref(); - } - } - } - this.updateRef(); - - request.onError(.{ .protocol = err }, this.globalObject); - }, - .PortalSuspended => { - // try reader.eatMessage(&protocol.PortalSuspended); - // var request = this.current() orelse return error.ExpectedRequest; - // _ = request; - debug("TODO PortalSuspended", .{}); - }, - .CloseComplete => { - try reader.eatMessage(protocol.CloseComplete); - var request = this.current() orelse return error.ExpectedRequest; - defer this.updateRef(); - if (request.flags.simple) { - request.onResult("CLOSECOMPLETE", this.globalObject, this.js_value, false); - } else { - request.onResult("CLOSECOMPLETE", this.globalObject, this.js_value, true); - } - }, - .CopyInResponse => { - debug("TODO CopyInResponse", .{}); - }, - .NoticeResponse => { - debug("UNSUPPORTED NoticeResponse", .{}); - var resp: protocol.NoticeResponse = undefined; - - try resp.decodeInternal(Context, reader); - resp.deinit(); - }, - .EmptyQueryResponse => { - try reader.eatMessage(protocol.EmptyQueryResponse); - var request = this.current() orelse return error.ExpectedRequest; - defer this.updateRef(); - if (request.flags.simple) { - request.onResult("", this.globalObject, this.js_value, false); - } else { - request.onResult("", this.globalObject, this.js_value, true); - } - }, - .CopyOutResponse => { - debug("TODO CopyOutResponse", .{}); - }, - .CopyDone => { - debug("TODO CopyDone", .{}); - }, - .CopyBothResponse => { - debug("TODO CopyBothResponse", .{}); - }, - else => @compileError("Unknown message type: " ++ @tagName(MessageType)), - } - } - - pub fn updateRef(this: *PostgresSQLConnection) void { - this.updateHasPendingActivity(); - if (this.pending_activity_count.raw > 0) { - this.poll_ref.ref(this.globalObject.bunVM()); - } else { - this.poll_ref.unref(this.globalObject.bunVM()); - } - } - - pub fn getConnected(this: *PostgresSQLConnection, _: *JSC.JSGlobalObject) JSValue { - return JSValue.jsBoolean(this.status == Status.connected); - } - - pub fn consumeOnConnectCallback(this: *const PostgresSQLConnection, globalObject: *JSC.JSGlobalObject) ?JSC.JSValue { - debug("consumeOnConnectCallback", .{}); - const on_connect = js.onconnectGetCached(this.js_value) orelse return null; - debug("consumeOnConnectCallback exists", .{}); - - js.onconnectSetCached(this.js_value, globalObject, .zero); - return on_connect; - } - - pub fn consumeOnCloseCallback(this: *const PostgresSQLConnection, globalObject: *JSC.JSGlobalObject) ?JSC.JSValue { - debug("consumeOnCloseCallback", .{}); - const on_close = js.oncloseGetCached(this.js_value) orelse return null; - debug("consumeOnCloseCallback exists", .{}); - js.oncloseSetCached(this.js_value, globalObject, .zero); - return on_close; - } -}; - -pub const PostgresCachedStructure = struct { - structure: JSC.Strong.Optional = .empty, - // only populated if more than JSC.JSC__JSObject__maxInlineCapacity fields otherwise the structure will contain all fields inlined - fields: ?[]JSC.JSObject.ExternColumnIdentifier = null, - - pub fn has(this: *@This()) bool { - return this.structure.has() or this.fields != null; - } - - pub fn jsValue(this: *const @This()) ?JSC.JSValue { - return this.structure.get(); - } - - pub fn set(this: *@This(), globalObject: *JSC.JSGlobalObject, value: ?JSC.JSValue, fields: ?[]JSC.JSObject.ExternColumnIdentifier) void { - if (value) |v| { - this.structure.set(globalObject, v); - } - this.fields = fields; - } - - pub fn deinit(this: *@This()) void { - this.structure.deinit(); - if (this.fields) |fields| { - this.fields = null; - for (fields) |*name| { - name.deinit(); - } - bun.default_allocator.free(fields); - } - } -}; -pub const PostgresSQLStatement = struct { - cached_structure: PostgresCachedStructure = .{}, - ref_count: u32 = 1, - fields: []protocol.FieldDescription = &[_]protocol.FieldDescription{}, - parameters: []const int4 = &[_]int4{}, - signature: Signature, - status: Status = Status.pending, - error_response: ?Error = null, - needs_duplicate_check: bool = true, - fields_flags: PostgresSQLConnection.DataCell.Flags = .{}, - - pub const Error = union(enum) { - protocol: protocol.ErrorResponse, - postgres_error: AnyPostgresError, - - pub fn deinit(this: *@This()) void { - switch (this.*) { - .protocol => |*err| err.deinit(), - .postgres_error => {}, - } - } - - pub fn toJS(this: *const @This(), globalObject: *JSC.JSGlobalObject) JSValue { - return switch (this.*) { - .protocol => |err| err.toJS(globalObject), - .postgres_error => |err| postgresErrorToJS(globalObject, null, err), - }; - } - }; - pub const Status = enum { - pending, - parsing, - prepared, - failed, - - pub fn isRunning(this: @This()) bool { - return this == .parsing; - } - }; - pub fn ref(this: *@This()) void { - bun.assert(this.ref_count > 0); - this.ref_count += 1; - } - - pub fn deref(this: *@This()) void { - const ref_count = this.ref_count; - this.ref_count -= 1; - - if (ref_count == 1) { - this.deinit(); - } - } - - pub fn checkForDuplicateFields(this: *PostgresSQLStatement) void { - if (!this.needs_duplicate_check) return; - this.needs_duplicate_check = false; - - var seen_numbers = std.ArrayList(u32).init(bun.default_allocator); - defer seen_numbers.deinit(); - var seen_fields = bun.StringHashMap(void).init(bun.default_allocator); - seen_fields.ensureUnusedCapacity(@intCast(this.fields.len)) catch bun.outOfMemory(); - defer seen_fields.deinit(); - - // iterate backwards - var remaining = this.fields.len; - var flags: PostgresSQLConnection.DataCell.Flags = .{}; - while (remaining > 0) { - remaining -= 1; - const field: *protocol.FieldDescription = &this.fields[remaining]; - switch (field.name_or_index) { - .name => |*name| { - const seen = seen_fields.getOrPut(name.slice()) catch unreachable; - if (seen.found_existing) { - field.name_or_index = .duplicate; - flags.has_duplicate_columns = true; - } - - flags.has_named_columns = true; - }, - .index => |index| { - if (std.mem.indexOfScalar(u32, seen_numbers.items, index) != null) { - field.name_or_index = .duplicate; - flags.has_duplicate_columns = true; - } else { - seen_numbers.append(index) catch bun.outOfMemory(); - } - - flags.has_indexed_columns = true; - }, - .duplicate => { - flags.has_duplicate_columns = true; - }, - } - } - - this.fields_flags = flags; - } - - pub fn deinit(this: *PostgresSQLStatement) void { - debug("PostgresSQLStatement deinit", .{}); - - bun.assert(this.ref_count == 0); - - for (this.fields) |*field| { - field.deinit(); - } - bun.default_allocator.free(this.fields); - bun.default_allocator.free(this.parameters); - this.cached_structure.deinit(); - if (this.error_response) |err| { - this.error_response = null; - var _error = err; - _error.deinit(); - } - this.signature.deinit(); - bun.default_allocator.destroy(this); - } - - pub fn structure(this: *PostgresSQLStatement, owner: JSValue, globalObject: *JSC.JSGlobalObject) PostgresCachedStructure { - if (this.cached_structure.has()) { - return this.cached_structure; - } - this.checkForDuplicateFields(); - - // lets avoid most allocations - var stack_ids: [70]JSC.JSObject.ExternColumnIdentifier = undefined; - // lets de duplicate the fields early - var nonDuplicatedCount = this.fields.len; - for (this.fields) |*field| { - if (field.name_or_index == .duplicate) { - nonDuplicatedCount -= 1; - } - } - const ids = if (nonDuplicatedCount <= JSC.JSObject.maxInlineCapacity()) stack_ids[0..nonDuplicatedCount] else bun.default_allocator.alloc(JSC.JSObject.ExternColumnIdentifier, nonDuplicatedCount) catch bun.outOfMemory(); - - var i: usize = 0; - for (this.fields) |*field| { - if (field.name_or_index == .duplicate) continue; - - var id: *JSC.JSObject.ExternColumnIdentifier = &ids[i]; - switch (field.name_or_index) { - .name => |name| { - id.value.name = String.createAtomIfPossible(name.slice()); - }, - .index => |index| { - id.value.index = index; - }, - .duplicate => unreachable, - } - id.tag = switch (field.name_or_index) { - .name => 2, - .index => 1, - .duplicate => 0, - }; - i += 1; - } - - if (nonDuplicatedCount > JSC.JSObject.maxInlineCapacity()) { - this.cached_structure.set(globalObject, null, ids); - } else { - this.cached_structure.set(globalObject, JSC.JSObject.createStructure( - globalObject, - owner, - @truncate(ids.len), - ids.ptr, - ), null); - } - - return this.cached_structure; - } -}; - -const QueryBindingIterator = union(enum) { - array: JSC.JSArrayIterator, - objects: ObjectIterator, - - pub fn init(array: JSValue, columns: JSValue, globalObject: *JSC.JSGlobalObject) QueryBindingIterator { - if (columns.isEmptyOrUndefinedOrNull()) { - return .{ .array = JSC.JSArrayIterator.init(array, globalObject) }; - } - - return .{ - .objects = .{ - .array = array, - .columns = columns, - .globalObject = globalObject, - .columns_count = columns.getLength(globalObject), - .array_length = array.getLength(globalObject), - }, - }; - } - - pub const ObjectIterator = struct { - array: JSValue, - columns: JSValue = .zero, - globalObject: *JSC.JSGlobalObject, - cell_i: usize = 0, - row_i: usize = 0, - current_row: JSC.JSValue = .zero, - columns_count: usize = 0, - array_length: usize = 0, - any_failed: bool = false, - - pub fn next(this: *ObjectIterator) ?JSC.JSValue { - if (this.row_i >= this.array_length) { - return null; - } - - const cell_i = this.cell_i; - this.cell_i += 1; - const row_i = this.row_i; - - const globalObject = this.globalObject; - - if (this.current_row == .zero) { - this.current_row = JSC.JSObject.getIndex(this.array, globalObject, @intCast(row_i)); - if (this.current_row.isEmptyOrUndefinedOrNull()) { - if (!globalObject.hasException()) - return globalObject.throw("Expected a row to be returned at index {d}", .{row_i}) catch null; - this.any_failed = true; - return null; - } - } - - defer { - if (this.cell_i >= this.columns_count) { - this.cell_i = 0; - this.current_row = .zero; - this.row_i += 1; - } - } - - const property = JSC.JSObject.getIndex(this.columns, globalObject, @intCast(cell_i)); - if (property == .zero or property.isUndefined()) { - if (!globalObject.hasException()) - return globalObject.throw("Expected a column at index {d} in row {d}", .{ cell_i, row_i }) catch null; - this.any_failed = true; - return null; - } - - const value = this.current_row.getOwnByValue(globalObject, property); - if (value == .zero or (value != null and value.?.isUndefined())) { - if (!globalObject.hasException()) - return globalObject.throw("Expected a value at index {d} in row {d}", .{ cell_i, row_i }) catch null; - this.any_failed = true; - return null; - } - return value; - } - }; - - pub fn next(this: *QueryBindingIterator) ?JSC.JSValue { - return switch (this.*) { - .array => |*iter| iter.next(), - .objects => |*iter| iter.next(), - }; - } - - pub fn anyFailed(this: *const QueryBindingIterator) bool { - return switch (this.*) { - .array => false, - .objects => |*iter| iter.any_failed, - }; - } - - pub fn to(this: *QueryBindingIterator, index: u32) void { - switch (this.*) { - .array => |*iter| iter.i = index, - .objects => |*iter| { - iter.cell_i = index % iter.columns_count; - iter.row_i = index / iter.columns_count; - iter.current_row = .zero; - }, - } - } - - pub fn reset(this: *QueryBindingIterator) void { - switch (this.*) { - .array => |*iter| { - iter.i = 0; - }, - .objects => |*iter| { - iter.cell_i = 0; - iter.row_i = 0; - iter.current_row = .zero; - }, - } - } -}; - -const Signature = struct { - fields: []const int4, - name: []const u8, - query: []const u8, - prepared_statement_name: []const u8, - - pub fn empty() Signature { - return Signature{ - .fields = &[_]int4{}, - .name = &[_]u8{}, - .query = &[_]u8{}, - .prepared_statement_name = &[_]u8{}, - }; - } - - const log = bun.Output.scoped(.PostgresSignature, false); - pub fn deinit(this: *Signature) void { - if (this.prepared_statement_name.len > 0) { - bun.default_allocator.free(this.prepared_statement_name); - } - if (this.name.len > 0) { - bun.default_allocator.free(this.name); - } - if (this.fields.len > 0) { - bun.default_allocator.free(this.fields); - } - if (this.query.len > 0) { - bun.default_allocator.free(this.query); - } - } - - pub fn hash(this: *const Signature) u64 { - var hasher = std.hash.Wyhash.init(0); - hasher.update(this.name); - hasher.update(std.mem.sliceAsBytes(this.fields)); - return hasher.final(); - } - - pub fn generate(globalObject: *JSC.JSGlobalObject, query: []const u8, array_value: JSValue, columns: JSValue, prepared_statement_id: u64, unnamed: bool) !Signature { - var fields = std.ArrayList(int4).init(bun.default_allocator); - var name = try std.ArrayList(u8).initCapacity(bun.default_allocator, query.len); - - name.appendSliceAssumeCapacity(query); - - errdefer { - fields.deinit(); - name.deinit(); - } - - var iter = QueryBindingIterator.init(array_value, columns, globalObject); - - while (iter.next()) |value| { - if (value.isEmptyOrUndefinedOrNull()) { - // Allow postgres to decide the type - try fields.append(0); - try name.appendSlice(".null"); - continue; - } - - const tag = try types.Tag.fromJS(globalObject, value); - - switch (tag) { - .int8 => try name.appendSlice(".int8"), - .int4 => try name.appendSlice(".int4"), - // .int4_array => try name.appendSlice(".int4_array"), - .int2 => try name.appendSlice(".int2"), - .float8 => try name.appendSlice(".float8"), - .float4 => try name.appendSlice(".float4"), - .numeric => try name.appendSlice(".numeric"), - .json, .jsonb => try name.appendSlice(".json"), - .bool => try name.appendSlice(".bool"), - .timestamp => try name.appendSlice(".timestamp"), - .timestamptz => try name.appendSlice(".timestamptz"), - .bytea => try name.appendSlice(".bytea"), - else => try name.appendSlice(".string"), - } - - switch (tag) { - .bool, .int4, .int8, .float8, .int2, .numeric, .float4, .bytea => { - // We decide the type - try fields.append(@intFromEnum(tag)); - }, - else => { - // Allow postgres to decide the type - try fields.append(0); - }, - } - } - - if (iter.anyFailed()) { - return error.InvalidQueryBinding; - } - // max u64 length is 20, max prepared_statement_name length is 63 - const prepared_statement_name = if (unnamed) "" else try std.fmt.allocPrint(bun.default_allocator, "P{s}${d}", .{ name.items[0..@min(40, name.items.len)], prepared_statement_id }); - - return Signature{ - .prepared_statement_name = prepared_statement_name, - .name = name.items, - .fields = fields.items, - .query = try bun.default_allocator.dupe(u8, query), - }; - } -}; - pub fn createBinding(globalObject: *JSC.JSGlobalObject) JSValue { const binding = JSValue.createEmptyObjectWithNullPrototype(globalObject); binding.put(globalObject, ZigString.static("PostgresSQLConnection"), PostgresSQLConnection.js.getConstructor(globalObject)); @@ -3287,6 +17,15 @@ pub fn createBinding(globalObject: *JSC.JSGlobalObject) JSValue { return binding; } -const ZigString = JSC.ZigString; +// @sortImports -const assert = bun.assert; +pub const PostgresSQLConnection = @import("./postgres/PostgresSQLConnection.zig"); +pub const PostgresSQLContext = @import("./postgres/PostgresSQLContext.zig"); +pub const PostgresSQLQuery = @import("./postgres/PostgresSQLQuery.zig"); +const bun = @import("bun"); +pub const protocol = @import("./postgres/PostgresProtocol.zig"); +pub const types = @import("./postgres/PostgresTypes.zig"); + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; +const ZigString = JSC.ZigString; diff --git a/src/sql/postgres/AnyPostgresError.zig b/src/sql/postgres/AnyPostgresError.zig new file mode 100644 index 0000000000..04167ec521 --- /dev/null +++ b/src/sql/postgres/AnyPostgresError.zig @@ -0,0 +1,89 @@ +pub const AnyPostgresError = error{ + ConnectionClosed, + ExpectedRequest, + ExpectedStatement, + InvalidBackendKeyData, + InvalidBinaryData, + InvalidByteSequence, + InvalidByteSequenceForEncoding, + InvalidCharacter, + InvalidMessage, + InvalidMessageLength, + InvalidQueryBinding, + InvalidServerKey, + InvalidServerSignature, + JSError, + MultidimensionalArrayNotSupportedYet, + NullsInArrayNotSupportedYet, + OutOfMemory, + Overflow, + PBKDFD2, + SASL_SIGNATURE_MISMATCH, + SASL_SIGNATURE_INVALID_BASE64, + ShortRead, + TLSNotAvailable, + TLSUpgradeFailed, + UnexpectedMessage, + UNKNOWN_AUTHENTICATION_METHOD, + UNSUPPORTED_AUTHENTICATION_METHOD, + UnsupportedByteaFormat, + UnsupportedIntegerSize, + UnsupportedArrayFormat, + UnsupportedNumericFormat, + UnknownFormatCode, +}; + +pub fn postgresErrorToJS(globalObject: *JSC.JSGlobalObject, message: ?[]const u8, err: AnyPostgresError) JSValue { + const error_code: JSC.Error = switch (err) { + error.ConnectionClosed => .POSTGRES_CONNECTION_CLOSED, + error.ExpectedRequest => .POSTGRES_EXPECTED_REQUEST, + error.ExpectedStatement => .POSTGRES_EXPECTED_STATEMENT, + error.InvalidBackendKeyData => .POSTGRES_INVALID_BACKEND_KEY_DATA, + error.InvalidBinaryData => .POSTGRES_INVALID_BINARY_DATA, + error.InvalidByteSequence => .POSTGRES_INVALID_BYTE_SEQUENCE, + error.InvalidByteSequenceForEncoding => .POSTGRES_INVALID_BYTE_SEQUENCE_FOR_ENCODING, + error.InvalidCharacter => .POSTGRES_INVALID_CHARACTER, + error.InvalidMessage => .POSTGRES_INVALID_MESSAGE, + error.InvalidMessageLength => .POSTGRES_INVALID_MESSAGE_LENGTH, + error.InvalidQueryBinding => .POSTGRES_INVALID_QUERY_BINDING, + error.InvalidServerKey => .POSTGRES_INVALID_SERVER_KEY, + error.InvalidServerSignature => .POSTGRES_INVALID_SERVER_SIGNATURE, + error.MultidimensionalArrayNotSupportedYet => .POSTGRES_MULTIDIMENSIONAL_ARRAY_NOT_SUPPORTED_YET, + error.NullsInArrayNotSupportedYet => .POSTGRES_NULLS_IN_ARRAY_NOT_SUPPORTED_YET, + error.Overflow => .POSTGRES_OVERFLOW, + error.PBKDFD2 => .POSTGRES_AUTHENTICATION_FAILED_PBKDF2, + error.SASL_SIGNATURE_MISMATCH => .POSTGRES_SASL_SIGNATURE_MISMATCH, + error.SASL_SIGNATURE_INVALID_BASE64 => .POSTGRES_SASL_SIGNATURE_INVALID_BASE64, + error.TLSNotAvailable => .POSTGRES_TLS_NOT_AVAILABLE, + error.TLSUpgradeFailed => .POSTGRES_TLS_UPGRADE_FAILED, + error.UnexpectedMessage => .POSTGRES_UNEXPECTED_MESSAGE, + error.UNKNOWN_AUTHENTICATION_METHOD => .POSTGRES_UNKNOWN_AUTHENTICATION_METHOD, + error.UNSUPPORTED_AUTHENTICATION_METHOD => .POSTGRES_UNSUPPORTED_AUTHENTICATION_METHOD, + error.UnsupportedByteaFormat => .POSTGRES_UNSUPPORTED_BYTEA_FORMAT, + error.UnsupportedArrayFormat => .POSTGRES_UNSUPPORTED_ARRAY_FORMAT, + error.UnsupportedIntegerSize => .POSTGRES_UNSUPPORTED_INTEGER_SIZE, + error.UnsupportedNumericFormat => .POSTGRES_UNSUPPORTED_NUMERIC_FORMAT, + error.UnknownFormatCode => .POSTGRES_UNKNOWN_FORMAT_CODE, + error.JSError => { + return globalObject.takeException(error.JSError); + }, + error.OutOfMemory => { + // TODO: add binding for creating an out of memory error? + return globalObject.takeException(globalObject.throwOutOfMemory()); + }, + error.ShortRead => { + bun.unreachablePanic("Assertion failed: ShortRead should be handled by the caller in postgres", .{}); + }, + }; + if (message) |msg| { + return error_code.fmt(globalObject, "{s}", .{msg}); + } + return error_code.fmt(globalObject, "Failed to bind query: {s}", .{@errorName(err)}); +} + +// @sortImports + +const bun = @import("bun"); + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/AuthenticationState.zig b/src/sql/postgres/AuthenticationState.zig new file mode 100644 index 0000000000..97d19c0893 --- /dev/null +++ b/src/sql/postgres/AuthenticationState.zig @@ -0,0 +1,21 @@ +pub const AuthenticationState = union(enum) { + pending: void, + none: void, + ok: void, + SASL: SASL, + md5: void, + + pub fn zero(this: *AuthenticationState) void { + switch (this.*) { + .SASL => |*sasl| { + sasl.deinit(); + }, + else => {}, + } + this.* = .{ .none = {} }; + } +}; + +// @sortImports + +const SASL = @import("./SASL.zig"); diff --git a/src/sql/postgres/CommandTag.zig b/src/sql/postgres/CommandTag.zig new file mode 100644 index 0000000000..5c89426eb3 --- /dev/null +++ b/src/sql/postgres/CommandTag.zig @@ -0,0 +1,107 @@ +pub const CommandTag = union(enum) { + // For an INSERT command, the tag is INSERT oid rows, where rows is the + // number of rows inserted. oid used to be the object ID of the inserted + // row if rows was 1 and the target table had OIDs, but OIDs system + // columns are not supported anymore; therefore oid is always 0. + INSERT: u64, + // For a DELETE command, the tag is DELETE rows where rows is the number + // of rows deleted. + DELETE: u64, + // For an UPDATE command, the tag is UPDATE rows where rows is the + // number of rows updated. + UPDATE: u64, + // For a MERGE command, the tag is MERGE rows where rows is the number + // of rows inserted, updated, or deleted. + MERGE: u64, + // For a SELECT or CREATE TABLE AS command, the tag is SELECT rows where + // rows is the number of rows retrieved. + SELECT: u64, + // For a MOVE command, the tag is MOVE rows where rows is the number of + // rows the cursor's position has been changed by. + MOVE: u64, + // For a FETCH command, the tag is FETCH rows where rows is the number + // of rows that have been retrieved from the cursor. + FETCH: u64, + // For a COPY command, the tag is COPY rows where rows is the number of + // rows copied. (Note: the row count appears only in PostgreSQL 8.2 and + // later.) + COPY: u64, + + other: []const u8, + + pub fn toJSTag(this: CommandTag, globalObject: *JSC.JSGlobalObject) JSValue { + return switch (this) { + .INSERT => JSValue.jsNumber(1), + .DELETE => JSValue.jsNumber(2), + .UPDATE => JSValue.jsNumber(3), + .MERGE => JSValue.jsNumber(4), + .SELECT => JSValue.jsNumber(5), + .MOVE => JSValue.jsNumber(6), + .FETCH => JSValue.jsNumber(7), + .COPY => JSValue.jsNumber(8), + .other => |tag| JSC.ZigString.init(tag).toJS(globalObject), + }; + } + + pub fn toJSNumber(this: CommandTag) JSValue { + return switch (this) { + .other => JSValue.jsNumber(0), + inline else => |val| JSValue.jsNumber(val), + }; + } + + const KnownCommand = enum { + INSERT, + DELETE, + UPDATE, + MERGE, + SELECT, + MOVE, + FETCH, + COPY, + + pub const Map = bun.ComptimeEnumMap(KnownCommand); + }; + + pub fn init(tag: []const u8) CommandTag { + const first_space_index = bun.strings.indexOfChar(tag, ' ') orelse return .{ .other = tag }; + const cmd = KnownCommand.Map.get(tag[0..first_space_index]) orelse return .{ + .other = tag, + }; + + const number = brk: { + switch (cmd) { + .INSERT => { + var remaining = tag[@min(first_space_index + 1, tag.len)..]; + const second_space = bun.strings.indexOfChar(remaining, ' ') orelse return .{ .other = tag }; + remaining = remaining[@min(second_space + 1, remaining.len)..]; + break :brk std.fmt.parseInt(u64, remaining, 0) catch |err| { + debug("CommandTag failed to parse number: {s}", .{@errorName(err)}); + return .{ .other = tag }; + }; + }, + else => { + const after_tag = tag[@min(first_space_index + 1, tag.len)..]; + break :brk std.fmt.parseInt(u64, after_tag, 0) catch |err| { + debug("CommandTag failed to parse number: {s}", .{@errorName(err)}); + return .{ .other = tag }; + }; + }, + } + }; + + switch (cmd) { + inline else => |t| return @unionInit(CommandTag, @tagName(t), number), + } + } +}; + +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/ConnectionFlags.zig b/src/sql/postgres/ConnectionFlags.zig new file mode 100644 index 0000000000..49ad9d6f90 --- /dev/null +++ b/src/sql/postgres/ConnectionFlags.zig @@ -0,0 +1,7 @@ +pub const ConnectionFlags = packed struct { + is_ready_for_query: bool = false, + is_processing_data: bool = false, + use_unnamed_prepared_statements: bool = false, +}; + +// @sortImports diff --git a/src/sql/postgres/Data.zig b/src/sql/postgres/Data.zig new file mode 100644 index 0000000000..557d00fe49 --- /dev/null +++ b/src/sql/postgres/Data.zig @@ -0,0 +1,67 @@ +pub const Data = union(enum) { + owned: bun.ByteList, + temporary: []const u8, + empty: void, + + pub const Empty: Data = .{ .empty = {} }; + + pub fn toOwned(this: @This()) !bun.ByteList { + return switch (this) { + .owned => this.owned, + .temporary => bun.ByteList.init(try bun.default_allocator.dupe(u8, this.temporary)), + .empty => bun.ByteList.init(&.{}), + }; + } + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .owned => this.owned.deinitWithAllocator(bun.default_allocator), + .temporary => {}, + .empty => {}, + } + } + + /// Zero bytes before deinit + /// Generally, for security reasons. + pub fn zdeinit(this: *@This()) void { + switch (this.*) { + .owned => { + + // Zero bytes before deinit + @memset(this.owned.slice(), 0); + + this.owned.deinitWithAllocator(bun.default_allocator); + }, + .temporary => {}, + .empty => {}, + } + } + + pub fn slice(this: @This()) []const u8 { + return switch (this) { + .owned => this.owned.slice(), + .temporary => this.temporary, + .empty => "", + }; + } + + pub fn substring(this: @This(), start_index: usize, end_index: usize) Data { + return switch (this) { + .owned => .{ .temporary = this.owned.slice()[start_index..end_index] }, + .temporary => .{ .temporary = this.temporary[start_index..end_index] }, + .empty => .{ .empty = {} }, + }; + } + + pub fn sliceZ(this: @This()) [:0]const u8 { + return switch (this) { + .owned => this.owned.slice()[0..this.owned.len :0], + .temporary => this.temporary[0..this.temporary.len :0], + .empty => "", + }; + } +}; + +// @sortImports + +const bun = @import("bun"); diff --git a/src/sql/DataCell.zig b/src/sql/postgres/DataCell.zig similarity index 99% rename from src/sql/DataCell.zig rename to src/sql/postgres/DataCell.zig index 3d841657f4..f2f8dd5f00 100644 --- a/src/sql/DataCell.zig +++ b/src/sql/postgres/DataCell.zig @@ -1085,19 +1085,23 @@ pub const DataCell = extern struct { }; }; +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const PostgresCachedStructure = @import("./PostgresCachedStructure.zig"); +const protocol = @import("./PostgresProtocol.zig"); +const std = @import("std"); +const Data = @import("./Data.zig").Data; +const PostgresSQLQueryResultMode = @import("./PostgresSQLQueryResultMode.zig").PostgresSQLQueryResultMode; + +const types = @import("./PostgresTypes.zig"); +const AnyPostgresError = types.AnyPostgresError; +const int4 = types.int4; +const short = types.short; + const bun = @import("bun"); +const String = bun.String; const JSC = bun.JSC; -const std = @import("std"); const JSValue = JSC.JSValue; -const postgres = @import("./postgres.zig"); -const Data = postgres.Data; -const types = postgres.types; -const String = bun.String; -const int4 = postgres.int4; -const AnyPostgresError = postgres.AnyPostgresError; -const protocol = postgres.protocol; -const PostgresSQLQueryResultMode = postgres.PostgresSQLQueryResultMode; -const PostgresCachedStructure = postgres.PostgresCachedStructure; -const debug = postgres.debug; -const short = postgres.short; diff --git a/src/sql/postgres/DebugSocketMonitorReader.zig b/src/sql/postgres/DebugSocketMonitorReader.zig new file mode 100644 index 0000000000..19a95c58cd --- /dev/null +++ b/src/sql/postgres/DebugSocketMonitorReader.zig @@ -0,0 +1,25 @@ +var file: std.fs.File = undefined; +pub var enabled = false; +pub var check = std.once(load); + +pub fn load() void { + if (bun.getenvZAnyCase("BUN_POSTGRES_SOCKET_MONITOR_READER")) |monitor| { + enabled = true; + file = std.fs.cwd().createFile(monitor, .{ .truncate = true }) catch { + enabled = false; + return; + }; + debug("duplicating reads to {s}", .{monitor}); + } +} + +pub fn write(data: []const u8) void { + file.writeAll(data) catch {}; +} + +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); diff --git a/src/sql/postgres/DebugSocketMonitorWriter.zig b/src/sql/postgres/DebugSocketMonitorWriter.zig new file mode 100644 index 0000000000..5dd43cdf79 --- /dev/null +++ b/src/sql/postgres/DebugSocketMonitorWriter.zig @@ -0,0 +1,25 @@ +var file: std.fs.File = undefined; +pub var enabled = false; +pub var check = std.once(load); + +pub fn write(data: []const u8) void { + file.writeAll(data) catch {}; +} + +pub fn load() void { + if (bun.getenvZAnyCase("BUN_POSTGRES_SOCKET_MONITOR")) |monitor| { + enabled = true; + file = std.fs.cwd().createFile(monitor, .{ .truncate = true }) catch { + enabled = false; + return; + }; + debug("writing to {s}", .{monitor}); + } +} + +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); diff --git a/src/sql/postgres/ObjectIterator.zig b/src/sql/postgres/ObjectIterator.zig new file mode 100644 index 0000000000..4c8c6be7e9 --- /dev/null +++ b/src/sql/postgres/ObjectIterator.zig @@ -0,0 +1,64 @@ +array: JSValue, +columns: JSValue = .zero, +globalObject: *JSC.JSGlobalObject, +cell_i: usize = 0, +row_i: usize = 0, +current_row: JSC.JSValue = .zero, +columns_count: usize = 0, +array_length: usize = 0, +any_failed: bool = false, + +pub fn next(this: *ObjectIterator) ?JSC.JSValue { + if (this.row_i >= this.array_length) { + return null; + } + + const cell_i = this.cell_i; + this.cell_i += 1; + const row_i = this.row_i; + + const globalObject = this.globalObject; + + if (this.current_row == .zero) { + this.current_row = JSC.JSObject.getIndex(this.array, globalObject, @intCast(row_i)) catch { + this.any_failed = true; + return null; + }; + if (this.current_row.isEmptyOrUndefinedOrNull()) { + return globalObject.throw("Expected a row to be returned at index {d}", .{row_i}) catch null; + } + } + + defer { + if (this.cell_i >= this.columns_count) { + this.cell_i = 0; + this.current_row = .zero; + this.row_i += 1; + } + } + + const property = JSC.JSObject.getIndex(this.columns, globalObject, @intCast(cell_i)) catch { + this.any_failed = true; + return null; + }; + if (property.isUndefined()) { + return globalObject.throw("Expected a column at index {d} in row {d}", .{ cell_i, row_i }) catch null; + } + + const value = this.current_row.getOwnByValue(globalObject, property); + if (value == .zero or (value != null and value.?.isUndefined())) { + if (!globalObject.hasException()) + return globalObject.throw("Expected a value at index {d} in row {d}", .{ cell_i, row_i }) catch null; + this.any_failed = true; + return null; + } + return value; +} + +// @sortImports + +const ObjectIterator = @This(); +const bun = @import("bun"); + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/PostgresCachedStructure.zig b/src/sql/postgres/PostgresCachedStructure.zig new file mode 100644 index 0000000000..367e4c19c8 --- /dev/null +++ b/src/sql/postgres/PostgresCachedStructure.zig @@ -0,0 +1,34 @@ +structure: JSC.Strong.Optional = .empty, +// only populated if more than JSC.JSC__JSObject__maxInlineCapacity fields otherwise the structure will contain all fields inlined +fields: ?[]JSC.JSObject.ExternColumnIdentifier = null, + +pub fn has(this: *@This()) bool { + return this.structure.has() or this.fields != null; +} + +pub fn jsValue(this: *const @This()) ?JSC.JSValue { + return this.structure.get(); +} + +pub fn set(this: *@This(), globalObject: *JSC.JSGlobalObject, value: ?JSC.JSValue, fields: ?[]JSC.JSObject.ExternColumnIdentifier) void { + if (value) |v| { + this.structure.set(globalObject, v); + } + this.fields = fields; +} + +pub fn deinit(this: *@This()) void { + this.structure.deinit(); + if (this.fields) |fields| { + this.fields = null; + for (fields) |*name| { + name.deinit(); + } + bun.default_allocator.free(fields); + } +} + +// @sortImports + +const bun = @import("bun"); +const JSC = bun.JSC; diff --git a/src/sql/postgres/PostgresProtocol.zig b/src/sql/postgres/PostgresProtocol.zig new file mode 100644 index 0000000000..8f4ac063aa --- /dev/null +++ b/src/sql/postgres/PostgresProtocol.zig @@ -0,0 +1,63 @@ +pub const CloseComplete = [_]u8{'3'} ++ toBytes(Int32(4)); +pub const EmptyQueryResponse = [_]u8{'I'} ++ toBytes(Int32(4)); +pub const Terminate = [_]u8{'X'} ++ toBytes(Int32(4)); + +pub const BindComplete = [_]u8{'2'} ++ toBytes(Int32(4)); + +pub const ParseComplete = [_]u8{'1'} ++ toBytes(Int32(4)); + +pub const CopyDone = [_]u8{'c'} ++ toBytes(Int32(4)); +pub const Sync = [_]u8{'S'} ++ toBytes(Int32(4)); +pub const Flush = [_]u8{'H'} ++ toBytes(Int32(4)); +pub const SSLRequest = toBytes(Int32(8)) ++ toBytes(Int32(80877103)); +pub const NoData = [_]u8{'n'} ++ toBytes(Int32(4)); + +pub fn writeQuery(query: []const u8, comptime Context: type, writer: NewWriter(Context)) !void { + const count: u32 = @sizeOf((u32)) + @as(u32, @intCast(query.len)) + 1; + const header = [_]u8{ + 'Q', + } ++ toBytes(Int32(count)); + try writer.write(&header); + try writer.string(query); +} + +// @sortImports + +pub const ArrayList = @import("./protocol/ArrayList.zig"); +pub const BackendKeyData = @import("./protocol/BackendKeyData.zig"); +pub const CommandComplete = @import("./protocol/CommandComplete.zig"); +pub const CopyData = @import("./protocol/CopyData.zig"); +pub const CopyFail = @import("./protocol/CopyFail.zig"); +pub const DataRow = @import("./protocol/DataRow.zig"); +pub const Describe = @import("./protocol/Describe.zig"); +pub const ErrorResponse = @import("./protocol/ErrorResponse.zig"); +pub const Execute = @import("./protocol/Execute.zig"); +pub const FieldDescription = @import("./protocol/FieldDescription.zig"); +pub const NegotiateProtocolVersion = @import("./protocol/NegotiateProtocolVersion.zig"); +pub const NoticeResponse = @import("./protocol/NoticeResponse.zig"); +pub const NotificationResponse = @import("./protocol/NotificationResponse.zig"); +pub const ParameterDescription = @import("./protocol/ParameterDescription.zig"); +pub const ParameterStatus = @import("./protocol/ParameterStatus.zig"); +pub const Parse = @import("./protocol/Parse.zig"); +pub const PasswordMessage = @import("./protocol/PasswordMessage.zig"); +pub const ReadyForQuery = @import("./protocol/ReadyForQuery.zig"); +pub const RowDescription = @import("./protocol/RowDescription.zig"); +pub const SASLInitialResponse = @import("./protocol/SASLInitialResponse.zig"); +pub const SASLResponse = @import("./protocol/SASLResponse.zig"); +pub const StackReader = @import("./protocol/StackReader.zig"); +pub const StartupMessage = @import("./protocol/StartupMessage.zig"); +const std = @import("std"); +const types = @import("./PostgresTypes.zig"); +pub const Authentication = @import("./protocol/Authentication.zig").Authentication; +pub const ColumnIdentifier = @import("./protocol/ColumnIdentifier.zig").ColumnIdentifier; +pub const DecoderWrap = @import("./protocol/DecoderWrap.zig").DecoderWrap; +pub const FieldMessage = @import("./protocol/FieldMessage.zig").FieldMessage; +pub const FieldType = @import("./protocol/FieldType.zig").FieldType; +pub const NewReader = @import("./protocol/NewReader.zig").NewReader; +pub const NewWriter = @import("./protocol/NewWriter.zig").NewWriter; +pub const PortalOrPreparedStatement = @import("./protocol/PortalOrPreparedStatement.zig").PortalOrPreparedStatement; +pub const WriteWrap = @import("./protocol/WriteWrap.zig").WriteWrap; +const toBytes = std.mem.toBytes; + +const int_types = @import("./types/int_types.zig"); +const Int32 = int_types.Int32; diff --git a/src/sql/postgres/PostgresRequest.zig b/src/sql/postgres/PostgresRequest.zig new file mode 100644 index 0000000000..c8769f4047 --- /dev/null +++ b/src/sql/postgres/PostgresRequest.zig @@ -0,0 +1,348 @@ +pub fn writeBind( + name: []const u8, + cursor_name: bun.String, + globalObject: *JSC.JSGlobalObject, + values_array: JSValue, + columns_value: JSValue, + parameter_fields: []const int4, + result_fields: []const protocol.FieldDescription, + comptime Context: type, + writer: protocol.NewWriter(Context), +) !void { + try writer.write("B"); + const length = try writer.length(); + + try writer.String(cursor_name); + try writer.string(name); + + const len: u32 = @truncate(parameter_fields.len); + + // The number of parameter format codes that follow (denoted C + // below). This can be zero to indicate that there are no + // parameters or that the parameters all use the default format + // (text); or one, in which case the specified format code is + // applied to all parameters; or it can equal the actual number + // of parameters. + try writer.short(len); + + var iter = try QueryBindingIterator.init(values_array, columns_value, globalObject); + for (0..len) |i| { + const parameter_field = parameter_fields[i]; + const is_custom_type = std.math.maxInt(short) < parameter_field; + const tag: types.Tag = if (is_custom_type) .text else @enumFromInt(@as(short, @intCast(parameter_field))); + + const force_text = is_custom_type or (tag.isBinaryFormatSupported() and brk: { + iter.to(@truncate(i)); + if (try iter.next()) |value| { + break :brk value.isString(); + } + if (iter.anyFailed()) { + return error.InvalidQueryBinding; + } + break :brk false; + }); + + if (force_text) { + // If they pass a value as a string, let's avoid attempting to + // convert it to the binary representation. This minimizes the room + // for mistakes on our end, such as stripping the timezone + // differently than what Postgres does when given a timestamp with + // timezone. + try writer.short(0); + continue; + } + + try writer.short( + tag.formatCode(), + ); + } + + // The number of parameter values that follow (possibly zero). This + // must match the number of parameters needed by the query. + try writer.short(len); + + debug("Bind: {} ({d} args)", .{ bun.fmt.quote(name), len }); + iter.to(0); + var i: usize = 0; + while (try iter.next()) |value| : (i += 1) { + const tag: types.Tag = brk: { + if (i >= len) { + // parameter in array but not in parameter_fields + // this is probably a bug a bug in bun lets return .text here so the server will send a error 08P01 + // with will describe better the error saying exactly how many parameters are missing and are expected + // Example: + // SQL error: PostgresError: bind message supplies 0 parameters, but prepared statement "PSELECT * FROM test_table WHERE id=$1 .in$0" requires 1 + // errno: "08P01", + // code: "ERR_POSTGRES_SERVER_ERROR" + break :brk .text; + } + const parameter_field = parameter_fields[i]; + const is_custom_type = std.math.maxInt(short) < parameter_field; + break :brk if (is_custom_type) .text else @enumFromInt(@as(short, @intCast(parameter_field))); + }; + if (value.isEmptyOrUndefinedOrNull()) { + debug(" -> NULL", .{}); + // As a special case, -1 indicates a + // NULL parameter value. No value bytes follow in the NULL case. + try writer.int4(@bitCast(@as(i32, -1))); + continue; + } + if (comptime bun.Environment.enable_logs) { + debug(" -> {s}", .{tag.tagName() orelse "(unknown)"}); + } + + switch ( + // If they pass a value as a string, let's avoid attempting to + // convert it to the binary representation. This minimizes the room + // for mistakes on our end, such as stripping the timezone + // differently than what Postgres does when given a timestamp with + // timezone. + if (tag.isBinaryFormatSupported() and value.isString()) .text else tag) { + .jsonb, .json => { + var str = bun.String.empty; + defer str.deref(); + try value.jsonStringify(globalObject, 0, &str); + const slice = str.toUTF8WithoutRef(bun.default_allocator); + defer slice.deinit(); + const l = try writer.length(); + try writer.write(slice.slice()); + try l.writeExcludingSelf(); + }, + .bool => { + const l = try writer.length(); + try writer.write(&[1]u8{@intFromBool(value.toBoolean())}); + try l.writeExcludingSelf(); + }, + .timestamp, .timestamptz => { + const l = try writer.length(); + try writer.int8(types.date.fromJS(globalObject, value)); + try l.writeExcludingSelf(); + }, + .bytea => { + var bytes: []const u8 = ""; + if (value.asArrayBuffer(globalObject)) |buf| { + bytes = buf.byteSlice(); + } + const l = try writer.length(); + debug(" {d} bytes", .{bytes.len}); + + try writer.write(bytes); + try l.writeExcludingSelf(); + }, + .int4 => { + const l = try writer.length(); + try writer.int4(@bitCast(try value.coerceToInt32(globalObject))); + try l.writeExcludingSelf(); + }, + .int4_array => { + const l = try writer.length(); + try writer.int4(@bitCast(try value.coerceToInt32(globalObject))); + try l.writeExcludingSelf(); + }, + .float8 => { + const l = try writer.length(); + try writer.f64(@bitCast(try value.toNumber(globalObject))); + try l.writeExcludingSelf(); + }, + + else => { + const str = try String.fromJS(value, globalObject); + if (str.tag == .Dead) return error.OutOfMemory; + defer str.deref(); + const slice = str.toUTF8WithoutRef(bun.default_allocator); + defer slice.deinit(); + const l = try writer.length(); + try writer.write(slice.slice()); + try l.writeExcludingSelf(); + }, + } + } + + var any_non_text_fields: bool = false; + for (result_fields) |field| { + if (field.typeTag().isBinaryFormatSupported()) { + any_non_text_fields = true; + break; + } + } + + if (any_non_text_fields) { + try writer.short(result_fields.len); + for (result_fields) |field| { + try writer.short( + field.typeTag().formatCode(), + ); + } + } else { + try writer.short(0); + } + + try length.write(); +} + +pub fn writeQuery( + query: []const u8, + name: []const u8, + params: []const int4, + comptime Context: type, + writer: protocol.NewWriter(Context), +) AnyPostgresError!void { + { + var q = protocol.Parse{ + .name = name, + .params = params, + .query = query, + }; + try q.writeInternal(Context, writer); + debug("Parse: {}", .{bun.fmt.quote(query)}); + } + + { + var d = protocol.Describe{ + .p = .{ + .prepared_statement = name, + }, + }; + try d.writeInternal(Context, writer); + debug("Describe: {}", .{bun.fmt.quote(name)}); + } +} + +pub fn prepareAndQueryWithSignature( + globalObject: *JSC.JSGlobalObject, + query: []const u8, + array_value: JSValue, + comptime Context: type, + writer: protocol.NewWriter(Context), + signature: *Signature, +) AnyPostgresError!void { + try writeQuery(query, signature.prepared_statement_name, signature.fields, Context, writer); + try writeBind(signature.prepared_statement_name, bun.String.empty, globalObject, array_value, .zero, &.{}, &.{}, Context, writer); + var exec = protocol.Execute{ + .p = .{ + .prepared_statement = signature.prepared_statement_name, + }, + }; + try exec.writeInternal(Context, writer); + + try writer.write(&protocol.Flush); + try writer.write(&protocol.Sync); +} + +pub fn bindAndExecute( + globalObject: *JSC.JSGlobalObject, + statement: *PostgresSQLStatement, + array_value: JSValue, + columns_value: JSValue, + comptime Context: type, + writer: protocol.NewWriter(Context), +) !void { + try writeBind(statement.signature.prepared_statement_name, bun.String.empty, globalObject, array_value, columns_value, statement.parameters, statement.fields, Context, writer); + var exec = protocol.Execute{ + .p = .{ + .prepared_statement = statement.signature.prepared_statement_name, + }, + }; + try exec.writeInternal(Context, writer); + + try writer.write(&protocol.Flush); + try writer.write(&protocol.Sync); +} + +pub fn executeQuery( + query: []const u8, + comptime Context: type, + writer: protocol.NewWriter(Context), +) !void { + try protocol.writeQuery(query, Context, writer); + try writer.write(&protocol.Flush); + try writer.write(&protocol.Sync); +} + +pub fn onData( + connection: *PostgresSQLConnection, + comptime Context: type, + reader: protocol.NewReader(Context), +) !void { + while (true) { + reader.markMessageStart(); + const c = try reader.int(u8); + debug("read: {c}", .{c}); + switch (c) { + 'D' => try connection.on(.DataRow, Context, reader), + 'd' => try connection.on(.CopyData, Context, reader), + 'S' => { + if (connection.tls_status == .message_sent) { + bun.debugAssert(connection.tls_status.message_sent == 8); + connection.tls_status = .ssl_ok; + connection.setupTLS(); + return; + } + + try connection.on(.ParameterStatus, Context, reader); + }, + 'Z' => try connection.on(.ReadyForQuery, Context, reader), + 'C' => try connection.on(.CommandComplete, Context, reader), + '2' => try connection.on(.BindComplete, Context, reader), + '1' => try connection.on(.ParseComplete, Context, reader), + 't' => try connection.on(.ParameterDescription, Context, reader), + 'T' => try connection.on(.RowDescription, Context, reader), + 'R' => try connection.on(.Authentication, Context, reader), + 'n' => try connection.on(.NoData, Context, reader), + 'K' => try connection.on(.BackendKeyData, Context, reader), + 'E' => try connection.on(.ErrorResponse, Context, reader), + 's' => try connection.on(.PortalSuspended, Context, reader), + '3' => try connection.on(.CloseComplete, Context, reader), + 'G' => try connection.on(.CopyInResponse, Context, reader), + 'N' => { + if (connection.tls_status == .message_sent) { + connection.tls_status = .ssl_not_available; + debug("Server does not support SSL", .{}); + if (connection.ssl_mode == .require) { + connection.fail("Server does not support SSL", error.TLSNotAvailable); + return; + } + continue; + } + + try connection.on(.NoticeResponse, Context, reader); + }, + 'I' => try connection.on(.EmptyQueryResponse, Context, reader), + 'H' => try connection.on(.CopyOutResponse, Context, reader), + 'c' => try connection.on(.CopyDone, Context, reader), + 'W' => try connection.on(.CopyBothResponse, Context, reader), + + else => { + debug("Unknown message: {c}", .{c}); + const to_skip = try reader.length() -| 1; + debug("to_skip: {d}", .{to_skip}); + try reader.skip(@intCast(@max(to_skip, 0))); + }, + } + } +} + +pub const Queue = std.fifo.LinearFifo(*PostgresSQLQuery, .Dynamic); + +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const PostgresSQLConnection = @import("./PostgresSQLConnection.zig"); +const PostgresSQLQuery = @import("./PostgresSQLQuery.zig"); +const PostgresSQLStatement = @import("./PostgresSQLStatement.zig"); +const Signature = @import("./Signature.zig"); +const protocol = @import("./PostgresProtocol.zig"); +const std = @import("std"); +const QueryBindingIterator = @import("./QueryBindingIterator.zig").QueryBindingIterator; + +const types = @import("./PostgresTypes.zig"); +const AnyPostgresError = @import("./PostgresTypes.zig").AnyPostgresError; +const int4 = types.int4; +const short = types.short; + +const bun = @import("bun"); +const String = bun.String; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/PostgresSQLConnection.zig b/src/sql/postgres/PostgresSQLConnection.zig new file mode 100644 index 0000000000..4ee0e3c8f2 --- /dev/null +++ b/src/sql/postgres/PostgresSQLConnection.zig @@ -0,0 +1,1574 @@ +socket: Socket, +status: Status = Status.connecting, +ref_count: u32 = 1, + +write_buffer: bun.OffsetByteList = .{}, +read_buffer: bun.OffsetByteList = .{}, +last_message_start: u32 = 0, +requests: PostgresRequest.Queue, + +poll_ref: bun.Async.KeepAlive = .{}, +globalObject: *JSC.JSGlobalObject, + +statements: PreparedStatementsMap, +prepared_statement_id: u64 = 0, +pending_activity_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), +js_value: JSValue = .js_undefined, + +backend_parameters: bun.StringMap = bun.StringMap.init(bun.default_allocator, true), +backend_key_data: protocol.BackendKeyData = .{}, + +database: []const u8 = "", +user: []const u8 = "", +password: []const u8 = "", +path: []const u8 = "", +options: []const u8 = "", +options_buf: []const u8 = "", + +authentication_state: AuthenticationState = .{ .pending = {} }, + +tls_ctx: ?*uws.SocketContext = null, +tls_config: JSC.API.ServerConfig.SSLConfig = .{}, +tls_status: TLSStatus = .none, +ssl_mode: SSLMode = .disable, + +idle_timeout_interval_ms: u32 = 0, +connection_timeout_ms: u32 = 0, + +flags: ConnectionFlags = .{}, + +/// Before being connected, this is a connection timeout timer. +/// After being connected, this is an idle timeout timer. +timer: bun.api.Timer.EventLoopTimer = .{ + .tag = .PostgresSQLConnectionTimeout, + .next = .{ + .sec = 0, + .nsec = 0, + }, +}, + +/// This timer controls the maximum lifetime of a connection. +/// It starts when the connection successfully starts (i.e. after handshake is complete). +/// It stops when the connection is closed. +max_lifetime_interval_ms: u32 = 0, +max_lifetime_timer: bun.api.Timer.EventLoopTimer = .{ + .tag = .PostgresSQLConnectionMaxLifetime, + .next = .{ + .sec = 0, + .nsec = 0, + }, +}, + +fn getTimeoutInterval(this: *const PostgresSQLConnection) u32 { + return switch (this.status) { + .connected => this.idle_timeout_interval_ms, + .failed => 0, + else => this.connection_timeout_ms, + }; +} +pub fn disableConnectionTimeout(this: *PostgresSQLConnection) void { + if (this.timer.state == .ACTIVE) { + this.globalObject.bunVM().timer.remove(&this.timer); + } + this.timer.state = .CANCELLED; +} +pub fn resetConnectionTimeout(this: *PostgresSQLConnection) void { + // if we are processing data, don't reset the timeout, wait for the data to be processed + if (this.flags.is_processing_data) return; + const interval = this.getTimeoutInterval(); + if (this.timer.state == .ACTIVE) { + this.globalObject.bunVM().timer.remove(&this.timer); + } + if (interval == 0) { + return; + } + + this.timer.next = bun.timespec.msFromNow(@intCast(interval)); + this.globalObject.bunVM().timer.insert(&this.timer); +} + +pub fn getQueries(_: *PostgresSQLConnection, thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { + if (js.queriesGetCached(thisValue)) |value| { + return value; + } + + const array = try JSC.JSValue.createEmptyArray(globalObject, 0); + js.queriesSetCached(thisValue, globalObject, array); + + return array; +} + +pub fn getOnConnect(_: *PostgresSQLConnection, thisValue: JSC.JSValue, _: *JSC.JSGlobalObject) JSC.JSValue { + if (js.onconnectGetCached(thisValue)) |value| { + return value; + } + + return .js_undefined; +} + +pub fn setOnConnect(_: *PostgresSQLConnection, thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { + js.onconnectSetCached(thisValue, globalObject, value); +} + +pub fn getOnClose(_: *PostgresSQLConnection, thisValue: JSC.JSValue, _: *JSC.JSGlobalObject) JSC.JSValue { + if (js.oncloseGetCached(thisValue)) |value| { + return value; + } + + return .js_undefined; +} + +pub fn setOnClose(_: *PostgresSQLConnection, thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { + js.oncloseSetCached(thisValue, globalObject, value); +} + +pub fn setupTLS(this: *PostgresSQLConnection) void { + debug("setupTLS", .{}); + const new_socket = this.socket.SocketTCP.socket.connected.upgrade(this.tls_ctx.?, this.tls_config.server_name) orelse { + this.fail("Failed to upgrade to TLS", error.TLSUpgradeFailed); + return; + }; + this.socket = .{ + .SocketTLS = .{ + .socket = .{ + .connected = new_socket, + }, + }, + }; + + this.start(); +} +fn setupMaxLifetimeTimerIfNecessary(this: *PostgresSQLConnection) void { + if (this.max_lifetime_interval_ms == 0) return; + if (this.max_lifetime_timer.state == .ACTIVE) return; + + this.max_lifetime_timer.next = bun.timespec.msFromNow(@intCast(this.max_lifetime_interval_ms)); + this.globalObject.bunVM().timer.insert(&this.max_lifetime_timer); +} + +pub fn onConnectionTimeout(this: *PostgresSQLConnection) bun.api.Timer.EventLoopTimer.Arm { + debug("onConnectionTimeout", .{}); + + this.timer.state = .FIRED; + if (this.flags.is_processing_data) { + return .disarm; + } + + if (this.getTimeoutInterval() == 0) { + this.resetConnectionTimeout(); + return .disarm; + } + + switch (this.status) { + .connected => { + this.failFmt(.POSTGRES_IDLE_TIMEOUT, "Idle timeout reached after {}", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.idle_timeout_interval_ms) *| std.time.ns_per_ms)}); + }, + else => { + this.failFmt(.POSTGRES_CONNECTION_TIMEOUT, "Connection timeout after {}", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.connection_timeout_ms) *| std.time.ns_per_ms)}); + }, + .sent_startup_message => { + this.failFmt(.POSTGRES_CONNECTION_TIMEOUT, "Connection timed out after {} (sent startup message, but never received response)", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.connection_timeout_ms) *| std.time.ns_per_ms)}); + }, + } + return .disarm; +} + +pub fn onMaxLifetimeTimeout(this: *PostgresSQLConnection) bun.api.Timer.EventLoopTimer.Arm { + debug("onMaxLifetimeTimeout", .{}); + this.max_lifetime_timer.state = .FIRED; + if (this.status == .failed) return .disarm; + this.failFmt(.POSTGRES_LIFETIME_TIMEOUT, "Max lifetime timeout reached after {}", .{bun.fmt.fmtDurationOneDecimal(@as(u64, this.max_lifetime_interval_ms) *| std.time.ns_per_ms)}); + return .disarm; +} + +fn start(this: *PostgresSQLConnection) void { + this.setupMaxLifetimeTimerIfNecessary(); + this.resetConnectionTimeout(); + this.sendStartupMessage(); + + const event_loop = this.globalObject.bunVM().eventLoop(); + event_loop.enter(); + defer event_loop.exit(); + this.flushData(); +} + +pub fn hasPendingActivity(this: *PostgresSQLConnection) bool { + return this.pending_activity_count.load(.acquire) > 0; +} + +fn updateHasPendingActivity(this: *PostgresSQLConnection) void { + const a: u32 = if (this.requests.readableLength() > 0) 1 else 0; + const b: u32 = if (this.status != .disconnected) 1 else 0; + this.pending_activity_count.store(a + b, .release); +} + +pub fn setStatus(this: *PostgresSQLConnection, status: Status) void { + if (this.status == status) return; + defer this.updateHasPendingActivity(); + + this.status = status; + this.resetConnectionTimeout(); + + switch (status) { + .connected => { + const on_connect = this.consumeOnConnectCallback(this.globalObject) orelse return; + const js_value = this.js_value; + js_value.ensureStillAlive(); + this.globalObject.queueMicrotask(on_connect, &[_]JSValue{ JSValue.jsNull(), js_value }); + this.poll_ref.unref(this.globalObject.bunVM()); + }, + else => {}, + } +} + +pub fn finalize(this: *PostgresSQLConnection) void { + debug("PostgresSQLConnection finalize", .{}); + this.stopTimers(); + this.js_value = .zero; + this.deref(); +} + +pub fn flushDataAndResetTimeout(this: *PostgresSQLConnection) void { + this.resetConnectionTimeout(); + this.flushData(); +} + +pub fn flushData(this: *PostgresSQLConnection) void { + const chunk = this.write_buffer.remaining(); + if (chunk.len == 0) return; + const wrote = this.socket.write(chunk); + if (wrote > 0) { + SocketMonitor.write(chunk[0..@intCast(wrote)]); + this.write_buffer.consume(@intCast(wrote)); + } +} + +pub fn failWithJSValue(this: *PostgresSQLConnection, value: JSValue) void { + defer this.updateHasPendingActivity(); + this.stopTimers(); + if (this.status == .failed) return; + + this.status = .failed; + + this.ref(); + defer this.deref(); + // we defer the refAndClose so the on_close will be called first before we reject the pending requests + defer this.refAndClose(value); + const on_close = this.consumeOnCloseCallback(this.globalObject) orelse return; + + const loop = this.globalObject.bunVM().eventLoop(); + loop.enter(); + defer loop.exit(); + _ = on_close.call( + this.globalObject, + this.js_value, + &[_]JSValue{ + value, + this.getQueriesArray(), + }, + ) catch |e| this.globalObject.reportActiveExceptionAsUnhandled(e); +} + +pub fn failFmt(this: *PostgresSQLConnection, comptime error_code: JSC.Error, comptime fmt: [:0]const u8, args: anytype) void { + this.failWithJSValue(error_code.fmt(this.globalObject, fmt, args)); +} + +pub fn fail(this: *PostgresSQLConnection, message: []const u8, err: AnyPostgresError) void { + debug("failed: {s}: {s}", .{ message, @errorName(err) }); + + const globalObject = this.globalObject; + + this.failWithJSValue(postgresErrorToJS(globalObject, message, err)); +} + +pub fn onClose(this: *PostgresSQLConnection) void { + var vm = this.globalObject.bunVM(); + const loop = vm.eventLoop(); + loop.enter(); + defer loop.exit(); + this.poll_ref.unref(this.globalObject.bunVM()); + + this.fail("Connection closed", error.ConnectionClosed); +} + +fn sendStartupMessage(this: *PostgresSQLConnection) void { + if (this.status != .connecting) return; + debug("sendStartupMessage", .{}); + this.status = .sent_startup_message; + var msg = protocol.StartupMessage{ + .user = Data{ .temporary = this.user }, + .database = Data{ .temporary = this.database }, + .options = Data{ .temporary = this.options }, + }; + msg.writeInternal(Writer, this.writer()) catch |err| { + this.fail("Failed to write startup message", err); + }; +} + +fn startTLS(this: *PostgresSQLConnection, socket: uws.AnySocket) void { + debug("startTLS", .{}); + const offset = switch (this.tls_status) { + .message_sent => |count| count, + else => 0, + }; + const ssl_request = [_]u8{ + 0x00, 0x00, 0x00, 0x08, // Length + 0x04, 0xD2, 0x16, 0x2F, // SSL request code + }; + + const written = socket.write(ssl_request[offset..]); + if (written > 0) { + this.tls_status = .{ + .message_sent = offset + @as(u8, @intCast(written)), + }; + } else { + this.tls_status = .{ + .message_sent = offset, + }; + } +} + +pub fn onOpen(this: *PostgresSQLConnection, socket: uws.AnySocket) void { + this.socket = socket; + + this.poll_ref.ref(this.globalObject.bunVM()); + this.updateHasPendingActivity(); + + if (this.tls_status == .message_sent or this.tls_status == .pending) { + this.startTLS(socket); + return; + } + + this.start(); +} + +pub fn onHandshake(this: *PostgresSQLConnection, success: i32, ssl_error: uws.us_bun_verify_error_t) void { + debug("onHandshake: {d} {d}", .{ success, ssl_error.error_no }); + const handshake_success = if (success == 1) true else false; + if (handshake_success) { + if (this.tls_config.reject_unauthorized != 0) { + // only reject the connection if reject_unauthorized == true + switch (this.ssl_mode) { + // https://github.com/porsager/postgres/blob/6ec85a432b17661ccacbdf7f765c651e88969d36/src/connection.js#L272-L279 + + .verify_ca, .verify_full => { + if (ssl_error.error_no != 0) { + this.failWithJSValue(ssl_error.toJS(this.globalObject)); + return; + } + + const ssl_ptr: *BoringSSL.c.SSL = @ptrCast(this.socket.getNativeHandle()); + if (BoringSSL.c.SSL_get_servername(ssl_ptr, 0)) |servername| { + const hostname = servername[0..bun.len(servername)]; + if (!BoringSSL.checkServerIdentity(ssl_ptr, hostname)) { + this.failWithJSValue(ssl_error.toJS(this.globalObject)); + } + } + }, + else => { + return; + }, + } + } + } else { + // if we are here is because server rejected us, and the error_no is the cause of this + // no matter if reject_unauthorized is false because we are disconnected by the server + this.failWithJSValue(ssl_error.toJS(this.globalObject)); + } +} + +pub fn onTimeout(this: *PostgresSQLConnection) void { + _ = this; + debug("onTimeout", .{}); +} + +pub fn onDrain(this: *PostgresSQLConnection) void { + + // Don't send any other messages while we're waiting for TLS. + if (this.tls_status == .message_sent) { + if (this.tls_status.message_sent < 8) { + this.startTLS(this.socket); + } + + return; + } + + const event_loop = this.globalObject.bunVM().eventLoop(); + event_loop.enter(); + defer event_loop.exit(); + this.flushData(); +} + +pub fn onData(this: *PostgresSQLConnection, data: []const u8) void { + this.ref(); + this.flags.is_processing_data = true; + const vm = this.globalObject.bunVM(); + + this.disableConnectionTimeout(); + defer { + if (this.status == .connected and !this.hasQueryRunning() and this.write_buffer.remaining().len == 0) { + // Don't keep the process alive when there's nothing to do. + this.poll_ref.unref(vm); + } else if (this.status == .connected) { + // Keep the process alive if there's something to do. + this.poll_ref.ref(vm); + } + this.flags.is_processing_data = false; + + // reset the connection timeout after we're done processing the data + this.resetConnectionTimeout(); + this.deref(); + } + + const event_loop = vm.eventLoop(); + event_loop.enter(); + defer event_loop.exit(); + SocketMonitor.read(data); + // reset the head to the last message so remaining reflects the right amount of bytes + this.read_buffer.head = this.last_message_start; + + if (this.read_buffer.remaining().len == 0) { + var consumed: usize = 0; + var offset: usize = 0; + const reader = protocol.StackReader.init(data, &consumed, &offset); + PostgresRequest.onData(this, protocol.StackReader, reader) catch |err| { + if (err == error.ShortRead) { + if (comptime bun.Environment.allow_assert) { + debug("read_buffer: empty and received short read: last_message_start: {d}, head: {d}, len: {d}", .{ + offset, + consumed, + data.len, + }); + } + + this.read_buffer.head = 0; + this.last_message_start = 0; + this.read_buffer.byte_list.len = 0; + this.read_buffer.write(bun.default_allocator, data[offset..]) catch @panic("failed to write to read buffer"); + } else { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + + this.fail("Failed to read data", err); + } + }; + // no need to reset anything, its already empty + return; + } + // read buffer is not empty, so we need to write the data to the buffer and then read it + this.read_buffer.write(bun.default_allocator, data) catch @panic("failed to write to read buffer"); + PostgresRequest.onData(this, Reader, this.bufferedReader()) catch |err| { + if (err != error.ShortRead) { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + this.fail("Failed to read data", err); + return; + } + + if (comptime bun.Environment.allow_assert) { + debug("read_buffer: not empty and received short read: last_message_start: {d}, head: {d}, len: {d}", .{ + this.last_message_start, + this.read_buffer.head, + this.read_buffer.byte_list.len, + }); + } + return; + }; + + debug("clean read_buffer", .{}); + // success, we read everything! let's reset the last message start and the head + this.last_message_start = 0; + this.read_buffer.head = 0; +} + +pub fn constructor(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*PostgresSQLConnection { + _ = callframe; + return globalObject.throw("PostgresSQLConnection cannot be constructed directly", .{}); +} + +comptime { + const jscall = JSC.toJSHostFn(call); + @export(&jscall, .{ .name = "PostgresSQLConnection__createInstance" }); +} + +pub fn call(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + var vm = globalObject.bunVM(); + const arguments = callframe.arguments_old(15).slice(); + const hostname_str = try arguments[0].toBunString(globalObject); + defer hostname_str.deref(); + const port = try arguments[1].coerce(i32, globalObject); + + const username_str = try arguments[2].toBunString(globalObject); + defer username_str.deref(); + const password_str = try arguments[3].toBunString(globalObject); + defer password_str.deref(); + const database_str = try arguments[4].toBunString(globalObject); + defer database_str.deref(); + const ssl_mode: SSLMode = switch (arguments[5].toInt32()) { + 0 => .disable, + 1 => .prefer, + 2 => .require, + 3 => .verify_ca, + 4 => .verify_full, + else => .disable, + }; + + const tls_object = arguments[6]; + + var tls_config: JSC.API.ServerConfig.SSLConfig = .{}; + var tls_ctx: ?*uws.SocketContext = null; + if (ssl_mode != .disable) { + tls_config = if (tls_object.isBoolean() and tls_object.toBoolean()) + .{} + else if (tls_object.isObject()) + (JSC.API.ServerConfig.SSLConfig.fromJS(vm, globalObject, tls_object) catch return .zero) orelse .{} + else { + return globalObject.throwInvalidArguments("tls must be a boolean or an object", .{}); + }; + + if (globalObject.hasException()) { + tls_config.deinit(); + return .zero; + } + + // we always request the cert so we can verify it and also we manually abort the connection if the hostname doesn't match + const original_reject_unauthorized = tls_config.reject_unauthorized; + tls_config.reject_unauthorized = 0; + tls_config.request_cert = 1; + // We create it right here so we can throw errors early. + const context_options = tls_config.asUSockets(); + var err: uws.create_bun_socket_error_t = .none; + tls_ctx = uws.SocketContext.createSSLContext(vm.uwsLoop(), @sizeOf(*PostgresSQLConnection), context_options, &err) orelse { + if (err != .none) { + return globalObject.throw("failed to create TLS context", .{}); + } else { + return globalObject.throwValue(err.toJS(globalObject)); + } + }; + // restore the original reject_unauthorized + tls_config.reject_unauthorized = original_reject_unauthorized; + if (err != .none) { + tls_config.deinit(); + if (tls_ctx) |ctx| { + ctx.deinit(true); + } + return globalObject.throwValue(err.toJS(globalObject)); + } + + uws.NewSocketHandler(true).configure(tls_ctx.?, true, *PostgresSQLConnection, SocketHandler(true)); + } + + var username: []const u8 = ""; + var password: []const u8 = ""; + var database: []const u8 = ""; + var options: []const u8 = ""; + var path: []const u8 = ""; + + const options_str = try arguments[7].toBunString(globalObject); + defer options_str.deref(); + + const path_str = try arguments[8].toBunString(globalObject); + defer path_str.deref(); + + const options_buf: []u8 = brk: { + var b = bun.StringBuilder{}; + b.cap += username_str.utf8ByteLength() + 1 + password_str.utf8ByteLength() + 1 + database_str.utf8ByteLength() + 1 + options_str.utf8ByteLength() + 1 + path_str.utf8ByteLength() + 1; + + b.allocate(bun.default_allocator) catch {}; + var u = username_str.toUTF8WithoutRef(bun.default_allocator); + defer u.deinit(); + username = b.append(u.slice()); + + var p = password_str.toUTF8WithoutRef(bun.default_allocator); + defer p.deinit(); + password = b.append(p.slice()); + + var d = database_str.toUTF8WithoutRef(bun.default_allocator); + defer d.deinit(); + database = b.append(d.slice()); + + var o = options_str.toUTF8WithoutRef(bun.default_allocator); + defer o.deinit(); + options = b.append(o.slice()); + + var _path = path_str.toUTF8WithoutRef(bun.default_allocator); + defer _path.deinit(); + path = b.append(_path.slice()); + + break :brk b.allocatedSlice(); + }; + + const on_connect = arguments[9]; + const on_close = arguments[10]; + const idle_timeout = arguments[11].toInt32(); + const connection_timeout = arguments[12].toInt32(); + const max_lifetime = arguments[13].toInt32(); + const use_unnamed_prepared_statements = arguments[14].asBoolean(); + + const ptr: *PostgresSQLConnection = try bun.default_allocator.create(PostgresSQLConnection); + + ptr.* = PostgresSQLConnection{ + .globalObject = globalObject, + + .database = database, + .user = username, + .password = password, + .path = path, + .options = options, + .options_buf = options_buf, + .socket = .{ .SocketTCP = .{ .socket = .{ .detached = {} } } }, + .requests = PostgresRequest.Queue.init(bun.default_allocator), + .statements = PreparedStatementsMap{}, + .tls_config = tls_config, + .tls_ctx = tls_ctx, + .ssl_mode = ssl_mode, + .tls_status = if (ssl_mode != .disable) .pending else .none, + .idle_timeout_interval_ms = @intCast(idle_timeout), + .connection_timeout_ms = @intCast(connection_timeout), + .max_lifetime_interval_ms = @intCast(max_lifetime), + .flags = .{ + .use_unnamed_prepared_statements = use_unnamed_prepared_statements, + }, + }; + + ptr.updateHasPendingActivity(); + ptr.poll_ref.ref(vm); + const js_value = ptr.toJS(globalObject); + js_value.ensureStillAlive(); + ptr.js_value = js_value; + + js.onconnectSetCached(js_value, globalObject, on_connect); + js.oncloseSetCached(js_value, globalObject, on_close); + bun.analytics.Features.postgres_connections += 1; + + { + const hostname = hostname_str.toUTF8(bun.default_allocator); + defer hostname.deinit(); + + const ctx = vm.rareData().postgresql_context.tcp orelse brk: { + const ctx_ = uws.SocketContext.createNoSSLContext(vm.uwsLoop(), @sizeOf(*PostgresSQLConnection)).?; + uws.NewSocketHandler(false).configure(ctx_, true, *PostgresSQLConnection, SocketHandler(false)); + vm.rareData().postgresql_context.tcp = ctx_; + break :brk ctx_; + }; + + if (path.len > 0) { + ptr.socket = .{ + .SocketTCP = uws.SocketTCP.connectUnixAnon(path, ctx, ptr, false) catch |err| { + tls_config.deinit(); + if (tls_ctx) |tls| { + tls.deinit(true); + } + ptr.deinit(); + return globalObject.throwError(err, "failed to connect to postgresql"); + }, + }; + } else { + ptr.socket = .{ + .SocketTCP = uws.SocketTCP.connectAnon(hostname.slice(), port, ctx, ptr, false) catch |err| { + tls_config.deinit(); + if (tls_ctx) |tls| { + tls.deinit(true); + } + ptr.deinit(); + return globalObject.throwError(err, "failed to connect to postgresql"); + }, + }; + } + ptr.resetConnectionTimeout(); + } + + return js_value; +} + +fn SocketHandler(comptime ssl: bool) type { + return struct { + const SocketType = uws.NewSocketHandler(ssl); + fn _socket(s: SocketType) Socket { + if (comptime ssl) { + return Socket{ .SocketTLS = s }; + } + + return Socket{ .SocketTCP = s }; + } + pub fn onOpen(this: *PostgresSQLConnection, socket: SocketType) void { + this.onOpen(_socket(socket)); + } + + fn onHandshake_(this: *PostgresSQLConnection, _: anytype, success: i32, ssl_error: uws.us_bun_verify_error_t) void { + this.onHandshake(success, ssl_error); + } + + pub const onHandshake = if (ssl) onHandshake_ else null; + + pub fn onClose(this: *PostgresSQLConnection, socket: SocketType, _: i32, _: ?*anyopaque) void { + _ = socket; + this.onClose(); + } + + pub fn onEnd(this: *PostgresSQLConnection, socket: SocketType) void { + _ = socket; + this.onClose(); + } + + pub fn onConnectError(this: *PostgresSQLConnection, socket: SocketType, _: i32) void { + _ = socket; + this.onClose(); + } + + pub fn onTimeout(this: *PostgresSQLConnection, socket: SocketType) void { + _ = socket; + this.onTimeout(); + } + + pub fn onData(this: *PostgresSQLConnection, socket: SocketType, data: []const u8) void { + _ = socket; + this.onData(data); + } + + pub fn onWritable(this: *PostgresSQLConnection, socket: SocketType) void { + _ = socket; + this.onDrain(); + } + }; +} + +pub fn ref(this: *@This()) void { + bun.assert(this.ref_count > 0); + this.ref_count += 1; +} + +pub fn doRef(this: *@This(), _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + this.poll_ref.ref(this.globalObject.bunVM()); + this.updateHasPendingActivity(); + return .js_undefined; +} + +pub fn doUnref(this: *@This(), _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + this.poll_ref.unref(this.globalObject.bunVM()); + this.updateHasPendingActivity(); + return .js_undefined; +} +pub fn doFlush(this: *PostgresSQLConnection, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { + this.flushData(); + return .js_undefined; +} + +pub fn deref(this: *@This()) void { + const ref_count = this.ref_count; + this.ref_count -= 1; + + if (ref_count == 1) { + this.disconnect(); + this.deinit(); + } +} + +pub fn doClose(this: *@This(), globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + _ = globalObject; + this.disconnect(); + this.write_buffer.deinit(bun.default_allocator); + + return .js_undefined; +} + +pub fn stopTimers(this: *PostgresSQLConnection) void { + if (this.timer.state == .ACTIVE) { + this.globalObject.bunVM().timer.remove(&this.timer); + } + if (this.max_lifetime_timer.state == .ACTIVE) { + this.globalObject.bunVM().timer.remove(&this.max_lifetime_timer); + } +} + +pub fn deinit(this: *@This()) void { + this.stopTimers(); + var iter = this.statements.valueIterator(); + while (iter.next()) |stmt_ptr| { + var stmt = stmt_ptr.*; + stmt.deref(); + } + this.statements.deinit(bun.default_allocator); + this.write_buffer.deinit(bun.default_allocator); + this.read_buffer.deinit(bun.default_allocator); + this.backend_parameters.deinit(); + + bun.freeSensitive(bun.default_allocator, this.options_buf); + + this.tls_config.deinit(); + bun.default_allocator.destroy(this); +} + +fn refAndClose(this: *@This(), js_reason: ?JSC.JSValue) void { + // refAndClose is always called when we wanna to disconnect or when we are closed + + if (!this.socket.isClosed()) { + // event loop need to be alive to close the socket + this.poll_ref.ref(this.globalObject.bunVM()); + // will unref on socket close + this.socket.close(); + } + + // cleanup requests + while (this.current()) |request| { + switch (request.status) { + // pending we will fail the request and the stmt will be marked as error ConnectionClosed too + .pending => { + const stmt = request.statement orelse continue; + stmt.error_response = .{ .postgres_error = AnyPostgresError.ConnectionClosed }; + stmt.status = .failed; + if (js_reason) |reason| { + request.onJSError(reason, this.globalObject); + } else { + request.onError(.{ .postgres_error = AnyPostgresError.ConnectionClosed }, this.globalObject); + } + }, + // in the middle of running + .binding, + .running, + .partial_response, + => { + if (js_reason) |reason| { + request.onJSError(reason, this.globalObject); + } else { + request.onError(.{ .postgres_error = AnyPostgresError.ConnectionClosed }, this.globalObject); + } + }, + // just ignore success and fail cases + .success, .fail => {}, + } + request.deref(); + this.requests.discard(1); + } +} + +pub fn disconnect(this: *@This()) void { + this.stopTimers(); + + if (this.status == .connected) { + this.status = .disconnected; + this.refAndClose(null); + } +} + +fn current(this: *PostgresSQLConnection) ?*PostgresSQLQuery { + if (this.requests.readableLength() == 0) { + return null; + } + + return this.requests.peekItem(0); +} + +pub fn hasQueryRunning(this: *PostgresSQLConnection) bool { + return !this.flags.is_ready_for_query or this.current() != null; +} + +pub const Writer = struct { + connection: *PostgresSQLConnection, + + pub fn write(this: Writer, data: []const u8) AnyPostgresError!void { + var buffer = &this.connection.write_buffer; + try buffer.write(bun.default_allocator, data); + } + + pub fn pwrite(this: Writer, data: []const u8, index: usize) AnyPostgresError!void { + @memcpy(this.connection.write_buffer.byte_list.slice()[index..][0..data.len], data); + } + + pub fn offset(this: Writer) usize { + return this.connection.write_buffer.len(); + } +}; + +pub fn writer(this: *PostgresSQLConnection) protocol.NewWriter(Writer) { + return .{ + .wrapped = .{ + .connection = this, + }, + }; +} + +pub const Reader = struct { + connection: *PostgresSQLConnection, + + pub fn markMessageStart(this: Reader) void { + this.connection.last_message_start = this.connection.read_buffer.head; + } + + pub const ensureLength = ensureCapacity; + + pub fn peek(this: Reader) []const u8 { + return this.connection.read_buffer.remaining(); + } + pub fn skip(this: Reader, count: usize) void { + this.connection.read_buffer.head = @min(this.connection.read_buffer.head + @as(u32, @truncate(count)), this.connection.read_buffer.byte_list.len); + } + pub fn ensureCapacity(this: Reader, count: usize) bool { + return @as(usize, this.connection.read_buffer.head) + count <= @as(usize, this.connection.read_buffer.byte_list.len); + } + pub fn read(this: Reader, count: usize) AnyPostgresError!Data { + var remaining = this.connection.read_buffer.remaining(); + if (@as(usize, remaining.len) < count) { + return error.ShortRead; + } + + this.skip(count); + return Data{ + .temporary = remaining[0..count], + }; + } + pub fn readZ(this: Reader) AnyPostgresError!Data { + const remain = this.connection.read_buffer.remaining(); + + if (bun.strings.indexOfChar(remain, 0)) |zero| { + this.skip(zero + 1); + return Data{ + .temporary = remain[0..zero], + }; + } + + return error.ShortRead; + } +}; + +pub fn bufferedReader(this: *PostgresSQLConnection) protocol.NewReader(Reader) { + return .{ + .wrapped = .{ .connection = this }, + }; +} + +fn advance(this: *PostgresSQLConnection) !void { + while (this.requests.readableLength() > 0) { + var req: *PostgresSQLQuery = this.requests.peekItem(0); + switch (req.status) { + .pending => { + if (req.flags.simple) { + debug("executeQuery", .{}); + var query_str = req.query.toUTF8(bun.default_allocator); + defer query_str.deinit(); + PostgresRequest.executeQuery(query_str.slice(), PostgresSQLConnection.Writer, this.writer()) catch |err| { + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + req.deref(); + this.requests.discard(1); + + continue; + }; + this.flags.is_ready_for_query = false; + req.status = .running; + return; + } else { + const stmt = req.statement orelse return error.ExpectedStatement; + + switch (stmt.status) { + .failed => { + bun.assert(stmt.error_response != null); + req.onError(stmt.error_response.?, this.globalObject); + req.deref(); + this.requests.discard(1); + + continue; + }, + .prepared => { + const thisValue = req.thisValue.get(); + bun.assert(thisValue != .zero); + const binding_value = PostgresSQLQuery.js.bindingGetCached(thisValue) orelse .zero; + const columns_value = PostgresSQLQuery.js.columnsGetCached(thisValue) orelse .zero; + req.flags.binary = stmt.fields.len > 0; + + PostgresRequest.bindAndExecute(this.globalObject, stmt, binding_value, columns_value, PostgresSQLConnection.Writer, this.writer()) catch |err| { + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + req.deref(); + this.requests.discard(1); + + continue; + }; + this.flags.is_ready_for_query = false; + req.status = .binding; + return; + }, + .pending => { + // statement is pending, lets write/parse it + var query_str = req.query.toUTF8(bun.default_allocator); + defer query_str.deinit(); + const has_params = stmt.signature.fields.len > 0; + // If it does not have params, we can write and execute immediately in one go + if (!has_params) { + const thisValue = req.thisValue.get(); + bun.assert(thisValue != .zero); + // prepareAndQueryWithSignature will write + bind + execute, it will change to running after binding is complete + const binding_value = PostgresSQLQuery.js.bindingGetCached(thisValue) orelse .zero; + PostgresRequest.prepareAndQueryWithSignature(this.globalObject, query_str.slice(), binding_value, PostgresSQLConnection.Writer, this.writer(), &stmt.signature) catch |err| { + stmt.status = .failed; + stmt.error_response = .{ .postgres_error = err }; + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + req.deref(); + this.requests.discard(1); + + continue; + }; + this.flags.is_ready_for_query = false; + req.status = .binding; + stmt.status = .parsing; + + return; + } + const connection_writer = this.writer(); + // write query and wait for it to be prepared + PostgresRequest.writeQuery(query_str.slice(), stmt.signature.prepared_statement_name, stmt.signature.fields, PostgresSQLConnection.Writer, connection_writer) catch |err| { + stmt.error_response = .{ .postgres_error = err }; + stmt.status = .failed; + + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + req.deref(); + this.requests.discard(1); + + continue; + }; + connection_writer.write(&protocol.Sync) catch |err| { + stmt.error_response = .{ .postgres_error = err }; + stmt.status = .failed; + + req.onWriteFail(err, this.globalObject, this.getQueriesArray()); + req.deref(); + this.requests.discard(1); + + continue; + }; + this.flags.is_ready_for_query = false; + stmt.status = .parsing; + return; + }, + .parsing => { + // we are still parsing, lets wait for it to be prepared or failed + return; + }, + } + } + }, + + .running, .binding, .partial_response => { + // if we are binding it will switch to running immediately + // if we are running, we need to wait for it to be success or fail + return; + }, + .success, .fail => { + req.deref(); + this.requests.discard(1); + continue; + }, + } + } +} + +pub fn getQueriesArray(this: *const PostgresSQLConnection) JSValue { + return js.queriesGetCached(this.js_value) orelse .zero; +} + +pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_literal), comptime Context: type, reader: protocol.NewReader(Context)) AnyPostgresError!void { + debug("on({s})", .{@tagName(MessageType)}); + + switch (comptime MessageType) { + .DataRow => { + const request = this.current() orelse return error.ExpectedRequest; + var statement = request.statement orelse return error.ExpectedStatement; + var structure: JSValue = .js_undefined; + var cached_structure: ?PostgresCachedStructure = null; + // explicit use switch without else so if new modes are added, we don't forget to check for duplicate fields + switch (request.flags.result_mode) { + .objects => { + cached_structure = statement.structure(this.js_value, this.globalObject); + structure = cached_structure.?.jsValue() orelse .js_undefined; + }, + .raw, .values => { + // no need to check for duplicate fields or structure + }, + } + + var putter = DataCell.Putter{ + .list = &.{}, + .fields = statement.fields, + .binary = request.flags.binary, + .bigint = request.flags.bigint, + .globalObject = this.globalObject, + }; + + var stack_buf: [70]DataCell = undefined; + var cells: []DataCell = stack_buf[0..@min(statement.fields.len, JSC.JSObject.maxInlineCapacity())]; + var free_cells = false; + defer { + for (cells[0..putter.count]) |*cell| { + cell.deinit(); + } + if (free_cells) bun.default_allocator.free(cells); + } + + if (statement.fields.len >= JSC.JSObject.maxInlineCapacity()) { + cells = try bun.default_allocator.alloc(DataCell, statement.fields.len); + free_cells = true; + } + // make sure all cells are reset if reader short breaks the fields will just be null with is better than undefined behavior + @memset(cells, DataCell{ .tag = .null, .value = .{ .null = 0 } }); + putter.list = cells; + + if (request.flags.result_mode == .raw) { + try protocol.DataRow.decode( + &putter, + Context, + reader, + DataCell.Putter.putRaw, + ); + } else { + try protocol.DataRow.decode( + &putter, + Context, + reader, + DataCell.Putter.put, + ); + } + const thisValue = request.thisValue.get(); + bun.assert(thisValue != .zero); + const pending_value = PostgresSQLQuery.js.pendingValueGetCached(thisValue) orelse .zero; + pending_value.ensureStillAlive(); + const result = putter.toJS(this.globalObject, pending_value, structure, statement.fields_flags, request.flags.result_mode, cached_structure); + + if (pending_value == .zero) { + PostgresSQLQuery.js.pendingValueSetCached(thisValue, this.globalObject, result); + } + }, + .CopyData => { + var copy_data: protocol.CopyData = undefined; + try copy_data.decodeInternal(Context, reader); + copy_data.data.deinit(); + }, + .ParameterStatus => { + var parameter_status: protocol.ParameterStatus = undefined; + try parameter_status.decodeInternal(Context, reader); + defer { + parameter_status.deinit(); + } + try this.backend_parameters.insert(parameter_status.name.slice(), parameter_status.value.slice()); + }, + .ReadyForQuery => { + var ready_for_query: protocol.ReadyForQuery = undefined; + try ready_for_query.decodeInternal(Context, reader); + + this.setStatus(.connected); + this.flags.is_ready_for_query = true; + this.socket.setTimeout(300); + defer this.updateRef(); + + if (this.current()) |request| { + if (request.status == .partial_response) { + // if is a partial response, just signal that the query is now complete + request.onResult("", this.globalObject, this.js_value, true); + } + } + try this.advance(); + + this.flushData(); + }, + .CommandComplete => { + var request = this.current() orelse return error.ExpectedRequest; + + var cmd: protocol.CommandComplete = undefined; + try cmd.decodeInternal(Context, reader); + defer { + cmd.deinit(); + } + debug("-> {s}", .{cmd.command_tag.slice()}); + defer this.updateRef(); + + if (request.flags.simple) { + // simple queries can have multiple commands + request.onResult(cmd.command_tag.slice(), this.globalObject, this.js_value, false); + } else { + request.onResult(cmd.command_tag.slice(), this.globalObject, this.js_value, true); + } + }, + .BindComplete => { + try reader.eatMessage(protocol.BindComplete); + var request = this.current() orelse return error.ExpectedRequest; + if (request.status == .binding) { + request.status = .running; + } + }, + .ParseComplete => { + try reader.eatMessage(protocol.ParseComplete); + const request = this.current() orelse return error.ExpectedRequest; + if (request.statement) |statement| { + // if we have params wait for parameter description + if (statement.status == .parsing and statement.signature.fields.len == 0) { + statement.status = .prepared; + } + } + }, + .ParameterDescription => { + var description: protocol.ParameterDescription = undefined; + try description.decodeInternal(Context, reader); + const request = this.current() orelse return error.ExpectedRequest; + var statement = request.statement orelse return error.ExpectedStatement; + statement.parameters = description.parameters; + if (statement.status == .parsing) { + statement.status = .prepared; + } + }, + .RowDescription => { + var description: protocol.RowDescription = undefined; + try description.decodeInternal(Context, reader); + errdefer description.deinit(); + const request = this.current() orelse return error.ExpectedRequest; + var statement = request.statement orelse return error.ExpectedStatement; + statement.fields = description.fields; + }, + .Authentication => { + var auth: protocol.Authentication = undefined; + try auth.decodeInternal(Context, reader); + defer auth.deinit(); + + switch (auth) { + .SASL => { + if (this.authentication_state != .SASL) { + this.authentication_state = .{ .SASL = .{} }; + } + + var mechanism_buf: [128]u8 = undefined; + const mechanism = std.fmt.bufPrintZ(&mechanism_buf, "n,,n=*,r={s}", .{this.authentication_state.SASL.nonce()}) catch unreachable; + var response = protocol.SASLInitialResponse{ + .mechanism = .{ + .temporary = "SCRAM-SHA-256", + }, + .data = .{ + .temporary = mechanism, + }, + }; + + try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); + debug("SASL", .{}); + this.flushData(); + }, + .SASLContinue => |*cont| { + if (this.authentication_state != .SASL) { + debug("Unexpected SASLContinue for authentiation state: {s}", .{@tagName(std.meta.activeTag(this.authentication_state))}); + return error.UnexpectedMessage; + } + var sasl = &this.authentication_state.SASL; + + if (sasl.status != .init) { + debug("Unexpected SASLContinue for SASL state: {s}", .{@tagName(sasl.status)}); + return error.UnexpectedMessage; + } + debug("SASLContinue", .{}); + + const iteration_count = try cont.iterationCount(); + + const server_salt_decoded_base64 = bun.base64.decodeAlloc(bun.z_allocator, cont.s) catch |err| { + return switch (err) { + error.DecodingFailed => error.SASL_SIGNATURE_INVALID_BASE64, + else => |e| e, + }; + }; + defer bun.z_allocator.free(server_salt_decoded_base64); + try sasl.computeSaltedPassword(server_salt_decoded_base64, iteration_count, this); + + const auth_string = try std.fmt.allocPrint( + bun.z_allocator, + "n=*,r={s},r={s},s={s},i={s},c=biws,r={s}", + .{ + sasl.nonce(), + cont.r, + cont.s, + cont.i, + cont.r, + }, + ); + defer bun.z_allocator.free(auth_string); + try sasl.computeServerSignature(auth_string); + + const client_key = sasl.clientKey(); + const client_key_signature = sasl.clientKeySignature(&client_key, auth_string); + var client_key_xor_buffer: [32]u8 = undefined; + for (&client_key_xor_buffer, client_key, client_key_signature) |*out, a, b| { + out.* = a ^ b; + } + + var client_key_xor_base64_buf = std.mem.zeroes([bun.base64.encodeLenFromSize(32)]u8); + const xor_base64_len = bun.base64.encode(&client_key_xor_base64_buf, &client_key_xor_buffer); + + const payload = try std.fmt.allocPrint( + bun.z_allocator, + "c=biws,r={s},p={s}", + .{ cont.r, client_key_xor_base64_buf[0..xor_base64_len] }, + ); + defer bun.z_allocator.free(payload); + + var response = protocol.SASLResponse{ + .data = .{ + .temporary = payload, + }, + }; + + try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); + sasl.status = .@"continue"; + this.flushData(); + }, + .SASLFinal => |final| { + if (this.authentication_state != .SASL) { + debug("SASLFinal - Unexpected SASLContinue for authentiation state: {s}", .{@tagName(std.meta.activeTag(this.authentication_state))}); + return error.UnexpectedMessage; + } + var sasl = &this.authentication_state.SASL; + + if (sasl.status != .@"continue") { + debug("SASLFinal - Unexpected SASLContinue for SASL state: {s}", .{@tagName(sasl.status)}); + return error.UnexpectedMessage; + } + + if (sasl.server_signature_len == 0) { + debug("SASLFinal - Server signature is empty", .{}); + return error.UnexpectedMessage; + } + + const server_signature = sasl.serverSignature(); + + // This will usually start with "v=" + const comparison_signature = final.data.slice(); + + if (comparison_signature.len < 2 or !bun.strings.eqlLong(server_signature, comparison_signature[2..], true)) { + debug("SASLFinal - SASL Server signature mismatch\nExpected: {s}\nActual: {s}", .{ server_signature, comparison_signature[2..] }); + this.fail("The server did not return the correct signature", error.SASL_SIGNATURE_MISMATCH); + } else { + debug("SASLFinal - SASL Server signature match", .{}); + this.authentication_state.zero(); + } + }, + .Ok => { + debug("Authentication OK", .{}); + this.authentication_state.zero(); + this.authentication_state = .{ .ok = {} }; + }, + + .Unknown => { + this.fail("Unknown authentication method", error.UNKNOWN_AUTHENTICATION_METHOD); + }, + + .ClearTextPassword => { + debug("ClearTextPassword", .{}); + var response = protocol.PasswordMessage{ + .password = .{ + .temporary = this.password, + }, + }; + + try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); + this.flushData(); + }, + + .MD5Password => |md5| { + debug("MD5Password", .{}); + // Format is: md5 + md5(md5(password + username) + salt) + var first_hash_buf: bun.sha.MD5.Digest = undefined; + var first_hash_str: [32]u8 = undefined; + var final_hash_buf: bun.sha.MD5.Digest = undefined; + var final_hash_str: [32]u8 = undefined; + var final_password_buf: [36]u8 = undefined; + + // First hash: md5(password + username) + var first_hasher = bun.sha.MD5.init(); + first_hasher.update(this.password); + first_hasher.update(this.user); + first_hasher.final(&first_hash_buf); + const first_hash_str_output = std.fmt.bufPrint(&first_hash_str, "{x}", .{std.fmt.fmtSliceHexLower(&first_hash_buf)}) catch unreachable; + + // Second hash: md5(first_hash + salt) + var final_hasher = bun.sha.MD5.init(); + final_hasher.update(first_hash_str_output); + final_hasher.update(&md5.salt); + final_hasher.final(&final_hash_buf); + const final_hash_str_output = std.fmt.bufPrint(&final_hash_str, "{x}", .{std.fmt.fmtSliceHexLower(&final_hash_buf)}) catch unreachable; + + // Format final password as "md5" + final_hash + const final_password = std.fmt.bufPrintZ(&final_password_buf, "md5{s}", .{final_hash_str_output}) catch unreachable; + + var response = protocol.PasswordMessage{ + .password = .{ + .temporary = final_password, + }, + }; + + this.authentication_state = .{ .md5 = {} }; + try response.writeInternal(PostgresSQLConnection.Writer, this.writer()); + this.flushData(); + }, + + else => { + debug("TODO auth: {s}", .{@tagName(std.meta.activeTag(auth))}); + this.fail("TODO: support authentication method: {s}", error.UNSUPPORTED_AUTHENTICATION_METHOD); + }, + } + }, + .NoData => { + try reader.eatMessage(protocol.NoData); + var request = this.current() orelse return error.ExpectedRequest; + if (request.status == .binding) { + request.status = .running; + } + }, + .BackendKeyData => { + try this.backend_key_data.decodeInternal(Context, reader); + }, + .ErrorResponse => { + var err: protocol.ErrorResponse = undefined; + try err.decodeInternal(Context, reader); + + if (this.status == .connecting or this.status == .sent_startup_message) { + defer { + err.deinit(); + } + + this.failWithJSValue(err.toJS(this.globalObject)); + + // it shouldn't enqueue any requests while connecting + bun.assert(this.requests.count == 0); + return; + } + + var request = this.current() orelse { + debug("ErrorResponse: {}", .{err}); + return error.ExpectedRequest; + }; + var is_error_owned = true; + defer { + if (is_error_owned) { + err.deinit(); + } + } + if (request.statement) |stmt| { + if (stmt.status == PostgresSQLStatement.Status.parsing) { + stmt.status = PostgresSQLStatement.Status.failed; + stmt.error_response = .{ .protocol = err }; + is_error_owned = false; + if (this.statements.remove(bun.hash(stmt.signature.name))) { + stmt.deref(); + } + } + } + this.updateRef(); + + request.onError(.{ .protocol = err }, this.globalObject); + }, + .PortalSuspended => { + // try reader.eatMessage(&protocol.PortalSuspended); + // var request = this.current() orelse return error.ExpectedRequest; + // _ = request; + debug("TODO PortalSuspended", .{}); + }, + .CloseComplete => { + try reader.eatMessage(protocol.CloseComplete); + var request = this.current() orelse return error.ExpectedRequest; + defer this.updateRef(); + if (request.flags.simple) { + request.onResult("CLOSECOMPLETE", this.globalObject, this.js_value, false); + } else { + request.onResult("CLOSECOMPLETE", this.globalObject, this.js_value, true); + } + }, + .CopyInResponse => { + debug("TODO CopyInResponse", .{}); + }, + .NoticeResponse => { + debug("UNSUPPORTED NoticeResponse", .{}); + var resp: protocol.NoticeResponse = undefined; + + try resp.decodeInternal(Context, reader); + resp.deinit(); + }, + .EmptyQueryResponse => { + try reader.eatMessage(protocol.EmptyQueryResponse); + var request = this.current() orelse return error.ExpectedRequest; + defer this.updateRef(); + if (request.flags.simple) { + request.onResult("", this.globalObject, this.js_value, false); + } else { + request.onResult("", this.globalObject, this.js_value, true); + } + }, + .CopyOutResponse => { + debug("TODO CopyOutResponse", .{}); + }, + .CopyDone => { + debug("TODO CopyDone", .{}); + }, + .CopyBothResponse => { + debug("TODO CopyBothResponse", .{}); + }, + else => @compileError("Unknown message type: " ++ @tagName(MessageType)), + } +} + +pub fn updateRef(this: *PostgresSQLConnection) void { + this.updateHasPendingActivity(); + if (this.pending_activity_count.raw > 0) { + this.poll_ref.ref(this.globalObject.bunVM()); + } else { + this.poll_ref.unref(this.globalObject.bunVM()); + } +} + +pub fn getConnected(this: *PostgresSQLConnection, _: *JSC.JSGlobalObject) JSValue { + return JSValue.jsBoolean(this.status == Status.connected); +} + +pub fn consumeOnConnectCallback(this: *const PostgresSQLConnection, globalObject: *JSC.JSGlobalObject) ?JSC.JSValue { + debug("consumeOnConnectCallback", .{}); + const on_connect = js.onconnectGetCached(this.js_value) orelse return null; + debug("consumeOnConnectCallback exists", .{}); + + js.onconnectSetCached(this.js_value, globalObject, .zero); + return on_connect; +} + +pub fn consumeOnCloseCallback(this: *const PostgresSQLConnection, globalObject: *JSC.JSGlobalObject) ?JSC.JSValue { + debug("consumeOnCloseCallback", .{}); + const on_close = js.oncloseGetCached(this.js_value) orelse return null; + debug("consumeOnCloseCallback exists", .{}); + js.oncloseSetCached(this.js_value, globalObject, .zero); + return on_close; +} + +const PreparedStatementsMap = std.HashMapUnmanaged(u64, *PostgresSQLStatement, bun.IdentityContext(u64), 80); + +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const PostgresCachedStructure = @import("./PostgresCachedStructure.zig"); +const PostgresRequest = @import("./PostgresRequest.zig"); +const PostgresSQLConnection = @This(); +const PostgresSQLQuery = @import("./PostgresSQLQuery.zig"); +const PostgresSQLStatement = @import("./PostgresSQLStatement.zig"); +const SocketMonitor = @import("./SocketMonitor.zig"); +const protocol = @import("./PostgresProtocol.zig"); +const std = @import("std"); +const AuthenticationState = @import("./AuthenticationState.zig").AuthenticationState; +const ConnectionFlags = @import("./ConnectionFlags.zig").ConnectionFlags; +const Data = @import("./Data.zig").Data; +const DataCell = @import("./DataCell.zig").DataCell; +const SSLMode = @import("./SSLMode.zig").SSLMode; +const Status = @import("./Status.zig").Status; +const TLSStatus = @import("./TLSStatus.zig").TLSStatus; + +const AnyPostgresError = @import("./AnyPostgresError.zig").AnyPostgresError; +const postgresErrorToJS = @import("./AnyPostgresError.zig").postgresErrorToJS; + +const bun = @import("bun"); +const BoringSSL = bun.BoringSSL; +const assert = bun.assert; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; + +pub const js = JSC.Codegen.JSPostgresSQLConnection; +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; +pub const toJS = js.toJS; + +const uws = bun.uws; +const Socket = uws.AnySocket; diff --git a/src/sql/postgres/PostgresSQLContext.zig b/src/sql/postgres/PostgresSQLContext.zig new file mode 100644 index 0000000000..35ecc7f46e --- /dev/null +++ b/src/sql/postgres/PostgresSQLContext.zig @@ -0,0 +1,23 @@ +tcp: ?*uws.SocketContext = null, + +onQueryResolveFn: JSC.Strong.Optional = .empty, +onQueryRejectFn: JSC.Strong.Optional = .empty, + +pub fn init(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + var ctx = &globalObject.bunVM().rareData().postgresql_context; + ctx.onQueryResolveFn.set(globalObject, callframe.argument(0)); + ctx.onQueryRejectFn.set(globalObject, callframe.argument(1)); + + return .js_undefined; +} + +comptime { + const js_init = JSC.toJSHostFn(init); + @export(&js_init, .{ .name = "PostgresSQLContext__init" }); +} + +// @sortImports + +const bun = @import("bun"); +const JSC = bun.JSC; +const uws = bun.uws; diff --git a/src/sql/postgres/PostgresSQLQuery.zig b/src/sql/postgres/PostgresSQLQuery.zig new file mode 100644 index 0000000000..3aaa4d3920 --- /dev/null +++ b/src/sql/postgres/PostgresSQLQuery.zig @@ -0,0 +1,499 @@ +statement: ?*PostgresSQLStatement = null, +query: bun.String = bun.String.empty, +cursor_name: bun.String = bun.String.empty, + +thisValue: JSRef = JSRef.empty(), + +status: Status = Status.pending, + +ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), + +flags: packed struct(u8) { + is_done: bool = false, + binary: bool = false, + bigint: bool = false, + simple: bool = false, + result_mode: PostgresSQLQueryResultMode = .objects, + _padding: u2 = 0, +} = .{}, + +pub fn getTarget(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, clean_target: bool) JSC.JSValue { + const thisValue = this.thisValue.get(); + if (thisValue == .zero) { + return .zero; + } + const target = js.targetGetCached(thisValue) orelse return .zero; + if (clean_target) { + js.targetSetCached(thisValue, globalObject, .zero); + } + return target; +} + +pub const Status = enum(u8) { + /// The query was just enqueued, statement status can be checked for more details + pending, + /// The query is being bound to the statement + binding, + /// The query is running + running, + /// The query is waiting for a partial response + partial_response, + /// The query was successful + success, + /// The query failed + fail, + + pub fn isRunning(this: Status) bool { + return @intFromEnum(this) > @intFromEnum(Status.pending) and @intFromEnum(this) < @intFromEnum(Status.success); + } +}; + +pub fn hasPendingActivity(this: *@This()) bool { + return this.ref_count.load(.monotonic) > 1; +} + +pub fn deinit(this: *@This()) void { + this.thisValue.deinit(); + if (this.statement) |statement| { + statement.deref(); + } + this.query.deref(); + this.cursor_name.deref(); + bun.default_allocator.destroy(this); +} + +pub fn finalize(this: *@This()) void { + debug("PostgresSQLQuery finalize", .{}); + if (this.thisValue == .weak) { + // clean up if is a weak reference, if is a strong reference we need to wait until the query is done + // if we are a strong reference, here is probably a bug because GC'd should not happen + this.thisValue.weak = .zero; + } + this.deref(); +} + +pub fn deref(this: *@This()) void { + const ref_count = this.ref_count.fetchSub(1, .monotonic); + + if (ref_count == 1) { + this.deinit(); + } +} + +pub fn ref(this: *@This()) void { + bun.assert(this.ref_count.fetchAdd(1, .monotonic) > 0); +} + +pub fn onWriteFail( + this: *@This(), + err: AnyPostgresError, + globalObject: *JSC.JSGlobalObject, + queries_array: JSValue, +) void { + this.status = .fail; + const thisValue = this.thisValue.get(); + defer this.thisValue.deinit(); + const targetValue = this.getTarget(globalObject, true); + if (thisValue == .zero or targetValue == .zero) { + return; + } + + const vm = JSC.VirtualMachine.get(); + const function = vm.rareData().postgresql_context.onQueryRejectFn.get().?; + const event_loop = vm.eventLoop(); + event_loop.runCallback(function, globalObject, thisValue, &.{ + targetValue, + postgresErrorToJS(globalObject, null, err), + queries_array, + }); +} +pub fn onJSError(this: *@This(), err: JSC.JSValue, globalObject: *JSC.JSGlobalObject) void { + this.status = .fail; + this.ref(); + defer this.deref(); + + const thisValue = this.thisValue.get(); + defer this.thisValue.deinit(); + const targetValue = this.getTarget(globalObject, true); + if (thisValue == .zero or targetValue == .zero) { + return; + } + + var vm = JSC.VirtualMachine.get(); + const function = vm.rareData().postgresql_context.onQueryRejectFn.get().?; + const event_loop = vm.eventLoop(); + event_loop.runCallback(function, globalObject, thisValue, &.{ + targetValue, + err, + }); +} +pub fn onError(this: *@This(), err: PostgresSQLStatement.Error, globalObject: *JSC.JSGlobalObject) void { + this.onJSError(err.toJS(globalObject), globalObject); +} + +pub fn allowGC(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject) void { + if (thisValue == .zero) { + return; + } + + defer thisValue.ensureStillAlive(); + js.bindingSetCached(thisValue, globalObject, .zero); + js.pendingValueSetCached(thisValue, globalObject, .zero); + js.targetSetCached(thisValue, globalObject, .zero); +} + +fn consumePendingValue(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject) ?JSValue { + const pending_value = js.pendingValueGetCached(thisValue) orelse return null; + js.pendingValueSetCached(thisValue, globalObject, .zero); + return pending_value; +} + +pub fn onResult(this: *@This(), command_tag_str: []const u8, globalObject: *JSC.JSGlobalObject, connection: JSC.JSValue, is_last: bool) void { + this.ref(); + defer this.deref(); + + const thisValue = this.thisValue.get(); + const targetValue = this.getTarget(globalObject, is_last); + if (is_last) { + this.status = .success; + } else { + this.status = .partial_response; + } + defer if (is_last) { + allowGC(thisValue, globalObject); + this.thisValue.deinit(); + }; + if (thisValue == .zero or targetValue == .zero) { + return; + } + + const vm = JSC.VirtualMachine.get(); + const function = vm.rareData().postgresql_context.onQueryResolveFn.get().?; + const event_loop = vm.eventLoop(); + const tag = CommandTag.init(command_tag_str); + + event_loop.runCallback(function, globalObject, thisValue, &.{ + targetValue, + consumePendingValue(thisValue, globalObject) orelse .js_undefined, + tag.toJSTag(globalObject), + tag.toJSNumber(), + if (connection == .zero) .js_undefined else PostgresSQLConnection.js.queriesGetCached(connection) orelse .js_undefined, + JSValue.jsBoolean(is_last), + }); +} + +pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*PostgresSQLQuery { + _ = callframe; + return globalThis.throw("PostgresSQLQuery cannot be constructed directly", .{}); +} + +pub fn estimatedSize(this: *PostgresSQLQuery) usize { + _ = this; + return @sizeOf(PostgresSQLQuery); +} + +pub fn call(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const arguments = callframe.arguments_old(6).slice(); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const query = args.nextEat() orelse { + return globalThis.throw("query must be a string", .{}); + }; + const values = args.nextEat() orelse { + return globalThis.throw("values must be an array", .{}); + }; + + if (!query.isString()) { + return globalThis.throw("query must be a string", .{}); + } + + if (values.jsType() != .Array) { + return globalThis.throw("values must be an array", .{}); + } + + const pending_value: JSValue = args.nextEat() orelse .js_undefined; + const columns: JSValue = args.nextEat() orelse .js_undefined; + const js_bigint: JSValue = args.nextEat() orelse .false; + const js_simple: JSValue = args.nextEat() orelse .false; + + const bigint = js_bigint.isBoolean() and js_bigint.asBoolean(); + const simple = js_simple.isBoolean() and js_simple.asBoolean(); + if (simple) { + if (try values.getLength(globalThis) > 0) { + return globalThis.throwInvalidArguments("simple query cannot have parameters", .{}); + } + if (try query.getLength(globalThis) >= std.math.maxInt(i32)) { + return globalThis.throwInvalidArguments("query is too long", .{}); + } + } + if (!pending_value.jsType().isArrayLike()) { + return globalThis.throwInvalidArgumentType("query", "pendingValue", "Array"); + } + + var ptr = try bun.default_allocator.create(PostgresSQLQuery); + + const this_value = ptr.toJS(globalThis); + this_value.ensureStillAlive(); + + ptr.* = .{ + .query = try query.toBunString(globalThis), + .thisValue = JSRef.initWeak(this_value), + .flags = .{ + .bigint = bigint, + .simple = simple, + }, + }; + + js.bindingSetCached(this_value, globalThis, values); + js.pendingValueSetCached(this_value, globalThis, pending_value); + if (!columns.isUndefined()) { + js.columnsSetCached(this_value, globalThis, columns); + } + + return this_value; +} + +pub fn push(this: *PostgresSQLQuery, globalThis: *JSC.JSGlobalObject, value: JSValue) void { + var pending_value = this.pending_value.get() orelse return; + pending_value.push(globalThis, value); +} + +pub fn doDone(this: *@This(), globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { + _ = globalObject; + this.flags.is_done = true; + return .js_undefined; +} +pub fn setPendingValue(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const result = callframe.argument(0); + js.pendingValueSetCached(this.thisValue.get(), globalObject, result); + return .js_undefined; +} +pub fn setMode(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const js_mode = callframe.argument(0); + if (js_mode.isEmptyOrUndefinedOrNull() or !js_mode.isNumber()) { + return globalObject.throwInvalidArgumentType("setMode", "mode", "Number"); + } + + const mode = try js_mode.coerce(i32, globalObject); + this.flags.result_mode = std.meta.intToEnum(PostgresSQLQueryResultMode, mode) catch { + return globalObject.throwInvalidArgumentTypeValue("mode", "Number", js_mode); + }; + return .js_undefined; +} + +pub fn doRun(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + var arguments_ = callframe.arguments_old(2); + const arguments = arguments_.slice(); + const connection: *PostgresSQLConnection = arguments[0].as(PostgresSQLConnection) orelse { + return globalObject.throw("connection must be a PostgresSQLConnection", .{}); + }; + + connection.poll_ref.ref(globalObject.bunVM()); + var query = arguments[1]; + + if (!query.isObject()) { + return globalObject.throwInvalidArgumentType("run", "query", "Query"); + } + + const this_value = callframe.this(); + const binding_value = js.bindingGetCached(this_value) orelse .zero; + var query_str = this.query.toUTF8(bun.default_allocator); + defer query_str.deinit(); + var writer = connection.writer(); + + if (this.flags.simple) { + debug("executeQuery", .{}); + + const can_execute = !connection.hasQueryRunning(); + if (can_execute) { + PostgresRequest.executeQuery(query_str.slice(), PostgresSQLConnection.Writer, writer) catch |err| { + if (!globalObject.hasException()) + return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to execute query", err)); + return error.JSError; + }; + connection.flags.is_ready_for_query = false; + this.status = .running; + } else { + this.status = .pending; + } + const stmt = bun.default_allocator.create(PostgresSQLStatement) catch { + return globalObject.throwOutOfMemory(); + }; + // Query is simple and it's the only owner of the statement + stmt.* = .{ + .signature = Signature.empty(), + .ref_count = 1, + .status = .parsing, + }; + this.statement = stmt; + // We need a strong reference to the query so that it doesn't get GC'd + connection.requests.writeItem(this) catch return globalObject.throwOutOfMemory(); + this.ref(); + this.thisValue.upgrade(globalObject); + + js.targetSetCached(this_value, globalObject, query); + if (this.status == .running) { + connection.flushDataAndResetTimeout(); + } else { + connection.resetConnectionTimeout(); + } + return .js_undefined; + } + + const columns_value: JSValue = js.columnsGetCached(this_value) orelse .js_undefined; + + var signature = Signature.generate(globalObject, query_str.slice(), binding_value, columns_value, connection.prepared_statement_id, connection.flags.use_unnamed_prepared_statements) catch |err| { + if (!globalObject.hasException()) + return globalObject.throwError(err, "failed to generate signature"); + return error.JSError; + }; + + const has_params = signature.fields.len > 0; + var did_write = false; + enqueue: { + var connection_entry_value: ?**PostgresSQLStatement = null; + if (!connection.flags.use_unnamed_prepared_statements) { + const entry = connection.statements.getOrPut(bun.default_allocator, bun.hash(signature.name)) catch |err| { + signature.deinit(); + return globalObject.throwError(err, "failed to allocate statement"); + }; + connection_entry_value = entry.value_ptr; + if (entry.found_existing) { + this.statement = connection_entry_value.?.*; + this.statement.?.ref(); + signature.deinit(); + + switch (this.statement.?.status) { + .failed => { + // If the statement failed, we need to throw the error + return globalObject.throwValue(this.statement.?.error_response.?.toJS(globalObject)); + }, + .prepared => { + if (!connection.hasQueryRunning()) { + this.flags.binary = this.statement.?.fields.len > 0; + debug("bindAndExecute", .{}); + + // bindAndExecute will bind + execute, it will change to running after binding is complete + PostgresRequest.bindAndExecute(globalObject, this.statement.?, binding_value, columns_value, PostgresSQLConnection.Writer, writer) catch |err| { + if (!globalObject.hasException()) + return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to bind and execute query", err)); + return error.JSError; + }; + connection.flags.is_ready_for_query = false; + this.status = .binding; + + did_write = true; + } + }, + .parsing, .pending => {}, + } + + break :enqueue; + } + } + const can_execute = !connection.hasQueryRunning(); + + if (can_execute) { + // If it does not have params, we can write and execute immediately in one go + if (!has_params) { + debug("prepareAndQueryWithSignature", .{}); + // prepareAndQueryWithSignature will write + bind + execute, it will change to running after binding is complete + PostgresRequest.prepareAndQueryWithSignature(globalObject, query_str.slice(), binding_value, PostgresSQLConnection.Writer, writer, &signature) catch |err| { + signature.deinit(); + if (!globalObject.hasException()) + return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to prepare and query", err)); + return error.JSError; + }; + connection.flags.is_ready_for_query = false; + this.status = .binding; + did_write = true; + } else { + debug("writeQuery", .{}); + + PostgresRequest.writeQuery(query_str.slice(), signature.prepared_statement_name, signature.fields, PostgresSQLConnection.Writer, writer) catch |err| { + signature.deinit(); + if (!globalObject.hasException()) + return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to write query", err)); + return error.JSError; + }; + writer.write(&protocol.Sync) catch |err| { + signature.deinit(); + if (!globalObject.hasException()) + return globalObject.throwValue(postgresErrorToJS(globalObject, "failed to flush", err)); + return error.JSError; + }; + connection.flags.is_ready_for_query = false; + did_write = true; + } + } + { + const stmt = bun.default_allocator.create(PostgresSQLStatement) catch { + return globalObject.throwOutOfMemory(); + }; + // we only have connection_entry_value if we are using named prepared statements + if (connection_entry_value) |entry_value| { + connection.prepared_statement_id += 1; + stmt.* = .{ .signature = signature, .ref_count = 2, .status = if (can_execute) .parsing else .pending }; + this.statement = stmt; + + entry_value.* = stmt; + } else { + stmt.* = .{ .signature = signature, .ref_count = 1, .status = if (can_execute) .parsing else .pending }; + this.statement = stmt; + } + } + } + // We need a strong reference to the query so that it doesn't get GC'd + connection.requests.writeItem(this) catch return globalObject.throwOutOfMemory(); + this.ref(); + this.thisValue.upgrade(globalObject); + + js.targetSetCached(this_value, globalObject, query); + if (did_write) { + connection.flushDataAndResetTimeout(); + } else { + connection.resetConnectionTimeout(); + } + return .js_undefined; +} + +pub fn doCancel(this: *PostgresSQLQuery, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + _ = callframe; + _ = globalObject; + _ = this; + + return .js_undefined; +} + +comptime { + const jscall = JSC.toJSHostFn(call); + @export(&jscall, .{ .name = "PostgresSQLQuery__createInstance" }); +} + +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const PostgresRequest = @import("./PostgresRequest.zig"); +const PostgresSQLConnection = @import("./PostgresSQLConnection.zig"); +const PostgresSQLQuery = @This(); +const PostgresSQLStatement = @import("./PostgresSQLStatement.zig"); +const Signature = @import("./Signature.zig"); +const bun = @import("bun"); +const protocol = @import("./PostgresProtocol.zig"); +const std = @import("std"); +const CommandTag = @import("./CommandTag.zig").CommandTag; +const PostgresSQLQueryResultMode = @import("./PostgresSQLQueryResultMode.zig").PostgresSQLQueryResultMode; + +const AnyPostgresError = @import("./AnyPostgresError.zig").AnyPostgresError; +const postgresErrorToJS = @import("./AnyPostgresError.zig").postgresErrorToJS; + +const JSC = bun.JSC; +const JSGlobalObject = JSC.JSGlobalObject; +const JSRef = JSC.JSRef; +const JSValue = JSC.JSValue; + +pub const js = JSC.Codegen.JSPostgresSQLQuery; +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; +pub const toJS = js.toJS; diff --git a/src/sql/postgres/PostgresSQLQueryResultMode.zig b/src/sql/postgres/PostgresSQLQueryResultMode.zig new file mode 100644 index 0000000000..d8f7c9c444 --- /dev/null +++ b/src/sql/postgres/PostgresSQLQueryResultMode.zig @@ -0,0 +1,7 @@ +pub const PostgresSQLQueryResultMode = enum(u2) { + objects = 0, + values = 1, + raw = 2, +}; + +// @sortImports diff --git a/src/sql/postgres/PostgresSQLStatement.zig b/src/sql/postgres/PostgresSQLStatement.zig new file mode 100644 index 0000000000..cc832e6909 --- /dev/null +++ b/src/sql/postgres/PostgresSQLStatement.zig @@ -0,0 +1,192 @@ +cached_structure: PostgresCachedStructure = .{}, +ref_count: u32 = 1, +fields: []protocol.FieldDescription = &[_]protocol.FieldDescription{}, +parameters: []const int4 = &[_]int4{}, +signature: Signature, +status: Status = Status.pending, +error_response: ?Error = null, +needs_duplicate_check: bool = true, +fields_flags: DataCell.Flags = .{}, + +pub const Error = union(enum) { + protocol: protocol.ErrorResponse, + postgres_error: AnyPostgresError, + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .protocol => |*err| err.deinit(), + .postgres_error => {}, + } + } + + pub fn toJS(this: *const @This(), globalObject: *JSC.JSGlobalObject) JSValue { + return switch (this.*) { + .protocol => |err| err.toJS(globalObject), + .postgres_error => |err| postgresErrorToJS(globalObject, null, err), + }; + } +}; +pub const Status = enum { + pending, + parsing, + prepared, + failed, + + pub fn isRunning(this: @This()) bool { + return this == .parsing; + } +}; +pub fn ref(this: *@This()) void { + bun.assert(this.ref_count > 0); + this.ref_count += 1; +} + +pub fn deref(this: *@This()) void { + const ref_count = this.ref_count; + this.ref_count -= 1; + + if (ref_count == 1) { + this.deinit(); + } +} + +pub fn checkForDuplicateFields(this: *PostgresSQLStatement) void { + if (!this.needs_duplicate_check) return; + this.needs_duplicate_check = false; + + var seen_numbers = std.ArrayList(u32).init(bun.default_allocator); + defer seen_numbers.deinit(); + var seen_fields = bun.StringHashMap(void).init(bun.default_allocator); + seen_fields.ensureUnusedCapacity(@intCast(this.fields.len)) catch bun.outOfMemory(); + defer seen_fields.deinit(); + + // iterate backwards + var remaining = this.fields.len; + var flags: DataCell.Flags = .{}; + while (remaining > 0) { + remaining -= 1; + const field: *protocol.FieldDescription = &this.fields[remaining]; + switch (field.name_or_index) { + .name => |*name| { + const seen = seen_fields.getOrPut(name.slice()) catch unreachable; + if (seen.found_existing) { + field.name_or_index = .duplicate; + flags.has_duplicate_columns = true; + } + + flags.has_named_columns = true; + }, + .index => |index| { + if (std.mem.indexOfScalar(u32, seen_numbers.items, index) != null) { + field.name_or_index = .duplicate; + flags.has_duplicate_columns = true; + } else { + seen_numbers.append(index) catch bun.outOfMemory(); + } + + flags.has_indexed_columns = true; + }, + .duplicate => { + flags.has_duplicate_columns = true; + }, + } + } + + this.fields_flags = flags; +} + +pub fn deinit(this: *PostgresSQLStatement) void { + debug("PostgresSQLStatement deinit", .{}); + + bun.assert(this.ref_count == 0); + + for (this.fields) |*field| { + field.deinit(); + } + bun.default_allocator.free(this.fields); + bun.default_allocator.free(this.parameters); + this.cached_structure.deinit(); + if (this.error_response) |err| { + this.error_response = null; + var _error = err; + _error.deinit(); + } + this.signature.deinit(); + bun.default_allocator.destroy(this); +} + +pub fn structure(this: *PostgresSQLStatement, owner: JSValue, globalObject: *JSC.JSGlobalObject) PostgresCachedStructure { + if (this.cached_structure.has()) { + return this.cached_structure; + } + this.checkForDuplicateFields(); + + // lets avoid most allocations + var stack_ids: [70]JSC.JSObject.ExternColumnIdentifier = undefined; + // lets de duplicate the fields early + var nonDuplicatedCount = this.fields.len; + for (this.fields) |*field| { + if (field.name_or_index == .duplicate) { + nonDuplicatedCount -= 1; + } + } + const ids = if (nonDuplicatedCount <= JSC.JSObject.maxInlineCapacity()) stack_ids[0..nonDuplicatedCount] else bun.default_allocator.alloc(JSC.JSObject.ExternColumnIdentifier, nonDuplicatedCount) catch bun.outOfMemory(); + + var i: usize = 0; + for (this.fields) |*field| { + if (field.name_or_index == .duplicate) continue; + + var id: *JSC.JSObject.ExternColumnIdentifier = &ids[i]; + switch (field.name_or_index) { + .name => |name| { + id.value.name = String.createAtomIfPossible(name.slice()); + }, + .index => |index| { + id.value.index = index; + }, + .duplicate => unreachable, + } + id.tag = switch (field.name_or_index) { + .name => 2, + .index => 1, + .duplicate => 0, + }; + i += 1; + } + + if (nonDuplicatedCount > JSC.JSObject.maxInlineCapacity()) { + this.cached_structure.set(globalObject, null, ids); + } else { + this.cached_structure.set(globalObject, JSC.JSObject.createStructure( + globalObject, + owner, + @truncate(ids.len), + ids.ptr, + ), null); + } + + return this.cached_structure; +} + +const debug = bun.Output.scoped(.Postgres, false); + +// @sortImports + +const PostgresCachedStructure = @import("./PostgresCachedStructure.zig"); +const PostgresSQLStatement = @This(); +const Signature = @import("./Signature.zig"); +const protocol = @import("./PostgresProtocol.zig"); +const std = @import("std"); +const DataCell = @import("./DataCell.zig").DataCell; + +const AnyPostgresError = @import("./AnyPostgresError.zig").AnyPostgresError; +const postgresErrorToJS = @import("./AnyPostgresError.zig").postgresErrorToJS; + +const types = @import("./PostgresTypes.zig"); +const int4 = types.int4; + +const bun = @import("bun"); +const String = bun.String; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/PostgresTypes.zig b/src/sql/postgres/PostgresTypes.zig new file mode 100644 index 0000000000..d21d7ce483 --- /dev/null +++ b/src/sql/postgres/PostgresTypes.zig @@ -0,0 +1,20 @@ +pub const @"bool" = @import("./types/bool.zig"); + +// @sortImports + +pub const bytea = @import("./types/bytea.zig"); +pub const date = @import("./types/date.zig"); +pub const json = @import("./types/json.zig"); +pub const numeric = @import("./types/numeric.zig"); +pub const string = @import("./types/PostgresString.zig"); +pub const AnyPostgresError = @import("./AnyPostgresError.zig").AnyPostgresError; +pub const Tag = @import("./types/Tag.zig").Tag; + +const int_types = @import("./types/int_types.zig"); +pub const Int32 = int_types.Int32; +pub const PostgresInt32 = int_types.int4; +pub const PostgresInt64 = int_types.int8; +pub const PostgresShort = int_types.short; +pub const int4 = int_types.int4; +pub const int8 = int_types.int8; +pub const short = int_types.short; diff --git a/src/sql/postgres/QueryBindingIterator.zig b/src/sql/postgres/QueryBindingIterator.zig new file mode 100644 index 0000000000..af2e3e78fb --- /dev/null +++ b/src/sql/postgres/QueryBindingIterator.zig @@ -0,0 +1,66 @@ +pub const QueryBindingIterator = union(enum) { + array: JSC.JSArrayIterator, + objects: ObjectIterator, + + pub fn init(array: JSValue, columns: JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!QueryBindingIterator { + if (columns.isEmptyOrUndefinedOrNull()) { + return .{ .array = try JSC.JSArrayIterator.init(array, globalObject) }; + } + + return .{ + .objects = .{ + .array = array, + .columns = columns, + .globalObject = globalObject, + .columns_count = try columns.getLength(globalObject), + .array_length = try array.getLength(globalObject), + }, + }; + } + + pub fn next(this: *QueryBindingIterator) bun.JSError!?JSC.JSValue { + return switch (this.*) { + .array => |*iter| iter.next(), + .objects => |*iter| iter.next(), + }; + } + + pub fn anyFailed(this: *const QueryBindingIterator) bool { + return switch (this.*) { + .array => false, + .objects => |*iter| iter.any_failed, + }; + } + + pub fn to(this: *QueryBindingIterator, index: u32) void { + switch (this.*) { + .array => |*iter| iter.i = index, + .objects => |*iter| { + iter.cell_i = index % iter.columns_count; + iter.row_i = index / iter.columns_count; + iter.current_row = .zero; + }, + } + } + + pub fn reset(this: *QueryBindingIterator) void { + switch (this.*) { + .array => |*iter| { + iter.i = 0; + }, + .objects => |*iter| { + iter.cell_i = 0; + iter.row_i = 0; + iter.current_row = .zero; + }, + } + } +}; + +// @sortImports + +const ObjectIterator = @import("./ObjectIterator.zig"); +const bun = @import("bun"); + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/SASL.zig b/src/sql/postgres/SASL.zig new file mode 100644 index 0000000000..6fb482f40a --- /dev/null +++ b/src/sql/postgres/SASL.zig @@ -0,0 +1,95 @@ +const nonce_byte_len = 18; +const nonce_base64_len = bun.base64.encodeLenFromSize(nonce_byte_len); + +const server_signature_byte_len = 32; +const server_signature_base64_len = bun.base64.encodeLenFromSize(server_signature_byte_len); + +const salted_password_byte_len = 32; + +nonce_base64_bytes: [nonce_base64_len]u8 = .{0} ** nonce_base64_len, +nonce_len: u8 = 0, + +server_signature_base64_bytes: [server_signature_base64_len]u8 = .{0} ** server_signature_base64_len, +server_signature_len: u8 = 0, + +salted_password_bytes: [salted_password_byte_len]u8 = .{0} ** salted_password_byte_len, +salted_password_created: bool = false, + +status: SASLStatus = .init, + +pub const SASLStatus = enum { + init, + @"continue", +}; + +fn hmac(password: []const u8, data: []const u8) ?[32]u8 { + var buf = std.mem.zeroes([bun.BoringSSL.c.EVP_MAX_MD_SIZE]u8); + + // TODO: I don't think this is failable. + const result = bun.hmac.generate(password, data, .sha256, &buf) orelse return null; + + assert(result.len == 32); + return buf[0..32].*; +} + +pub fn computeSaltedPassword(this: *SASL, salt_bytes: []const u8, iteration_count: u32, connection: *PostgresSQLConnection) !void { + this.salted_password_created = true; + if (Crypto.EVP.pbkdf2(&this.salted_password_bytes, connection.password, salt_bytes, iteration_count, .sha256) == null) { + return error.PBKDFD2; + } +} + +pub fn saltedPassword(this: *const SASL) []const u8 { + assert(this.salted_password_created); + return this.salted_password_bytes[0..salted_password_byte_len]; +} + +pub fn serverSignature(this: *const SASL) []const u8 { + assert(this.server_signature_len > 0); + return this.server_signature_base64_bytes[0..this.server_signature_len]; +} + +pub fn computeServerSignature(this: *SASL, auth_string: []const u8) !void { + assert(this.server_signature_len == 0); + + const server_key = hmac(this.saltedPassword(), "Server Key") orelse return error.InvalidServerKey; + const server_signature_bytes = hmac(&server_key, auth_string) orelse return error.InvalidServerSignature; + this.server_signature_len = @intCast(bun.base64.encode(&this.server_signature_base64_bytes, &server_signature_bytes)); +} + +pub fn clientKey(this: *const SASL) [32]u8 { + return hmac(this.saltedPassword(), "Client Key").?; +} + +pub fn clientKeySignature(_: *const SASL, client_key: []const u8, auth_string: []const u8) [32]u8 { + var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); + bun.sha.SHA256.hash(client_key, &sha_digest, JSC.VirtualMachine.get().rareData().boringEngine()); + return hmac(&sha_digest, auth_string).?; +} + +pub fn nonce(this: *SASL) []const u8 { + if (this.nonce_len == 0) { + var bytes: [nonce_byte_len]u8 = .{0} ** nonce_byte_len; + bun.csprng(&bytes); + this.nonce_len = @intCast(bun.base64.encode(&this.nonce_base64_bytes, &bytes)); + } + return this.nonce_base64_bytes[0..this.nonce_len]; +} + +pub fn deinit(this: *SASL) void { + this.nonce_len = 0; + this.salted_password_created = false; + this.server_signature_len = 0; + this.status = .init; +} + +// @sortImports + +const PostgresSQLConnection = @import("./PostgresSQLConnection.zig"); +const SASL = @This(); +const std = @import("std"); + +const bun = @import("bun"); +const JSC = bun.JSC; +const assert = bun.assert; +const Crypto = JSC.API.Bun.Crypto; diff --git a/src/sql/postgres/SSLMode.zig b/src/sql/postgres/SSLMode.zig new file mode 100644 index 0000000000..adc78ff605 --- /dev/null +++ b/src/sql/postgres/SSLMode.zig @@ -0,0 +1,9 @@ +pub const SSLMode = enum(u8) { + disable = 0, + prefer = 1, + require = 2, + verify_ca = 3, + verify_full = 4, +}; + +// @sortImports diff --git a/src/sql/postgres/Signature.zig b/src/sql/postgres/Signature.zig new file mode 100644 index 0000000000..37720ef626 --- /dev/null +++ b/src/sql/postgres/Signature.zig @@ -0,0 +1,113 @@ +fields: []const int4, +name: []const u8, +query: []const u8, +prepared_statement_name: []const u8, + +pub fn empty() Signature { + return Signature{ + .fields = &[_]int4{}, + .name = &[_]u8{}, + .query = &[_]u8{}, + .prepared_statement_name = &[_]u8{}, + }; +} + +pub fn deinit(this: *Signature) void { + if (this.prepared_statement_name.len > 0) { + bun.default_allocator.free(this.prepared_statement_name); + } + if (this.name.len > 0) { + bun.default_allocator.free(this.name); + } + if (this.fields.len > 0) { + bun.default_allocator.free(this.fields); + } + if (this.query.len > 0) { + bun.default_allocator.free(this.query); + } +} + +pub fn hash(this: *const Signature) u64 { + var hasher = std.hash.Wyhash.init(0); + hasher.update(this.name); + hasher.update(std.mem.sliceAsBytes(this.fields)); + return hasher.final(); +} + +pub fn generate(globalObject: *JSC.JSGlobalObject, query: []const u8, array_value: JSValue, columns: JSValue, prepared_statement_id: u64, unnamed: bool) !Signature { + var fields = std.ArrayList(int4).init(bun.default_allocator); + var name = try std.ArrayList(u8).initCapacity(bun.default_allocator, query.len); + + name.appendSliceAssumeCapacity(query); + + errdefer { + fields.deinit(); + name.deinit(); + } + + var iter = try QueryBindingIterator.init(array_value, columns, globalObject); + + while (try iter.next()) |value| { + if (value.isEmptyOrUndefinedOrNull()) { + // Allow postgres to decide the type + try fields.append(0); + try name.appendSlice(".null"); + continue; + } + + const tag = try types.Tag.fromJS(globalObject, value); + + switch (tag) { + .int8 => try name.appendSlice(".int8"), + .int4 => try name.appendSlice(".int4"), + // .int4_array => try name.appendSlice(".int4_array"), + .int2 => try name.appendSlice(".int2"), + .float8 => try name.appendSlice(".float8"), + .float4 => try name.appendSlice(".float4"), + .numeric => try name.appendSlice(".numeric"), + .json, .jsonb => try name.appendSlice(".json"), + .bool => try name.appendSlice(".bool"), + .timestamp => try name.appendSlice(".timestamp"), + .timestamptz => try name.appendSlice(".timestamptz"), + .bytea => try name.appendSlice(".bytea"), + else => try name.appendSlice(".string"), + } + + switch (tag) { + .bool, .int4, .int8, .float8, .int2, .numeric, .float4, .bytea => { + // We decide the type + try fields.append(@intFromEnum(tag)); + }, + else => { + // Allow postgres to decide the type + try fields.append(0); + }, + } + } + + if (iter.anyFailed()) { + return error.InvalidQueryBinding; + } + // max u64 length is 20, max prepared_statement_name length is 63 + const prepared_statement_name = if (unnamed) "" else try std.fmt.allocPrint(bun.default_allocator, "P{s}${d}", .{ name.items[0..@min(40, name.items.len)], prepared_statement_id }); + + return Signature{ + .prepared_statement_name = prepared_statement_name, + .name = name.items, + .fields = fields.items, + .query = try bun.default_allocator.dupe(u8, query), + }; +} + +// @sortImports + +const Signature = @This(); +const bun = @import("bun"); +const std = @import("std"); +const QueryBindingIterator = @import("./QueryBindingIterator.zig").QueryBindingIterator; + +const types = @import("./PostgresTypes.zig"); +const int4 = types.int4; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/SocketMonitor.zig b/src/sql/postgres/SocketMonitor.zig new file mode 100644 index 0000000000..7765d31017 --- /dev/null +++ b/src/sql/postgres/SocketMonitor.zig @@ -0,0 +1,23 @@ +pub fn write(data: []const u8) void { + if (comptime bun.Environment.isDebug) { + DebugSocketMonitorWriter.check.call(); + if (DebugSocketMonitorWriter.enabled) { + DebugSocketMonitorWriter.write(data); + } + } +} + +pub fn read(data: []const u8) void { + if (comptime bun.Environment.isDebug) { + DebugSocketMonitorReader.check.call(); + if (DebugSocketMonitorReader.enabled) { + DebugSocketMonitorReader.write(data); + } + } +} + +// @sortImports + +const DebugSocketMonitorReader = @import("./DebugSocketMonitorReader.zig"); +const DebugSocketMonitorWriter = @import("./DebugSocketMonitorWriter.zig"); +const bun = @import("bun"); diff --git a/src/sql/postgres/Status.zig b/src/sql/postgres/Status.zig new file mode 100644 index 0000000000..f4a0e9290e --- /dev/null +++ b/src/sql/postgres/Status.zig @@ -0,0 +1,11 @@ +pub const Status = enum { + disconnected, + connecting, + // Prevent sending the startup message multiple times. + // Particularly relevant for TLS connections. + sent_startup_message, + connected, + failed, +}; + +// @sortImports diff --git a/src/sql/postgres/TLSStatus.zig b/src/sql/postgres/TLSStatus.zig new file mode 100644 index 0000000000..a711af013a --- /dev/null +++ b/src/sql/postgres/TLSStatus.zig @@ -0,0 +1,11 @@ +pub const TLSStatus = union(enum) { + none, + pending, + + /// Number of bytes sent of the 8-byte SSL request message. + /// Since we may send a partial message, we need to know how many bytes were sent. + message_sent: u8, + + ssl_not_available, + ssl_ok, +}; diff --git a/src/sql/postgres/postgres_protocol.zig b/src/sql/postgres/postgres_protocol.zig deleted file mode 100644 index 111e96b0ac..0000000000 --- a/src/sql/postgres/postgres_protocol.zig +++ /dev/null @@ -1,1551 +0,0 @@ -const std = @import("std"); -const bun = @import("bun"); -const postgres = bun.api.Postgres; -const Data = postgres.Data; -const protocol = @This(); -const PostgresInt32 = postgres.PostgresInt32; -const PostgresShort = postgres.PostgresShort; -const String = bun.String; -const debug = postgres.debug; -const JSValue = JSC.JSValue; -const JSC = bun.JSC; -const short = postgres.short; -const int4 = postgres.int4; -const int8 = postgres.int8; -const PostgresInt64 = postgres.PostgresInt64; -const types = postgres.types; -const AnyPostgresError = postgres.AnyPostgresError; -pub const ArrayList = struct { - array: *std.ArrayList(u8), - - pub fn offset(this: @This()) usize { - return this.array.items.len; - } - - pub fn write(this: @This(), bytes: []const u8) AnyPostgresError!void { - try this.array.appendSlice(bytes); - } - - pub fn pwrite(this: @This(), bytes: []const u8, i: usize) AnyPostgresError!void { - @memcpy(this.array.items[i..][0..bytes.len], bytes); - } - - pub const Writer = NewWriter(@This()); -}; - -pub const StackReader = struct { - buffer: []const u8 = "", - offset: *usize, - message_start: *usize, - - pub fn markMessageStart(this: @This()) void { - this.message_start.* = this.offset.*; - } - - pub fn ensureLength(this: @This(), length: usize) bool { - return this.buffer.len >= (this.offset.* + length); - } - - pub fn init(buffer: []const u8, offset: *usize, message_start: *usize) protocol.NewReader(StackReader) { - return .{ - .wrapped = .{ - .buffer = buffer, - .offset = offset, - .message_start = message_start, - }, - }; - } - - pub fn peek(this: StackReader) []const u8 { - return this.buffer[this.offset.*..]; - } - pub fn skip(this: StackReader, count: usize) void { - if (this.offset.* + count > this.buffer.len) { - this.offset.* = this.buffer.len; - return; - } - - this.offset.* += count; - } - pub fn ensureCapacity(this: StackReader, count: usize) bool { - return this.buffer.len >= (this.offset.* + count); - } - pub fn read(this: StackReader, count: usize) AnyPostgresError!Data { - const offset = this.offset.*; - if (!this.ensureCapacity(count)) { - return error.ShortRead; - } - - this.skip(count); - return Data{ - .temporary = this.buffer[offset..this.offset.*], - }; - } - pub fn readZ(this: StackReader) AnyPostgresError!Data { - const remaining = this.peek(); - if (bun.strings.indexOfChar(remaining, 0)) |zero| { - this.skip(zero + 1); - return Data{ - .temporary = remaining[0..zero], - }; - } - - return error.ShortRead; - } -}; - -pub fn NewWriterWrap( - comptime Context: type, - comptime offsetFn_: (fn (ctx: Context) usize), - comptime writeFunction_: (fn (ctx: Context, bytes: []const u8) AnyPostgresError!void), - comptime pwriteFunction_: (fn (ctx: Context, bytes: []const u8, offset: usize) AnyPostgresError!void), -) type { - return struct { - wrapped: Context, - - const writeFn = writeFunction_; - const pwriteFn = pwriteFunction_; - const offsetFn = offsetFn_; - pub const Ctx = Context; - - pub const WrappedWriter = @This(); - - pub inline fn write(this: @This(), data: []const u8) AnyPostgresError!void { - try writeFn(this.wrapped, data); - } - - pub const LengthWriter = struct { - index: usize, - context: WrappedWriter, - - pub fn write(this: LengthWriter) AnyPostgresError!void { - try this.context.pwrite(&Int32(this.context.offset() - this.index), this.index); - } - - pub fn writeExcludingSelf(this: LengthWriter) AnyPostgresError!void { - try this.context.pwrite(&Int32(this.context.offset() -| (this.index + 4)), this.index); - } - }; - - pub inline fn length(this: @This()) AnyPostgresError!LengthWriter { - const i = this.offset(); - try this.int4(0); - return LengthWriter{ - .index = i, - .context = this, - }; - } - - pub inline fn offset(this: @This()) usize { - return offsetFn(this.wrapped); - } - - pub inline fn pwrite(this: @This(), data: []const u8, i: usize) AnyPostgresError!void { - try pwriteFn(this.wrapped, data, i); - } - - pub fn int4(this: @This(), value: PostgresInt32) !void { - try this.write(std.mem.asBytes(&@byteSwap(value))); - } - - pub fn int8(this: @This(), value: PostgresInt64) !void { - try this.write(std.mem.asBytes(&@byteSwap(value))); - } - - pub fn sint4(this: @This(), value: i32) !void { - try this.write(std.mem.asBytes(&@byteSwap(value))); - } - - pub fn @"f64"(this: @This(), value: f64) !void { - try this.write(std.mem.asBytes(&@byteSwap(@as(u64, @bitCast(value))))); - } - - pub fn @"f32"(this: @This(), value: f32) !void { - try this.write(std.mem.asBytes(&@byteSwap(@as(u32, @bitCast(value))))); - } - - pub fn short(this: @This(), value: anytype) !void { - try this.write(std.mem.asBytes(&@byteSwap(@as(u16, @intCast(value))))); - } - - pub fn string(this: @This(), value: []const u8) !void { - try this.write(value); - if (value.len == 0 or value[value.len - 1] != 0) - try this.write(&[_]u8{0}); - } - - pub fn bytes(this: @This(), value: []const u8) !void { - try this.write(value); - if (value.len == 0 or value[value.len - 1] != 0) - try this.write(&[_]u8{0}); - } - - pub fn @"bool"(this: @This(), value: bool) !void { - try this.write(if (value) "t" else "f"); - } - - pub fn @"null"(this: @This()) !void { - try this.int4(std.math.maxInt(PostgresInt32)); - } - - pub fn String(this: @This(), value: bun.String) !void { - if (value.isEmpty()) { - try this.write(&[_]u8{0}); - return; - } - - var sliced = value.toUTF8(bun.default_allocator); - defer sliced.deinit(); - const slice = sliced.slice(); - - try this.write(slice); - if (slice.len == 0 or slice[slice.len - 1] != 0) - try this.write(&[_]u8{0}); - } - }; -} - -pub const FieldType = enum(u8) { - /// Severity: the field contents are ERROR, FATAL, or PANIC (in an error message), or WARNING, NOTICE, DEBUG, INFO, or LOG (in a notice message), or a localized translation of one of these. Always present. - severity = 'S', - - /// Severity: the field contents are ERROR, FATAL, or PANIC (in an error message), or WARNING, NOTICE, DEBUG, INFO, or LOG (in a notice message). This is identical to the S field except that the contents are never localized. This is present only in messages generated by PostgreSQL versions 9.6 and later. - localized_severity = 'V', - - /// Code: the SQLSTATE code for the error (see Appendix A). Not localizable. Always present. - code = 'C', - - /// Message: the primary human-readable error message. This should be accurate but terse (typically one line). Always present. - message = 'M', - - /// Detail: an optional secondary error message carrying more detail about the problem. Might run to multiple lines. - detail = 'D', - - /// Hint: an optional suggestion what to do about the problem. This is intended to differ from Detail in that it offers advice (potentially inappropriate) rather than hard facts. Might run to multiple lines. - hint = 'H', - - /// Position: the field value is a decimal ASCII integer, indicating an error cursor position as an index into the original query string. The first character has index 1, and positions are measured in characters not bytes. - position = 'P', - - /// Internal position: this is defined the same as the P field, but it is used when the cursor position refers to an internally generated command rather than the one submitted by the client. The q field will always appear when this field appears. - internal_position = 'p', - - /// Internal query: the text of a failed internally-generated command. This could be, for example, an SQL query issued by a PL/pgSQL function. - internal = 'q', - - /// Where: an indication of the context in which the error occurred. Presently this includes a call stack traceback of active procedural language functions and internally-generated queries. The trace is one entry per line, most recent first. - where = 'W', - - /// Schema name: if the error was associated with a specific database object, the name of the schema containing that object, if any. - schema = 's', - - /// Table name: if the error was associated with a specific table, the name of the table. (Refer to the schema name field for the name of the table's schema.) - table = 't', - - /// Column name: if the error was associated with a specific table column, the name of the column. (Refer to the schema and table name fields to identify the table.) - column = 'c', - - /// Data type name: if the error was associated with a specific data type, the name of the data type. (Refer to the schema name field for the name of the data type's schema.) - datatype = 'd', - - /// Constraint name: if the error was associated with a specific constraint, the name of the constraint. Refer to fields listed above for the associated table or domain. (For this purpose, indexes are treated as constraints, even if they weren't created with constraint syntax.) - constraint = 'n', - - /// File: the file name of the source-code location where the error was reported. - file = 'F', - - /// Line: the line number of the source-code location where the error was reported. - line = 'L', - - /// Routine: the name of the source-code routine reporting the error. - routine = 'R', - - _, -}; - -pub const FieldMessage = union(FieldType) { - severity: String, - localized_severity: String, - code: String, - message: String, - detail: String, - hint: String, - position: String, - internal_position: String, - internal: String, - where: String, - schema: String, - table: String, - column: String, - datatype: String, - constraint: String, - file: String, - line: String, - routine: String, - - pub fn format(this: FieldMessage, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - switch (this) { - inline else => |str| { - try std.fmt.format(writer, "{}", .{str}); - }, - } - } - - pub fn deinit(this: *FieldMessage) void { - switch (this.*) { - inline else => |*message| { - message.deref(); - }, - } - } - - pub fn decodeList(comptime Context: type, reader: NewReader(Context)) !std.ArrayListUnmanaged(FieldMessage) { - var messages = std.ArrayListUnmanaged(FieldMessage){}; - while (true) { - const field_int = try reader.int(u8); - if (field_int == 0) break; - const field: FieldType = @enumFromInt(field_int); - - var message = try reader.readZ(); - defer message.deinit(); - if (message.slice().len == 0) break; - - try messages.append(bun.default_allocator, FieldMessage.init(field, message.slice()) catch continue); - } - - return messages; - } - - pub fn init(tag: FieldType, message: []const u8) !FieldMessage { - return switch (tag) { - .severity => FieldMessage{ .severity = String.createUTF8(message) }, - // Ignore this one for now. - // .localized_severity => FieldMessage{ .localized_severity = String.createUTF8(message) }, - .code => FieldMessage{ .code = String.createUTF8(message) }, - .message => FieldMessage{ .message = String.createUTF8(message) }, - .detail => FieldMessage{ .detail = String.createUTF8(message) }, - .hint => FieldMessage{ .hint = String.createUTF8(message) }, - .position => FieldMessage{ .position = String.createUTF8(message) }, - .internal_position => FieldMessage{ .internal_position = String.createUTF8(message) }, - .internal => FieldMessage{ .internal = String.createUTF8(message) }, - .where => FieldMessage{ .where = String.createUTF8(message) }, - .schema => FieldMessage{ .schema = String.createUTF8(message) }, - .table => FieldMessage{ .table = String.createUTF8(message) }, - .column => FieldMessage{ .column = String.createUTF8(message) }, - .datatype => FieldMessage{ .datatype = String.createUTF8(message) }, - .constraint => FieldMessage{ .constraint = String.createUTF8(message) }, - .file => FieldMessage{ .file = String.createUTF8(message) }, - .line => FieldMessage{ .line = String.createUTF8(message) }, - .routine => FieldMessage{ .routine = String.createUTF8(message) }, - else => error.UnknownFieldType, - }; - } -}; - -pub fn NewReaderWrap( - comptime Context: type, - comptime markMessageStartFn_: (fn (ctx: Context) void), - comptime peekFn_: (fn (ctx: Context) []const u8), - comptime skipFn_: (fn (ctx: Context, count: usize) void), - comptime ensureCapacityFn_: (fn (ctx: Context, count: usize) bool), - comptime readFunction_: (fn (ctx: Context, count: usize) AnyPostgresError!Data), - comptime readZ_: (fn (ctx: Context) AnyPostgresError!Data), -) type { - return struct { - wrapped: Context, - const readFn = readFunction_; - const readZFn = readZ_; - const ensureCapacityFn = ensureCapacityFn_; - const skipFn = skipFn_; - const peekFn = peekFn_; - const markMessageStartFn = markMessageStartFn_; - - pub const Ctx = Context; - - pub inline fn markMessageStart(this: @This()) void { - markMessageStartFn(this.wrapped); - } - - pub inline fn read(this: @This(), count: usize) AnyPostgresError!Data { - return try readFn(this.wrapped, count); - } - - pub inline fn eatMessage(this: @This(), comptime msg_: anytype) AnyPostgresError!void { - const msg = msg_[1..]; - try this.ensureCapacity(msg.len); - - var input = try readFn(this.wrapped, msg.len); - defer input.deinit(); - if (bun.strings.eqlComptime(input.slice(), msg)) return; - return error.InvalidMessage; - } - - pub fn skip(this: @This(), count: usize) AnyPostgresError!void { - skipFn(this.wrapped, count); - } - - pub fn peek(this: @This()) []const u8 { - return peekFn(this.wrapped); - } - - pub inline fn readZ(this: @This()) AnyPostgresError!Data { - return try readZFn(this.wrapped); - } - - pub inline fn ensureCapacity(this: @This(), count: usize) AnyPostgresError!void { - if (!ensureCapacityFn(this.wrapped, count)) { - return error.ShortRead; - } - } - - pub fn int(this: @This(), comptime Int: type) !Int { - var data = try this.read(@sizeOf((Int))); - defer data.deinit(); - if (comptime Int == u8) { - return @as(Int, data.slice()[0]); - } - return @byteSwap(@as(Int, @bitCast(data.slice()[0..@sizeOf(Int)].*))); - } - - pub fn peekInt(this: @This(), comptime Int: type) ?Int { - const remain = this.peek(); - if (remain.len < @sizeOf(Int)) { - return null; - } - return @byteSwap(@as(Int, @bitCast(remain[0..@sizeOf(Int)].*))); - } - - pub fn expectInt(this: @This(), comptime Int: type, comptime value: comptime_int) !bool { - const actual = try this.int(Int); - return actual == value; - } - - pub fn int4(this: @This()) !PostgresInt32 { - return this.int(PostgresInt32); - } - - pub fn short(this: @This()) !PostgresShort { - return this.int(PostgresShort); - } - - pub fn length(this: @This()) !PostgresInt32 { - const expected = try this.int(PostgresInt32); - if (expected > -1) { - try this.ensureCapacity(@intCast(expected -| 4)); - } - - return expected; - } - - pub const bytes = read; - - pub fn String(this: @This()) !bun.String { - var result = try this.readZ(); - defer result.deinit(); - return bun.String.fromUTF8(result.slice()); - } - }; -} - -pub fn NewReader(comptime Context: type) type { - return NewReaderWrap(Context, Context.markMessageStart, Context.peek, Context.skip, Context.ensureLength, Context.read, Context.readZ); -} - -pub fn NewWriter(comptime Context: type) type { - return NewWriterWrap(Context, Context.offset, Context.write, Context.pwrite); -} - -fn decoderWrap(comptime Container: type, comptime decodeFn: anytype) type { - return struct { - pub fn decode(this: *Container, context: anytype) AnyPostgresError!void { - const Context = @TypeOf(context); - try decodeFn(this, Context, NewReader(Context){ .wrapped = context }); - } - }; -} - -fn writeWrap(comptime Container: type, comptime writeFn: anytype) type { - return struct { - pub fn write(this: *Container, context: anytype) AnyPostgresError!void { - const Context = @TypeOf(context); - try writeFn(this, Context, NewWriter(Context){ .wrapped = context }); - } - }; -} - -pub const Authentication = union(enum) { - Ok: void, - ClearTextPassword: struct {}, - MD5Password: struct { - salt: [4]u8, - }, - KerberosV5: struct {}, - SCMCredential: struct {}, - GSS: struct {}, - GSSContinue: struct { - data: Data, - }, - SSPI: struct {}, - SASL: struct {}, - SASLContinue: struct { - data: Data, - r: []const u8, - s: []const u8, - i: []const u8, - - pub fn iterationCount(this: *const @This()) !u32 { - return try std.fmt.parseInt(u32, this.i, 0); - } - }, - SASLFinal: struct { - data: Data, - }, - Unknown: void, - - pub fn deinit(this: *@This()) void { - switch (this.*) { - .MD5Password => {}, - .SASL => {}, - .SASLContinue => { - this.SASLContinue.data.zdeinit(); - }, - .SASLFinal => { - this.SASLFinal.data.zdeinit(); - }, - else => {}, - } - } - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - const message_length = try reader.length(); - - switch (try reader.int4()) { - 0 => { - if (message_length != 8) return error.InvalidMessageLength; - this.* = .{ .Ok = {} }; - }, - 2 => { - if (message_length != 8) return error.InvalidMessageLength; - this.* = .{ - .KerberosV5 = .{}, - }; - }, - 3 => { - if (message_length != 8) return error.InvalidMessageLength; - this.* = .{ - .ClearTextPassword = .{}, - }; - }, - 5 => { - if (message_length != 12) return error.InvalidMessageLength; - var salt_data = try reader.bytes(4); - defer salt_data.deinit(); - this.* = .{ - .MD5Password = .{ - .salt = salt_data.slice()[0..4].*, - }, - }; - }, - 7 => { - if (message_length != 8) return error.InvalidMessageLength; - this.* = .{ - .GSS = .{}, - }; - }, - - 8 => { - if (message_length < 9) return error.InvalidMessageLength; - const bytes = try reader.read(message_length - 8); - this.* = .{ - .GSSContinue = .{ - .data = bytes, - }, - }; - }, - 9 => { - if (message_length != 8) return error.InvalidMessageLength; - this.* = .{ - .SSPI = .{}, - }; - }, - - 10 => { - if (message_length < 9) return error.InvalidMessageLength; - try reader.skip(message_length - 8); - this.* = .{ - .SASL = .{}, - }; - }, - - 11 => { - if (message_length < 9) return error.InvalidMessageLength; - var bytes = try reader.bytes(message_length - 8); - errdefer { - bytes.deinit(); - } - - var iter = bun.strings.split(bytes.slice(), ","); - var r: ?[]const u8 = null; - var i: ?[]const u8 = null; - var s: ?[]const u8 = null; - - while (iter.next()) |item| { - if (item.len > 2) { - const key = item[0]; - const after_equals = item[2..]; - if (key == 'r') { - r = after_equals; - } else if (key == 's') { - s = after_equals; - } else if (key == 'i') { - i = after_equals; - } - } - } - - if (r == null) { - debug("Missing r", .{}); - } - - if (s == null) { - debug("Missing s", .{}); - } - - if (i == null) { - debug("Missing i", .{}); - } - - this.* = .{ - .SASLContinue = .{ - .data = bytes, - .r = r orelse return error.InvalidMessage, - .s = s orelse return error.InvalidMessage, - .i = i orelse return error.InvalidMessage, - }, - }; - }, - - 12 => { - if (message_length < 9) return error.InvalidMessageLength; - const remaining: usize = message_length - 8; - - const bytes = try reader.read(remaining); - this.* = .{ - .SASLFinal = .{ - .data = bytes, - }, - }; - }, - - else => { - this.* = .{ .Unknown = {} }; - }, - } - } - - pub const decode = decoderWrap(Authentication, decodeInternal).decode; -}; - -pub const ParameterStatus = struct { - name: Data = .{ .empty = {} }, - value: Data = .{ .empty = {} }, - - pub fn deinit(this: *@This()) void { - this.name.deinit(); - this.value.deinit(); - } - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - const length = try reader.length(); - bun.assert(length >= 4); - - this.* = .{ - .name = try reader.readZ(), - .value = try reader.readZ(), - }; - } - - pub const decode = decoderWrap(ParameterStatus, decodeInternal).decode; -}; - -pub const BackendKeyData = struct { - process_id: u32 = 0, - secret_key: u32 = 0, - pub const decode = decoderWrap(BackendKeyData, decodeInternal).decode; - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - if (!try reader.expectInt(u32, 12)) { - return error.InvalidBackendKeyData; - } - - this.* = .{ - .process_id = @bitCast(try reader.int4()), - .secret_key = @bitCast(try reader.int4()), - }; - } -}; - -pub const ErrorResponse = struct { - messages: std.ArrayListUnmanaged(FieldMessage) = .{}, - - pub fn format(formatter: ErrorResponse, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - for (formatter.messages.items) |message| { - try std.fmt.format(writer, "{}\n", .{message}); - } - } - - pub fn deinit(this: *ErrorResponse) void { - for (this.messages.items) |*message| { - message.deinit(); - } - this.messages.deinit(bun.default_allocator); - } - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - var remaining_bytes = try reader.length(); - if (remaining_bytes < 4) return error.InvalidMessageLength; - remaining_bytes -|= 4; - - if (remaining_bytes > 0) { - this.* = .{ - .messages = try FieldMessage.decodeList(Container, reader), - }; - } - } - - pub const decode = decoderWrap(ErrorResponse, decodeInternal).decode; - - pub fn toJS(this: ErrorResponse, globalObject: *JSC.JSGlobalObject) JSValue { - var b = bun.StringBuilder{}; - defer b.deinit(bun.default_allocator); - - // Pre-calculate capacity to avoid reallocations - for (this.messages.items) |*msg| { - b.cap += switch (msg.*) { - inline else => |m| m.utf8ByteLength(), - } + 1; - } - b.allocate(bun.default_allocator) catch {}; - - // Build a more structured error message - var severity: String = String.dead; - var code: String = String.dead; - var message: String = String.dead; - var detail: String = String.dead; - var hint: String = String.dead; - var position: String = String.dead; - var where: String = String.dead; - var schema: String = String.dead; - var table: String = String.dead; - var column: String = String.dead; - var datatype: String = String.dead; - var constraint: String = String.dead; - var file: String = String.dead; - var line: String = String.dead; - var routine: String = String.dead; - - for (this.messages.items) |*msg| { - switch (msg.*) { - .severity => |str| severity = str, - .code => |str| code = str, - .message => |str| message = str, - .detail => |str| detail = str, - .hint => |str| hint = str, - .position => |str| position = str, - .where => |str| where = str, - .schema => |str| schema = str, - .table => |str| table = str, - .column => |str| column = str, - .datatype => |str| datatype = str, - .constraint => |str| constraint = str, - .file => |str| file = str, - .line => |str| line = str, - .routine => |str| routine = str, - else => {}, - } - } - - var needs_newline = false; - construct_message: { - if (!message.isEmpty()) { - _ = b.appendStr(message); - needs_newline = true; - break :construct_message; - } - if (!detail.isEmpty()) { - if (needs_newline) { - _ = b.append("\n"); - } else { - _ = b.append(" "); - } - needs_newline = true; - _ = b.appendStr(detail); - } - if (!hint.isEmpty()) { - if (needs_newline) { - _ = b.append("\n"); - } else { - _ = b.append(" "); - } - needs_newline = true; - _ = b.appendStr(hint); - } - } - - const possible_fields = .{ - .{ "detail", detail, void }, - .{ "hint", hint, void }, - .{ "column", column, void }, - .{ "constraint", constraint, void }, - .{ "datatype", datatype, void }, - // in the past this was set to i32 but postgres returns a strings lets keep it compatible - .{ "errno", code, void }, - .{ "position", position, i32 }, - .{ "schema", schema, void }, - .{ "table", table, void }, - .{ "where", where, void }, - }; - const error_code: JSC.Error = - // https://www.postgresql.org/docs/8.1/errcodes-appendix.html - if (code.eqlComptime("42601")) - .POSTGRES_SYNTAX_ERROR - else - .POSTGRES_SERVER_ERROR; - const err = error_code.fmt(globalObject, "{s}", .{b.allocatedSlice()[0..b.len]}); - - inline for (possible_fields) |field| { - if (!field.@"1".isEmpty()) { - const value = brk: { - if (field.@"2" == i32) { - if (field.@"1".toInt32()) |val| { - break :brk JSC.JSValue.jsNumberFromInt32(val); - } - } - - break :brk field.@"1".toJS(globalObject); - }; - - err.put(globalObject, JSC.ZigString.static(field.@"0"), value); - } - } - - return err; - } -}; - -pub const PortalOrPreparedStatement = union(enum) { - portal: []const u8, - prepared_statement: []const u8, - - pub fn slice(this: @This()) []const u8 { - return switch (this) { - .portal => this.portal, - .prepared_statement => this.prepared_statement, - }; - } - - pub fn tag(this: @This()) u8 { - return switch (this) { - .portal => 'P', - .prepared_statement => 'S', - }; - } -}; - -/// Close (F) -/// Byte1('C') -/// - Identifies the message as a Close command. -/// Int32 -/// - Length of message contents in bytes, including self. -/// Byte1 -/// - 'S' to close a prepared statement; or 'P' to close a portal. -/// String -/// - The name of the prepared statement or portal to close (an empty string selects the unnamed prepared statement or portal). -pub const Close = struct { - p: PortalOrPreparedStatement, - - fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const p = this.p; - const count: u32 = @sizeOf((u32)) + 1 + p.slice().len + 1; - const header = [_]u8{ - 'C', - } ++ @byteSwap(count) ++ [_]u8{ - p.tag(), - }; - try writer.write(&header); - try writer.write(p.slice()); - try writer.write(&[_]u8{0}); - } - - pub const write = writeWrap(@This(), writeInternal); -}; - -pub const CloseComplete = [_]u8{'3'} ++ toBytes(Int32(4)); -pub const EmptyQueryResponse = [_]u8{'I'} ++ toBytes(Int32(4)); -pub const Terminate = [_]u8{'X'} ++ toBytes(Int32(4)); - -fn Int32(value: anytype) [4]u8 { - return @bitCast(@byteSwap(@as(int4, @intCast(value)))); -} - -const toBytes = std.mem.toBytes; - -pub const TransactionStatusIndicator = enum(u8) { - /// if idle (not in a transaction block) - I = 'I', - - /// if in a transaction block - T = 'T', - - /// if in a failed transaction block - E = 'E', - - _, -}; - -pub const ReadyForQuery = struct { - status: TransactionStatusIndicator = .I, - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - const length = try reader.length(); - bun.assert(length >= 4); - - const status = try reader.int(u8); - this.* = .{ - .status = @enumFromInt(status), - }; - } - - pub const decode = decoderWrap(ReadyForQuery, decodeInternal).decode; -}; - -pub const null_int4 = 4294967295; - -pub const DataRow = struct { - pub fn decode(context: anytype, comptime ContextType: type, reader: NewReader(ContextType), comptime forEach: fn (@TypeOf(context), index: u32, bytes: ?*Data) AnyPostgresError!bool) AnyPostgresError!void { - var remaining_bytes = try reader.length(); - remaining_bytes -|= 4; - - const remaining_fields: usize = @intCast(@max(try reader.short(), 0)); - - for (0..remaining_fields) |index| { - const byte_length = try reader.int4(); - switch (byte_length) { - 0 => { - var empty = Data.Empty; - if (!try forEach(context, @intCast(index), &empty)) break; - }, - null_int4 => { - if (!try forEach(context, @intCast(index), null)) break; - }, - else => { - var bytes = try reader.bytes(@intCast(byte_length)); - if (!try forEach(context, @intCast(index), &bytes)) break; - }, - } - } - } -}; - -pub const BindComplete = [_]u8{'2'} ++ toBytes(Int32(4)); - -pub const ColumnIdentifier = union(enum) { - name: Data, - index: u32, - duplicate: void, - - pub fn init(name: Data) !@This() { - if (switch (name.slice().len) { - 1..."4294967295".len => true, - 0 => return .{ .name = .{ .empty = {} } }, - else => false, - }) might_be_int: { - // use a u64 to avoid overflow - var int: u64 = 0; - for (name.slice()) |byte| { - int = int * 10 + switch (byte) { - '0'...'9' => @as(u64, byte - '0'), - else => break :might_be_int, - }; - } - - // JSC only supports indexed property names up to 2^32 - if (int < std.math.maxInt(u32)) - return .{ .index = @intCast(int) }; - } - - return .{ .name = .{ .owned = try name.toOwned() } }; - } - - pub fn deinit(this: *@This()) void { - switch (this.*) { - .name => |*name| name.deinit(), - else => {}, - } - } -}; -pub const FieldDescription = struct { - /// JavaScriptCore treats numeric property names differently than string property names. - /// so we do the work to figure out if the property name is a number ahead of time. - name_or_index: ColumnIdentifier = .{ - .name = .{ .empty = {} }, - }, - table_oid: int4 = 0, - column_index: short = 0, - type_oid: int4 = 0, - binary: bool = false, - pub fn typeTag(this: @This()) types.Tag { - return @enumFromInt(@as(short, @truncate(this.type_oid))); - } - - pub fn deinit(this: *@This()) void { - this.name_or_index.deinit(); - } - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) AnyPostgresError!void { - var name = try reader.readZ(); - errdefer { - name.deinit(); - } - - // Field name (null-terminated string) - const field_name = try ColumnIdentifier.init(name); - // Table OID (4 bytes) - // If the field can be identified as a column of a specific table, the object ID of the table; otherwise zero. - const table_oid = try reader.int4(); - - // Column attribute number (2 bytes) - // If the field can be identified as a column of a specific table, the attribute number of the column; otherwise zero. - const column_index = try reader.short(); - - // Data type OID (4 bytes) - // The object ID of the field's data type. The type modifier (see pg_attribute.atttypmod). The meaning of the modifier is type-specific. - const type_oid = try reader.int4(); - - // Data type size (2 bytes) The data type size (see pg_type.typlen). Note that negative values denote variable-width types. - // Type modifier (4 bytes) The type modifier (see pg_attribute.atttypmod). The meaning of the modifier is type-specific. - try reader.skip(6); - - // Format code (2 bytes) - // The format code being used for the field. Currently will be zero (text) or one (binary). In a RowDescription returned from the statement variant of Describe, the format code is not yet known and will always be zero. - const binary = switch (try reader.short()) { - 0 => false, - 1 => true, - else => return error.UnknownFormatCode, - }; - this.* = .{ - .table_oid = table_oid, - .column_index = column_index, - .type_oid = type_oid, - .binary = binary, - .name_or_index = field_name, - }; - } - - pub const decode = decoderWrap(FieldDescription, decodeInternal).decode; -}; - -pub const RowDescription = struct { - fields: []FieldDescription = &[_]FieldDescription{}, - pub fn deinit(this: *@This()) void { - for (this.fields) |*field| { - field.deinit(); - } - - bun.default_allocator.free(this.fields); - } - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - var remaining_bytes = try reader.length(); - remaining_bytes -|= 4; - - const field_count: usize = @intCast(@max(try reader.short(), 0)); - var fields = try bun.default_allocator.alloc( - FieldDescription, - field_count, - ); - var remaining = fields; - errdefer { - for (fields[0 .. field_count - remaining.len]) |*field| { - field.deinit(); - } - - bun.default_allocator.free(fields); - } - while (remaining.len > 0) { - try remaining[0].decodeInternal(Container, reader); - remaining = remaining[1..]; - } - this.* = .{ - .fields = fields, - }; - } - - pub const decode = decoderWrap(RowDescription, decodeInternal).decode; -}; - -pub const ParameterDescription = struct { - parameters: []int4 = &[_]int4{}, - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - var remaining_bytes = try reader.length(); - remaining_bytes -|= 4; - - const count = try reader.short(); - const parameters = try bun.default_allocator.alloc(int4, @intCast(@max(count, 0))); - - var data = try reader.read(@as(usize, @intCast(@max(count, 0))) * @sizeOf((int4))); - defer data.deinit(); - const input_params: []align(1) const int4 = toInt32Slice(int4, data.slice()); - for (input_params, parameters) |src, *dest| { - dest.* = @byteSwap(src); - } - - this.* = .{ - .parameters = parameters, - }; - } - - pub const decode = decoderWrap(ParameterDescription, decodeInternal).decode; -}; - -// workaround for zig compiler TODO -fn toInt32Slice(comptime Int: type, slice: []const u8) []align(1) const Int { - return @as([*]align(1) const Int, @ptrCast(slice.ptr))[0 .. slice.len / @sizeOf((Int))]; -} - -pub const NotificationResponse = struct { - pid: int4 = 0, - channel: bun.ByteList = .{}, - payload: bun.ByteList = .{}, - - pub fn deinit(this: *@This()) void { - this.channel.deinitWithAllocator(bun.default_allocator); - this.payload.deinitWithAllocator(bun.default_allocator); - } - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - const length = try reader.length(); - bun.assert(length >= 4); - - this.* = .{ - .pid = try reader.int4(), - .channel = (try reader.readZ()).toOwned(), - .payload = (try reader.readZ()).toOwned(), - }; - } - - pub const decode = decoderWrap(NotificationResponse, decodeInternal).decode; -}; - -pub const CommandComplete = struct { - command_tag: Data = .{ .empty = {} }, - - pub fn deinit(this: *@This()) void { - this.command_tag.deinit(); - } - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - const length = try reader.length(); - bun.assert(length >= 4); - - const tag = try reader.readZ(); - this.* = .{ - .command_tag = tag, - }; - } - - pub const decode = decoderWrap(CommandComplete, decodeInternal).decode; -}; - -pub const Parse = struct { - name: []const u8 = "", - query: []const u8 = "", - params: []const int4 = &.{}, - - pub fn deinit(this: *Parse) void { - _ = this; - } - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const parameters = this.params; - const count: usize = @sizeOf((u32)) + @sizeOf(u16) + (parameters.len * @sizeOf(u32)) + @max(zCount(this.name), 1) + @max(zCount(this.query), 1); - const header = [_]u8{ - 'P', - } ++ toBytes(Int32(count)); - try writer.write(&header); - try writer.string(this.name); - try writer.string(this.query); - try writer.short(parameters.len); - for (parameters) |parameter| { - try writer.int4(parameter); - } - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const ParseComplete = [_]u8{'1'} ++ toBytes(Int32(4)); - -pub const PasswordMessage = struct { - password: Data = .{ .empty = {} }, - - pub fn deinit(this: *PasswordMessage) void { - this.password.deinit(); - } - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const password = this.password.slice(); - const count: usize = @sizeOf((u32)) + password.len + 1; - const header = [_]u8{ - 'p', - } ++ toBytes(Int32(count)); - try writer.write(&header); - try writer.string(password); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const CopyData = struct { - data: Data = .{ .empty = {} }, - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - const length = try reader.length(); - - const data = try reader.read(@intCast(length -| 5)); - this.* = .{ - .data = data, - }; - } - - pub const decode = decoderWrap(CopyData, decodeInternal).decode; - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const data = this.data.slice(); - const count: u32 = @sizeOf((u32)) + data.len + 1; - const header = [_]u8{ - 'd', - } ++ toBytes(Int32(count)); - try writer.write(&header); - try writer.string(data); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const CopyDone = [_]u8{'c'} ++ toBytes(Int32(4)); -pub const Sync = [_]u8{'S'} ++ toBytes(Int32(4)); -pub const Flush = [_]u8{'H'} ++ toBytes(Int32(4)); -pub const SSLRequest = toBytes(Int32(8)) ++ toBytes(Int32(80877103)); -pub const NoData = [_]u8{'n'} ++ toBytes(Int32(4)); - -pub fn writeQuery(query: []const u8, comptime Context: type, writer: NewWriter(Context)) !void { - const count: u32 = @sizeOf((u32)) + @as(u32, @intCast(query.len)) + 1; - const header = [_]u8{ - 'Q', - } ++ toBytes(Int32(count)); - try writer.write(&header); - try writer.string(query); -} -pub const SASLInitialResponse = struct { - mechanism: Data = .{ .empty = {} }, - data: Data = .{ .empty = {} }, - - pub fn deinit(this: *SASLInitialResponse) void { - this.mechanism.deinit(); - this.data.deinit(); - } - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const mechanism = this.mechanism.slice(); - const data = this.data.slice(); - const count: usize = @sizeOf(u32) + mechanism.len + 1 + data.len + @sizeOf(u32); - const header = [_]u8{ - 'p', - } ++ toBytes(Int32(count)); - try writer.write(&header); - try writer.string(mechanism); - try writer.int4(@truncate(data.len)); - try writer.write(data); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const SASLResponse = struct { - data: Data = .{ .empty = {} }, - - pub fn deinit(this: *SASLResponse) void { - this.data.deinit(); - } - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const data = this.data.slice(); - const count: usize = @sizeOf(u32) + data.len; - const header = [_]u8{ - 'p', - } ++ toBytes(Int32(count)); - try writer.write(&header); - try writer.write(data); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const StartupMessage = struct { - user: Data, - database: Data, - options: Data = Data{ .empty = {} }, - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const user = this.user.slice(); - const database = this.database.slice(); - const options = this.options.slice(); - const count: usize = @sizeOf((int4)) + @sizeOf((int4)) + zFieldCount("user", user) + zFieldCount("database", database) + zFieldCount("client_encoding", "UTF8") + options.len + 1; - - const header = toBytes(Int32(@as(u32, @truncate(count)))); - try writer.write(&header); - try writer.int4(196608); - - try writer.string("user"); - if (user.len > 0) - try writer.string(user); - - try writer.string("database"); - - if (database.len == 0) { - // The database to connect to. Defaults to the user name. - try writer.string(user); - } else { - try writer.string(database); - } - try writer.string("client_encoding"); - try writer.string("UTF8"); - if (options.len > 0) { - try writer.write(options); - } - try writer.write(&[_]u8{0}); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -fn zCount(slice: []const u8) usize { - return if (slice.len > 0) slice.len + 1 else 0; -} - -fn zFieldCount(prefix: []const u8, slice: []const u8) usize { - if (slice.len > 0) { - return zCount(prefix) + zCount(slice); - } - - return zCount(prefix); -} - -pub const Execute = struct { - max_rows: int4 = 0, - p: PortalOrPreparedStatement, - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - try writer.write("E"); - const length = try writer.length(); - if (this.p == .portal) - try writer.string(this.p.portal) - else - try writer.write(&[_]u8{0}); - try writer.int4(this.max_rows); - try length.write(); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const Describe = struct { - p: PortalOrPreparedStatement, - - pub fn writeInternal( - this: *const @This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const message = this.p.slice(); - try writer.write(&[_]u8{ - 'D', - }); - const length = try writer.length(); - try writer.write(&[_]u8{ - this.p.tag(), - }); - try writer.string(message); - try length.write(); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const NegotiateProtocolVersion = struct { - version: int4 = 0, - unrecognized_options: std.ArrayListUnmanaged(String) = .{}, - - pub fn decodeInternal( - this: *@This(), - comptime Container: type, - reader: NewReader(Container), - ) !void { - const length = try reader.length(); - bun.assert(length >= 4); - - const version = try reader.int4(); - this.* = .{ - .version = version, - }; - - const unrecognized_options_count: u32 = @intCast(@max(try reader.int4(), 0)); - try this.unrecognized_options.ensureTotalCapacity(bun.default_allocator, unrecognized_options_count); - errdefer { - for (this.unrecognized_options.items) |*option| { - option.deinit(); - } - this.unrecognized_options.deinit(bun.default_allocator); - } - for (0..unrecognized_options_count) |_| { - var option = try reader.readZ(); - if (option.slice().len == 0) break; - defer option.deinit(); - this.unrecognized_options.appendAssumeCapacity( - String.fromUTF8(option), - ); - } - } -}; - -pub const NoticeResponse = struct { - messages: std.ArrayListUnmanaged(FieldMessage) = .{}, - pub fn deinit(this: *NoticeResponse) void { - for (this.messages.items) |*message| { - message.deinit(); - } - this.messages.deinit(bun.default_allocator); - } - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - var remaining_bytes = try reader.length(); - remaining_bytes -|= 4; - - if (remaining_bytes > 0) { - this.* = .{ - .messages = try FieldMessage.decodeList(Container, reader), - }; - } - } - pub const decode = decoderWrap(NoticeResponse, decodeInternal).decode; - - pub fn toJS(this: NoticeResponse, globalObject: *JSC.JSGlobalObject) JSValue { - var b = bun.StringBuilder{}; - defer b.deinit(bun.default_allocator); - - for (this.messages.items) |msg| { - b.cap += switch (msg) { - inline else => |m| m.utf8ByteLength(), - } + 1; - } - b.allocate(bun.default_allocator) catch {}; - - for (this.messages.items) |msg| { - var str = switch (msg) { - inline else => |m| m.toUTF8(bun.default_allocator), - }; - defer str.deinit(); - _ = b.append(str.slice()); - _ = b.append("\n"); - } - - return JSC.ZigString.init(b.allocatedSlice()[0..b.len]).toJS(globalObject); - } -}; - -pub const CopyFail = struct { - message: Data = .{ .empty = {} }, - - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - _ = try reader.int4(); - - const message = try reader.readZ(); - this.* = .{ - .message = message, - }; - } - - pub const decode = decoderWrap(CopyFail, decodeInternal).decode; - - pub fn writeInternal( - this: *@This(), - comptime Context: type, - writer: NewWriter(Context), - ) !void { - const message = this.message.slice(); - const count: u32 = @sizeOf((u32)) + message.len + 1; - const header = [_]u8{ - 'f', - } ++ toBytes(Int32(count)); - try writer.write(&header); - try writer.string(message); - } - - pub const write = writeWrap(@This(), writeInternal).write; -}; - -pub const CopyInResponse = struct { - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - _ = reader; - _ = this; - TODO(@This()); - } - - pub const decode = decoderWrap(CopyInResponse, decodeInternal).decode; -}; - -pub const CopyOutResponse = struct { - pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { - _ = reader; - _ = this; - TODO(@This()); - } - - pub const decode = decoderWrap(CopyInResponse, decodeInternal).decode; -}; - -fn TODO(comptime Type: type) !void { - bun.Output.panic("TODO: not implemented {s}", .{bun.meta.typeBaseName(@typeName(Type))}); -} diff --git a/src/sql/postgres/protocol/ArrayList.zig b/src/sql/postgres/protocol/ArrayList.zig new file mode 100644 index 0000000000..0fff3a0c0f --- /dev/null +++ b/src/sql/postgres/protocol/ArrayList.zig @@ -0,0 +1,22 @@ +array: *std.ArrayList(u8), + +pub fn offset(this: @This()) usize { + return this.array.items.len; +} + +pub fn write(this: @This(), bytes: []const u8) AnyPostgresError!void { + try this.array.appendSlice(bytes); +} + +pub fn pwrite(this: @This(), bytes: []const u8, i: usize) AnyPostgresError!void { + @memcpy(this.array.items[i..][0..bytes.len], bytes); +} + +pub const Writer = NewWriter(@This()); + +// @sortImports + +const ArrayList = @This(); +const std = @import("std"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; +const NewWriter = @import("./NewWriter.zig").NewWriter; diff --git a/src/sql/postgres/protocol/Authentication.zig b/src/sql/postgres/protocol/Authentication.zig new file mode 100644 index 0000000000..91b838d332 --- /dev/null +++ b/src/sql/postgres/protocol/Authentication.zig @@ -0,0 +1,182 @@ +pub const Authentication = union(enum) { + Ok: void, + ClearTextPassword: struct {}, + MD5Password: struct { + salt: [4]u8, + }, + KerberosV5: struct {}, + SCMCredential: struct {}, + GSS: struct {}, + GSSContinue: struct { + data: Data, + }, + SSPI: struct {}, + SASL: struct {}, + SASLContinue: struct { + data: Data, + r: []const u8, + s: []const u8, + i: []const u8, + + pub fn iterationCount(this: *const @This()) !u32 { + return try std.fmt.parseInt(u32, this.i, 0); + } + }, + SASLFinal: struct { + data: Data, + }, + Unknown: void, + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .MD5Password => {}, + .SASL => {}, + .SASLContinue => { + this.SASLContinue.data.zdeinit(); + }, + .SASLFinal => { + this.SASLFinal.data.zdeinit(); + }, + else => {}, + } + } + + pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + const message_length = try reader.length(); + + switch (try reader.int4()) { + 0 => { + if (message_length != 8) return error.InvalidMessageLength; + this.* = .{ .Ok = {} }; + }, + 2 => { + if (message_length != 8) return error.InvalidMessageLength; + this.* = .{ + .KerberosV5 = .{}, + }; + }, + 3 => { + if (message_length != 8) return error.InvalidMessageLength; + this.* = .{ + .ClearTextPassword = .{}, + }; + }, + 5 => { + if (message_length != 12) return error.InvalidMessageLength; + var salt_data = try reader.bytes(4); + defer salt_data.deinit(); + this.* = .{ + .MD5Password = .{ + .salt = salt_data.slice()[0..4].*, + }, + }; + }, + 7 => { + if (message_length != 8) return error.InvalidMessageLength; + this.* = .{ + .GSS = .{}, + }; + }, + + 8 => { + if (message_length < 9) return error.InvalidMessageLength; + const bytes = try reader.read(message_length - 8); + this.* = .{ + .GSSContinue = .{ + .data = bytes, + }, + }; + }, + 9 => { + if (message_length != 8) return error.InvalidMessageLength; + this.* = .{ + .SSPI = .{}, + }; + }, + + 10 => { + if (message_length < 9) return error.InvalidMessageLength; + try reader.skip(message_length - 8); + this.* = .{ + .SASL = .{}, + }; + }, + + 11 => { + if (message_length < 9) return error.InvalidMessageLength; + var bytes = try reader.bytes(message_length - 8); + errdefer { + bytes.deinit(); + } + + var iter = bun.strings.split(bytes.slice(), ","); + var r: ?[]const u8 = null; + var i: ?[]const u8 = null; + var s: ?[]const u8 = null; + + while (iter.next()) |item| { + if (item.len > 2) { + const key = item[0]; + const after_equals = item[2..]; + if (key == 'r') { + r = after_equals; + } else if (key == 's') { + s = after_equals; + } else if (key == 'i') { + i = after_equals; + } + } + } + + if (r == null) { + debug("Missing r", .{}); + } + + if (s == null) { + debug("Missing s", .{}); + } + + if (i == null) { + debug("Missing i", .{}); + } + + this.* = .{ + .SASLContinue = .{ + .data = bytes, + .r = r orelse return error.InvalidMessage, + .s = s orelse return error.InvalidMessage, + .i = i orelse return error.InvalidMessage, + }, + }; + }, + + 12 => { + if (message_length < 9) return error.InvalidMessageLength; + const remaining: usize = message_length - 8; + + const bytes = try reader.read(remaining); + this.* = .{ + .SASLFinal = .{ + .data = bytes, + }, + }; + }, + + else => { + this.* = .{ .Unknown = {} }; + }, + } + } + + pub const decode = DecoderWrap(Authentication, decodeInternal).decode; +}; + +const debug = bun.Output.scoped(.Postgres, true); + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); +const Data = @import("../Data.zig").Data; +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/BackendKeyData.zig b/src/sql/postgres/protocol/BackendKeyData.zig new file mode 100644 index 0000000000..7df3e20971 --- /dev/null +++ b/src/sql/postgres/protocol/BackendKeyData.zig @@ -0,0 +1,20 @@ +process_id: u32 = 0, +secret_key: u32 = 0, +pub const decode = DecoderWrap(BackendKeyData, decodeInternal).decode; + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + if (!try reader.expectInt(u32, 12)) { + return error.InvalidBackendKeyData; + } + + this.* = .{ + .process_id = @bitCast(try reader.int4()), + .secret_key = @bitCast(try reader.int4()), + }; +} + +// @sortImports + +const BackendKeyData = @This(); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/Close.zig b/src/sql/postgres/protocol/Close.zig new file mode 100644 index 0000000000..baac29e9e8 --- /dev/null +++ b/src/sql/postgres/protocol/Close.zig @@ -0,0 +1,39 @@ +/// Close (F) +/// Byte1('C') +/// - Identifies the message as a Close command. +/// Int32 +/// - Length of message contents in bytes, including self. +/// Byte1 +/// - 'S' to close a prepared statement; or 'P' to close a portal. +/// String +/// - The name of the prepared statement or portal to close (an empty string selects the unnamed prepared statement or portal). +pub const Close = struct { + p: PortalOrPreparedStatement, + + fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), + ) !void { + const p = this.p; + const count: u32 = @sizeOf((u32)) + 1 + p.slice().len + 1; + const header = [_]u8{ + 'C', + } ++ @byteSwap(count) ++ [_]u8{ + p.tag(), + }; + try writer.write(&header); + try writer.write(p.slice()); + try writer.write(&[_]u8{0}); + } + + pub const write = WriteWrap(@This(), writeInternal); +}; + +// @sortImports + +const NewWriter = @import("./NewWriter.zig").NewWriter; + +const PortalOrPreparedStatement = @import("./PortalOrPreparedStatement.zig").PortalOrPreparedStatement; + +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; diff --git a/src/sql/postgres/protocol/ColumnIdentifier.zig b/src/sql/postgres/protocol/ColumnIdentifier.zig new file mode 100644 index 0000000000..026a6a843e --- /dev/null +++ b/src/sql/postgres/protocol/ColumnIdentifier.zig @@ -0,0 +1,40 @@ +pub const ColumnIdentifier = union(enum) { + name: Data, + index: u32, + duplicate: void, + + pub fn init(name: Data) !@This() { + if (switch (name.slice().len) { + 1..."4294967295".len => true, + 0 => return .{ .name = .{ .empty = {} } }, + else => false, + }) might_be_int: { + // use a u64 to avoid overflow + var int: u64 = 0; + for (name.slice()) |byte| { + int = int * 10 + switch (byte) { + '0'...'9' => @as(u64, byte - '0'), + else => break :might_be_int, + }; + } + + // JSC only supports indexed property names up to 2^32 + if (int < std.math.maxInt(u32)) + return .{ .index = @intCast(int) }; + } + + return .{ .name = .{ .owned = try name.toOwned() } }; + } + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .name => |*name| name.deinit(), + else => {}, + } + } +}; + +// @sortImports + +const std = @import("std"); +const Data = @import("../Data.zig").Data; diff --git a/src/sql/postgres/protocol/CommandComplete.zig b/src/sql/postgres/protocol/CommandComplete.zig new file mode 100644 index 0000000000..a9299cd1a6 --- /dev/null +++ b/src/sql/postgres/protocol/CommandComplete.zig @@ -0,0 +1,25 @@ +command_tag: Data = .{ .empty = {} }, + +pub fn deinit(this: *@This()) void { + this.command_tag.deinit(); +} + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + const length = try reader.length(); + bun.assert(length >= 4); + + const tag = try reader.readZ(); + this.* = .{ + .command_tag = tag, + }; +} + +pub const decode = DecoderWrap(CommandComplete, decodeInternal).decode; + +// @sortImports + +const CommandComplete = @This(); +const bun = @import("bun"); +const Data = @import("../Data.zig").Data; +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/CopyData.zig b/src/sql/postgres/protocol/CopyData.zig new file mode 100644 index 0000000000..885bb2960e --- /dev/null +++ b/src/sql/postgres/protocol/CopyData.zig @@ -0,0 +1,40 @@ +data: Data = .{ .empty = {} }, + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + const length = try reader.length(); + + const data = try reader.read(@intCast(length -| 5)); + this.* = .{ + .data = data, + }; +} + +pub const decode = DecoderWrap(CopyData, decodeInternal).decode; + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const data = this.data.slice(); + const count: u32 = @sizeOf((u32)) + data.len + 1; + const header = [_]u8{ + 'd', + } ++ toBytes(Int32(count)); + try writer.write(&header); + try writer.string(data); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const CopyData = @This(); +const std = @import("std"); +const Data = @import("../Data.zig").Data; +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const Int32 = @import("../types/int_types.zig").Int32; +const NewReader = @import("./NewReader.zig").NewReader; +const NewWriter = @import("./NewWriter.zig").NewWriter; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; +const toBytes = std.mem.toBytes; diff --git a/src/sql/postgres/protocol/CopyFail.zig b/src/sql/postgres/protocol/CopyFail.zig new file mode 100644 index 0000000000..f006cafb76 --- /dev/null +++ b/src/sql/postgres/protocol/CopyFail.zig @@ -0,0 +1,42 @@ +message: Data = .{ .empty = {} }, + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + _ = try reader.int4(); + + const message = try reader.readZ(); + this.* = .{ + .message = message, + }; +} + +pub const decode = DecoderWrap(CopyFail, decodeInternal).decode; + +pub fn writeInternal( + this: *@This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const message = this.message.slice(); + const count: u32 = @sizeOf((u32)) + message.len + 1; + const header = [_]u8{ + 'f', + } ++ toBytes(Int32(count)); + try writer.write(&header); + try writer.string(message); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const CopyFail = @This(); +const std = @import("std"); +const Data = @import("../Data.zig").Data; +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; +const NewWriter = @import("./NewWriter.zig").NewWriter; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; +const toBytes = std.mem.toBytes; + +const int_types = @import("../types/int_types.zig"); +const Int32 = int_types.Int32; diff --git a/src/sql/postgres/protocol/CopyInResponse.zig b/src/sql/postgres/protocol/CopyInResponse.zig new file mode 100644 index 0000000000..47dbdd850f --- /dev/null +++ b/src/sql/postgres/protocol/CopyInResponse.zig @@ -0,0 +1,14 @@ +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + _ = reader; + _ = this; + bun.Output.panic("TODO: not implemented {s}", .{bun.meta.typeBaseName(@typeName(@This()))}); +} + +pub const decode = DecoderWrap(CopyInResponse, decodeInternal).decode; + +// @sortImports + +const CopyInResponse = @This(); +const bun = @import("bun"); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/CopyOutResponse.zig b/src/sql/postgres/protocol/CopyOutResponse.zig new file mode 100644 index 0000000000..45650a3f41 --- /dev/null +++ b/src/sql/postgres/protocol/CopyOutResponse.zig @@ -0,0 +1,14 @@ +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + _ = reader; + _ = this; + bun.Output.panic("TODO: not implemented {s}", .{bun.meta.typeBaseName(@typeName(@This()))}); +} + +pub const decode = DecoderWrap(CopyOutResponse, decodeInternal).decode; + +// @sortImports + +const CopyOutResponse = @This(); +const bun = @import("bun"); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/DataRow.zig b/src/sql/postgres/protocol/DataRow.zig new file mode 100644 index 0000000000..125a25f1f2 --- /dev/null +++ b/src/sql/postgres/protocol/DataRow.zig @@ -0,0 +1,33 @@ +pub fn decode(context: anytype, comptime ContextType: type, reader: NewReader(ContextType), comptime forEach: fn (@TypeOf(context), index: u32, bytes: ?*Data) AnyPostgresError!bool) AnyPostgresError!void { + var remaining_bytes = try reader.length(); + remaining_bytes -|= 4; + + const remaining_fields: usize = @intCast(@max(try reader.short(), 0)); + + for (0..remaining_fields) |index| { + const byte_length = try reader.int4(); + switch (byte_length) { + 0 => { + var empty = Data.Empty; + if (!try forEach(context, @intCast(index), &empty)) break; + }, + null_int4 => { + if (!try forEach(context, @intCast(index), null)) break; + }, + else => { + var bytes = try reader.bytes(@intCast(byte_length)); + if (!try forEach(context, @intCast(index), &bytes)) break; + }, + } + } +} + +pub const null_int4 = 4294967295; + +// @sortImports + +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; + +const Data = @import("../Data.zig").Data; + +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/DecoderWrap.zig b/src/sql/postgres/protocol/DecoderWrap.zig new file mode 100644 index 0000000000..fe2b78902f --- /dev/null +++ b/src/sql/postgres/protocol/DecoderWrap.zig @@ -0,0 +1,14 @@ +pub fn DecoderWrap(comptime Container: type, comptime decodeFn: anytype) type { + return struct { + pub fn decode(this: *Container, context: anytype) AnyPostgresError!void { + const Context = @TypeOf(context); + try decodeFn(this, Context, NewReader(Context){ .wrapped = context }); + } + }; +} + +// @sortImports + +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; + +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/Describe.zig b/src/sql/postgres/protocol/Describe.zig new file mode 100644 index 0000000000..4dc9fd2728 --- /dev/null +++ b/src/sql/postgres/protocol/Describe.zig @@ -0,0 +1,28 @@ +p: PortalOrPreparedStatement, + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const message = this.p.slice(); + try writer.write(&[_]u8{ + 'D', + }); + const length = try writer.length(); + try writer.write(&[_]u8{ + this.p.tag(), + }); + try writer.string(message); + try length.write(); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const NewWriter = @import("./NewWriter.zig").NewWriter; + +const PortalOrPreparedStatement = @import("./PortalOrPreparedStatement.zig").PortalOrPreparedStatement; + +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; diff --git a/src/sql/postgres/protocol/ErrorResponse.zig b/src/sql/postgres/protocol/ErrorResponse.zig new file mode 100644 index 0000000000..e70d2215a1 --- /dev/null +++ b/src/sql/postgres/protocol/ErrorResponse.zig @@ -0,0 +1,159 @@ +messages: std.ArrayListUnmanaged(FieldMessage) = .{}, + +pub fn format(formatter: ErrorResponse, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + for (formatter.messages.items) |message| { + try std.fmt.format(writer, "{}\n", .{message}); + } +} + +pub fn deinit(this: *ErrorResponse) void { + for (this.messages.items) |*message| { + message.deinit(); + } + this.messages.deinit(bun.default_allocator); +} + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + var remaining_bytes = try reader.length(); + if (remaining_bytes < 4) return error.InvalidMessageLength; + remaining_bytes -|= 4; + + if (remaining_bytes > 0) { + this.* = .{ + .messages = try FieldMessage.decodeList(Container, reader), + }; + } +} + +pub const decode = DecoderWrap(ErrorResponse, decodeInternal).decode; + +pub fn toJS(this: ErrorResponse, globalObject: *JSC.JSGlobalObject) JSValue { + var b = bun.StringBuilder{}; + defer b.deinit(bun.default_allocator); + + // Pre-calculate capacity to avoid reallocations + for (this.messages.items) |*msg| { + b.cap += switch (msg.*) { + inline else => |m| m.utf8ByteLength(), + } + 1; + } + b.allocate(bun.default_allocator) catch {}; + + // Build a more structured error message + var severity: String = String.dead; + var code: String = String.dead; + var message: String = String.dead; + var detail: String = String.dead; + var hint: String = String.dead; + var position: String = String.dead; + var where: String = String.dead; + var schema: String = String.dead; + var table: String = String.dead; + var column: String = String.dead; + var datatype: String = String.dead; + var constraint: String = String.dead; + var file: String = String.dead; + var line: String = String.dead; + var routine: String = String.dead; + + for (this.messages.items) |*msg| { + switch (msg.*) { + .severity => |str| severity = str, + .code => |str| code = str, + .message => |str| message = str, + .detail => |str| detail = str, + .hint => |str| hint = str, + .position => |str| position = str, + .where => |str| where = str, + .schema => |str| schema = str, + .table => |str| table = str, + .column => |str| column = str, + .datatype => |str| datatype = str, + .constraint => |str| constraint = str, + .file => |str| file = str, + .line => |str| line = str, + .routine => |str| routine = str, + else => {}, + } + } + + var needs_newline = false; + construct_message: { + if (!message.isEmpty()) { + _ = b.appendStr(message); + needs_newline = true; + break :construct_message; + } + if (!detail.isEmpty()) { + if (needs_newline) { + _ = b.append("\n"); + } else { + _ = b.append(" "); + } + needs_newline = true; + _ = b.appendStr(detail); + } + if (!hint.isEmpty()) { + if (needs_newline) { + _ = b.append("\n"); + } else { + _ = b.append(" "); + } + needs_newline = true; + _ = b.appendStr(hint); + } + } + + const possible_fields = .{ + .{ "detail", detail, void }, + .{ "hint", hint, void }, + .{ "column", column, void }, + .{ "constraint", constraint, void }, + .{ "datatype", datatype, void }, + // in the past this was set to i32 but postgres returns a strings lets keep it compatible + .{ "errno", code, void }, + .{ "position", position, i32 }, + .{ "schema", schema, void }, + .{ "table", table, void }, + .{ "where", where, void }, + }; + const error_code: JSC.Error = + // https://www.postgresql.org/docs/8.1/errcodes-appendix.html + if (code.eqlComptime("42601")) + .POSTGRES_SYNTAX_ERROR + else + .POSTGRES_SERVER_ERROR; + const err = error_code.fmt(globalObject, "{s}", .{b.allocatedSlice()[0..b.len]}); + + inline for (possible_fields) |field| { + if (!field.@"1".isEmpty()) { + const value = brk: { + if (field.@"2" == i32) { + if (field.@"1".toInt32()) |val| { + break :brk JSC.JSValue.jsNumberFromInt32(val); + } + } + + break :brk field.@"1".toJS(globalObject); + }; + + err.put(globalObject, JSC.ZigString.static(field.@"0"), value); + } + } + + return err; +} + +// @sortImports + +const ErrorResponse = @This(); +const std = @import("std"); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const FieldMessage = @import("./FieldMessage.zig").FieldMessage; +const NewReader = @import("./NewReader.zig").NewReader; + +const bun = @import("bun"); +const String = bun.String; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/protocol/Execute.zig b/src/sql/postgres/protocol/Execute.zig new file mode 100644 index 0000000000..648d39da4f --- /dev/null +++ b/src/sql/postgres/protocol/Execute.zig @@ -0,0 +1,28 @@ +max_rows: int4 = 0, +p: PortalOrPreparedStatement, + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + try writer.write("E"); + const length = try writer.length(); + if (this.p == .portal) + try writer.string(this.p.portal) + else + try writer.write(&[_]u8{0}); + try writer.int4(this.max_rows); + try length.write(); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const NewWriter = @import("./NewWriter.zig").NewWriter; +const PortalOrPreparedStatement = @import("./PortalOrPreparedStatement.zig").PortalOrPreparedStatement; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; + +const int_types = @import("../types/int_types.zig"); +const int4 = int_types.int4; diff --git a/src/sql/postgres/protocol/FieldDescription.zig b/src/sql/postgres/protocol/FieldDescription.zig new file mode 100644 index 0000000000..860176c5b3 --- /dev/null +++ b/src/sql/postgres/protocol/FieldDescription.zig @@ -0,0 +1,70 @@ +/// JavaScriptCore treats numeric property names differently than string property names. +/// so we do the work to figure out if the property name is a number ahead of time. +name_or_index: ColumnIdentifier = .{ + .name = .{ .empty = {} }, +}, +table_oid: int4 = 0, +column_index: short = 0, +type_oid: int4 = 0, +binary: bool = false, +pub fn typeTag(this: @This()) types.Tag { + return @enumFromInt(@as(short, @truncate(this.type_oid))); +} + +pub fn deinit(this: *@This()) void { + this.name_or_index.deinit(); +} + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) AnyPostgresError!void { + var name = try reader.readZ(); + errdefer { + name.deinit(); + } + + // Field name (null-terminated string) + const field_name = try ColumnIdentifier.init(name); + // Table OID (4 bytes) + // If the field can be identified as a column of a specific table, the object ID of the table; otherwise zero. + const table_oid = try reader.int4(); + + // Column attribute number (2 bytes) + // If the field can be identified as a column of a specific table, the attribute number of the column; otherwise zero. + const column_index = try reader.short(); + + // Data type OID (4 bytes) + // The object ID of the field's data type. The type modifier (see pg_attribute.atttypmod). The meaning of the modifier is type-specific. + const type_oid = try reader.int4(); + + // Data type size (2 bytes) The data type size (see pg_type.typlen). Note that negative values denote variable-width types. + // Type modifier (4 bytes) The type modifier (see pg_attribute.atttypmod). The meaning of the modifier is type-specific. + try reader.skip(6); + + // Format code (2 bytes) + // The format code being used for the field. Currently will be zero (text) or one (binary). In a RowDescription returned from the statement variant of Describe, the format code is not yet known and will always be zero. + const binary = switch (try reader.short()) { + 0 => false, + 1 => true, + else => return error.UnknownFormatCode, + }; + this.* = .{ + .table_oid = table_oid, + .column_index = column_index, + .type_oid = type_oid, + .binary = binary, + .name_or_index = field_name, + }; +} + +pub const decode = DecoderWrap(FieldDescription, decodeInternal).decode; + +// @sortImports + +const FieldDescription = @This(); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; +const ColumnIdentifier = @import("./ColumnIdentifier.zig").ColumnIdentifier; +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; + +const types = @import("../PostgresTypes.zig"); +const int4 = types.int4; +const short = types.short; diff --git a/src/sql/postgres/protocol/FieldMessage.zig b/src/sql/postgres/protocol/FieldMessage.zig new file mode 100644 index 0000000000..d3d2c1fdbf --- /dev/null +++ b/src/sql/postgres/protocol/FieldMessage.zig @@ -0,0 +1,87 @@ +pub const FieldMessage = union(FieldType) { + severity: String, + localized_severity: String, + code: String, + message: String, + detail: String, + hint: String, + position: String, + internal_position: String, + internal: String, + where: String, + schema: String, + table: String, + column: String, + datatype: String, + constraint: String, + file: String, + line: String, + routine: String, + + pub fn format(this: FieldMessage, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + switch (this) { + inline else => |str| { + try std.fmt.format(writer, "{}", .{str}); + }, + } + } + + pub fn deinit(this: *FieldMessage) void { + switch (this.*) { + inline else => |*message| { + message.deref(); + }, + } + } + + pub fn decodeList(comptime Context: type, reader: NewReader(Context)) !std.ArrayListUnmanaged(FieldMessage) { + var messages = std.ArrayListUnmanaged(FieldMessage){}; + while (true) { + const field_int = try reader.int(u8); + if (field_int == 0) break; + const field: FieldType = @enumFromInt(field_int); + + var message = try reader.readZ(); + defer message.deinit(); + if (message.slice().len == 0) break; + + try messages.append(bun.default_allocator, FieldMessage.init(field, message.slice()) catch continue); + } + + return messages; + } + + pub fn init(tag: FieldType, message: []const u8) !FieldMessage { + return switch (tag) { + .severity => FieldMessage{ .severity = String.createUTF8(message) }, + // Ignore this one for now. + // .localized_severity => FieldMessage{ .localized_severity = String.createUTF8(message) }, + .code => FieldMessage{ .code = String.createUTF8(message) }, + .message => FieldMessage{ .message = String.createUTF8(message) }, + .detail => FieldMessage{ .detail = String.createUTF8(message) }, + .hint => FieldMessage{ .hint = String.createUTF8(message) }, + .position => FieldMessage{ .position = String.createUTF8(message) }, + .internal_position => FieldMessage{ .internal_position = String.createUTF8(message) }, + .internal => FieldMessage{ .internal = String.createUTF8(message) }, + .where => FieldMessage{ .where = String.createUTF8(message) }, + .schema => FieldMessage{ .schema = String.createUTF8(message) }, + .table => FieldMessage{ .table = String.createUTF8(message) }, + .column => FieldMessage{ .column = String.createUTF8(message) }, + .datatype => FieldMessage{ .datatype = String.createUTF8(message) }, + .constraint => FieldMessage{ .constraint = String.createUTF8(message) }, + .file => FieldMessage{ .file = String.createUTF8(message) }, + .line => FieldMessage{ .line = String.createUTF8(message) }, + .routine => FieldMessage{ .routine = String.createUTF8(message) }, + else => error.UnknownFieldType, + }; + } +}; + +// @sortImports + +const std = @import("std"); +const FieldType = @import("./FieldType.zig").FieldType; +const NewReader = @import("./NewReader.zig").NewReader; + +const bun = @import("bun"); +const String = bun.String; diff --git a/src/sql/postgres/protocol/FieldType.zig b/src/sql/postgres/protocol/FieldType.zig new file mode 100644 index 0000000000..b5e6c860fe --- /dev/null +++ b/src/sql/postgres/protocol/FieldType.zig @@ -0,0 +1,57 @@ +pub const FieldType = enum(u8) { + /// Severity: the field contents are ERROR, FATAL, or PANIC (in an error message), or WARNING, NOTICE, DEBUG, INFO, or LOG (in a notice message), or a localized translation of one of these. Always present. + severity = 'S', + + /// Severity: the field contents are ERROR, FATAL, or PANIC (in an error message), or WARNING, NOTICE, DEBUG, INFO, or LOG (in a notice message). This is identical to the S field except that the contents are never localized. This is present only in messages generated by PostgreSQL versions 9.6 and later. + localized_severity = 'V', + + /// Code: the SQLSTATE code for the error (see Appendix A). Not localizable. Always present. + code = 'C', + + /// Message: the primary human-readable error message. This should be accurate but terse (typically one line). Always present. + message = 'M', + + /// Detail: an optional secondary error message carrying more detail about the problem. Might run to multiple lines. + detail = 'D', + + /// Hint: an optional suggestion what to do about the problem. This is intended to differ from Detail in that it offers advice (potentially inappropriate) rather than hard facts. Might run to multiple lines. + hint = 'H', + + /// Position: the field value is a decimal ASCII integer, indicating an error cursor position as an index into the original query string. The first character has index 1, and positions are measured in characters not bytes. + position = 'P', + + /// Internal position: this is defined the same as the P field, but it is used when the cursor position refers to an internally generated command rather than the one submitted by the client. The q field will always appear when this field appears. + internal_position = 'p', + + /// Internal query: the text of a failed internally-generated command. This could be, for example, an SQL query issued by a PL/pgSQL function. + internal = 'q', + + /// Where: an indication of the context in which the error occurred. Presently this includes a call stack traceback of active procedural language functions and internally-generated queries. The trace is one entry per line, most recent first. + where = 'W', + + /// Schema name: if the error was associated with a specific database object, the name of the schema containing that object, if any. + schema = 's', + + /// Table name: if the error was associated with a specific table, the name of the table. (Refer to the schema name field for the name of the table's schema.) + table = 't', + + /// Column name: if the error was associated with a specific table column, the name of the column. (Refer to the schema and table name fields to identify the table.) + column = 'c', + + /// Data type name: if the error was associated with a specific data type, the name of the data type. (Refer to the schema name field for the name of the data type's schema.) + datatype = 'd', + + /// Constraint name: if the error was associated with a specific constraint, the name of the constraint. Refer to fields listed above for the associated table or domain. (For this purpose, indexes are treated as constraints, even if they weren't created with constraint syntax.) + constraint = 'n', + + /// File: the file name of the source-code location where the error was reported. + file = 'F', + + /// Line: the line number of the source-code location where the error was reported. + line = 'L', + + /// Routine: the name of the source-code routine reporting the error. + routine = 'R', + + _, +}; diff --git a/src/sql/postgres/protocol/NegotiateProtocolVersion.zig b/src/sql/postgres/protocol/NegotiateProtocolVersion.zig new file mode 100644 index 0000000000..9b80f0fdd2 --- /dev/null +++ b/src/sql/postgres/protocol/NegotiateProtocolVersion.zig @@ -0,0 +1,44 @@ +version: int4 = 0, +unrecognized_options: std.ArrayListUnmanaged(String) = .{}, + +pub fn decodeInternal( + this: *@This(), + comptime Container: type, + reader: NewReader(Container), +) !void { + const length = try reader.length(); + bun.assert(length >= 4); + + const version = try reader.int4(); + this.* = .{ + .version = version, + }; + + const unrecognized_options_count: u32 = @intCast(@max(try reader.int4(), 0)); + try this.unrecognized_options.ensureTotalCapacity(bun.default_allocator, unrecognized_options_count); + errdefer { + for (this.unrecognized_options.items) |*option| { + option.deinit(); + } + this.unrecognized_options.deinit(bun.default_allocator); + } + for (0..unrecognized_options_count) |_| { + var option = try reader.readZ(); + if (option.slice().len == 0) break; + defer option.deinit(); + this.unrecognized_options.appendAssumeCapacity( + String.fromUTF8(option), + ); + } +} + +// @sortImports + +const std = @import("std"); +const NewReader = @import("./NewReader.zig").NewReader; + +const int_types = @import("../types/int_types.zig"); +const int4 = int_types.int4; + +const bun = @import("bun"); +const String = bun.String; diff --git a/src/sql/postgres/protocol/NewReader.zig b/src/sql/postgres/protocol/NewReader.zig new file mode 100644 index 0000000000..932d4d334d --- /dev/null +++ b/src/sql/postgres/protocol/NewReader.zig @@ -0,0 +1,118 @@ +pub fn NewReaderWrap( + comptime Context: type, + comptime markMessageStartFn_: (fn (ctx: Context) void), + comptime peekFn_: (fn (ctx: Context) []const u8), + comptime skipFn_: (fn (ctx: Context, count: usize) void), + comptime ensureCapacityFn_: (fn (ctx: Context, count: usize) bool), + comptime readFunction_: (fn (ctx: Context, count: usize) AnyPostgresError!Data), + comptime readZ_: (fn (ctx: Context) AnyPostgresError!Data), +) type { + return struct { + wrapped: Context, + const readFn = readFunction_; + const readZFn = readZ_; + const ensureCapacityFn = ensureCapacityFn_; + const skipFn = skipFn_; + const peekFn = peekFn_; + const markMessageStartFn = markMessageStartFn_; + + pub const Ctx = Context; + + pub inline fn markMessageStart(this: @This()) void { + markMessageStartFn(this.wrapped); + } + + pub inline fn read(this: @This(), count: usize) AnyPostgresError!Data { + return try readFn(this.wrapped, count); + } + + pub inline fn eatMessage(this: @This(), comptime msg_: anytype) AnyPostgresError!void { + const msg = msg_[1..]; + try this.ensureCapacity(msg.len); + + var input = try readFn(this.wrapped, msg.len); + defer input.deinit(); + if (bun.strings.eqlComptime(input.slice(), msg)) return; + return error.InvalidMessage; + } + + pub fn skip(this: @This(), count: usize) AnyPostgresError!void { + skipFn(this.wrapped, count); + } + + pub fn peek(this: @This()) []const u8 { + return peekFn(this.wrapped); + } + + pub inline fn readZ(this: @This()) AnyPostgresError!Data { + return try readZFn(this.wrapped); + } + + pub inline fn ensureCapacity(this: @This(), count: usize) AnyPostgresError!void { + if (!ensureCapacityFn(this.wrapped, count)) { + return error.ShortRead; + } + } + + pub fn int(this: @This(), comptime Int: type) !Int { + var data = try this.read(@sizeOf((Int))); + defer data.deinit(); + if (comptime Int == u8) { + return @as(Int, data.slice()[0]); + } + return @byteSwap(@as(Int, @bitCast(data.slice()[0..@sizeOf(Int)].*))); + } + + pub fn peekInt(this: @This(), comptime Int: type) ?Int { + const remain = this.peek(); + if (remain.len < @sizeOf(Int)) { + return null; + } + return @byteSwap(@as(Int, @bitCast(remain[0..@sizeOf(Int)].*))); + } + + pub fn expectInt(this: @This(), comptime Int: type, comptime value: comptime_int) !bool { + const actual = try this.int(Int); + return actual == value; + } + + pub fn int4(this: @This()) !PostgresInt32 { + return this.int(PostgresInt32); + } + + pub fn short(this: @This()) !PostgresShort { + return this.int(PostgresShort); + } + + pub fn length(this: @This()) !PostgresInt32 { + const expected = try this.int(PostgresInt32); + if (expected > -1) { + try this.ensureCapacity(@intCast(expected -| 4)); + } + + return expected; + } + + pub const bytes = read; + + pub fn String(this: @This()) !bun.String { + var result = try this.readZ(); + defer result.deinit(); + return bun.String.fromUTF8(result.slice()); + } + }; +} + +pub fn NewReader(comptime Context: type) type { + return NewReaderWrap(Context, Context.markMessageStart, Context.peek, Context.skip, Context.ensureLength, Context.read, Context.readZ); +} + +// @sortImports + +const bun = @import("bun"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; +const Data = @import("../Data.zig").Data; + +const int_types = @import("../types/int_types.zig"); +const PostgresInt32 = int_types.PostgresInt32; +const PostgresShort = int_types.PostgresShort; diff --git a/src/sql/postgres/protocol/NewWriter.zig b/src/sql/postgres/protocol/NewWriter.zig new file mode 100644 index 0000000000..6f6a800328 --- /dev/null +++ b/src/sql/postgres/protocol/NewWriter.zig @@ -0,0 +1,125 @@ +pub fn NewWriterWrap( + comptime Context: type, + comptime offsetFn_: (fn (ctx: Context) usize), + comptime writeFunction_: (fn (ctx: Context, bytes: []const u8) AnyPostgresError!void), + comptime pwriteFunction_: (fn (ctx: Context, bytes: []const u8, offset: usize) AnyPostgresError!void), +) type { + return struct { + wrapped: Context, + + const writeFn = writeFunction_; + const pwriteFn = pwriteFunction_; + const offsetFn = offsetFn_; + pub const Ctx = Context; + + pub const WrappedWriter = @This(); + + pub inline fn write(this: @This(), data: []const u8) AnyPostgresError!void { + try writeFn(this.wrapped, data); + } + + pub const LengthWriter = struct { + index: usize, + context: WrappedWriter, + + pub fn write(this: LengthWriter) AnyPostgresError!void { + try this.context.pwrite(&Int32(this.context.offset() - this.index), this.index); + } + + pub fn writeExcludingSelf(this: LengthWriter) AnyPostgresError!void { + try this.context.pwrite(&Int32(this.context.offset() -| (this.index + 4)), this.index); + } + }; + + pub inline fn length(this: @This()) AnyPostgresError!LengthWriter { + const i = this.offset(); + try this.int4(0); + return LengthWriter{ + .index = i, + .context = this, + }; + } + + pub inline fn offset(this: @This()) usize { + return offsetFn(this.wrapped); + } + + pub inline fn pwrite(this: @This(), data: []const u8, i: usize) AnyPostgresError!void { + try pwriteFn(this.wrapped, data, i); + } + + pub fn int4(this: @This(), value: PostgresInt32) !void { + try this.write(std.mem.asBytes(&@byteSwap(value))); + } + + pub fn int8(this: @This(), value: PostgresInt64) !void { + try this.write(std.mem.asBytes(&@byteSwap(value))); + } + + pub fn sint4(this: @This(), value: i32) !void { + try this.write(std.mem.asBytes(&@byteSwap(value))); + } + + pub fn @"f64"(this: @This(), value: f64) !void { + try this.write(std.mem.asBytes(&@byteSwap(@as(u64, @bitCast(value))))); + } + + pub fn @"f32"(this: @This(), value: f32) !void { + try this.write(std.mem.asBytes(&@byteSwap(@as(u32, @bitCast(value))))); + } + + pub fn short(this: @This(), value: anytype) !void { + try this.write(std.mem.asBytes(&@byteSwap(@as(u16, @intCast(value))))); + } + + pub fn string(this: @This(), value: []const u8) !void { + try this.write(value); + if (value.len == 0 or value[value.len - 1] != 0) + try this.write(&[_]u8{0}); + } + + pub fn bytes(this: @This(), value: []const u8) !void { + try this.write(value); + if (value.len == 0 or value[value.len - 1] != 0) + try this.write(&[_]u8{0}); + } + + pub fn @"bool"(this: @This(), value: bool) !void { + try this.write(if (value) "t" else "f"); + } + + pub fn @"null"(this: @This()) !void { + try this.int4(std.math.maxInt(PostgresInt32)); + } + + pub fn String(this: @This(), value: bun.String) !void { + if (value.isEmpty()) { + try this.write(&[_]u8{0}); + return; + } + + var sliced = value.toUTF8(bun.default_allocator); + defer sliced.deinit(); + const slice = sliced.slice(); + + try this.write(slice); + if (slice.len == 0 or slice[slice.len - 1] != 0) + try this.write(&[_]u8{0}); + } + }; +} + +pub fn NewWriter(comptime Context: type) type { + return NewWriterWrap(Context, Context.offset, Context.write, Context.pwrite); +} + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; + +const int_types = @import("../types/int_types.zig"); +const Int32 = int_types.Int32; +const PostgresInt32 = int_types.PostgresInt32; +const PostgresInt64 = int_types.PostgresInt64; diff --git a/src/sql/postgres/protocol/NoticeResponse.zig b/src/sql/postgres/protocol/NoticeResponse.zig new file mode 100644 index 0000000000..1e84eef072 --- /dev/null +++ b/src/sql/postgres/protocol/NoticeResponse.zig @@ -0,0 +1,53 @@ +messages: std.ArrayListUnmanaged(FieldMessage) = .{}, +pub fn deinit(this: *NoticeResponse) void { + for (this.messages.items) |*message| { + message.deinit(); + } + this.messages.deinit(bun.default_allocator); +} +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + var remaining_bytes = try reader.length(); + remaining_bytes -|= 4; + + if (remaining_bytes > 0) { + this.* = .{ + .messages = try FieldMessage.decodeList(Container, reader), + }; + } +} +pub const decode = DecoderWrap(NoticeResponse, decodeInternal).decode; + +pub fn toJS(this: NoticeResponse, globalObject: *JSC.JSGlobalObject) JSValue { + var b = bun.StringBuilder{}; + defer b.deinit(bun.default_allocator); + + for (this.messages.items) |msg| { + b.cap += switch (msg) { + inline else => |m| m.utf8ByteLength(), + } + 1; + } + b.allocate(bun.default_allocator) catch {}; + + for (this.messages.items) |msg| { + var str = switch (msg) { + inline else => |m| m.toUTF8(bun.default_allocator), + }; + defer str.deinit(); + _ = b.append(str.slice()); + _ = b.append("\n"); + } + + return JSC.ZigString.init(b.allocatedSlice()[0..b.len]).toJS(globalObject); +} + +// @sortImports + +const NoticeResponse = @This(); +const bun = @import("bun"); +const std = @import("std"); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const FieldMessage = @import("./FieldMessage.zig").FieldMessage; +const NewReader = @import("./NewReader.zig").NewReader; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/protocol/NotificationResponse.zig b/src/sql/postgres/protocol/NotificationResponse.zig new file mode 100644 index 0000000000..936490602d --- /dev/null +++ b/src/sql/postgres/protocol/NotificationResponse.zig @@ -0,0 +1,31 @@ +pid: int4 = 0, +channel: bun.ByteList = .{}, +payload: bun.ByteList = .{}, + +pub fn deinit(this: *@This()) void { + this.channel.deinitWithAllocator(bun.default_allocator); + this.payload.deinitWithAllocator(bun.default_allocator); +} + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + const length = try reader.length(); + bun.assert(length >= 4); + + this.* = .{ + .pid = try reader.int4(), + .channel = (try reader.readZ()).toOwned(), + .payload = (try reader.readZ()).toOwned(), + }; +} + +pub const decode = DecoderWrap(NotificationResponse, decodeInternal).decode; + +// @sortImports + +const NotificationResponse = @This(); +const bun = @import("bun"); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; + +const types = @import("../PostgresTypes.zig"); +const int4 = types.int4; diff --git a/src/sql/postgres/protocol/ParameterDescription.zig b/src/sql/postgres/protocol/ParameterDescription.zig new file mode 100644 index 0000000000..8be2737fd6 --- /dev/null +++ b/src/sql/postgres/protocol/ParameterDescription.zig @@ -0,0 +1,37 @@ +parameters: []int4 = &[_]int4{}, + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + var remaining_bytes = try reader.length(); + remaining_bytes -|= 4; + + const count = try reader.short(); + const parameters = try bun.default_allocator.alloc(int4, @intCast(@max(count, 0))); + + var data = try reader.read(@as(usize, @intCast(@max(count, 0))) * @sizeOf((int4))); + defer data.deinit(); + const input_params: []align(1) const int4 = toInt32Slice(int4, data.slice()); + for (input_params, parameters) |src, *dest| { + dest.* = @byteSwap(src); + } + + this.* = .{ + .parameters = parameters, + }; +} + +pub const decode = DecoderWrap(ParameterDescription, decodeInternal).decode; + +// workaround for zig compiler TODO +fn toInt32Slice(comptime Int: type, slice: []const u8) []align(1) const Int { + return @as([*]align(1) const Int, @ptrCast(slice.ptr))[0 .. slice.len / @sizeOf((Int))]; +} + +// @sortImports + +const ParameterDescription = @This(); +const bun = @import("bun"); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; + +const types = @import("../PostgresTypes.zig"); +const int4 = types.int4; diff --git a/src/sql/postgres/protocol/ParameterStatus.zig b/src/sql/postgres/protocol/ParameterStatus.zig new file mode 100644 index 0000000000..9575c0302d --- /dev/null +++ b/src/sql/postgres/protocol/ParameterStatus.zig @@ -0,0 +1,27 @@ +name: Data = .{ .empty = {} }, +value: Data = .{ .empty = {} }, + +pub fn deinit(this: *@This()) void { + this.name.deinit(); + this.value.deinit(); +} + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + const length = try reader.length(); + bun.assert(length >= 4); + + this.* = .{ + .name = try reader.readZ(), + .value = try reader.readZ(), + }; +} + +pub const decode = DecoderWrap(ParameterStatus, decodeInternal).decode; + +// @sortImports + +const ParameterStatus = @This(); +const bun = @import("bun"); +const Data = @import("../Data.zig").Data; +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/Parse.zig b/src/sql/postgres/protocol/Parse.zig new file mode 100644 index 0000000000..af14f63461 --- /dev/null +++ b/src/sql/postgres/protocol/Parse.zig @@ -0,0 +1,43 @@ +name: []const u8 = "", +query: []const u8 = "", +params: []const int4 = &.{}, + +pub fn deinit(this: *Parse) void { + _ = this; +} + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const parameters = this.params; + const count: usize = @sizeOf((u32)) + @sizeOf(u16) + (parameters.len * @sizeOf(u32)) + @max(zCount(this.name), 1) + @max(zCount(this.query), 1); + const header = [_]u8{ + 'P', + } ++ toBytes(Int32(count)); + try writer.write(&header); + try writer.string(this.name); + try writer.string(this.query); + try writer.short(parameters.len); + for (parameters) |parameter| { + try writer.int4(parameter); + } +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const Parse = @This(); +const std = @import("std"); +const NewWriter = @import("./NewWriter.zig").NewWriter; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; +const toBytes = std.mem.toBytes; + +const types = @import("../PostgresTypes.zig"); +const Int32 = types.Int32; +const int4 = types.int4; + +const zHelpers = @import("./zHelpers.zig"); +const zCount = zHelpers.zCount; diff --git a/src/sql/postgres/protocol/PasswordMessage.zig b/src/sql/postgres/protocol/PasswordMessage.zig new file mode 100644 index 0000000000..222e37b7da --- /dev/null +++ b/src/sql/postgres/protocol/PasswordMessage.zig @@ -0,0 +1,31 @@ +password: Data = .{ .empty = {} }, + +pub fn deinit(this: *PasswordMessage) void { + this.password.deinit(); +} + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const password = this.password.slice(); + const count: usize = @sizeOf((u32)) + password.len + 1; + const header = [_]u8{ + 'p', + } ++ toBytes(Int32(count)); + try writer.write(&header); + try writer.string(password); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const PasswordMessage = @This(); +const std = @import("std"); +const Data = @import("../Data.zig").Data; +const Int32 = @import("../types/int_types.zig").Int32; +const NewWriter = @import("./NewWriter.zig").NewWriter; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; +const toBytes = std.mem.toBytes; diff --git a/src/sql/postgres/protocol/PortalOrPreparedStatement.zig b/src/sql/postgres/protocol/PortalOrPreparedStatement.zig new file mode 100644 index 0000000000..575f5a07bd --- /dev/null +++ b/src/sql/postgres/protocol/PortalOrPreparedStatement.zig @@ -0,0 +1,18 @@ +pub const PortalOrPreparedStatement = union(enum) { + portal: []const u8, + prepared_statement: []const u8, + + pub fn slice(this: @This()) []const u8 { + return switch (this) { + .portal => this.portal, + .prepared_statement => this.prepared_statement, + }; + } + + pub fn tag(this: @This()) u8 { + return switch (this) { + .portal => 'P', + .prepared_statement => 'S', + }; + } +}; diff --git a/src/sql/postgres/protocol/ReadyForQuery.zig b/src/sql/postgres/protocol/ReadyForQuery.zig new file mode 100644 index 0000000000..baee6bea3b --- /dev/null +++ b/src/sql/postgres/protocol/ReadyForQuery.zig @@ -0,0 +1,18 @@ +const ReadyForQuery = @This(); +status: TransactionStatusIndicator = .I, +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + const length = try reader.length(); + bun.assert(length >= 4); + + const status = try reader.int(u8); + this.* = .{ + .status = @enumFromInt(status), + }; +} + +pub const decode = DecoderWrap(ReadyForQuery, decodeInternal).decode; + +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; +const TransactionStatusIndicator = @import("./TransactionStatusIndicator.zig").TransactionStatusIndicator; +const bun = @import("bun"); diff --git a/src/sql/postgres/protocol/RowDescription.zig b/src/sql/postgres/protocol/RowDescription.zig new file mode 100644 index 0000000000..e3068d4aee --- /dev/null +++ b/src/sql/postgres/protocol/RowDescription.zig @@ -0,0 +1,44 @@ +fields: []FieldDescription = &[_]FieldDescription{}, +pub fn deinit(this: *@This()) void { + for (this.fields) |*field| { + field.deinit(); + } + + bun.default_allocator.free(this.fields); +} + +pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) !void { + var remaining_bytes = try reader.length(); + remaining_bytes -|= 4; + + const field_count: usize = @intCast(@max(try reader.short(), 0)); + var fields = try bun.default_allocator.alloc( + FieldDescription, + field_count, + ); + var remaining = fields; + errdefer { + for (fields[0 .. field_count - remaining.len]) |*field| { + field.deinit(); + } + + bun.default_allocator.free(fields); + } + while (remaining.len > 0) { + try remaining[0].decodeInternal(Container, reader); + remaining = remaining[1..]; + } + this.* = .{ + .fields = fields, + }; +} + +pub const decode = DecoderWrap(RowDescription, decodeInternal).decode; + +// @sortImports + +const FieldDescription = @import("./FieldDescription.zig"); +const RowDescription = @This(); +const bun = @import("bun"); +const DecoderWrap = @import("./DecoderWrap.zig").DecoderWrap; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/SASLInitialResponse.zig b/src/sql/postgres/protocol/SASLInitialResponse.zig new file mode 100644 index 0000000000..8c5ee5cf14 --- /dev/null +++ b/src/sql/postgres/protocol/SASLInitialResponse.zig @@ -0,0 +1,36 @@ +mechanism: Data = .{ .empty = {} }, +data: Data = .{ .empty = {} }, + +pub fn deinit(this: *SASLInitialResponse) void { + this.mechanism.deinit(); + this.data.deinit(); +} + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const mechanism = this.mechanism.slice(); + const data = this.data.slice(); + const count: usize = @sizeOf(u32) + mechanism.len + 1 + data.len + @sizeOf(u32); + const header = [_]u8{ + 'p', + } ++ toBytes(Int32(count)); + try writer.write(&header); + try writer.string(mechanism); + try writer.int4(@truncate(data.len)); + try writer.write(data); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const SASLInitialResponse = @This(); +const std = @import("std"); +const Data = @import("../Data.zig").Data; +const Int32 = @import("../types/int_types.zig").Int32; +const NewWriter = @import("./NewWriter.zig").NewWriter; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; +const toBytes = std.mem.toBytes; diff --git a/src/sql/postgres/protocol/SASLResponse.zig b/src/sql/postgres/protocol/SASLResponse.zig new file mode 100644 index 0000000000..314fabd9e2 --- /dev/null +++ b/src/sql/postgres/protocol/SASLResponse.zig @@ -0,0 +1,31 @@ +data: Data = .{ .empty = {} }, + +pub fn deinit(this: *SASLResponse) void { + this.data.deinit(); +} + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const data = this.data.slice(); + const count: usize = @sizeOf(u32) + data.len; + const header = [_]u8{ + 'p', + } ++ toBytes(Int32(count)); + try writer.write(&header); + try writer.write(data); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const SASLResponse = @This(); +const std = @import("std"); +const Data = @import("../Data.zig").Data; +const Int32 = @import("../types/int_types.zig").Int32; +const NewWriter = @import("./NewWriter.zig").NewWriter; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; +const toBytes = std.mem.toBytes; diff --git a/src/sql/postgres/protocol/StackReader.zig b/src/sql/postgres/protocol/StackReader.zig new file mode 100644 index 0000000000..85fb93b5a9 --- /dev/null +++ b/src/sql/postgres/protocol/StackReader.zig @@ -0,0 +1,66 @@ +buffer: []const u8 = "", +offset: *usize, +message_start: *usize, + +pub fn markMessageStart(this: @This()) void { + this.message_start.* = this.offset.*; +} + +pub fn ensureLength(this: @This(), length: usize) bool { + return this.buffer.len >= (this.offset.* + length); +} + +pub fn init(buffer: []const u8, offset: *usize, message_start: *usize) NewReader(StackReader) { + return .{ + .wrapped = .{ + .buffer = buffer, + .offset = offset, + .message_start = message_start, + }, + }; +} + +pub fn peek(this: StackReader) []const u8 { + return this.buffer[this.offset.*..]; +} +pub fn skip(this: StackReader, count: usize) void { + if (this.offset.* + count > this.buffer.len) { + this.offset.* = this.buffer.len; + return; + } + + this.offset.* += count; +} +pub fn ensureCapacity(this: StackReader, count: usize) bool { + return this.buffer.len >= (this.offset.* + count); +} +pub fn read(this: StackReader, count: usize) AnyPostgresError!Data { + const offset = this.offset.*; + if (!this.ensureCapacity(count)) { + return error.ShortRead; + } + + this.skip(count); + return Data{ + .temporary = this.buffer[offset..this.offset.*], + }; +} +pub fn readZ(this: StackReader) AnyPostgresError!Data { + const remaining = this.peek(); + if (bun.strings.indexOfChar(remaining, 0)) |zero| { + this.skip(zero + 1); + return Data{ + .temporary = remaining[0..zero], + }; + } + + return error.ShortRead; +} + +// @sortImports + +const StackReader = @This(); +const bun = @import("bun"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; +const Data = @import("../Data.zig").Data; +const NewReader = @import("./NewReader.zig").NewReader; diff --git a/src/sql/postgres/protocol/StartupMessage.zig b/src/sql/postgres/protocol/StartupMessage.zig new file mode 100644 index 0000000000..d52f65a878 --- /dev/null +++ b/src/sql/postgres/protocol/StartupMessage.zig @@ -0,0 +1,52 @@ +user: Data, +database: Data, +options: Data = Data{ .empty = {} }, + +pub fn writeInternal( + this: *const @This(), + comptime Context: type, + writer: NewWriter(Context), +) !void { + const user = this.user.slice(); + const database = this.database.slice(); + const options = this.options.slice(); + const count: usize = @sizeOf((int4)) + @sizeOf((int4)) + zFieldCount("user", user) + zFieldCount("database", database) + zFieldCount("client_encoding", "UTF8") + options.len + 1; + + const header = toBytes(Int32(@as(u32, @truncate(count)))); + try writer.write(&header); + try writer.int4(196608); + + try writer.string("user"); + if (user.len > 0) + try writer.string(user); + + try writer.string("database"); + + if (database.len == 0) { + // The database to connect to. Defaults to the user name. + try writer.string(user); + } else { + try writer.string(database); + } + try writer.string("client_encoding"); + try writer.string("UTF8"); + if (options.len > 0) { + try writer.write(options); + } + try writer.write(&[_]u8{0}); +} + +pub const write = WriteWrap(@This(), writeInternal).write; + +// @sortImports + +const std = @import("std"); +const Data = @import("../Data.zig").Data; +const NewWriter = @import("./NewWriter.zig").NewWriter; +const WriteWrap = @import("./WriteWrap.zig").WriteWrap; +const zFieldCount = @import("./zHelpers.zig").zFieldCount; +const toBytes = std.mem.toBytes; + +const int_types = @import("../types/int_types.zig"); +const Int32 = int_types.Int32; +const int4 = int_types.int4; diff --git a/src/sql/postgres/protocol/TransactionStatusIndicator.zig b/src/sql/postgres/protocol/TransactionStatusIndicator.zig new file mode 100644 index 0000000000..9650d394f1 --- /dev/null +++ b/src/sql/postgres/protocol/TransactionStatusIndicator.zig @@ -0,0 +1,12 @@ +pub const TransactionStatusIndicator = enum(u8) { + /// if idle (not in a transaction block) + I = 'I', + + /// if in a transaction block + T = 'T', + + /// if in a failed transaction block + E = 'E', + + _, +}; diff --git a/src/sql/postgres/protocol/WriteWrap.zig b/src/sql/postgres/protocol/WriteWrap.zig new file mode 100644 index 0000000000..0fc4470b69 --- /dev/null +++ b/src/sql/postgres/protocol/WriteWrap.zig @@ -0,0 +1,14 @@ +pub fn WriteWrap(comptime Container: type, comptime writeFn: anytype) type { + return struct { + pub fn write(this: *Container, context: anytype) AnyPostgresError!void { + const Context = @TypeOf(context); + try writeFn(this, Context, NewWriter(Context){ .wrapped = context }); + } + }; +} + +// @sortImports + +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; + +const NewWriter = @import("./NewWriter.zig").NewWriter; diff --git a/src/sql/postgres/protocol/zHelpers.zig b/src/sql/postgres/protocol/zHelpers.zig new file mode 100644 index 0000000000..9e28f9d0d1 --- /dev/null +++ b/src/sql/postgres/protocol/zHelpers.zig @@ -0,0 +1,11 @@ +pub fn zCount(slice: []const u8) usize { + return if (slice.len > 0) slice.len + 1 else 0; +} + +pub fn zFieldCount(prefix: []const u8, slice: []const u8) usize { + if (slice.len > 0) { + return zCount(prefix) + zCount(slice); + } + + return zCount(prefix); +} diff --git a/src/sql/postgres/types/PostgresString.zig b/src/sql/postgres/types/PostgresString.zig new file mode 100644 index 0000000000..f2e4cb4292 --- /dev/null +++ b/src/sql/postgres/types/PostgresString.zig @@ -0,0 +1,52 @@ +pub const to = 25; +pub const from = [_]short{1002}; + +pub fn toJSWithType( + globalThis: *JSC.JSGlobalObject, + comptime Type: type, + value: Type, +) AnyPostgresError!JSValue { + switch (comptime Type) { + [:0]u8, []u8, []const u8, [:0]const u8 => { + var str = bun.String.fromUTF8(value); + defer str.deinit(); + return str.toJS(globalThis); + }, + + bun.String => { + return value.toJS(globalThis); + }, + + *Data => { + var str = bun.String.fromUTF8(value.slice()); + defer str.deinit(); + defer value.deinit(); + return str.toJS(globalThis); + }, + + else => { + @compileError("unsupported type " ++ @typeName(Type)); + }, + } +} + +pub fn toJS( + globalThis: *JSC.JSGlobalObject, + value: anytype, +) !JSValue { + var str = try toJSWithType(globalThis, @TypeOf(value), value); + defer str.deinit(); + return str.toJS(globalThis); +} + +// @sortImports + +const bun = @import("bun"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; +const Data = @import("../Data.zig").Data; + +const int_types = @import("./int_types.zig"); +const short = int_types.short; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/postgres_types.zig b/src/sql/postgres/types/Tag.zig similarity index 71% rename from src/sql/postgres/postgres_types.zig rename to src/sql/postgres/types/Tag.zig index 7ab4dae90c..7c77c5a390 100644 --- a/src/sql/postgres/postgres_types.zig +++ b/src/sql/postgres/types/Tag.zig @@ -1,14 +1,3 @@ -const std = @import("std"); -const bun = @import("bun"); -const postgres = bun.api.Postgres; -const Data = postgres.Data; -const String = bun.String; -const JSValue = JSC.JSValue; -const JSC = bun.JSC; -const short = postgres.short; -const int4 = postgres.int4; -const AnyPostgresError = postgres.AnyPostgresError; - // select b.typname, b.oid, b.typarray // from pg_catalog.pg_type a // left join pg_catalog.pg_type b on b.oid = a.typelem @@ -353,10 +342,9 @@ pub const Tag = enum(short) { return .int8; } - if (tag.isArrayLike() and value.getLength(globalObject) > 0) { - return Tag.fromJS(globalObject, value.getIndex(globalObject, 0)); + if (tag.isArrayLike() and try value.getLength(globalObject) > 0) { + return Tag.fromJS(globalObject, try value.getIndex(globalObject, 0)); } - if (globalObject.hasException()) return error.JSError; // Ban these types: if (tag == .NumberObject) { @@ -403,153 +391,21 @@ pub const Tag = enum(short) { } }; -pub const string = struct { - pub const to = 25; - pub const from = [_]short{1002}; +const @"bool" = @import("./bool.zig"); - pub fn toJSWithType( - globalThis: *JSC.JSGlobalObject, - comptime Type: type, - value: Type, - ) AnyPostgresError!JSValue { - switch (comptime Type) { - [:0]u8, []u8, []const u8, [:0]const u8 => { - var str = String.fromUTF8(value); - defer str.deinit(); - return str.toJS(globalThis); - }, +// @sortImports - bun.String => { - return value.toJS(globalThis); - }, +const bun = @import("bun"); +const bytea = @import("./bytea.zig"); +const date = @import("./date.zig"); +const json = @import("./json.zig"); +const numeric = @import("./numeric.zig"); +const std = @import("std"); +const string = @import("./PostgresString.zig"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; - *Data => { - var str = String.fromUTF8(value.slice()); - defer str.deinit(); - defer value.deinit(); - return str.toJS(globalThis); - }, +const int_types = @import("./int_types.zig"); +const short = int_types.short; - else => { - @compileError("unsupported type " ++ @typeName(Type)); - }, - } - } - - pub fn toJS( - globalThis: *JSC.JSGlobalObject, - value: anytype, - ) !JSValue { - var str = try toJSWithType(globalThis, @TypeOf(value), value); - defer str.deinit(); - return str.toJS(globalThis); - } -}; - -pub const numeric = struct { - pub const to = 0; - pub const from = [_]short{ 21, 23, 26, 700, 701 }; - - pub fn toJS( - _: *JSC.JSGlobalObject, - value: anytype, - ) AnyPostgresError!JSValue { - return JSValue.jsNumber(value); - } -}; - -pub const json = struct { - pub const to = 114; - pub const from = [_]short{ 114, 3802 }; - - pub fn toJS( - globalObject: *JSC.JSGlobalObject, - value: *Data, - ) AnyPostgresError!JSValue { - defer value.deinit(); - var str = bun.String.fromUTF8(value.slice()); - defer str.deref(); - const parse_result = JSValue.parse(str.toJS(globalObject), globalObject); - if (parse_result.AnyPostgresError()) { - return globalObject.throwValue(parse_result); - } - - return parse_result; - } -}; - -pub const @"bool" = struct { - pub const to = 16; - pub const from = [_]short{16}; - - pub fn toJS( - _: *JSC.JSGlobalObject, - value: bool, - ) AnyPostgresError!JSValue { - return JSValue.jsBoolean(value); - } -}; - -pub const date = struct { - pub const to = 1184; - pub const from = [_]short{ 1082, 1114, 1184 }; - - // Postgres stores timestamp and timestampz as microseconds since 2000-01-01 - // This is a signed 64-bit integer. - const POSTGRES_EPOCH_DATE = 946684800000; - - pub fn fromBinary(bytes: []const u8) f64 { - const microseconds = std.mem.readInt(i64, bytes[0..8], .big); - const double_microseconds: f64 = @floatFromInt(microseconds); - return (double_microseconds / std.time.us_per_ms) + POSTGRES_EPOCH_DATE; - } - - pub fn fromJS(globalObject: *JSC.JSGlobalObject, value: JSValue) i64 { - const double_value = if (value.isDate()) - value.getUnixTimestamp() - else if (value.isNumber()) - value.asNumber() - else if (value.isString()) brk: { - var str = value.toBunString(globalObject) catch @panic("unreachable"); - defer str.deref(); - break :brk str.parseDate(globalObject); - } else return 0; - - const unix_timestamp: i64 = @intFromFloat(double_value); - return (unix_timestamp - POSTGRES_EPOCH_DATE) * std.time.us_per_ms; - } - - pub fn toJS( - globalObject: *JSC.JSGlobalObject, - value: anytype, - ) JSValue { - switch (@TypeOf(value)) { - i64 => { - // Convert from Postgres timestamp (μs since 2000-01-01) to Unix timestamp (ms) - const ms = @divFloor(value, std.time.us_per_ms) + POSTGRES_EPOCH_DATE; - return JSValue.fromDateNumber(globalObject, @floatFromInt(ms)); - }, - *Data => { - defer value.deinit(); - return JSValue.fromDateString(globalObject, value.sliceZ().ptr); - }, - else => @compileError("unsupported type " ++ @typeName(@TypeOf(value))), - } - } -}; - -pub const bytea = struct { - pub const to = 17; - pub const from = [_]short{17}; - - pub fn toJS( - globalObject: *JSC.JSGlobalObject, - value: *Data, - ) AnyPostgresError!JSValue { - defer value.deinit(); - - // var slice = value.slice()[@min(1, value.len)..]; - // _ = slice; - return JSValue.createBuffer(globalObject, value.slice(), null); - } -}; +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/types/bool.zig b/src/sql/postgres/types/bool.zig new file mode 100644 index 0000000000..0a00d07084 --- /dev/null +++ b/src/sql/postgres/types/bool.zig @@ -0,0 +1,20 @@ +pub const to = 16; +pub const from = [_]short{16}; + +pub fn toJS( + _: *JSC.JSGlobalObject, + value: bool, +) AnyPostgresError!JSValue { + return JSValue.jsBoolean(value); +} + +// @sortImports + +const bun = @import("bun"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; + +const int_types = @import("./int_types.zig"); +const short = int_types.short; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/types/bytea.zig b/src/sql/postgres/types/bytea.zig new file mode 100644 index 0000000000..dec468e524 --- /dev/null +++ b/src/sql/postgres/types/bytea.zig @@ -0,0 +1,25 @@ +pub const to = 17; +pub const from = [_]short{17}; + +pub fn toJS( + globalObject: *JSC.JSGlobalObject, + value: *Data, +) AnyPostgresError!JSValue { + defer value.deinit(); + + // var slice = value.slice()[@min(1, value.len)..]; + // _ = slice; + return JSValue.createBuffer(globalObject, value.slice(), null); +} + +// @sortImports + +const bun = @import("bun"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; +const Data = @import("../Data.zig").Data; + +const int_types = @import("./int_types.zig"); +const short = int_types.short; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/types/date.zig b/src/sql/postgres/types/date.zig new file mode 100644 index 0000000000..cdec908240 --- /dev/null +++ b/src/sql/postgres/types/date.zig @@ -0,0 +1,57 @@ +pub const to = 1184; +pub const from = [_]short{ 1082, 1114, 1184 }; + +// Postgres stores timestamp and timestampz as microseconds since 2000-01-01 +// This is a signed 64-bit integer. +const POSTGRES_EPOCH_DATE = 946684800000; + +pub fn fromBinary(bytes: []const u8) f64 { + const microseconds = std.mem.readInt(i64, bytes[0..8], .big); + const double_microseconds: f64 = @floatFromInt(microseconds); + return (double_microseconds / std.time.us_per_ms) + POSTGRES_EPOCH_DATE; +} + +pub fn fromJS(globalObject: *JSC.JSGlobalObject, value: JSValue) i64 { + const double_value = if (value.isDate()) + value.getUnixTimestamp() + else if (value.isNumber()) + value.asNumber() + else if (value.isString()) brk: { + var str = value.toBunString(globalObject) catch @panic("unreachable"); + defer str.deref(); + break :brk str.parseDate(globalObject); + } else return 0; + + const unix_timestamp: i64 = @intFromFloat(double_value); + return (unix_timestamp - POSTGRES_EPOCH_DATE) * std.time.us_per_ms; +} + +pub fn toJS( + globalObject: *JSC.JSGlobalObject, + value: anytype, +) JSValue { + switch (@TypeOf(value)) { + i64 => { + // Convert from Postgres timestamp (μs since 2000-01-01) to Unix timestamp (ms) + const ms = @divFloor(value, std.time.us_per_ms) + POSTGRES_EPOCH_DATE; + return JSValue.fromDateNumber(globalObject, @floatFromInt(ms)); + }, + *Data => { + defer value.deinit(); + return JSValue.fromDateString(globalObject, value.sliceZ().ptr); + }, + else => @compileError("unsupported type " ++ @typeName(@TypeOf(value))), + } +} + +// @sortImports + +const bun = @import("bun"); +const std = @import("std"); +const Data = @import("../Data.zig").Data; + +const int_types = @import("./int_types.zig"); +const short = int_types.short; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/types/int_types.zig b/src/sql/postgres/types/int_types.zig new file mode 100644 index 0000000000..5489a5309d --- /dev/null +++ b/src/sql/postgres/types/int_types.zig @@ -0,0 +1,10 @@ +pub const int4 = u32; +pub const PostgresInt32 = int4; +pub const int8 = i64; +pub const PostgresInt64 = int8; +pub const short = u16; +pub const PostgresShort = u16; + +pub fn Int32(value: anytype) [4]u8 { + return @bitCast(@byteSwap(@as(int4, @intCast(value)))); +} diff --git a/src/sql/postgres/types/json.zig b/src/sql/postgres/types/json.zig new file mode 100644 index 0000000000..0aaa37c173 --- /dev/null +++ b/src/sql/postgres/types/json.zig @@ -0,0 +1,29 @@ +pub const to = 114; +pub const from = [_]short{ 114, 3802 }; + +pub fn toJS( + globalObject: *JSC.JSGlobalObject, + value: *Data, +) AnyPostgresError!JSValue { + defer value.deinit(); + var str = bun.String.fromUTF8(value.slice()); + defer str.deref(); + const parse_result = JSValue.parse(str.toJS(globalObject), globalObject); + if (parse_result.AnyPostgresError()) { + return globalObject.throwValue(parse_result); + } + + return parse_result; +} + +// @sortImports + +const bun = @import("bun"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; +const Data = @import("../Data.zig").Data; + +const int_types = @import("./int_types.zig"); +const short = int_types.short; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/sql/postgres/types/numeric.zig b/src/sql/postgres/types/numeric.zig new file mode 100644 index 0000000000..01897396dc --- /dev/null +++ b/src/sql/postgres/types/numeric.zig @@ -0,0 +1,20 @@ +pub const to = 0; +pub const from = [_]short{ 21, 23, 26, 700, 701 }; + +pub fn toJS( + _: *JSC.JSGlobalObject, + value: anytype, +) AnyPostgresError!JSValue { + return JSValue.jsNumber(value); +} + +// @sortImports + +const bun = @import("bun"); +const AnyPostgresError = @import("../AnyPostgresError.zig").AnyPostgresError; + +const int_types = @import("./int_types.zig"); +const short = int_types.short; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/string.zig b/src/string.zig index 2025afe79a..4c95e61b68 100644 --- a/src/string.zig +++ b/src/string.zig @@ -514,18 +514,22 @@ pub const String = extern struct { } pub fn fromJS(value: bun.JSC.JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!String { + var scope: JSC.ExceptionValidationScope = undefined; + scope.init(globalObject, @src()); + defer scope.deinit(); var out: String = String.dead; - if (BunString__fromJS(globalObject, value, &out)) { - if (comptime bun.Environment.isDebug) { - bun.assert(out.tag != .Dead); - } - return out; + const ok = BunString__fromJS(globalObject, value, &out); + + // If there is a pending exception, but stringifying succeeds, we don't return JSError. + // We do need to always call hasException() to satisfy the need for an exception check. + const has_exception = scope.hasExceptionOrFalseWhenAssertionsAreDisabled(); + if (ok) { + bun.debugAssert(out.tag != .Dead); + } else { + bun.debugAssert(has_exception); } - if (comptime bun.Environment.isDebug) { - bun.assert(globalObject.hasException()); - } - return error.JSError; + return if (ok) out else error.JSError; } pub fn toJS(this: *const String, globalObject: *bun.JSC.JSGlobalObject) JSC.JSValue { @@ -549,7 +553,7 @@ pub const String = extern struct { /// calls toJS on all elements of `array`. pub fn toJSArray(globalObject: *bun.JSC.JSGlobalObject, array: []const bun.String) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); - return bun.jsc.fromJSHostValue(BunString__createArray(globalObject, array.ptr, array.len)); + return bun.jsc.fromJSHostCall(globalObject, @src(), BunString__createArray, .{ globalObject, array.ptr, array.len }); } pub fn toZigString(this: String) ZigString { @@ -662,16 +666,10 @@ pub const String = extern struct { return false; } - extern fn BunString__toJSON( - globalObject: *bun.JSC.JSGlobalObject, - this: *String, - ) JSC.JSValue; + extern fn BunString__toJSON(globalObject: *bun.JSC.JSGlobalObject, this: *String) JSC.JSValue; pub fn toJSByParseJSON(self: *String, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - const result = BunString__toJSON(globalObject, self); - if (result == .zero) return error.JSError; - return result; + return bun.jsc.fromJSHostCall(globalObject, @src(), BunString__toJSON, .{ globalObject, self }); } pub fn encodeInto(self: String, out: []u8, comptime enc: JSC.Node.Encoding) !usize { diff --git a/src/string/MutableString.zig b/src/string/MutableString.zig index 0cdb26f374..e98c675349 100644 --- a/src/string/MutableString.zig +++ b/src/string/MutableString.zig @@ -379,7 +379,6 @@ pub const BufferedWriter = struct { this.remain()[0 .. bytes.len * 2], []const u16, bytes, - true, ); this.context.list.items.len += @as(usize, decoded.written); return pending.len; @@ -393,7 +392,6 @@ pub const BufferedWriter = struct { this.remain()[0 .. bytes.len * 2], []const u16, bytes, - true, ); this.pos += @as(usize, decoded.written); } diff --git a/src/string/StringBuilder.zig b/src/string/StringBuilder.zig index 803fdcd4ef..25af0cbf96 100644 --- a/src/string/StringBuilder.zig +++ b/src/string/StringBuilder.zig @@ -66,7 +66,7 @@ pub fn append16(this: *StringBuilder, slice: []const u16, fallback_allocator: st var list = std.ArrayList(u8).init(fallback_allocator); var out = bun.strings.toUTF8ListWithTypeBun(&list, []const u16, slice, false) catch return null; out.append(0) catch return null; - return list.items[0 .. list.items.len - 1 :0]; + return out.items[0 .. out.items.len - 1 :0]; } } diff --git a/src/string/paths.zig b/src/string/paths.zig index da6073f39b..0a7eba3dc7 100644 --- a/src/string/paths.zig +++ b/src/string/paths.zig @@ -39,7 +39,7 @@ pub fn isWindowsAbsolutePathMissingDriveLetter(comptime T: type, chars: []const pub fn fromWPath(buf: []u8, utf16: []const u16) [:0]const u8 { bun.unsafeAssert(buf.len > 0); const to_copy = trimPrefixComptime(u16, utf16, bun.windows.long_path_prefix); - const encode_into_result = copyUTF16IntoUTF8(buf[0 .. buf.len - 1], []const u16, to_copy, false); + const encode_into_result = copyUTF16IntoUTF8(buf[0 .. buf.len - 1], []const u16, to_copy); bun.unsafeAssert(encode_into_result.written < buf.len); buf[encode_into_result.written] = 0; return buf[0..encode_into_result.written :0]; @@ -118,18 +118,6 @@ pub fn toNTPath16(wbuf: []u16, path: []const u16) [:0]u16 { return wbuf[0 .. toWPathNormalized16(wbuf[prefix.len..], path).len + prefix.len :0]; } -pub fn toNTMaxPath(buf: []u8, utf8: []const u8) [:0]const u8 { - if (!std.fs.path.isAbsoluteWindows(utf8) or utf8.len <= 260) { - @memcpy(buf[0..utf8.len], utf8); - buf[utf8.len] = 0; - return buf[0..utf8.len :0]; - } - - const prefix = bun.windows.nt_maxpath_prefix_u8; - buf[0..prefix.len].* = prefix; - return buf[0 .. toPathNormalized(buf[prefix.len..], utf8).len + prefix.len :0]; -} - pub fn addNTPathPrefix(wbuf: []u16, utf16: []const u16) [:0]u16 { wbuf[0..bun.windows.nt_object_prefix.len].* = bun.windows.nt_object_prefix; @memcpy(wbuf[bun.windows.nt_object_prefix.len..][0..utf16.len], utf16); @@ -155,6 +143,11 @@ pub const toNTDir = toNTPath; pub fn toExtendedPathNormalized(wbuf: []u16, utf8: []const u8) [:0]const u16 { bun.unsafeAssert(wbuf.len > 4); + if (hasPrefixComptime(utf8, bun.windows.long_path_prefix_u8) or + hasPrefixComptime(utf8, bun.windows.nt_object_prefix_u8)) + { + return toWPathNormalized(wbuf, utf8); + } wbuf[0..4].* = bun.windows.long_path_prefix; return wbuf[0 .. toWPathNormalized(wbuf[4..], utf8).len + 4 :0]; } @@ -168,8 +161,8 @@ pub fn toWPathNormalizeAutoExtend(wbuf: []u16, utf8: []const u8) [:0]const u16 { } pub fn toWPathNormalized(wbuf: []u16, utf8: []const u8) [:0]u16 { - const renormalized = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(renormalized); + const renormalized = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(renormalized); var path_to_use = normalizeSlashesOnly(renormalized, utf8, '\\'); @@ -195,8 +188,8 @@ pub fn toWPathNormalized16(wbuf: []u16, path: []const u16) [:0]u16 { } pub fn toPathNormalized(buf: []u8, utf8: []const u8) [:0]const u8 { - const renormalized = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(renormalized); + const renormalized = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(renormalized); var path_to_use = normalizeSlashesOnly(renormalized, utf8, '\\'); @@ -235,12 +228,12 @@ pub fn normalizeSlashesOnly(buf: []u8, utf8: []const u8, comptime desired_slash: pub fn toWDirNormalized(wbuf: []u16, utf8: []const u8) [:0]const u16 { var renormalized: ?*bun.PathBuffer = null; - defer if (renormalized) |r| bun.PathBufferPool.put(r); + defer if (renormalized) |r| bun.path_buffer_pool.put(r); var path_to_use = utf8; if (bun.strings.containsChar(utf8, '/')) { - renormalized = bun.PathBufferPool.get(); + renormalized = bun.path_buffer_pool.get(); @memcpy(renormalized.?[0..utf8.len], utf8); for (renormalized.?[0..utf8.len]) |*c| { if (c.* == '/') { @@ -447,6 +440,67 @@ pub fn removeLeadingDotSlash(slice: []const u8) callconv(bun.callconv_inline) [] return slice; } +// Copied from std, modified to accept input type +pub fn basename(comptime T: type, input: []const T) []const T { + if (comptime Environment.isWindows) { + return basenameWindows(T, input); + } + return basenamePosix(T, input); +} + +fn basenamePosix(comptime T: type, input: []const T) []const T { + if (input.len == 0) + return &[_]u8{}; + + var end_index: usize = input.len - 1; + while (input[end_index] == '/') { + if (end_index == 0) + return &.{}; + end_index -= 1; + } + var start_index: usize = end_index; + end_index += 1; + while (input[start_index] != '/') { + if (start_index == 0) + return input[0..end_index]; + start_index -= 1; + } + + return input[start_index + 1 .. end_index]; +} + +fn basenameWindows(comptime T: type, input: []const T) []const T { + if (input.len == 0) + return &.{}; + + var end_index: usize = input.len - 1; + while (true) { + const byte = input[end_index]; + if (byte == '/' or byte == '\\') { + if (end_index == 0) + return &.{}; + end_index -= 1; + continue; + } + if (byte == ':' and end_index == 1) { + return &.{}; + } + break; + } + + var start_index: usize = end_index; + end_index += 1; + while (input[start_index] != '/' and input[start_index] != '\\' and + !(input[start_index] == ':' and start_index == 1)) + { + if (start_index == 0) + return input[0..end_index]; + start_index -= 1; + } + + return input[start_index + 1 .. end_index]; +} + const bun = @import("bun"); const std = @import("std"); const Environment = bun.Environment; diff --git a/src/string/unicode.zig b/src/string/unicode.zig index 0404ff020d..42baa91591 100644 --- a/src/string/unicode.zig +++ b/src/string/unicode.zig @@ -428,7 +428,9 @@ pub fn toUTF8ListWithTypeBun(list: *std.ArrayList(u8), comptime Type: type, utf1 } pub const EncodeIntoResult = struct { + /// The number of u16s we read from the utf-16 buffer read: u32 = 0, + /// The number of u8s we wrote to the utf-8 buffer written: u32 = 0, }; pub fn allocateLatin1IntoUTF8(allocator: std.mem.Allocator, comptime Type: type, latin1_: Type) ![]u8 { @@ -1679,7 +1681,15 @@ pub fn latin1ToCodepointBytesAssumeNotASCII16(char: u32) u16 { return latin1_to_utf16_conversion_table[@as(u8, @truncate(char))]; } -pub fn copyUTF16IntoUTF8(buf: []u8, comptime Type: type, utf16: Type, comptime allow_partial_write: bool) EncodeIntoResult { +/// Copy a UTF-16 string as UTF-8 into `buf` +/// +/// This may not encode everything if `buf` is not big enough. +pub fn copyUTF16IntoUTF8(buf: []u8, comptime Type: type, utf16: Type) EncodeIntoResult { + return copyUTF16IntoUTF8Impl(buf, Type, utf16, false); +} + +/// See comment on `copyUTF16IntoUTF8WithBufferImpl` on what `allow_truncated_utf8_sequence` should do +pub fn copyUTF16IntoUTF8Impl(buf: []u8, comptime Type: type, utf16: Type, comptime allow_truncated_utf8_sequence: bool) EncodeIntoResult { if (comptime Type == []const u16) { if (bun.FeatureFlags.use_simdutf) { if (utf16.len == 0) @@ -1693,14 +1703,33 @@ pub fn copyUTF16IntoUTF8(buf: []u8, comptime Type: type, utf16: Type, comptime a else buf.len; - return copyUTF16IntoUTF8WithBuffer(buf, Type, utf16, trimmed, out_len, allow_partial_write); + return copyUTF16IntoUTF8WithBufferImpl(buf, Type, utf16, trimmed, out_len, allow_truncated_utf8_sequence); } } - return copyUTF16IntoUTF8WithBuffer(buf, Type, utf16, utf16, utf16.len, allow_partial_write); + return copyUTF16IntoUTF8WithBufferImpl(buf, Type, utf16, utf16, utf16.len, allow_truncated_utf8_sequence); } -pub fn copyUTF16IntoUTF8WithBuffer(buf: []u8, comptime Type: type, utf16: Type, trimmed: Type, out_len: usize, comptime allow_partial_write: bool) EncodeIntoResult { +pub fn copyUTF16IntoUTF8WithBuffer(buf: []u8, comptime Type: type, utf16: Type, trimmed: Type, out_len: usize) EncodeIntoResult { + return copyUTF16IntoUTF8WithBufferImpl(buf, Type, utf16, trimmed, out_len, false); +} + +/// Q: What does the `allow_truncated_utf8_sequence` parameter do? +/// A: If the output buffer can't fit everything, this function will write +/// incomplete utf-8 byte sequences if `allow_truncated_utf8_sequence` is +/// enabled. +/// +/// Q: Doesn't that mean this function would output invalid utf-8? Why would you +/// ever want to do that? +/// A: Yes. This is needed for writing a UTF-16 string to a node Buffer that +/// doesn't have enough space for all the bytes: +/// +/// ```js +/// let buffer = Buffer.allocUnsafe(1); +/// buffer.fill("\u0222"); +/// expect(buffer[0]).toBe(0xc8); +/// ``` +pub fn copyUTF16IntoUTF8WithBufferImpl(buf: []u8, comptime Type: type, utf16: Type, trimmed: Type, out_len: usize, comptime allow_truncated_utf8_sequence: bool) EncodeIntoResult { var remaining = buf; var utf16_remaining = utf16; var ended_on_non_ascii = false; @@ -1734,9 +1763,10 @@ pub fn copyUTF16IntoUTF8WithBuffer(buf: []u8, comptime Type: type, utf16: Type, const replacement = utf16CodepointWithFFFD(Type, utf16_remaining); const width: usize = replacement.utf8Width(); + bun.assert(width > 1); if (width > remaining.len) { ended_on_non_ascii = width > 1; - if (comptime allow_partial_write) switch (width) { + if (comptime allow_truncated_utf8_sequence) switch (width) { 2 => { if (remaining.len > 0) { //only first will be written diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 63a1d8991a..2a78d74483 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -2247,7 +2247,9 @@ pub const copyU16IntoU8 = unicode.copyU16IntoU8; pub const copyU8IntoU16 = unicode.copyU8IntoU16; pub const copyU8IntoU16WithAlignment = unicode.copyU8IntoU16WithAlignment; pub const copyUTF16IntoUTF8 = unicode.copyUTF16IntoUTF8; +pub const copyUTF16IntoUTF8Impl = unicode.copyUTF16IntoUTF8Impl; pub const copyUTF16IntoUTF8WithBuffer = unicode.copyUTF16IntoUTF8WithBuffer; +pub const copyUTF16IntoUTF8WithBufferImpl = unicode.copyUTF16IntoUTF8WithBufferImpl; pub const decodeCheck = unicode.decodeCheck; pub const decodeWTF8RuneT = unicode.decodeWTF8RuneT; pub const decodeWTF8RuneTMultibyte = unicode.decodeWTF8RuneTMultibyte; @@ -2327,7 +2329,6 @@ pub const startsWithWindowsDriveLetter = _paths.startsWithWindowsDriveLetter; pub const startsWithWindowsDriveLetterT = _paths.startsWithWindowsDriveLetterT; pub const toExtendedPathNormalized = _paths.toExtendedPathNormalized; pub const toKernel32Path = _paths.toKernel32Path; -pub const toNTMaxPath = _paths.toNTMaxPath; pub const toNTPath = _paths.toNTPath; pub const toNTPath16 = _paths.toNTPath16; pub const toPath = _paths.toPath; @@ -2345,6 +2346,7 @@ pub const withoutLeadingSlash = _paths.withoutLeadingSlash; pub const withoutNTPrefix = _paths.withoutNTPrefix; pub const withoutTrailingSlash = _paths.withoutTrailingSlash; pub const withoutTrailingSlashWindowsPath = _paths.withoutTrailingSlashWindowsPath; +pub const basename = _paths.basename; pub const log = bun.Output.scoped(.STR, true); pub const grapheme = @import("./grapheme.zig"); diff --git a/src/symbols.def b/src/symbols.def index ed8dce0cff..e323c71c96 100644 --- a/src/symbols.def +++ b/src/symbols.def @@ -635,3 +635,4 @@ EXPORTS ?GetName@Function@v8@@QEBA?AV?$Local@VValue@v8@@@2@XZ ?IsFunction@Value@v8@@QEBA_NXZ ?FromJustIsNothing@api_internal@v8@@YAXXZ + ?GetFunctionTemplateData@api_internal@v8@@YA?AV?$Local@VValue@v8@@@2@PEAVIsolate@2@V?$Local@VData@v8@@@2@@Z diff --git a/src/symbols.dyn b/src/symbols.dyn index bd733b5ede..52ac723141 100644 --- a/src/symbols.dyn +++ b/src/symbols.dyn @@ -7,6 +7,7 @@ __ZN2v812api_internal13DisposeGlobalEPm; __ZN2v812api_internal17FromJustIsNothingEv; __ZN2v812api_internal18GlobalizeReferenceEPNS_8internal7IsolateEm; + __ZN2v812api_internal23GetFunctionTemplateDataEPNS_7IsolateENS_5LocalINS_4DataEEE; __ZN2v814ObjectTemplate11NewInstanceENS_5LocalINS_7ContextEEE; __ZN2v814ObjectTemplate21SetInternalFieldCountEi; __ZN2v814ObjectTemplate3NewEPNS_7IsolateENS_5LocalINS_16FunctionTemplateEEE; diff --git a/src/symbols.txt b/src/symbols.txt index 56f7a8f68b..30da904b1f 100644 --- a/src/symbols.txt +++ b/src/symbols.txt @@ -6,6 +6,7 @@ __ZN2v812api_internal12ToLocalEmptyEv __ZN2v812api_internal13DisposeGlobalEPm __ZN2v812api_internal17FromJustIsNothingEv __ZN2v812api_internal18GlobalizeReferenceEPNS_8internal7IsolateEm +__ZN2v812api_internal23GetFunctionTemplateDataEPNS_7IsolateENS_5LocalINS_4DataEEE __ZN2v814ObjectTemplate11NewInstanceENS_5LocalINS_7ContextEEE __ZN2v814ObjectTemplate21SetInternalFieldCountEi __ZN2v814ObjectTemplate3NewEPNS_7IsolateENS_5LocalINS_16FunctionTemplateEEE diff --git a/src/sys.zig b/src/sys.zig index 211730b523..6ab9d10606 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -57,6 +57,7 @@ const Environment = bun.Environment; const JSC = bun.JSC; const MAX_PATH_BYTES = bun.MAX_PATH_BYTES; const SystemError = JSC.SystemError; +const FD = bun.FD; const linux = syscall; @@ -215,6 +216,7 @@ pub const Tag = enum(u8) { chmod, chown, clonefile, + clonefileat, close, copy_file_range, copyfile, @@ -343,10 +345,10 @@ pub const Error = struct { syscall: sys.Tag = sys.Tag.TODO, dest: []const u8 = "", - pub fn clone(this: *const Error, allocator: std.mem.Allocator) !Error { + pub fn clone(this: *const Error, allocator: std.mem.Allocator) Error { var copy = this.*; - copy.path = try allocator.dupe(u8, copy.path); - copy.dest = try allocator.dupe(u8, copy.dest); + copy.path = allocator.dupe(u8, copy.path) catch bun.outOfMemory(); + copy.dest = allocator.dupe(u8, copy.dest) catch bun.outOfMemory(); return copy; } @@ -424,14 +426,18 @@ pub const Error = struct { }; } - /// Only call this after it's been .clone()'d pub fn deinit(this: *Error) void { + this.deinitWithAllocator(bun.default_allocator); + } + + /// Only call this after it's been .clone()'d + pub fn deinitWithAllocator(this: *Error, allocator: std.mem.Allocator) void { if (this.path.len > 0) { - bun.default_allocator.free(this.path); + allocator.free(this.path); this.path = ""; } if (this.dest.len > 0) { - bun.default_allocator.free(this.dest); + allocator.free(this.dest); this.dest = ""; } } @@ -519,12 +525,24 @@ pub const Error = struct { return null; } + pub fn msg(this: Error) ?[]const u8 { + if (this.getErrorCodeTagName()) |resolved_errno| { + const code, const system_errno = resolved_errno; + if (coreutils_error_map.get(system_errno)) |label| { + return label; + } + return code; + } + return null; + } + /// Simpler formatting which does not allocate a message pub fn toShellSystemError(this: Error) SystemError { @setEvalBranchQuota(1_000_000); var err = SystemError{ .errno = @as(c_int, this.errno) * -1, .syscall = bun.String.static(@tagName(this.syscall)), + .message = .empty, }; // errno label @@ -560,6 +578,7 @@ pub const Error = struct { var err = SystemError{ .errno = -%@as(c_int, this.errno), .syscall = bun.String.static(@tagName(this.syscall)), + .message = .empty, }; // errno label @@ -626,11 +645,7 @@ pub const Error = struct { return Error{ .errno = todo_errno, .syscall = .TODO }; } - pub fn toJS(this: Error, ctx: *JSC.JSGlobalObject) JSC.C.JSObjectRef { - return this.toSystemError().toErrorInstance(ctx).asObjectRef(); - } - - pub fn toJSC(this: Error, ptr: *JSC.JSGlobalObject) JSC.JSValue { + pub fn toJS(this: Error, ptr: *JSC.JSGlobalObject) JSC.JSValue { return this.toSystemError().toErrorInstance(ptr); } }; @@ -652,8 +667,8 @@ pub fn getcwdZ(buf: *bun.PathBuffer) Maybe([:0]const u8) { buf[0] = 0; if (comptime Environment.isWindows) { - var wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); + var wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); const len: windows.DWORD = kernel32.GetCurrentDirectoryW(wbuf.len, wbuf); if (Result.errnoSysP(len, .getcwd, buf)) |err| return err; return Result{ .result = bun.strings.fromWPath(buf, wbuf[0..len]) }; @@ -745,8 +760,8 @@ pub fn chdirOSPath(path: bun.stringZ, destination: if (Environment.isPosix) bun. } if (comptime Environment.isWindows) { - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); + const wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); if (c.SetCurrentDirectoryW(bun.strings.toWDirPath(wbuf, destination)) == windows.FALSE) { log("SetCurrentDirectory({s}) = {d}", .{ destination, kernel32.GetLastError() }); return Maybe(void).errnoSysPD(0, .chdir, path, destination) orelse Maybe(void).success; @@ -893,8 +908,8 @@ pub fn lutimes(path: [:0]const u8, atime: JSC.Node.TimeLike, mtime: JSC.Node.Tim } pub fn mkdiratA(dir_fd: bun.FileDescriptor, file_path: []const u8) Maybe(void) { - const buf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(buf); + const buf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(buf); return mkdiratW(dir_fd, bun.strings.toWPathNormalized(buf, file_path)); } @@ -919,7 +934,7 @@ pub const mkdirat = if (Environment.isWindows) else mkdiratPosix; -pub fn mkdiratW(dir_fd: bun.FileDescriptor, file_path: []const u16, _: i32) Maybe(void) { +pub fn mkdiratW(dir_fd: bun.FileDescriptor, file_path: [:0]const u16, _: i32) Maybe(void) { const dir_to_make = openDirAtWindowsNtPath(dir_fd, file_path, .{ .iterable = false, .can_rename_or_delete = true, .create = true }); if (dir_to_make == .err) { return .{ .err = dir_to_make.err }; @@ -955,8 +970,8 @@ pub fn mkdir(file_path: [:0]const u8, flags: mode_t) Maybe(void) { .linux => Maybe(void).errnoSysP(syscall.mkdir(file_path, flags), .mkdir, file_path) orelse Maybe(void).success, .windows => { - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); + const wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); return Maybe(void).errnoSysP( bun.windows.CreateDirectoryW(bun.strings.toKernel32Path(wbuf, file_path).ptr, null), .mkdir, @@ -988,8 +1003,8 @@ pub fn mkdirA(file_path: []const u8, flags: mode_t) Maybe(void) { } if (comptime Environment.isWindows) { - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); + const wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); const wpath = bun.strings.toKernel32Path(wbuf, file_path); assertIsValidWindowsPath(u16, wpath); return Maybe(void).errnoSysP( @@ -1053,8 +1068,8 @@ pub fn normalizePathWindows( if (comptime T != u8 and T != u16) { @compileError("normalizePathWindows only supports u8 and u16 character types"); } - const wbuf = if (T != u16) bun.WPathBufferPool.get(); - defer if (T != u16) bun.WPathBufferPool.put(wbuf); + const wbuf = if (T != u16) bun.w_path_buffer_pool.get(); + defer if (T != u16) bun.w_path_buffer_pool.put(wbuf); var path = if (T == u16) path_ else bun.strings.convertUTF8toUTF16InBuffer(wbuf, path_); if (std.fs.path.isAbsoluteWindowsWTF16(path)) { @@ -1124,8 +1139,8 @@ pub fn normalizePathWindows( path = path[2..]; } - const buf1 = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(buf1); + const buf1 = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(buf1); @memcpy(buf1[0..base_path.len], base_path); buf1[base_path.len] = '\\'; @memcpy(buf1[base_path.len + 1 .. base_path.len + 1 + path.len], path); @@ -1279,8 +1294,8 @@ fn openDirAtWindowsT( path: []const T, options: WindowsOpenDirOptions, ) Maybe(bun.FileDescriptor) { - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); + const wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); const norm = switch (normalizePathWindows(T, dirFd, path, wbuf, .{})) { .err => |err| return .{ .err = err }, @@ -1598,8 +1613,8 @@ pub fn openFileAtWindowsT( path: []const T, options: NtCreateFileOptions, ) Maybe(bun.FileDescriptor) { - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); + const wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); const norm = switch (normalizePathWindows(T, dirFd, path, wbuf, .{})) { .err => |err| return .{ .err = err }, @@ -2542,11 +2557,11 @@ pub fn renameat2(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.F pub fn renameat(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.FileDescriptor, to: [:0]const u8) Maybe(void) { if (Environment.isWindows) { - const w_buf_from = bun.WPathBufferPool.get(); - const w_buf_to = bun.WPathBufferPool.get(); + const w_buf_from = bun.w_path_buffer_pool.get(); + const w_buf_to = bun.w_path_buffer_pool.get(); defer { - bun.WPathBufferPool.put(w_buf_from); - bun.WPathBufferPool.put(w_buf_to); + bun.w_path_buffer_pool.put(w_buf_from); + bun.w_path_buffer_pool.put(w_buf_to); } const rc = bun.windows.renameAtW( @@ -2586,8 +2601,10 @@ pub fn symlink(target: [:0]const u8, dest: [:0]const u8) Maybe(void) { while (true) { if (Maybe(void).errnoSys(syscall.symlink(target, dest), .symlink)) |err| { if (err.getErrno() == .INTR) continue; + log("symlink({s}, {s}) = {s}", .{ target, dest, @tagName(err.getErrno()) }); return err; } + log("symlink({s}, {s}) = 0", .{ target, dest }); return Maybe(void).success; } } @@ -2596,8 +2613,10 @@ pub fn symlinkat(target: [:0]const u8, dirfd: bun.FileDescriptor, dest: [:0]cons while (true) { if (Maybe(void).errnoSys(syscall.symlinkat(target, dirfd.cast(), dest), .symlinkat)) |err| { if (err.getErrno() == .INTR) continue; + log("symlinkat({s}, {}, {s}) = {s}", .{ target, dirfd, dest, @tagName(err.getErrno()) }); return err; } + log("symlinkat({s}, {}, {s}) = 0", .{ target, dirfd, dest }); return Maybe(void).success; } } @@ -2621,15 +2640,21 @@ pub const WindowsSymlinkOptions = packed struct { pub var has_failed_to_create_symlink = false; }; -pub fn symlinkOrJunction(dest: [:0]const u8, target: [:0]const u8) Maybe(void) { - if (comptime !Environment.isWindows) @compileError("symlinkOrJunction is windows only"); +/// Symlinks on Windows can be relative or absolute, and junctions can +/// only be absolute. Passing `null` for `abs_fallback_junction_target` +/// is saying `target` is already absolute. +pub fn symlinkOrJunction(dest: [:0]const u8, target: [:0]const u8, abs_fallback_junction_target: ?[:0]const u8) Maybe(void) { + if (comptime !Environment.isWindows) { + // return symlink(target, dest); + @compileError("windows only plz!!"); + } if (!WindowsSymlinkOptions.has_failed_to_create_symlink) { - const sym16 = bun.WPathBufferPool.get(); - const target16 = bun.WPathBufferPool.get(); + const sym16 = bun.w_path_buffer_pool.get(); + const target16 = bun.w_path_buffer_pool.get(); defer { - bun.WPathBufferPool.put(sym16); - bun.WPathBufferPool.put(target16); + bun.w_path_buffer_pool.put(sym16); + bun.w_path_buffer_pool.put(target16); } const sym_path = bun.strings.toWPathNormalizeAutoExtend(sym16, dest); const target_path = bun.strings.toWPathNormalizeAutoExtend(target16, target); @@ -2638,14 +2663,26 @@ pub fn symlinkOrJunction(dest: [:0]const u8, target: [:0]const u8) Maybe(void) { return Maybe(void).success; }, .err => |err| { - if (err.getErrno() == .EXIST) { - return .{ .err = err }; + switch (err.getErrno()) { + .EXIST, .NOENT => { + // if the destination already exists, or a component + // of the destination doesn't exist, return the error + // without trying junctions. + return .{ .err = err }; + }, + else => { + // fallthrough to junction + }, } }, } } - return sys_uv.symlinkUV(target, dest, bun.windows.libuv.UV_FS_SYMLINK_JUNCTION); + return sys_uv.symlinkUV( + abs_fallback_junction_target orelse target, + dest, + bun.windows.libuv.UV_FS_SYMLINK_JUNCTION, + ); } pub fn symlinkW(dest: [:0]const u16, target: [:0]const u16, options: WindowsSymlinkOptions) Maybe(void) { @@ -2671,6 +2708,20 @@ pub fn symlinkW(dest: [:0]const u16, target: [:0]const u16, options: WindowsSyml } if (errno.toSystemErrno()) |err| { + switch (err) { + .ENOENT, + .EEXIST, + => { + return .{ + .err = .{ + .errno = @intFromEnum(err), + .syscall = .symlink, + }, + }; + }, + + else => {}, + } WindowsSymlinkOptions.has_failed_to_create_symlink = true; return .{ .err = .{ @@ -2699,12 +2750,46 @@ pub fn clonefile(from: [:0]const u8, to: [:0]const u8) Maybe(void) { while (true) { if (Maybe(void).errnoSys(c.clonefile(from, to, 0), .clonefile)) |err| { if (err.getErrno() == .INTR) continue; + log("clonefile({s}, {s}) = {s}", .{ from, to, @tagName(err.getErrno()) }); return err; } + log("clonefile({s}, {s}) = 0", .{ from, to }); return Maybe(void).success; } } +pub fn clonefileat(from: FD, from_path: [:0]const u8, to: FD, to_path: [:0]const u8) Maybe(void) { + if (comptime !Environment.isMac) { + @compileError("macOS only"); + } + + while (true) { + if (Maybe(void).errnoSys(c.clonefileat(from.cast(), from_path, to.cast(), to_path, 0), .clonefileat)) |err| { + if (err.getErrno() == .INTR) continue; + log( + \\clonefileat( + \\ {}, + \\ {s}, + \\ {}, + \\ {s}, + \\) = {s} + \\ + , .{ from, from_path, to, to_path, @tagName(err.getErrno()) }); + return err; + } + log( + \\clonefileat( + \\ {}, + \\ {s}, + \\ {}, + \\ {s}, + \\) = 0 + \\ + , .{ from, from_path, to, to_path }); + return .success; + } +} + pub fn copyfile(from: [:0]const u8, to: [:0]const u8, flags: posix.system.COPYFILE) Maybe(void) { if (comptime !Environment.isMac) @compileError("macOS only"); @@ -2730,8 +2815,10 @@ pub fn fcopyfile(fd_in: bun.FileDescriptor, fd_out: bun.FileDescriptor, flags: p } pub fn unlinkW(from: [:0]const u16) Maybe(void) { - if (windows.DeleteFileW(from.ptr) != 0) { - return .{ .err = Error.fromCode(bun.windows.getLastErrno(), .unlink) }; + const ret = windows.DeleteFileW(from); + if (Maybe(void).errnoSys(ret, .unlink)) |err| { + log("DeleteFileW({s}) = {s}", .{ bun.fmt.fmtPath(u16, from, .{}), @tagName(err.getErrno()) }); + return err; } return Maybe(void).success; @@ -2739,14 +2826,15 @@ pub fn unlinkW(from: [:0]const u16) Maybe(void) { pub fn unlink(from: [:0]const u8) Maybe(void) { if (comptime Environment.isWindows) { - const w_buf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(w_buf); - return unlinkW(bun.strings.toNTPath(w_buf, from)); + const w_buf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(w_buf); + return unlinkW(bun.strings.toWPathNormalizeAutoExtend(w_buf, from)); } while (true) { if (Maybe(void).errnoSysP(syscall.unlink(from), .unlink, from)) |err| { if (err.getErrno() == .INTR) continue; + log("unlink({s}) = {s}", .{ from, @tagName(err.getErrno()) }); return err; } @@ -2762,8 +2850,8 @@ pub fn rmdirat(dirfd: bun.FileDescriptor, to: anytype) Maybe(void) { pub fn unlinkatWithFlags(dirfd: bun.FileDescriptor, to: anytype, flags: c_uint) Maybe(void) { if (Environment.isWindows) { if (comptime std.meta.Elem(@TypeOf(to)) == u8) { - const w_buf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(w_buf); + const w_buf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(w_buf); return unlinkatWithFlags(dirfd, bun.strings.toNTPath(w_buf, bun.span(to)), flags); } @@ -2777,7 +2865,7 @@ pub fn unlinkatWithFlags(dirfd: bun.FileDescriptor, to: anytype, flags: c_uint) if (Maybe(void).errnoSysFP(syscall.unlinkat(dirfd.cast(), to, flags), .unlink, dirfd, to)) |err| { if (err.getErrno() == .INTR) continue; if (comptime Environment.allow_assert) - log("unlinkat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); + log("unlinkat({}, {s}) = {s}", .{ dirfd, bun.sliceTo(to, 0), @tagName(err.getErrno()) }); return err; } if (comptime Environment.allow_assert) @@ -2795,7 +2883,7 @@ pub fn unlinkat(dirfd: bun.FileDescriptor, to: anytype) Maybe(void) { if (Maybe(void).errnoSysFP(syscall.unlinkat(dirfd.cast(), to, 0), .unlink, dirfd, to)) |err| { if (err.getErrno() == .INTR) continue; if (comptime Environment.allow_assert) - log("unlinkat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); + log("unlinkat({}, {s}) = {s}", .{ dirfd, bun.sliceTo(to, 0), @tagName(err.getErrno()) }); return err; } if (comptime Environment.allow_assert) @@ -2922,6 +3010,7 @@ pub fn setsockopt(fd: bun.FileDescriptor, level: c_int, optname: u32, value: i32 pub fn setNoSigpipe(fd: bun.FileDescriptor) Maybe(void) { if (comptime Environment.isMac) { + log("setNoSigpipe({})", .{fd}); return switch (setsockopt(fd, std.posix.SOL.SOCKET, std.posix.SO.NOSIGPIPE, 1)) { .result => .{ .result = {} }, .err => |err| .{ .err = err }, @@ -2932,13 +3021,49 @@ pub fn setNoSigpipe(fd: bun.FileDescriptor) Maybe(void) { } const socketpair_t = if (Environment.isLinux) i32 else c_uint; +const NonblockingStatus = enum { blocking, nonblocking }; /// libc socketpair() except it defaults to: /// - SOCK_CLOEXEC on Linux /// - SO_NOSIGPIPE on macOS /// /// On POSIX it otherwise makes it do O_CLOEXEC. -pub fn socketpair(domain: socketpair_t, socktype: socketpair_t, protocol: socketpair_t, nonblocking_status: enum { blocking, nonblocking }) Maybe([2]bun.FileDescriptor) { +pub fn socketpair(domain: socketpair_t, socktype: socketpair_t, protocol: socketpair_t, nonblocking_status: NonblockingStatus) Maybe([2]bun.FileDescriptor) { + return socketpairImpl(domain, socktype, protocol, nonblocking_status, false); +} + +/// We can't actually use SO_NOSIGPIPE for the stdout of a +/// subprocess we don't control because they have different +/// semantics. +/// +/// For example, when running the shell script: +/// `grep hi src/js_parser/zig | echo hi`, +/// +/// The `echo hi` command will terminate first and close its +/// end of the socketpair. +/// +/// With SO_NOSIGPIPE, when `grep` continues and tries to write to +/// stdout, `ESIGPIPE` is returned and then `grep` handles this +/// and prints `grep: stdout: Broken pipe` +/// +/// So the solution is to NOT set SO_NOGSIGPIPE in that scenario. +/// +/// I think this only applies to stdout/stderr, not stdin. `read(...)` +/// and `recv(...)` do not return EPIPE as error codes. +pub fn socketpairForShell(domain: socketpair_t, socktype: socketpair_t, protocol: socketpair_t, nonblocking_status: NonblockingStatus) Maybe([2]bun.FileDescriptor) { + return socketpairImpl(domain, socktype, protocol, nonblocking_status, true); +} + +pub const ShellSigpipeConfig = enum { + /// Only SO_NOSIGPIPE for the socket in the pair + /// that *we're* going to use, don't touch the one + /// we hand off to the subprocess + spawn, + /// off completely + pipeline, +}; + +pub fn socketpairImpl(domain: socketpair_t, socktype: socketpair_t, protocol: socketpair_t, nonblocking_status: NonblockingStatus, for_shell: bool) Maybe([2]bun.FileDescriptor) { if (comptime !Environment.isPosix) @compileError("linux only!"); var fds_i: [2]syscall.fd_t = .{ 0, 0 }; @@ -2980,10 +3105,15 @@ pub fn socketpair(domain: socketpair_t, socktype: socketpair_t, protocol: socket } if (comptime Environment.isMac) { - inline for (0..2) |i| { - switch (setNoSigpipe(.fromNative(fds_i[i]))) { - .err => |err| break :err err, - else => {}, + if (for_shell) { + // see the comment on `socketpairForShell` for why we don't + // set SO_NOSIGPIPE here + } else { + inline for (0..2) |i| { + switch (setNoSigpipe(.fromNative(fds_i[i]))) { + .err => |err| break :err err, + else => {}, + } } } } @@ -3177,8 +3307,8 @@ pub fn getFileAttributes(path: anytype) ?WindowsFileAttributes { const attributes: WindowsFileAttributes = @bitCast(dword); return attributes; } else { - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); + const wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); const path_to_use = bun.strings.toKernel32Path(wbuf, path); return getFileAttributes(path_to_use); } @@ -3379,13 +3509,19 @@ pub const ExistsAtType = enum { }; pub fn existsAtType(fd: bun.FileDescriptor, subpath: anytype) Maybe(ExistsAtType) { if (comptime Environment.isWindows) { - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); - const path = if (std.meta.Child(@TypeOf(subpath)) == u16) + const wbuf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(wbuf); + var path = if (std.meta.Child(@TypeOf(subpath)) == u16) bun.strings.toNTPath16(wbuf, subpath) else bun.strings.toNTPath(wbuf, subpath); + // trim leading .\ + // NtQueryAttributesFile expects relative paths to not start with .\ + if (path.len > 2 and path[0] == '.' and path[1] == '\\') { + path = path[2..]; + } + const path_len_bytes: u16 = @truncate(path.len * 2); var nt_name = w.UNICODE_STRING{ .Length = path_len_bytes, @@ -3435,8 +3571,8 @@ pub fn existsAtType(fd: bun.FileDescriptor, subpath: anytype) Maybe(ExistsAtType } if (std.meta.sentinel(@TypeOf(subpath)) == null) { - const path_buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(path_buf); + const path_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(path_buf); @memcpy(path_buf[0..subpath.len], subpath); path_buf[subpath.len] = 0; const slice: [:0]const u8 = @ptrCast(path_buf); @@ -3640,28 +3776,69 @@ pub fn dup(fd: bun.FileDescriptor) Maybe(bun.FileDescriptor) { return dupWithFlags(fd, 0); } -pub fn linkat(dir_fd: bun.FileDescriptor, basename: []const u8, dest_dir_fd: bun.FileDescriptor, dest_name: []const u8) Maybe(void) { - return Maybe(void).errnoSysP( - std.c.linkat( - @intCast(dir_fd), - &(std.posix.toPosixPath(basename) catch return .{ - .err = .{ - .errno = @intFromEnum(E.NOMEM), - .syscall = .open, - }, - }), - @intCast(dest_dir_fd), - &(std.posix.toPosixPath(dest_name) catch return .{ - .err = .{ - .errno = @intFromEnum(E.NOMEM), - .syscall = .open, - }, - }), - 0, - ), - .link, - basename, - ) orelse Maybe(void).success; +pub fn link(comptime T: type, src: [:0]const T, dest: [:0]const T) Maybe(void) { + if (comptime Environment.isWindows) { + if (T == u8) { + return sys_uv.link(src, dest); + } + + const ret = bun.windows.CreateHardLinkW(dest, src, null); + if (Maybe(void).errnoSys(ret, .link)) |err| { + log("CreateHardLinkW({s}, {s}) = {s}", .{ + bun.fmt.fmtPath(T, dest, .{}), + bun.fmt.fmtPath(T, src, .{}), + @tagName(err.getErrno()), + }); + return err; + } + + log("CreateHardLinkW({s}, {s}) = 0", .{ + bun.fmt.fmtPath(T, dest, .{}), + bun.fmt.fmtPath(T, src, .{}), + }); + return .success; + } + + if (T == u16) { + @compileError("unexpected path type"); + } + + const ret = std.c.link(src, dest); + if (Maybe(void).errnoSysP(ret, .link, src)) |err| { + log("link({s}, {s}) = {s}", .{ src, dest, @tagName(err.getErrno()) }); + return err; + } + log("link({s}, {s}) = 0", .{ src, dest }); + return .success; +} + +pub fn linkat(src: bun.FileDescriptor, src_path: []const u8, dest: bun.FileDescriptor, dest_path: []const u8) Maybe(void) { + return linkatZ( + src, + &(std.posix.toPosixPath(src_path) catch return .{ + .err = .{ + .errno = @intFromEnum(E.NOMEM), + .syscall = .link, + }, + }), + dest, + &(std.posix.toPosixPath(dest_path) catch return .{ + .err = .{ + .errno = @intFromEnum(E.NOMEM), + .syscall = .link, + }, + }), + ); +} + +pub fn linkatZ(src: FD, src_path: [:0]const u8, dest: FD, dest_path: [:0]const u8) Maybe(void) { + const ret = std.c.linkat(src.cast(), src_path, dest.cast(), dest_path, 0); + if (Maybe(void).errnoSysP(ret, .link, src_path)) |err| { + log("linkat({}, {s}, {}, {s}) = {s}", .{ src, src_path, dest, dest_path, @tagName(err.getErrno()) }); + return err; + } + log("linkat({}, {s}, {}, {s}) = 0", .{ src, src_path, dest, dest_path }); + return .success; } pub fn linkatTmpfile(tmpfd: bun.FileDescriptor, dirfd: bun.FileDescriptor, name: [:0]const u8) Maybe(void) { @@ -3854,6 +4031,7 @@ pub fn getFileSize(fd: bun.FileDescriptor) Maybe(usize) { } pub fn isPollable(mode: mode_t) bool { + if (comptime bun.Environment.isWindows) return false; return posix.S.ISFIFO(mode) or posix.S.ISSOCK(mode); } diff --git a/src/transpiler.zig b/src/transpiler.zig index 848e389289..884468d32d 100644 --- a/src/transpiler.zig +++ b/src/transpiler.zig @@ -23,7 +23,7 @@ const Fs = @import("fs.zig"); const schema = @import("api/schema.zig"); const Api = schema.Api; const _resolver = @import("./resolver/resolver.zig"); -const MimeType = @import("./http/mime_type.zig"); +const MimeType = @import("./http/MimeType.zig"); const runtime = @import("./runtime.zig"); const MacroRemap = @import("./resolver/package_json.zig").MacroMap; const DebugLogs = _resolver.DebugLogs; @@ -241,7 +241,7 @@ pub const PluginRunner = struct { if (!path_value.isString()) { return JSC.ErrorableString.err( error.JSErrorObject, - bun.String.static("Expected \"path\" to be a string in onResolve plugin").toErrorInstance(this.global_object).asVoid(), + bun.String.static("Expected \"path\" to be a string in onResolve plugin").toErrorInstance(this.global_object), ); } @@ -250,7 +250,7 @@ pub const PluginRunner = struct { if (file_path.length() == 0) { return JSC.ErrorableString.err( error.JSErrorObject, - bun.String.static("Expected \"path\" to be a non-empty string in onResolve plugin").toErrorInstance(this.global_object).asVoid(), + bun.String.static("Expected \"path\" to be a non-empty string in onResolve plugin").toErrorInstance(this.global_object), ); } else if // TODO: validate this better @@ -261,7 +261,7 @@ pub const PluginRunner = struct { { return JSC.ErrorableString.err( error.JSErrorObject, - bun.String.static("\"path\" is invalid in onResolve plugin").toErrorInstance(this.global_object).asVoid(), + bun.String.static("\"path\" is invalid in onResolve plugin").toErrorInstance(this.global_object), ); } var static_namespace = true; @@ -270,7 +270,7 @@ pub const PluginRunner = struct { if (!namespace_value.isString()) { return JSC.ErrorableString.err( error.JSErrorObject, - bun.String.static("Expected \"namespace\" to be a string").toErrorInstance(this.global_object).asVoid(), + bun.String.static("Expected \"namespace\" to be a string").toErrorInstance(this.global_object), ); } diff --git a/src/valkey/js_valkey_functions.zig b/src/valkey/js_valkey_functions.zig index 4abe8b7b1f..e3cac8e08c 100644 --- a/src/valkey/js_valkey_functions.zig +++ b/src/valkey/js_valkey_functions.zig @@ -6,7 +6,7 @@ pub fn jsSend(this: *JSValkeyClient, globalObject: *JSC.JSGlobalObject, callfram if (!args_array.isObject() or !args_array.isArray()) { return globalObject.throw("Arguments must be an array", .{}); } - var iter = args_array.arrayIterator(globalObject); + var iter = try args_array.arrayIterator(globalObject); var args = try std.ArrayList(JSArgument).initCapacity(bun.default_allocator, iter.len); defer { for (args.items) |*item| { @@ -15,7 +15,7 @@ pub fn jsSend(this: *JSValkeyClient, globalObject: *JSC.JSGlobalObject, callfram args.deinit(); } - while (iter.next()) |arg_js| { + while (try iter.next()) |arg_js| { args.appendAssumeCapacity(try fromJS(globalObject, arg_js) orelse { return globalObject.throwInvalidArgumentType("sendCommand", "argument", "string or buffer"); }); @@ -390,7 +390,7 @@ pub fn hmget(this: *JSValkeyClient, globalObject: *JSC.JSGlobalObject, callframe return globalObject.throw("Fields must be an array", .{}); } - var iter = fields_array.arrayIterator(globalObject); + var iter = try fields_array.arrayIterator(globalObject); var args = try std.ArrayList(JSC.ZigString.Slice).initCapacity(bun.default_allocator, iter.len + 1); defer { for (args.items) |item| { @@ -402,7 +402,7 @@ pub fn hmget(this: *JSValkeyClient, globalObject: *JSC.JSGlobalObject, callframe args.appendAssumeCapacity(JSC.ZigString.Slice.fromUTF8NeverFree(key.slice())); // Add field names as arguments - while (iter.next()) |field_js| { + while (try iter.next()) |field_js| { const field_str = try field_js.toBunString(globalObject); defer field_str.deref(); @@ -495,7 +495,7 @@ pub fn hmset(this: *JSValkeyClient, globalObject: *JSC.JSGlobalObject, callframe return globalObject.throw("Arguments must be an array of alternating field names and values", .{}); } - var iter = array_arg.arrayIterator(globalObject); + var iter = try array_arg.arrayIterator(globalObject); if (iter.len % 2 != 0) { return globalObject.throw("Arguments must be an array of alternating field names and values", .{}); } @@ -514,7 +514,7 @@ pub fn hmset(this: *JSValkeyClient, globalObject: *JSC.JSGlobalObject, callframe args.appendAssumeCapacity(key_slice); // Add field-value pairs - while (iter.next()) |field_js| { + while (try iter.next()) |field_js| { // Add field name const field_str = try field_js.toBunString(globalObject); defer field_str.deref(); @@ -522,7 +522,7 @@ pub fn hmset(this: *JSValkeyClient, globalObject: *JSC.JSGlobalObject, callframe args.appendAssumeCapacity(field_slice); // Add value - if (iter.next()) |value_js| { + if (try iter.next()) |value_js| { const value_str = try value_js.toBunString(globalObject); defer value_str.deref(); const value_slice = value_str.toUTF8WithoutRef(bun.default_allocator); diff --git a/src/valkey/valkey.zig b/src/valkey/valkey.zig index b84ffe8db3..af0947becc 100644 --- a/src/valkey/valkey.zig +++ b/src/valkey/valkey.zig @@ -346,7 +346,7 @@ pub const ValkeyClient = struct { pub fn flushData(this: *ValkeyClient) bool { const chunk = this.write_buffer.remaining(); if (chunk.len == 0) return false; - const wrote = this.socket.write(chunk, false); + const wrote = this.socket.write(chunk); if (wrote > 0) { this.write_buffer.consume(@intCast(wrote)); } @@ -795,7 +795,7 @@ pub const ValkeyClient = struct { // Optimization: avoid cloning the data an extra time. defer this.allocator.free(data); - const wrote = this.socket.write(data, false); + const wrote = this.socket.write(data); const unwritten = data[@intCast(@max(wrote, 0))..]; if (unwritten.len > 0) { diff --git a/src/valkey/valkey_protocol.zig b/src/valkey/valkey_protocol.zig index f7f3e76e87..8d82f8a2f7 100644 --- a/src/valkey/valkey_protocol.zig +++ b/src/valkey/valkey_protocol.zig @@ -255,7 +255,7 @@ pub const RESPValue = union(RESPType) { fn valkeyStrToJSValue(globalObject: *JSC.JSGlobalObject, str: []const u8, options: *const ToJSOptions) bun.JSError!JSC.JSValue { if (options.return_as_buffer) { // TODO: handle values > 4.7 GB - const buf = JSC.ArrayBuffer.createBuffer(globalObject, str); + const buf = try JSC.ArrayBuffer.createBuffer(globalObject, str); return buf.toJS(globalObject); } else { return bun.String.createUTF8ForJS(globalObject, str); @@ -278,7 +278,7 @@ pub const RESPValue = union(RESPType) { var js_array = try JSC.JSValue.createEmptyArray(globalObject, array.len); for (array, 0..) |*item, i| { const js_item = try item.toJSWithOptions(globalObject, options); - js_array.putIndex(globalObject, @intCast(i), js_item); + try js_array.putIndex(globalObject, @intCast(i), js_item); } return js_array; }, @@ -303,7 +303,7 @@ pub const RESPValue = union(RESPType) { var js_array = try JSC.JSValue.createEmptyArray(globalObject, set.len); for (set, 0..) |*item, i| { const js_item = try item.toJSWithOptions(globalObject, options); - js_array.putIndex(globalObject, @intCast(i), js_item); + try js_array.putIndex(globalObject, @intCast(i), js_item); } return js_array; }, @@ -323,7 +323,7 @@ pub const RESPValue = union(RESPType) { var data_array = try JSC.JSValue.createEmptyArray(globalObject, push.data.len); for (push.data, 0..) |*item, i| { const js_item = try item.toJSWithOptions(globalObject, options); - data_array.putIndex(globalObject, @intCast(i), js_item); + try data_array.putIndex(globalObject, @intCast(i), js_item); } js_obj.put(globalObject, "data", data_array); diff --git a/src/walker_skippable.zig b/src/walker_skippable.zig index 679c5192dd..a352a32059 100644 --- a/src/walker_skippable.zig +++ b/src/walker_skippable.zig @@ -6,6 +6,9 @@ const path = std.fs.path; const DirIterator = bun.DirIterator; const Environment = bun.Environment; const OSPathSlice = bun.OSPathSlice; +const OSPathSliceZ = bun.OSPathSliceZ; +const OOM = bun.OOM; +const FD = bun.FD; stack: std.ArrayList(StackItem), name_buffer: NameBufferList, @@ -16,17 +19,16 @@ seed: u64 = 0, const NameBufferList = std.ArrayList(bun.OSPathChar); -const Dir = std.fs.Dir; const WrappedIterator = DirIterator.NewWrappedIterator(if (Environment.isWindows) .u16 else .u8); pub const WalkerEntry = struct { /// The containing directory. This can be used to operate directly on `basename` /// rather than `path`, avoiding `error.NameTooLong` for deeply nested paths. /// The directory remains open until `next` or `deinit` is called. - dir: Dir, - basename: OSPathSlice, - path: OSPathSlice, - kind: Dir.Entry.Kind, + dir: FD, + basename: OSPathSliceZ, + path: OSPathSliceZ, + kind: std.fs.Dir.Entry.Kind, }; const StackItem = struct { @@ -37,13 +39,13 @@ const StackItem = struct { /// After each call to this function, and on deinit(), the memory returned /// from this function becomes invalid. A copy must be made in order to keep /// a reference to the path. -pub fn next(self: *Walker) !?WalkerEntry { +pub fn next(self: *Walker) bun.sys.Maybe(?WalkerEntry) { while (self.stack.items.len != 0) { // `top` becomes invalid after appending to `self.stack` var top = &self.stack.items[self.stack.items.len - 1]; var dirname_len = top.dirname_len; switch (top.iter.next()) { - .err => |err| return bun.errnoToZigErr(err.errno), + .err => |err| return .initErr(err), .result => |res| { if (res) |base| { switch (base.kind) { @@ -79,37 +81,32 @@ pub fn next(self: *Walker) !?WalkerEntry { self.name_buffer.shrinkRetainingCapacity(dirname_len); if (self.name_buffer.items.len != 0) { - try self.name_buffer.append(path.sep); + self.name_buffer.append(path.sep) catch bun.outOfMemory(); dirname_len += 1; } - try self.name_buffer.appendSlice(base.name.slice()); + self.name_buffer.appendSlice(base.name.slice()) catch bun.outOfMemory(); const cur_len = self.name_buffer.items.len; - try self.name_buffer.append(0); - self.name_buffer.shrinkRetainingCapacity(cur_len); + self.name_buffer.append(0) catch bun.outOfMemory(); if (base.kind == .directory) { - var new_dir = (if (Environment.isWindows) - top.iter.iter.dir.openDirW(base.name.sliceAssumeZ(), .{ .iterate = true }) - else - top.iter.iter.dir.openDir(base.name.slice(), .{ .iterate = true })) catch |err| switch (err) { - error.NameTooLong => unreachable, // no path sep in base.name - else => |e| return e, + const new_dir = switch (bun.openDirForIterationOSPath(top.iter.iter.dir, base.name.slice())) { + .result => |fd| fd, + .err => |err| return .initErr(err), }; { - errdefer new_dir.close(); - try self.stack.append(StackItem{ + self.stack.append(StackItem{ .iter = DirIterator.iterate(new_dir, if (Environment.isWindows) .u16 else .u8), - .dirname_len = self.name_buffer.items.len, - }); + .dirname_len = cur_len, + }) catch bun.outOfMemory(); top = &self.stack.items[self.stack.items.len - 1]; } } - return WalkerEntry{ + return .initResult(WalkerEntry{ .dir = top.iter.iter.dir, - .basename = self.name_buffer.items[dirname_len..], - .path = self.name_buffer.items, + .basename = self.name_buffer.items[dirname_len..cur_len :0], + .path = self.name_buffer.items[0..cur_len :0], .kind = base.kind, - }; + }); } else { var item = self.stack.pop().?; if (self.stack.items.len != 0) { @@ -119,7 +116,7 @@ pub fn next(self: *Walker) !?WalkerEntry { }, } } - return null; + return .initResult(null); } pub fn deinit(self: *Walker) void { @@ -142,11 +139,11 @@ pub fn deinit(self: *Walker) void { /// The order of returned file system entries is undefined. /// `self` will not be closed after walking it. pub fn walk( - self: Dir, + self: FD, allocator: Allocator, skip_filenames: []const OSPathSlice, skip_dirnames: []const OSPathSlice, -) !Walker { +) OOM!Walker { var name_buffer = NameBufferList.init(allocator); errdefer name_buffer.deinit(); diff --git a/src/watcher/INotifyWatcher.zig b/src/watcher/INotifyWatcher.zig index cba39c4e6c..3a75557215 100644 --- a/src/watcher/INotifyWatcher.zig +++ b/src/watcher/INotifyWatcher.zig @@ -51,7 +51,7 @@ pub const Event = extern struct { const largest_size = std.mem.alignForward(usize, @sizeOf(Event) + bun.MAX_PATH_BYTES, @alignOf(Event)); pub fn name(event: *align(1) Event) [:0]u8 { - if (comptime Environment.allow_assert) bun.assert(event.name_len > 0); + if (comptime Environment.allow_assert) bun.assertf(event.name_len > 0, "INotifyWatcher.Event.name() called with name_len == 0, you should check it before calling this function.", .{}); const name_first_char_ptr = std.mem.asBytes(&event.name_len).ptr + @sizeOf(u32); return bun.sliceTo(@as([*:0]u8, @ptrCast(name_first_char_ptr)), 0); } @@ -192,7 +192,7 @@ pub fn read(this: *INotifyWatcher) bun.JSC.Maybe([]const *align(1) Event) { this.eventlist_ptrs[count] = event; i += event.size(); count += 1; - if (!Environment.enable_logs) + if (Environment.enable_logs) log("{} read event {} {} {} {}", .{ this.fd, event.watch_descriptor, diff --git a/src/watcher/WindowsWatcher.zig b/src/watcher/WindowsWatcher.zig index c53666468c..e9e88ec19e 100644 --- a/src/watcher/WindowsWatcher.zig +++ b/src/watcher/WindowsWatcher.zig @@ -216,7 +216,7 @@ pub fn watchLoopCycle(this: *bun.Watcher) bun.JSC.Maybe(void) { const item_paths = this.watchlist.items(.file_path); log("number of watched items: {d}", .{item_paths.len}); while (iter.next()) |event| { - const convert_res = bun.strings.copyUTF16IntoUTF8(buf[base_idx..], []const u16, event.filename, false); + const convert_res = bun.strings.copyUTF16IntoUTF8(buf[base_idx..], []const u16, event.filename); const eventpath = buf[0 .. base_idx + convert_res.written]; log("watcher update event: (filename: {s}, action: {s}", .{ eventpath, @tagName(event.action) }); diff --git a/src/which.zig b/src/which.zig index 34bed1b26b..2bb4f57340 100644 --- a/src/which.zig +++ b/src/which.zig @@ -20,8 +20,8 @@ pub fn which(buf: *bun.PathBuffer, path: []const u8, cwd: []const u8, bin: []con bun.Output.scoped(.which, true)("path={s} cwd={s} bin={s}", .{ path, cwd, bin }); if (bun.Environment.os == .windows) { - const convert_buf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(convert_buf); + const convert_buf = bun.w_path_buffer_pool.get(); + defer bun.w_path_buffer_pool.put(convert_buf); const result = whichWin(convert_buf, path, cwd, bin) orelse return null; const result_converted = bun.strings.convertUTF16toUTF8InBuffer(buf, result) catch unreachable; buf[result_converted.len] = 0; @@ -133,8 +133,8 @@ fn searchBinInPath(buf: *bun.WPathBuffer, path_buf: *bun.PathBuffer, path: []con /// It is similar to Get-Command in powershell. pub fn whichWin(buf: *bun.WPathBuffer, path: []const u8, cwd: []const u8, bin: []const u8) ?[:0]const u16 { if (bin.len == 0) return null; - const path_buf = bun.PathBufferPool.get(); - defer bun.PathBufferPool.put(path_buf); + const path_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(path_buf); const check_windows_extensions = !endsWithExtension(bin); diff --git a/src/windows-app-info.rc b/src/windows-app-info.rc index ed6473fbeb..e060d39d81 100644 --- a/src/windows-app-info.rc +++ b/src/windows-app-info.rc @@ -18,8 +18,9 @@ BEGIN VALUE "InternalName", "bun\0" VALUE "OriginalFilename", "bun.exe\0" VALUE "ProductName", "Bun\0" - VALUE "ProductVersion", "@Bun_VERSION_WITH_TAG@\0" - VALUE "LegalCopyright", "https://bun.sh/docs/project/licensing\0" + VALUE "ProductVersion", "@Bun_VERSION_WITH_TAG@\0", + VALUE "CompanyName", "Oven\0" + VALUE "LegalCopyright", "https://bun.com/docs/project/licensing\0" END END BLOCK "VarFileInfo" diff --git a/test/AGENTS.md b/test/AGENTS.md new file mode 120000 index 0000000000..681311eb9c --- /dev/null +++ b/test/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/test/CLAUDE.md b/test/CLAUDE.md new file mode 100644 index 0000000000..9e7601fb2e --- /dev/null +++ b/test/CLAUDE.md @@ -0,0 +1,147 @@ +To run tests: + +```sh +bun bd test <...test file> +``` + +To run a command with your debug build of Bun: + +```sh +bun bd <...cmd> +``` + +Note that compiling Bun may take up to 2.5 minutes. It is slow! + +**CRITICAL**: Do not use `bun test` to run tests. It will not have your changes. `bun bd test <...test file>` is the correct command, which compiles your code automatically. + +## Testing style + +Use `bun:test` with files that end in `*.test.ts`. + +**Do not write flaky tests**. Unless explicitly asked, **never wait for time to pass in tests**. Always wait for the condition to be met instead of waiting for an arbitrary amount of time. **Never use hardcoded port numbers**. Always use `port: 0` to get a random port. + +### Spawning processes + +#### Spawning Bun in tests + +When spawning Bun processes, use `bunExe` and `bunEnv` from `harness`. This ensures the same build of Bun is used to run the test and ensures debug logging is silenced. + +```ts +import { bunEnv, bunExe } from "harness"; +import { test, expect } from "bun:test"; + +test("spawns a Bun process", async () => { + const dir = tempDirWithFiles("my-test-prefix", { + "my.fixture.ts": ` + console.log("Hello, world!"); + `, + }); + + await using proc = Bun.spawn({ + cmd: [bunExe(), "my.fixture.ts"], + env: bunEnv, + cwd: dir, + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exitCode, + ]); + + expect(stdout).toBe("Hello, world!"); + expect(stderr).toBe(""); + expect(exitCode).toBe(0); +``` + +When a test file spawns a Bun process, we like for that file to end in `*-fixture.ts`. This is a convention that helps us identify the file as a test fixture and not a test itself. + +Generally, `await using` or `using` is a good idea to ensure proper resource cleanup. This works in most Bun APIs like Bun.listen, Bun.connect, Bun.spawn, Bun.serve, etc. + +#### Async/await in tests + +Prefer async/await over callbacks. + +When callbacks must be used and it's just a single callback, use `Promise.withResolvers` to create a promise that can be resolved or rejected from a callback. + +```ts +const ws = new WebSocket("ws://localhost:8080"); +const { promise, resolve, reject } = Promise.withResolvers(); +ws.onopen = resolve; +ws.onclose = reject; +await promise; +``` + +If it's several callbacks, it's okay to use callbacks. We aren't a stickler for this. + +### Creating temporary files + +Use `tempDirWithFiles` to create a temporary directory with files. + +```ts +import { tempDirWithFiles } from "harness"; +import path from "node:path"; + +test("creates a temporary directory with files", () => { + const dir = tempDirWithFiles("my-test-prefix", { + "file.txt": "Hello, world!", + }); + + expect(await Bun.file(path.join(dir.path, "file.txt")).text()).toBe( + "Hello, world!", + ); +}); +``` + +### Strings + +To create a repetitive string, use `Buffer.alloc(count, fill).toString()` instead of `"A".repeat(count)`. "".repeat is very slow in debug JavaScriptCore builds. + +### Test Organization + +- Use `describe` blocks for grouping related tests +- Regression tests go in `/test/regression/issue/` with issue number +- Unit tests for specific features are organized by module (e.g., `/test/js/bun/`, `/test/js/node/`) +- Integration tests are in `/test/integration/` + +### Common Imports from `harness` + +```ts +import { + bunExe, // Path to Bun executable + bunEnv, // Environment variables for Bun + tempDirWithFiles, // Create temporary test directories with files + tmpdirSync, // Create empty temporary directory + isMacOS, // Platform checks + isWindows, + isPosix, + gcTick, // Trigger garbage collection + withoutAggressiveGC, // Disable aggressive GC for performance tests +} from "harness"; +``` + +### Error Testing + +Always check exit codes and test error scenarios: + +```ts +test("handles errors", async () => { + await using proc = Bun.spawn({ + cmd: [bunExe(), "run", "invalid.js"], + env: bunEnv, + }); + + const exitCode = await proc.exited; + expect(exitCode).not.toBe(0); + + // For synchronous errors + expect(() => someFunction()).toThrow("Expected error message"); +}); +``` + +### Test Utilities + +- Use `describe.each()` for parameterized tests +- Use `toMatchSnapshot()` for snapshot testing +- Use `beforeAll()`, `afterEach()`, `beforeEach()` for setup/teardown +- Track resources (servers, clients) in arrays for cleanup in `afterEach()` diff --git a/test/bake/bake-harness.ts b/test/bake/bake-harness.ts index de46d788bc..b44f64d090 100644 --- a/test/bake/bake-harness.ts +++ b/test/bake/bake-harness.ts @@ -18,7 +18,7 @@ import { Matchers } from "bun:test"; import { EventEmitter } from "node:events"; // @ts-ignore import { dedent } from "../bundler/expectBundled.ts"; -import { bunEnv, isCI, isWindows, mergeWindowEnvs } from "harness"; +import { bunEnv, bunExe, isCI, isWindows, mergeWindowEnvs } from "harness"; import { expect } from "bun:test"; import { exitCodeMapStrings } from "./exit-code-map.mjs"; @@ -34,7 +34,7 @@ const verboseSynchronization = process.env.BUN_DEV_SERVER_VERBOSE_SYNC * Can be set in fast development environments to improve iteration time. * In CI/Windows it appears that sometimes these tests dont wait enough * for things to happen, so the extra delay reduces flakiness. - * + * * Needs much more investigation. */ const fastBatches = !!process.env.BUN_DEV_SERVER_FAST_BATCHES; @@ -128,7 +128,11 @@ export interface DevServerTest { */ mainDir?: string; - skip?: ('win32'|'darwin'|'linux'|'ci')[], + skip?: ("win32" | "darwin" | "linux" | "ci")[]; + /** + * Only run this test. + */ + only?: boolean; } let interactive = false; @@ -310,14 +314,11 @@ export class Dev extends EventEmitter { const wait = this.waitForHotReload(wantsHmrEvent); const b = { write: resetSeenFilesWithResolvers, - [Symbol.asyncDispose]: async() => { + [Symbol.asyncDispose]: async () => { if (wantsHmrEvent && interactive) { await seenFiles.promise; } else if (wantsHmrEvent) { - await Promise.race([ - seenFiles.promise, - Bun.sleep(1000), - ]); + await Promise.race([seenFiles.promise, Bun.sleep(1000)]); } if (!fastBatches) { // Wait an extra delay to avoid double-triggering events. @@ -348,10 +349,12 @@ export class Dev extends EventEmitter { return withAnnotatedStack(snapshot, async () => { await maybeWaitInteractive("write " + file); const isDev = this.nodeEnv === "development"; - await using _wait = isDev ? await this.batchChanges({ - errors: options.errors, - snapshot: snapshot, - }) : null; + await using _wait = isDev + ? await this.batchChanges({ + errors: options.errors, + snapshot: snapshot, + }) + : null; await Bun.write( this.join(file), @@ -384,10 +387,12 @@ export class Dev extends EventEmitter { return withAnnotatedStack(snapshot, async () => { await maybeWaitInteractive("delete " + file); const isDev = this.nodeEnv === "development"; - await using _wait = isDev ? await this.batchChanges({ - errors: options.errors, - snapshot: snapshot, - }) : null; + await using _wait = isDev + ? await this.batchChanges({ + errors: options.errors, + snapshot: snapshot, + }) + : null; const filePath = this.join(file); if (!fs.existsSync(filePath)) { @@ -411,10 +416,12 @@ export class Dev extends EventEmitter { return withAnnotatedStack(snapshot, async () => { await maybeWaitInteractive("patch " + file); const isDev = this.nodeEnv === "development"; - await using _wait = isDev ? await this.batchChanges({ - errors: errors, - snapshot: snapshot, - }) : null; + await using _wait = isDev + ? await this.batchChanges({ + errors: errors, + snapshot: snapshot, + }) + : null; const filename = this.join(file); const source = fs.readFileSync(filename, "utf8"); @@ -477,11 +484,13 @@ export class Dev extends EventEmitter { if (wantsHmrEvent) { await Bun.sleep(500); if (seenMainEvent) return; - console.warn("\x1b[33mWARN: Dev Server did not pick up any changed files. Consider wrapping this call in expectNoWebSocketActivity\x1b[35m"); + console.warn( + "\x1b[33mWARN: Dev Server did not pick up any changed files. Consider wrapping this call in expectNoWebSocketActivity\x1b[35m", + ); } cleanupAndResolve(); } - }; + } dev.on("watch_synchronization", onEvent); }); } @@ -552,10 +561,10 @@ export class Dev extends EventEmitter { * Run a stress test. The function should perform I/O in a loop, for about a * couple of seconds. In CI, this round is run once. In development, this can * be run forever using `DEV_SERVER_STRESS=FILTER`. - * + * * Tests using this should go in `stress.test.ts` */ - async stressTest(round: () => (Promise | void)) { + async stressTest(round: () => Promise | void) { if (!this.stressTestEndurance) { await round(); await Bun.sleep(250); @@ -567,16 +576,18 @@ export class Dev extends EventEmitter { const endTime = Date.now() + 10 * 60 * 1000; let iteration = 0; - - using log = new TrailingLog; + + using log = new TrailingLog(); while (Date.now() < endTime) { const timeRemaining = endTime - Date.now(); const minutes = Math.floor(timeRemaining / 60000); const seconds = Math.floor((timeRemaining % 60000) / 1000); - log.setMessage(`[STRESS] Time remaining: ${minutes}:${seconds.toString().padStart(2, '0')}. Iteration ${++iteration}`); + log.setMessage( + `[STRESS] Time remaining: ${minutes}:${seconds.toString().padStart(2, "0")}. Iteration ${++iteration}`, + ); await round(); - + if (this.output.panicked) { throw new Error("DevServer panicked in stress test"); } @@ -749,7 +760,10 @@ export class Client extends EventEmitter { hmr = false; webSocketMessagesAllowed = true; - constructor(url: string, options: { storeHotChunks?: boolean; hmr: boolean; expectErrors?: boolean; allowUnlimitedReloads?: boolean, }) { + constructor( + url: string, + options: { storeHotChunks?: boolean; hmr: boolean; expectErrors?: boolean; allowUnlimitedReloads?: boolean }, + ) { super(); activeClient = this; const proc = Bun.spawn({ @@ -822,6 +836,15 @@ export class Client extends EventEmitter { }); } + elemsText(selector: string): Promise { + return withAnnotatedStack(snapshotCallerLocation(), async () => { + const elems = await this.js< + string[] + >`Array.from(document.querySelectorAll(${selector})).map(elem => elem.innerHTML)`; + return elems; + }); + } + async [Symbol.asyncDispose]() { if (activeClient === this) { activeClient = null; @@ -946,7 +969,14 @@ export class Client extends EventEmitter { expectErrorOverlay(errors: ErrorSpec[], caller: string | null = null) { return withAnnotatedStack(caller ?? snapshotCallerLocationMayFail(), async () => { this.suppressInteractivePrompt = true; - const hasVisibleModal = await this.js`document.querySelector("bun-hmr")?.style.display === "block"`; + let retries = 0; + let hasVisibleModal = false; + while (retries < 5) { + hasVisibleModal = await this.js`document.querySelector("bun-hmr")?.style.display === "block"`; + if (hasVisibleModal) break; + await Bun.sleep(200); + retries++; + } this.suppressInteractivePrompt = false; if (errors && errors.length > 0) { if (!hasVisibleModal) { @@ -1333,6 +1363,12 @@ if (!fs.existsSync(tempDir)) { fs.mkdirSync(tempDir, { recursive: true }); } +// Create a cache directory for React dependencies +const reactCacheDir = path.join(tempDir, ".react-cache"); +if (!fs.existsSync(reactCacheDir)) { + fs.mkdirSync(reactCacheDir, { recursive: true }); +} + function cleanTestDir(dir: string) { if (!fs.existsSync(dir)) return; const files = fs.readdirSync(dir); @@ -1342,6 +1378,43 @@ function cleanTestDir(dir: string) { } } +async function installReactWithCache(root: string) { + const cacheFiles = ["node_modules", "package.json", "bun.lock"]; + const cacheValid = cacheFiles.every(file => fs.existsSync(path.join(reactCacheDir, file))); + + if (cacheValid) { + // Copy from cache + for (const file of cacheFiles) { + const src = path.join(reactCacheDir, file); + const dest = path.join(root, file); + if (fs.statSync(src).isDirectory()) { + fs.cpSync(src, dest, { recursive: true }); + } else { + fs.copyFileSync(src, dest); + } + } + } else { + // Install fresh and populate cache + await Bun.$`${bunExe()} i react@experimental react-dom@experimental react-server-dom-bun react-refresh@experimental && ${bunExe()} install` + .cwd(root) + .env({ ...bunEnv }) + .throws(true); + + // Copy to cache for future use + for (const file of cacheFiles) { + const src = path.join(root, file); + const dest = path.join(reactCacheDir, file); + if (fs.existsSync(src)) { + if (fs.statSync(src).isDirectory()) { + fs.cpSync(src, dest, { recursive: true, force: true }); + } else { + fs.copyFileSync(src, dest); + } + } + } + } +} + const devTestRoot = path.join(import.meta.dir, "dev").replaceAll("\\", "/"); const prodTestRoot = path.join(import.meta.dir, "dev").replaceAll("\\", "/"); const counts: Record = {}; @@ -1495,7 +1568,7 @@ class OutputLineStream extends EventEmitter { export function indexHtmlScript(htmlFiles: string[]) { return [ - ...htmlFiles.map((file, i) => `import html${i} from ${JSON.stringify("./" + file.replaceAll(path.sep, '/'))};`), + ...htmlFiles.map((file, i) => `import html${i} from ${JSON.stringify("./" + file.replaceAll(path.sep, "/"))};`), "export default {", " static: {", ...(htmlFiles.length === 1 @@ -1518,10 +1591,7 @@ export function indexHtmlScript(htmlFiles: string[]) { ].join("\n"); } -const skipTargets = [ - process.platform, - isCI ? 'ci' : null, -].filter(Boolean); +const skipTargets = [process.platform, isCI ? "ci" : null].filter(Boolean); function testImpl( description: string, @@ -1560,6 +1630,10 @@ function testImpl( x => path.join(root, x), ); await writeAll(root, options.files); + const runInstall = options.framework === "react"; + if (runInstall) { + await installReactWithCache(root); + } if (options.files["bun.app.ts"] == undefined && htmlFiles.length === 0) { if (!options.framework) { throw new Error("Must specify one of: `options.framework`, `*.html`, or `bun.app.ts`"); @@ -1743,14 +1817,15 @@ function testImpl( jest.test.todo(name, run); return options; } - jest.test( + + (options.only ? jest.test.only : jest.test)( name, run, isStressTest ? 11 * 60 * 1000 : interactive - ? interactive_timeout - : (options.timeoutMultiplier ?? 1) * (isWindows ? 15_000 : 10_000) * (Bun.version.includes("debug") ? 2 : 1), + ? interactive_timeout + : (options.timeoutMultiplier ?? 1) * (isWindows ? 15_000 : 10_000) * (Bun.version.includes("debug") ? 2 : 1), ); return options; } catch { @@ -1806,20 +1881,20 @@ class TrailingLog { } #wrapLog(method: keyof Console) { - const m: Function = this.realConsole[method] = console[method]; + const m: Function = (this.realConsole[method] = console[method]); return (...args: any[]) => { if (this.lines > 0) { - process.stderr.write('\u001B[?2026h' + this.#clear()); + process.stderr.write("\u001B[?2026h" + this.#clear()); this.realConsole[method](...args); - process.stderr.write(this.message + '\u001B[?2026l'); + process.stderr.write(this.message + "\u001B[?2026l"); } else { m.apply(console, args); } - } + }; } #clear() { - return '\x1b[2K' + ("\x1b[1A\x1b[2K").repeat(this.lines) + '\r'; + return "\x1b[2K" + "\x1b[1A\x1b[2K".repeat(this.lines) + "\r"; } [Symbol.dispose] = () => { @@ -1832,12 +1907,12 @@ class TrailingLog { console.warn = this.realConsole.warn; console.info = this.realConsole.info; console.debug = this.realConsole.debug; - } + }; setMessage(message: string) { this.message = message.trim() + "\n"; this.lines = this.message.split("\n").length - 1; - process.stderr.write('\u001B[?2026h' + this.#clear() + this.message + '\u001B[?2026l'); + process.stderr.write("\u001B[?2026h" + this.#clear() + this.message + "\u001B[?2026l"); } } @@ -1859,6 +1934,17 @@ export function devTest(description: string, options: T return testImpl(description, options, "development", caller); } +devTest.only = function (description: string, options: DevServerTest) { + // Capture the caller name as part of the test tempdir + const callerLocation = snapshotCallerLocation(); + const caller = stackTraceFileName(callerLocation); + assert( + caller.startsWith(devTestRoot) || caller.includes("dev-and-prod"), + "dev server tests must be in test/bake/dev, not " + caller, + ); + return testImpl(description, { ...options, only: true }, "development", caller); +}; + export function prodTest(description: string, options: T): T { const callerLocation = snapshotCallerLocation(); const caller = stackTraceFileName(callerLocation); diff --git a/test/bake/client-fixture.mjs b/test/bake/client-fixture.mjs index d5bb42beb0..39f3968670 100644 --- a/test/bake/client-fixture.mjs +++ b/test/bake/client-fixture.mjs @@ -69,11 +69,12 @@ function createWindow(windowUrl) { window.internal = internal; }; + const original_window_fetch = window.fetch; window.fetch = async function (url, options) { if (typeof url === "string") { url = new URL(url, windowUrl).href; } - return fetch(url, options); + return await original_window_fetch(url, options); }; // Provide WebSocket diff --git a/test/bake/dev/bundle.test.ts b/test/bake/dev/bundle.test.ts index 2e1a4f59c4..075ade9c32 100644 --- a/test/bake/dev/bundle.test.ts +++ b/test/bake/dev/bundle.test.ts @@ -352,6 +352,7 @@ devTest("import.meta.main", { }, }); devTest("commonjs forms", { + timeoutMultiplier: 2, files: { "index.html": emptyHtmlFile({ styles: [], @@ -366,32 +367,50 @@ devTest("commonjs forms", { `, }, async test(dev) { + console.log("Initial"); await using c = await dev.client("/"); + console.log(" expecting message"); await c.expectMessage({ field: {} }); + console.log(" expecting reload"); await c.expectReload(async () => { + console.log(" writing"); await dev.write("cjs.js", `exports.field = "1";`); + console.log(" now reloading"); }); + console.log(" expecting message"); await c.expectMessage({ field: "1" }); + console.log("Second"); + console.log(" expecting reload"); await c.expectReload(async () => { + console.log(" writing"); await dev.write("cjs.js", `let theExports = exports; theExports.field = "2";`); }); + console.log(" expecting message"); await c.expectMessage({ field: "2" }); + console.log("Third"); + console.log(" expecting reload"); await c.expectReload(async () => { + console.log(" writing"); await dev.write("cjs.js", `let theModule = module; theModule.exports.field = "3";`); }); + console.log(" expecting message"); await c.expectMessage({ field: "3" }); + console.log("Fourth"); await c.expectReload(async () => { await dev.write("cjs.js", `let { exports } = module; exports.field = "4";`); }); await c.expectMessage({ field: "4" }); + console.log("Fifth"); await c.expectReload(async () => { await dev.write("cjs.js", `var { exports } = module; exports.field = "4.5";`); }); await c.expectMessage({ field: "4.5" }); + console.log("Sixth"); await c.expectReload(async () => { await dev.write("cjs.js", `let theExports = module.exports; theExports.field = "5";`); }); await c.expectMessage({ field: "5" }); + console.log("Seventh"); await c.expectReload(async () => { await dev.write("cjs.js", `require; eval("module.exports.field = '6'");`); }); diff --git a/test/bake/dev/react-spa.test.ts b/test/bake/dev/react-spa.test.ts index 8944361666..9e951c0165 100644 --- a/test/bake/dev/react-spa.test.ts +++ b/test/bake/dev/react-spa.test.ts @@ -421,8 +421,330 @@ devTest("custom hook tracking", { } `, }, + async test(dev) { await using c = await dev.client("/", {}); await c.expectMessage("PASS"); }, }); + +devTest("react component with hooks and mutual recursion renders without error", { + files: { + ...reactAndRefreshStub, + "index.tsx": ` + import ComponentWithConst, { helper } from './component-with-const'; + import ComponentWithLet, { getCounter } from './component-with-let'; + import ComponentWithVar, { getGlobalState } from './component-with-var'; + import MathComponent, { utilityFunction } from './component-with-function'; + import ProcessorComponent, { DataProcessor } from './component-with-class'; + + function useThis() { + return null; + } + + function useFakeState(initial) { + return [initial, () => {}]; + } + + function useFakeEffect(fn) { + fn(); + } + + export default function AA({ depth = 0 }: { depth: number }) { + const [count, setCount] = useFakeState(0); + useThis(); + useFakeEffect(() => {}); + return depth === 0 && + } + + function B() { + const [value, setValue] = useFakeState(42); + useFakeEffect(() => {}); + return + } + + // Call B outside the function body to test statement -> expression transform + B(); + + // Call all imported default functions outside their bodies + ComponentWithConst(); + ComponentWithLet(); + ComponentWithVar(); + MathComponent({ input: 10 }); + ProcessorComponent({ text: "test" }); + + // Use all the imported components and their non-default exports + console.log("ComponentWithConst:", ComponentWithConst()); + console.log("helper:", helper()); + + console.log("ComponentWithLet:", ComponentWithLet()); + console.log("getCounter:", getCounter()); + + console.log("ComponentWithVar:", ComponentWithVar()); + console.log("getGlobalState:", getGlobalState()); + + console.log("MathComponent:", MathComponent({ input: 10 })); + console.log("utilityFunction:", utilityFunction(15)); + + console.log("ProcessorComponent:", ProcessorComponent({ text: "test" })); + const processor = new DataProcessor(); + console.log("DataProcessor:", processor.process("world")); + + console.log("PASS"); + `, + "component-with-const.tsx": ` + const helperValue = "helper-result"; + + function useFakeState(initial) { + return [initial, () => {}]; + } + + function useFakeCallback(fn) { + return fn; + } + + export default function Component() { + const [state, setState] = useFakeState(helperValue); + const [count, setCount] = useFakeState(0); + const callback = useFakeCallback(() => {}); + return helperValue; + } + + export const helper = () => helperValue; + + // Call Component outside its body to test statement -> expression transform + Component(); + const result1 = Component(); + helper(); + `, + "component-with-let.tsx": ` + let counter = 0; + + function useFakeState(initial) { + return [initial, () => {}]; + } + + function useFakeEffect(fn, deps) { + fn(); + } + + function useFakeMemo(fn, deps) { + return fn(); + } + + export default function Counter() { + const [localCount, setLocalCount] = useFakeState(0); + const [multiplier, setMultiplier] = useFakeState(1); + useFakeEffect(() => { + setLocalCount(counter * multiplier); + }, [multiplier]); + const memoized = useFakeMemo(() => counter * 2, [counter]); + return ++counter; + } + + export const getCounter = () => counter; + + // Call Counter outside its body multiple times + Counter(); + Counter(); + const currentCount = Counter(); + getCounter(); + + // Test with different call patterns + [1, 2, 3].forEach(() => Counter()); + const counters = [Counter, Counter, Counter].map(fn => fn()); + `, + "component-with-var.tsx": ` + var globalState = { value: 42 }; + + function useFakeState(initial) { + return [initial, () => {}]; + } + + function useFakeMemo(fn, deps) { + return fn(); + } + + function useFakeRef(initial) { + return { current: initial }; + } + + export default function StateComponent() { + const [localState, setLocalState] = useFakeState(globalState.value); + const [factor, setFactor] = useFakeState(2); + const computed = useFakeMemo(() => localState * factor, [localState, factor]); + const ref = useFakeRef(null); + return globalState.value; + } + + export const getGlobalState = () => globalState; + + // Call StateComponent outside its body + StateComponent(); + const state1 = StateComponent(); + const state2 = StateComponent(); + getGlobalState(); + + // Test with object method calls + const obj = { fn: StateComponent }; + obj.fn(); + + // Test with array of functions + const fns = [StateComponent, getGlobalState]; + fns[0](); + fns[1](); + `, + "component-with-function.tsx": ` + function multiply(x: number) { + return x * 2; + } + + function useFakeState(initial) { + return [initial, () => {}]; + } + + function useFakeCallback(fn, deps) { + return fn; + } + + function useFakeReducer(reducer, initial) { + return [initial, () => {}]; + } + + export default function MathComponent({ input }: { input: number }) { + const [result, setResult] = useFakeState(0); + const [operations, setOperations] = useFakeState(0); + const [state, dispatch] = useFakeReducer((s, a) => s, {}); + + const calculate = useFakeCallback(() => { + const value = multiply(input); + setResult(value); + setOperations(prev => prev + 1); + return value; + }, [input]); + + return multiply(input); + } + + export const utilityFunction = multiply; + + // Call MathComponent outside its body with various patterns + MathComponent({ input: 5 }); + MathComponent({ input: 10 }); + const result1 = MathComponent({ input: 15 }); + utilityFunction(20); + + // Test with function composition + const compose = (fn: Function) => fn({ input: 25 }); + compose(MathComponent); + + // Test with conditional calls + const shouldCall = true; + if (shouldCall) { + MathComponent({ input: 30 }); + } + + // Test with ternary + const ternaryResult = true ? MathComponent({ input: 35 }) : null; + + // Test with logical operators + true && MathComponent({ input: 40 }); + false || MathComponent({ input: 45 }); + `, + "component-with-class.tsx": ` + class Processor { + process(data: string) { + return data.toUpperCase(); + } + } + + function useFakeState(initial) { + return [initial, () => {}]; + } + + function useFakeReducer(reducer, initial) { + return [initial, () => {}]; + } + + function useFakeRef(initial) { + return { current: initial }; + } + + function useFakeContext() { + return {}; + } + + const reducer = (state: any, action: any) => { + switch (action.type) { + case 'process': + return { ...state, processed: action.payload }; + default: + return state; + } + }; + + export default function ProcessorComponent({ text }: { text: string }) { + const [state, setState] = useFakeState({ text, processed: '' }); + const [history, dispatch] = useFakeReducer(reducer, { processed: [] }); + const processorRef = useFakeRef(new Processor()); + const context = useFakeContext(); + + const processor = new Processor(); + const result = processor.process(text); + + dispatch({ type: 'process', payload: result }); + + return processor.process(text); + } + + export const DataProcessor = Processor; + + // Call ProcessorComponent outside its body + ProcessorComponent({ text: "hello" }); + ProcessorComponent({ text: "world" }); + const processed1 = ProcessorComponent({ text: "test1" }); + const processed2 = ProcessorComponent({ text: "test2" }); + + // Test with new DataProcessor + const proc1 = new DataProcessor(); + const proc2 = new DataProcessor(); + proc1.process("data1"); + proc2.process("data2"); + + // Test with function binding + const boundProcessor = ProcessorComponent.bind(null); + boundProcessor({ text: "bound" }); + + // Test with apply/call + ProcessorComponent.call(null, { text: "called" }); + ProcessorComponent.apply(null, [{ text: "applied" }]); + + // Test with destructuring + const { process } = new DataProcessor(); + + // Test with spread operator + const args = [{ text: "spread" }]; + ProcessorComponent(...args); + `, + "index.html": emptyHtmlFile({ + scripts: ["index.tsx"], + body: `
              `, + }), + }, + async test(dev) { + await using c = await dev.client("/", {}); + await c.expectMessage( + "ComponentWithConst:", + "helper:", + "ComponentWithLet:", + "getCounter:", + "ComponentWithVar:", + "getGlobalState:", + "MathComponent:", + "utilityFunction:", + "ProcessorComponent:", + "DataProcessor:", + "PASS", + ); + }, +}); diff --git a/test/bake/dev/ssg-pages-router.test.ts b/test/bake/dev/ssg-pages-router.test.ts new file mode 100644 index 0000000000..af60b9e88b --- /dev/null +++ b/test/bake/dev/ssg-pages-router.test.ts @@ -0,0 +1,295 @@ +// Test SSG pages router functionality +import { expect } from "bun:test"; +import { devTest } from "../bake-harness"; + +devTest("SSG pages router - multiple static pages", { + framework: "react", + files: { + "pages/about.tsx": ` + export default function AboutPage() { + return

              About Page

              ; + } + `, + "pages/contact.tsx": ` + export default function ContactPage() { + return

              Contact Page

              ; + } + `, + }, + async test(dev) { + // Test about page + await using c2 = await dev.client("/about"); + expect(await c2.elemText("h1")).toBe("About Page"); + + // Test contact page + await using c3 = await dev.client("/contact"); + expect(await c3.elemText("h1")).toBe("Contact Page"); + }, +}); + +devTest("SSG pages router - dynamic routes with [slug]", { + framework: "react", + files: { + "pages/[slug].tsx": ` + type Props = Bun.SSGProps; + + const Page: Bun.SSGPage = async ({ params }) => { + return ( +
              +

              Dynamic Page: {params.slug}

              +

              Slug value: {params.slug}

              +
              + ); + }; + + export default Page; + + export const getStaticPaths: Bun.GetStaticPaths = async () => { + return { + paths: [ + { params: { slug: "first-post" } }, + { params: { slug: "second-post" } }, + { params: { slug: "third-post" } }, + ], + }; + }; + `, + }, + async test(dev) { + // Test dynamic routes + await using c1 = await dev.client("/first-post"); + expect(await c1.elemText("h1")).toBe("Dynamic Page: first-post"); + expect(await c1.elemText("p")).toBe("Slug value: first-post"); + + await using c2 = await dev.client("/second-post"); + expect(await c2.elemText("h1")).toBe("Dynamic Page: second-post"); + + await using c3 = await dev.client("/third-post"); + expect(await c3.elemText("h1")).toBe("Dynamic Page: third-post"); + }, +}); + +devTest("SSG pages router - nested routes", { + framework: "react", + files: { + "pages/blog/index.tsx": ` + export default function BlogIndex() { + return

              Blog Index

              ; + } + `, + "pages/blog/[id].tsx": ` + const BlogPost: Bun.SSGPage = ({ params }) => { + return

              Blog Post {params.id}

              ; + }; + + export default BlogPost; + + export const getStaticPaths: Bun.GetStaticPaths = async () => { + return { + paths: [ + { params: { id: "1" } }, + { params: { id: "2" } }, + ], + }; + }; + `, + "pages/blog/categories/[category].tsx": ` + const CategoryPage: Bun.SSGPage = ({ params }) => { + return

              Category: {params.category}

              ; + }; + + export default CategoryPage; + + export const getStaticPaths: Bun.GetStaticPaths = async () => { + return { + paths: [ + { params: { category: "tech" } }, + { params: { category: "lifestyle" } }, + ], + }; + }; + `, + }, + async test(dev) { + // Test blog index + await using c1 = await dev.client("/blog"); + expect(await c1.elemText("h1")).toBe("Blog Index"); + + // Test blog posts + await using c2 = await dev.client("/blog/1"); + expect(await c2.elemText("h1")).toBe("Blog Post 1"); + + await using c3 = await dev.client("/blog/2"); + expect(await c3.elemText("h1")).toBe("Blog Post 2"); + + // Test categories + await using c4 = await dev.client("/blog/categories/tech"); + expect(await c4.elemText("h1")).toBe("Category: tech"); + + await using c5 = await dev.client("/blog/categories/lifestyle"); + expect(await c5.elemText("h1")).toBe("Category: lifestyle"); + }, +}); + +devTest("SSG pages router - hot reload on page changes", { + framework: "react", + files: { + "pages/index.tsx": ` + export default function IndexPage() { + return

              Welcome to SSG

              ; + } + `, + }, + async test(dev) { + await using c = await dev.client("/"); + expect(await c.elemText("h1")).toBe("Welcome to SSG"); + + // Update the page + await dev.write( + "pages/index.tsx", + ` + export default function IndexPage() { + console.log("updated load"); + return

              Updated Content

              ; + } + `, + ); + + // this %c%s%c is a react devtools thing and I don't know how to turn it off + await c.expectMessage("%c%s%c updated load"); + expect(await c.elemText("h1")).toBe("Updated Content"); + }, +}); + +devTest("SSG pages router - data fetching with async components", { + framework: "react", + files: { + "pages/data.tsx": ` + async function fetchData() { + // Simulate API call + return new Promise(resolve => { + setTimeout(() => { + resolve({ message: "Data from API", items: ["Item 1", "Item 2", "Item 3"] }); + }, 10); + }); + } + + export default async function DataPage() { + const data = await fetchData(); + + return ( +
              +

              {data.message}

              +
                + {data.items.map((item, index) => ( +
              • {item}
              • + ))} +
              +
              + ); + } + `, + }, + async test(dev) { + await using c = await dev.client("/data"); + expect(await c.elemText("h1")).toBe("Data from API"); + + const items = await c.elemsText("li"); + expect(items).toEqual(["Item 1", "Item 2", "Item 3"]); + }, +}); + +devTest("SSG pages router - multiple dynamic segments", { + framework: "react", + files: { + "pages/[category]/[year]/[slug].tsx": ` + const ArticlePage: Bun.SSGPage = ({ params }) => { + return ( +
              +

              {params.slug}

              +

              Category: {params.category}

              +

              Year: {params.year}

              +
              + ); + }; + + export default ArticlePage; + + export const getStaticPaths: Bun.GetStaticPaths = async () => { + return { + paths: [ + { params: { category: "tech", year: "2024", slug: "bun-release" } }, + { params: { category: "news", year: "2024", slug: "breaking-story" } }, + { params: { category: "tech", year: "2023", slug: "year-review" } }, + ], + }; + }; + `, + }, + async test(dev) { + // Test first path + await using c1 = await dev.client("/tech/2024/bun-release"); + expect(await c1.elemText("h1")).toBe("bun-release"); + expect(await c1.elemsText("p")).toEqual(["Category: tech", "Year: 2024"]); + + // Test second path + await using c2 = await dev.client("/news/2024/breaking-story"); + expect(await c2.elemText("h1")).toBe("breaking-story"); + expect(await c2.elemsText("p")).toEqual(["Category: news", "Year: 2024"]); + + // Test third path + await using c3 = await dev.client("/tech/2023/year-review"); + expect(await c3.elemText("h1")).toBe("year-review"); + expect(await c3.elemsText("p")).toEqual(["Category: tech", "Year: 2023"]); + }, +}); + +devTest("SSG pages router - file loading with Bun.file", { + framework: "react", + fixture: "ssg-pages-router", + files: { + "pages/[slug].tsx": ` + import { join } from "path"; + + const PostPage: Bun.SSGPage = async ({ params }) => { + const content = await Bun.file( + join(process.cwd(), "posts", params.slug + ".txt") + ).text(); + + return ( +
              +

              {params.slug}

              +
              {content}
              +
              + ); + }; + + export default PostPage; + + export const getStaticPaths: Bun.GetStaticPaths = async () => { + const glob = new Bun.Glob("**/*.txt"); + const paths = []; + + for (const file of Array.from(glob.scanSync({ cwd: join(process.cwd(), "posts") }))) { + const slug = file.replace(/\\.txt$/, ""); + paths.push({ params: { slug } }); + } + + return { paths }; + }; + `, + "posts/hello-world.txt": "This is the content of hello world post", + "posts/second-post.txt": "This is the second post content", + }, + async test(dev) { + // Test first post + await using c1 = await dev.client("/hello-world"); + expect(await c1.elemText("h1")).toBe("hello-world"); + expect(await c1.elemText("div div")).toBe("This is the content of hello world post"); + + // Test second post + await using c2 = await dev.client("/second-post"); + expect(await c2.elemText("h1")).toBe("second-post"); + expect(await c2.elemText("div div")).toBe("This is the second post content"); + }, +}); diff --git a/test/bake/dev/vfile.test.ts b/test/bake/dev/vfile.test.ts new file mode 100644 index 0000000000..beed2583d8 --- /dev/null +++ b/test/bake/dev/vfile.test.ts @@ -0,0 +1,65 @@ +import { describe, expect } from "bun:test"; +import { devTest, minimalFramework } from "../bake-harness"; + +/** + * Enure that node builtins imported on the server behave properly + */ +describe("node builtin test", () => { + /** + * + * This creates a minimal reproduction of an issue when VFile was imported on the dev server. + * + * The issue was that it was importing node:process and this was not correctly handled + */ + devTest("vfile import in server component", { + framework: minimalFramework, + files: { + "node_modules/vfile/package.json": JSON.stringify({ + name: "vfile", + version: "6.0.3", + type: "module", + exports: { + ".": "./lib/index.js", + }, + }), + "node_modules/vfile/lib/process.js": ` + export { default as minproc } from 'process'; + `, + "node_modules/vfile/lib/index.js": ` + // Minimal VFile implementation for testing + import { minproc } from './process.js'; + + export class VFile { + constructor(value) { + this.value = value; + this.data = {}; + this.messages = []; + this.history = []; + this.cwd = minproc.cwd(); + } + } + `, + "routes/test.ts": ` + import { VFile } from "vfile"; + + export default function (req, meta) { + const foo = new VFile("hello world"); + console.log(foo.value); + + return new Response(\`VFile content: \${foo.value}\`, { + headers: { "Content-Type": "text/plain" } + }); + } + `, + }, + async test(dev) { + // Test that the dev server can bundle the page without errors + const response = await dev.fetch("/test"); + expect(response.status).toBe(200); + + // Check that VFile is properly bundled and works + const text = await response.text(); + expect(text).toBe("VFile content: hello world"); + }, + }); +}); diff --git a/test/bun.lock b/test/bun.lock index 1b3732741f..fb4796cfb1 100644 --- a/test/bun.lock +++ b/test/bun.lock @@ -32,7 +32,7 @@ "commander": "12.1.0", "detect-libc": "2.0.3", "devalue": "5.1.1", - "duckdb": "1.1.3", + "duckdb": "1.3.1", "es-module-lexer": "1.3.0", "esbuild": "0.18.6", "express": "4.18.2", @@ -84,6 +84,8 @@ "typeorm": "0.3.20", "typescript": "5.0.2", "undici": "5.20.0", + "unzipper": "0.12.3", + "uuid": "11.1.0", "v8-heapsnapshot": "1.3.1", "verdaccio": "6.0.0", "vitest": "0.32.2", @@ -326,6 +328,8 @@ "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], + "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], + "@jest/schemas": ["@jest/schemas@29.6.3", "", { "dependencies": { "@sinclair/typebox": "^0.27.8" } }, "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA=="], "@jimp/core": ["@jimp/core@1.6.0", "", { "dependencies": { "@jimp/file-ops": "1.6.0", "@jimp/types": "1.6.0", "@jimp/utils": "1.6.0", "await-to-js": "^3.0.0", "exif-parser": "^0.1.12", "file-type": "^16.0.0", "mime": "3" } }, "sha512-EQQlKU3s9QfdJqiSrZWNTxBs3rKXgO2W+GxNXDtwchF3a4IqxDheFX1ti+Env9hdJXDiYLp2jTRjlxhPthsk8w=="], @@ -400,7 +404,7 @@ "@lukeed/csprng": ["@lukeed/csprng@1.1.0", "", {}, "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA=="], - "@mapbox/node-pre-gyp": ["@mapbox/node-pre-gyp@1.0.11", "", { "dependencies": { "detect-libc": "^2.0.0", "https-proxy-agent": "^5.0.0", "make-dir": "^3.1.0", "node-fetch": "^2.6.7", "nopt": "^5.0.0", "npmlog": "^5.0.1", "rimraf": "^3.0.2", "semver": "^7.3.5", "tar": "^6.1.11" }, "bin": { "node-pre-gyp": "bin/node-pre-gyp" } }, "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ=="], + "@mapbox/node-pre-gyp": ["@mapbox/node-pre-gyp@2.0.0", "", { "dependencies": { "consola": "^3.2.3", "detect-libc": "^2.0.0", "https-proxy-agent": "^7.0.5", "node-fetch": "^2.6.7", "nopt": "^8.0.0", "semver": "^7.5.3", "tar": "^7.4.0" }, "bin": { "node-pre-gyp": "bin/node-pre-gyp" } }, "sha512-llMXd39jtP0HpQLVI37Bf1m2ADlEb35GYSh1SDSLsBhR+5iCxiNGlT31yqbNtVHygHAtMy6dWFERpU2JgufhPg=="], "@mongodb-js/saslprep": ["@mongodb-js/saslprep@1.1.7", "", { "dependencies": { "sparse-bitfield": "^3.0.3" } }, "sha512-dCHW/oEX0KJ4NjDULBo3JiOaK5+6axtpBbS+ao2ZInoAL9/YRQLhXzSNAFz7hP4nzLkIqsfYAK/PDE3+XHny0Q=="], @@ -866,7 +870,7 @@ "aproba": ["aproba@2.0.0", "", {}, "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ=="], - "are-we-there-yet": ["are-we-there-yet@2.0.0", "", { "dependencies": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" } }, "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw=="], + "are-we-there-yet": ["are-we-there-yet@3.0.1", "", { "dependencies": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" } }, "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg=="], "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], @@ -950,6 +954,8 @@ "bl": ["bl@5.1.0", "", { "dependencies": { "buffer": "^6.0.3", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ=="], + "bluebird": ["bluebird@3.7.2", "", {}, "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg=="], + "blueimp-md5": ["blueimp-md5@2.19.0", "", {}, "sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w=="], "bmp-ts": ["bmp-ts@1.0.9", "", {}, "sha512-cTEHk2jLrPyi+12M3dhpEbnnPOsaZuq7C45ylbbQIiWgDFZq4UVYPEY5mlqjvsj/6gJv9qX5sa+ebDzLXT28Vw=="], @@ -1184,7 +1190,9 @@ "dset": ["dset@3.1.4", "", {}, "sha512-2QF/g9/zTaPDc3BjNcVTGoBbXBgYfMTTceLaYcFJ/W9kggFUkhxD/hMEeuLKbugyef9SqAx8cpgwlIP/jinUTA=="], - "duckdb": ["duckdb@1.1.3", "", { "dependencies": { "@mapbox/node-pre-gyp": "^1.0.0", "node-addon-api": "^7.0.0", "node-gyp": "^9.3.0" } }, "sha512-tIpZr2NsSkYmfGC1ETl75RuVsaDyjvR3yAOrECcIyw7bdluzcyzEXOXoiuT+4t54hT+CppZv43gk/HiZdKW9Vw=="], + "duckdb": ["duckdb@1.3.1", "", { "dependencies": { "@mapbox/node-pre-gyp": "^2.0.0", "node-addon-api": "^7.0.0", "node-gyp": "^9.3.0" } }, "sha512-wSCxu6zSkHkGHtLrI5MmHYUOpbi08s2eIY/QCg2f1YsSyohjA3MRnUMdDb88oqgLa7/h+/wHuIe1RXRu4k04Sw=="], + + "duplexer2": ["duplexer2@0.1.4", "", { "dependencies": { "readable-stream": "^2.0.2" } }, "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA=="], "duplexify": ["duplexify@4.1.3", "", { "dependencies": { "end-of-stream": "^1.4.1", "inherits": "^2.0.3", "readable-stream": "^3.1.1", "stream-shift": "^1.0.2" } }, "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA=="], @@ -1366,7 +1374,7 @@ "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], - "gauge": ["gauge@3.0.2", "", { "dependencies": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.2", "console-control-strings": "^1.0.0", "has-unicode": "^2.0.1", "object-assign": "^4.1.1", "signal-exit": "^3.0.0", "string-width": "^4.2.3", "strip-ansi": "^6.0.1", "wide-align": "^1.1.2" } }, "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q=="], + "gauge": ["gauge@4.0.4", "", { "dependencies": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.3", "console-control-strings": "^1.1.0", "has-unicode": "^2.0.1", "signal-exit": "^3.0.7", "string-width": "^4.2.3", "strip-ansi": "^6.0.1", "wide-align": "^1.1.5" } }, "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg=="], "generate-function": ["generate-function@2.3.1", "", { "dependencies": { "is-property": "^1.0.2" } }, "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ=="], @@ -1700,8 +1708,6 @@ "magicast": ["magicast@0.3.5", "", { "dependencies": { "@babel/parser": "^7.25.4", "@babel/types": "^7.25.4", "source-map-js": "^1.2.0" } }, "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ=="], - "make-dir": ["make-dir@3.1.0", "", { "dependencies": { "semver": "^6.0.0" } }, "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw=="], - "make-fetch-happen": ["make-fetch-happen@13.0.1", "", { "dependencies": { "@npmcli/agent": "^2.0.0", "cacache": "^18.0.0", "http-cache-semantics": "^4.1.1", "is-lambda": "^1.0.1", "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "proc-log": "^4.2.0", "promise-retry": "^2.0.1", "ssri": "^10.0.0" } }, "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA=="], "markdown-table": ["markdown-table@3.0.4", "", {}, "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw=="], @@ -1886,6 +1892,8 @@ "node-gyp-build-optional-packages": ["node-gyp-build-optional-packages@5.0.7", "", { "bin": { "node-gyp-build-optional-packages": "bin.js", "node-gyp-build-optional-packages-test": "build-test.js", "node-gyp-build-optional-packages-optional": "optional.js" } }, "sha512-YlCCc6Wffkx0kHkmam79GKvDQ6x+QZkMjFGrIMxgFNILFvGSbCp2fCBC55pGTT9gVaz8Na5CLmxt/urtzRv36w=="], + "node-int64": ["node-int64@0.4.0", "", {}, "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw=="], + "node-mock-http": ["node-mock-http@1.0.0", "", {}, "sha512-0uGYQ1WQL1M5kKvGRXWQ3uZCHtLTO8hln3oBjIusM75WoesZ909uQJs/Hb946i2SS+Gsrhkaa6iAO17jRIv6DQ=="], "node-releases": ["node-releases@2.0.14", "", {}, "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw=="], @@ -1898,7 +1906,7 @@ "npm-run-path": ["npm-run-path@4.0.1", "", { "dependencies": { "path-key": "^3.0.0" } }, "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw=="], - "npmlog": ["npmlog@5.0.1", "", { "dependencies": { "are-we-there-yet": "^2.0.0", "console-control-strings": "^1.1.0", "gauge": "^3.0.0", "set-blocking": "^2.0.0" } }, "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw=="], + "npmlog": ["npmlog@6.0.2", "", { "dependencies": { "are-we-there-yet": "^3.0.0", "console-control-strings": "^1.1.0", "gauge": "^4.0.3", "set-blocking": "^2.0.0" } }, "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg=="], "nwsapi": ["nwsapi@2.2.16", "", {}, "sha512-F1I/bimDpj3ncaNDhfyMWuFqmQDBwDB0Fogc2qpL3BWvkQteFD/8BzWuIRl83rq0DXfm8SGt/HFhLXZyljTXcQ=="], @@ -2518,6 +2526,8 @@ "unstorage": ["unstorage@1.15.0", "", { "dependencies": { "anymatch": "^3.1.3", "chokidar": "^4.0.3", "destr": "^2.0.3", "h3": "^1.15.0", "lru-cache": "^10.4.3", "node-fetch-native": "^1.6.6", "ofetch": "^1.4.1", "ufo": "^1.5.4" }, "peerDependencies": { "@azure/app-configuration": "^1.8.0", "@azure/cosmos": "^4.2.0", "@azure/data-tables": "^13.3.0", "@azure/identity": "^4.6.0", "@azure/keyvault-secrets": "^4.9.0", "@azure/storage-blob": "^12.26.0", "@capacitor/preferences": "^6.0.3", "@deno/kv": ">=0.9.0", "@netlify/blobs": "^6.5.0 || ^7.0.0 || ^8.1.0", "@planetscale/database": "^1.19.0", "@upstash/redis": "^1.34.3", "@vercel/blob": ">=0.27.1", "@vercel/kv": "^1.0.1", "aws4fetch": "^1.0.20", "db0": ">=0.2.1", "idb-keyval": "^6.2.1", "ioredis": "^5.4.2", "uploadthing": "^7.4.4" }, "optionalPeers": ["@azure/app-configuration", "@azure/cosmos", "@azure/data-tables", "@azure/identity", "@azure/keyvault-secrets", "@azure/storage-blob", "@capacitor/preferences", "@deno/kv", "@netlify/blobs", "@planetscale/database", "@upstash/redis", "@vercel/blob", "@vercel/kv", "aws4fetch", "db0", "idb-keyval", "ioredis", "uploadthing"] }, "sha512-m40eHdGY/gA6xAPqo8eaxqXgBuzQTlAKfmB1iF7oCKXE1HfwHwzDJBywK+qQGn52dta+bPlZluPF7++yR3p/bg=="], + "unzipper": ["unzipper@0.12.3", "", { "dependencies": { "bluebird": "~3.7.2", "duplexer2": "~0.1.4", "fs-extra": "^11.2.0", "graceful-fs": "^4.2.2", "node-int64": "^0.4.0" } }, "sha512-PZ8hTS+AqcGxsaQntl3IRBw65QrBI6lxzqDEL7IAo/XCEqRTKGfOX56Vea5TH9SZczRVxuzk1re04z/YjuYCJA=="], + "update-browserslist-db": ["update-browserslist-db@1.0.16", "", { "dependencies": { "escalade": "^3.1.2", "picocolors": "^1.0.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ=="], "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], @@ -2534,7 +2544,7 @@ "utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="], - "uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], + "uuid": ["uuid@11.1.0", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A=="], "v8-compile-cache": ["v8-compile-cache@2.4.0", "", {}, "sha512-ocyWc3bAHBB/guyqJQVI5o4BZkPhznPYUG2ea80Gond/BgNWpap8TOmLSeeQG7bnh2KMISxskdADG59j7zruhw=="], @@ -2744,11 +2754,11 @@ "@jridgewell/trace-mapping/@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.4.15", "", {}, "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg=="], - "@mapbox/node-pre-gyp/https-proxy-agent": ["https-proxy-agent@5.0.1", "", { "dependencies": { "agent-base": "6", "debug": "4" } }, "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA=="], + "@mapbox/node-pre-gyp/https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="], - "@mapbox/node-pre-gyp/nopt": ["nopt@5.0.0", "", { "dependencies": { "abbrev": "1" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ=="], + "@mapbox/node-pre-gyp/nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="], - "@mapbox/node-pre-gyp/semver": ["semver@7.6.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A=="], + "@mapbox/node-pre-gyp/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], "@nestjs/common/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], @@ -2928,6 +2938,8 @@ "duckdb/node-gyp": ["node-gyp@9.4.1", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "glob": "^7.1.4", "graceful-fs": "^4.2.6", "make-fetch-happen": "^10.0.3", "nopt": "^6.0.0", "npmlog": "^6.0.0", "rimraf": "^3.0.2", "semver": "^7.3.5", "tar": "^6.1.2", "which": "^2.0.2" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-OQkWKbjQKbGkMf/xqI1jjy3oCTgMKJac58G2+bjZb3fza6gW2YrCSdMQYaoTb70crvE//Gngr4f0AgVHmqHvBQ=="], + "duplexer2/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], + "duplexify/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], "ecc-jsbn/jsbn": ["jsbn@0.1.1", "", {}, "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="], @@ -3014,8 +3026,6 @@ "jsonwebtoken/semver": ["semver@7.6.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w=="], - "make-dir/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - "make-fetch-happen/proc-log": ["proc-log@4.2.0", "", {}, "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA=="], "micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], @@ -3198,6 +3208,8 @@ "typeorm/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "typeorm/uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], + "unbzip2-stream/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], "unstorage/chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="], @@ -3278,11 +3290,19 @@ "@isaacs/cliui/wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], - "@mapbox/node-pre-gyp/https-proxy-agent/agent-base": ["agent-base@6.0.2", "", { "dependencies": { "debug": "4" } }, "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ=="], + "@mapbox/node-pre-gyp/https-proxy-agent/agent-base": ["agent-base@7.1.3", "", {}, "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw=="], - "@mapbox/node-pre-gyp/https-proxy-agent/debug": ["debug@4.3.5", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg=="], + "@mapbox/node-pre-gyp/https-proxy-agent/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - "@mapbox/node-pre-gyp/nopt/abbrev": ["abbrev@1.1.1", "", {}, "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="], + "@mapbox/node-pre-gyp/nopt/abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], + + "@mapbox/node-pre-gyp/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], + + "@mapbox/node-pre-gyp/tar/minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="], + + "@mapbox/node-pre-gyp/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], + + "@mapbox/node-pre-gyp/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], "@npmcli/agent/https-proxy-agent/debug": ["debug@4.3.5", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg=="], @@ -3456,12 +3476,16 @@ "duckdb/node-gyp/nopt": ["nopt@6.0.0", "", { "dependencies": { "abbrev": "^1.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ZwLpbTgdhuZUnZzjd7nb1ZV+4DoiC6/sfiVKok72ym/4Tlf+DFdlHYmT2JPmcNNWV6Pi3SDf1kT+A4r9RTuT9g=="], - "duckdb/node-gyp/npmlog": ["npmlog@6.0.2", "", { "dependencies": { "are-we-there-yet": "^3.0.0", "console-control-strings": "^1.1.0", "gauge": "^4.0.3", "set-blocking": "^2.0.0" } }, "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg=="], - "duckdb/node-gyp/semver": ["semver@7.6.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A=="], "duckdb/node-gyp/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + "duplexer2/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], + + "duplexer2/readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], + + "duplexer2/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], + "engine.io-client/debug/ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], "engine.io/debug/ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], @@ -3706,8 +3730,6 @@ "@inquirer/core/wrap-ansi/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], - "@mapbox/node-pre-gyp/https-proxy-agent/debug/ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], - "@npmcli/agent/https-proxy-agent/debug/ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], "@npmcli/agent/socks-proxy-agent/debug/ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], @@ -3756,10 +3778,6 @@ "duckdb/node-gyp/nopt/abbrev": ["abbrev@1.1.1", "", {}, "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="], - "duckdb/node-gyp/npmlog/are-we-there-yet": ["are-we-there-yet@3.0.1", "", { "dependencies": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" } }, "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg=="], - - "duckdb/node-gyp/npmlog/gauge": ["gauge@4.0.4", "", { "dependencies": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.3", "console-control-strings": "^1.1.0", "has-unicode": "^2.0.1", "signal-exit": "^3.0.7", "string-width": "^4.2.3", "strip-ansi": "^6.0.1", "wide-align": "^1.1.5" } }, "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg=="], - "duckdb/node-gyp/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], "jest-diff/chalk/supports-color/has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], @@ -3922,12 +3940,6 @@ "duckdb/node-gyp/make-fetch-happen/https-proxy-agent/debug": ["debug@4.3.5", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg=="], - "duckdb/node-gyp/npmlog/are-we-there-yet/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - - "duckdb/node-gyp/npmlog/gauge/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], - - "duckdb/node-gyp/npmlog/gauge/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - "duckdb/node-gyp/make-fetch-happen/cacache/glob/minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="], "duckdb/node-gyp/make-fetch-happen/cacache/unique-filename/unique-slug": ["unique-slug@3.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w=="], @@ -3936,10 +3948,6 @@ "duckdb/node-gyp/make-fetch-happen/https-proxy-agent/debug/ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], - "duckdb/node-gyp/npmlog/gauge/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], - - "duckdb/node-gyp/npmlog/gauge/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "duckdb/node-gyp/make-fetch-happen/cacache/glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], "duckdb/node-gyp/make-fetch-happen/http-proxy-agent/agent-base/debug/ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], diff --git a/test/bundler/__snapshots__/bun-build-api.test.ts.snap b/test/bundler/__snapshots__/bun-build-api.test.ts.snap index 3f625ec089..e485dbc149 100644 --- a/test/bundler/__snapshots__/bun-build-api.test.ts.snap +++ b/test/bundler/__snapshots__/bun-build-api.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` "var __defProp = Object.defineProperty; diff --git a/test/bundler/bun-build-api.test.ts b/test/bundler/bun-build-api.test.ts index 4ad15e2595..7e8b2c2330 100644 --- a/test/bundler/bun-build-api.test.ts +++ b/test/bundler/bun-build-api.test.ts @@ -1,7 +1,7 @@ import assert from "assert"; import { describe, expect, test } from "bun:test"; import { readFileSync, writeFileSync } from "fs"; -import { bunEnv, bunExe, tempDirWithFiles } from "harness"; +import { bunEnv, bunExe, tempDirWithFiles, tempDirWithFilesAnon } from "harness"; import path, { join } from "path"; import { buildNoThrow } from "./buildNoThrow"; @@ -632,3 +632,163 @@ test("onEnd Plugin does not crash", async () => { })(), ).rejects.toThrow("On-end callbacks is not implemented yet. See https://github.com/oven-sh/bun/issues/2771"); }); + +test("macro with nested object", async () => { + const dir = tempDirWithFilesAnon({ + "index.ts": ` +import { testMacro } from "./macro" assert { type: "macro" }; + +export const testConfig = testMacro({ + borderRadius: { + 1: "4px", + 2: "8px", + }, +}); + `, + "macro.ts": ` +export function testMacro(val: any) { + return val; +} + `, + }); + + const build = await Bun.build({ + entrypoints: [join(dir, "index.ts")], + minify: true, + }); + + expect(build.outputs).toHaveLength(1); + expect(build.outputs[0].kind).toBe("entry-point"); + expect(await build.outputs[0].text()).toEqualIgnoringWhitespace( + `var t={borderRadius:{"1":"4px","2":"8px"}};export{t as testConfig};\n`, + ); +}); + +// Since NODE_PATH has to be set, we need to run this test outside the bundler tests. +test("regression/NODE_PATHBuild api", async () => { + const dir = tempDirWithFiles("node-path-build", { + "entry.js": ` + import MyClass from 'MyClass'; + console.log(new MyClass().constructor.name); + `, + "src/MyClass.js": ` + export default class MyClass {} + `, + "build.js": ` + import { join } from "path"; + + const build = await Bun.build({ + entrypoints: [join(import.meta.dir, "entry.js")], + outdir: join(import.meta.dir, "out"), + }); + + if (!build.success) { + console.error("Build failed:", build.logs); + process.exit(1); + } + + // Run the built file + const runProc = Bun.spawn({ + cmd: [process.argv[0], join(import.meta.dir, "out", "entry.js")], + stdout: "pipe", + stderr: "pipe", + }); + + await runProc.exited; + const runOutput = await new Response(runProc.stdout).text(); + const runError = await new Response(runProc.stderr).text(); + + if (runError) { + console.error("Run error:", runError); + process.exit(1); + } + + console.log(runOutput.trim()); + + `, + }); + + // Run the build script with NODE_PATH set + const proc = Bun.spawn({ + cmd: [bunExe(), join(dir, "build.js")], + env: { + ...bunEnv, + NODE_PATH: join(dir, "src"), + }, + stdout: "pipe", + stderr: "pipe", + cwd: dir, + }); + + await proc.exited; + const output = await new Response(proc.stdout).text(); + const error = await new Response(proc.stderr).text(); + + expect(error).toBe(""); + expect(output.trim()).toBe("MyClass"); +}); + +test("regression/GlobalThis", async () => { + const dir = tempDirWithFiles("global-this-regression", { + "entry.js": ` + function identity(x) { + return x; + } + import * as mod1 from 'assert'; + identity(mod1); +import * as mod2 from 'buffer'; +identity(mod2); +import * as mod3 from 'console'; +identity(mod3); +import * as mod4 from 'constants'; +identity(mod4); +import * as mod5 from 'crypto'; +identity(mod5); +import * as mod6 from 'domain'; +identity(mod6); +import * as mod7 from 'events'; +identity(mod7); +import * as mod8 from 'http'; +identity(mod8); +import * as mod9 from 'https'; +identity(mod9); +import * as mod10 from 'net'; +identity(mod10); +import * as mod11 from 'os'; +identity(mod11); +import * as mod12 from 'path'; +identity(mod12); +import * as mod13 from 'process'; +identity(mod13); +import * as mod14 from 'punycode'; +identity(mod14); +import * as mod15 from 'stream'; +identity(mod15); +import * as mod16 from 'string_decoder'; +identity(mod16); +import * as mod17 from 'sys'; +identity(mod17); +import * as mod18 from 'timers'; +identity(mod18); +import * as mod20 from 'tty'; +identity(mod20); +import * as mod21 from 'url'; +identity(mod21); +import * as mod22 from 'util'; +identity(mod22); +import * as mod23 from 'zlib'; +identity(mod23); + `, + }); + + const build = await Bun.build({ + entrypoints: [join(dir, "entry.js")], + target: "browser", + }); + + expect(build.success).toBe(true); + const text = await build.outputs[0].text(); + expect(text).not.toContain("process.env."); + expect(text).not.toContain(" global."); + expect(text).toContain(" globalThis."); +}); diff --git a/test/bundler/bundler_browser.test.ts b/test/bundler/bundler_browser.test.ts index 1a3460431b..53829e84f5 100644 --- a/test/bundler/bundler_browser.test.ts +++ b/test/bundler/bundler_browser.test.ts @@ -183,7 +183,9 @@ describe("bundler", () => { target: "browser", run: { stdout: - "{\n assert: {\n throws: [Getter/Setter],\n strictEqual: [Getter/Setter],\n strict: [Getter/Setter],\n rejects: [Getter/Setter],\n ok: [Getter/Setter],\n notStrictEqual: [Getter/Setter],\n notEqual: [Getter/Setter],\n notDeepStrictEqual: [Getter/Setter],\n notDeepEqual: [Getter/Setter],\n match: [Getter/Setter],\n ifError: [Getter/Setter],\n fail: [Getter/Setter],\n equal: [Getter/Setter],\n doesNotThrow: [Getter/Setter],\n doesNotReject: [Getter/Setter],\n doesNotMatch: [Getter/Setter],\n default: [Getter/Setter],\n deepStrictEqual: [Getter/Setter],\n deepEqual: [Getter/Setter],\n CallTracker: [Getter/Setter],\n AssertionError: [Getter/Setter],\n },\n buffer: {\n transcode: [Getter/Setter],\n resolveObjectURL: [Getter/Setter],\n kStringMaxLength: [Getter/Setter],\n kMaxLength: [Getter/Setter],\n isUtf8: [Getter/Setter],\n isAscii: [Getter/Setter],\n default: [Getter/Setter],\n constants: [Getter/Setter],\n btoa: [Getter/Setter],\n atob: [Getter/Setter],\n INSPECT_MAX_BYTES: [Getter/Setter],\n File: [Getter/Setter],\n Buffer: [Getter/Setter],\n Blob: [Getter/Setter],\n },\n child_process: [Function: child_process],\n cluster: [Function: cluster],\n console2: {\n default: [Getter/Setter],\n },\n constants: {\n X_OK: [Getter/Setter],\n W_OK: [Getter/Setter],\n UV_UDP_REUSEADDR: [Getter/Setter],\n S_IXUSR: [Getter/Setter],\n S_IXOTH: [Getter/Setter],\n S_IXGRP: [Getter/Setter],\n S_IWUSR: [Getter/Setter],\n S_IWOTH: [Getter/Setter],\n S_IWGRP: [Getter/Setter],\n S_IRWXU: [Getter/Setter],\n S_IRWXO: [Getter/Setter],\n S_IRWXG: [Getter/Setter],\n S_IRUSR: [Getter/Setter],\n S_IROTH: [Getter/Setter],\n S_IRGRP: [Getter/Setter],\n S_IFSOCK: [Getter/Setter],\n S_IFREG: [Getter/Setter],\n S_IFMT: [Getter/Setter],\n S_IFLNK: [Getter/Setter],\n S_IFIFO: [Getter/Setter],\n S_IFDIR: [Getter/Setter],\n S_IFCHR: [Getter/Setter],\n S_IFBLK: [Getter/Setter],\n SSL_OP_TLS_ROLLBACK_BUG: [Getter/Setter],\n SSL_OP_TLS_D5_BUG: [Getter/Setter],\n SSL_OP_TLS_BLOCK_PADDING_BUG: [Getter/Setter],\n SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: [Getter/Setter],\n SSL_OP_SSLEAY_080_CLIENT_DH_BUG: [Getter/Setter],\n SSL_OP_SINGLE_ECDH_USE: [Getter/Setter],\n SSL_OP_SINGLE_DH_USE: [Getter/Setter],\n SSL_OP_PKCS1_CHECK_2: [Getter/Setter],\n SSL_OP_PKCS1_CHECK_1: [Getter/Setter],\n SSL_OP_NO_TLSv1_2: [Getter/Setter],\n SSL_OP_NO_TLSv1_1: [Getter/Setter],\n SSL_OP_NO_TLSv1: [Getter/Setter],\n SSL_OP_NO_TICKET: [Getter/Setter],\n SSL_OP_NO_SSLv3: [Getter/Setter],\n SSL_OP_NO_SSLv2: [Getter/Setter],\n SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: [Getter/Setter],\n SSL_OP_NO_QUERY_MTU: [Getter/Setter],\n SSL_OP_NO_COMPRESSION: [Getter/Setter],\n SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: [Getter/Setter],\n SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: [Getter/Setter],\n SSL_OP_NETSCAPE_CHALLENGE_BUG: [Getter/Setter],\n SSL_OP_NETSCAPE_CA_DN_BUG: [Getter/Setter],\n SSL_OP_MSIE_SSLV2_RSA_PADDING: [Getter/Setter],\n SSL_OP_MICROSOFT_SESS_ID_BUG: [Getter/Setter],\n SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: [Getter/Setter],\n SSL_OP_LEGACY_SERVER_CONNECT: [Getter/Setter],\n SSL_OP_EPHEMERAL_RSA: [Getter/Setter],\n SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: [Getter/Setter],\n SSL_OP_CRYPTOPRO_TLSEXT_BUG: [Getter/Setter],\n SSL_OP_COOKIE_EXCHANGE: [Getter/Setter],\n SSL_OP_CISCO_ANYCONNECT: [Getter/Setter],\n SSL_OP_CIPHER_SERVER_PREFERENCE: [Getter/Setter],\n SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: [Getter/Setter],\n SSL_OP_ALL: [Getter/Setter],\n SIGXFSZ: [Getter/Setter],\n SIGXCPU: [Getter/Setter],\n SIGWINCH: [Getter/Setter],\n SIGVTALRM: [Getter/Setter],\n SIGUSR2: [Getter/Setter],\n SIGUSR1: [Getter/Setter],\n SIGURG: [Getter/Setter],\n SIGTTOU: [Getter/Setter],\n SIGTTIN: [Getter/Setter],\n SIGTSTP: [Getter/Setter],\n SIGTRAP: [Getter/Setter],\n SIGTERM: [Getter/Setter],\n SIGSYS: [Getter/Setter],\n SIGSTOP: [Getter/Setter],\n SIGSEGV: [Getter/Setter],\n SIGQUIT: [Getter/Setter],\n SIGPROF: [Getter/Setter],\n SIGPIPE: [Getter/Setter],\n SIGKILL: [Getter/Setter],\n SIGIOT: [Getter/Setter],\n SIGIO: [Getter/Setter],\n SIGINT: [Getter/Setter],\n SIGILL: [Getter/Setter],\n SIGHUP: [Getter/Setter],\n SIGFPE: [Getter/Setter],\n SIGCONT: [Getter/Setter],\n SIGCHLD: [Getter/Setter],\n SIGBUS: [Getter/Setter],\n SIGALRM: [Getter/Setter],\n SIGABRT: [Getter/Setter],\n R_OK: [Getter/Setter],\n RSA_X931_PADDING: [Getter/Setter],\n RSA_SSLV23_PADDING: [Getter/Setter],\n RSA_PKCS1_PSS_PADDING: [Getter/Setter],\n RSA_PKCS1_PADDING: [Getter/Setter],\n RSA_PKCS1_OAEP_PADDING: [Getter/Setter],\n RSA_NO_PADDING: [Getter/Setter],\n POINT_CONVERSION_UNCOMPRESSED: [Getter/Setter],\n POINT_CONVERSION_HYBRID: [Getter/Setter],\n POINT_CONVERSION_COMPRESSED: [Getter/Setter],\n O_WRONLY: [Getter/Setter],\n O_TRUNC: [Getter/Setter],\n O_SYNC: [Getter/Setter],\n O_SYMLINK: [Getter/Setter],\n O_RDWR: [Getter/Setter],\n O_RDONLY: [Getter/Setter],\n O_NONBLOCK: [Getter/Setter],\n O_NOFOLLOW: [Getter/Setter],\n O_NOCTTY: [Getter/Setter],\n O_EXCL: [Getter/Setter],\n O_DIRECTORY: [Getter/Setter],\n O_CREAT: [Getter/Setter],\n O_APPEND: [Getter/Setter],\n NPN_ENABLED: [Getter/Setter],\n F_OK: [Getter/Setter],\n EXDEV: [Getter/Setter],\n EWOULDBLOCK: [Getter/Setter],\n ETXTBSY: [Getter/Setter],\n ETIMEDOUT: [Getter/Setter],\n ETIME: [Getter/Setter],\n ESTALE: [Getter/Setter],\n ESRCH: [Getter/Setter],\n ESPIPE: [Getter/Setter],\n EROFS: [Getter/Setter],\n ERANGE: [Getter/Setter],\n EPROTOTYPE: [Getter/Setter],\n EPROTONOSUPPORT: [Getter/Setter],\n EPROTO: [Getter/Setter],\n EPIPE: [Getter/Setter],\n EPERM: [Getter/Setter],\n EOVERFLOW: [Getter/Setter],\n EOPNOTSUPP: [Getter/Setter],\n ENXIO: [Getter/Setter],\n ENOTTY: [Getter/Setter],\n ENOTSUP: [Getter/Setter],\n ENOTSOCK: [Getter/Setter],\n ENOTEMPTY: [Getter/Setter],\n ENOTDIR: [Getter/Setter],\n ENOTCONN: [Getter/Setter],\n ENOSYS: [Getter/Setter],\n ENOSTR: [Getter/Setter],\n ENOSR: [Getter/Setter],\n ENOSPC: [Getter/Setter],\n ENOPROTOOPT: [Getter/Setter],\n ENOMSG: [Getter/Setter],\n ENOMEM: [Getter/Setter],\n ENOLINK: [Getter/Setter],\n ENOLCK: [Getter/Setter],\n ENOEXEC: [Getter/Setter],\n ENOENT: [Getter/Setter],\n ENODEV: [Getter/Setter],\n ENODATA: [Getter/Setter],\n ENOBUFS: [Getter/Setter],\n ENGINE_METHOD_STORE: [Getter/Setter],\n ENGINE_METHOD_RAND: [Getter/Setter],\n ENGINE_METHOD_PKEY_METHS: [Getter/Setter],\n ENGINE_METHOD_PKEY_ASN1_METHS: [Getter/Setter],\n ENGINE_METHOD_NONE: [Getter/Setter],\n ENGINE_METHOD_ECDSA: [Getter/Setter],\n ENGINE_METHOD_ECDH: [Getter/Setter],\n ENGINE_METHOD_DSA: [Getter/Setter],\n ENGINE_METHOD_DIGESTS: [Getter/Setter],\n ENGINE_METHOD_DH: [Getter/Setter],\n ENGINE_METHOD_CIPHERS: [Getter/Setter],\n ENGINE_METHOD_ALL: [Getter/Setter],\n ENFILE: [Getter/Setter],\n ENETUNREACH: [Getter/Setter],\n ENETRESET: [Getter/Setter],\n ENETDOWN: [Getter/Setter],\n ENAMETOOLONG: [Getter/Setter],\n EMULTIHOP: [Getter/Setter],\n EMSGSIZE: [Getter/Setter],\n EMLINK: [Getter/Setter],\n EMFILE: [Getter/Setter],\n ELOOP: [Getter/Setter],\n EISDIR: [Getter/Setter],\n EISCONN: [Getter/Setter],\n EIO: [Getter/Setter],\n EINVAL: [Getter/Setter],\n EINTR: [Getter/Setter],\n EINPROGRESS: [Getter/Setter],\n EILSEQ: [Getter/Setter],\n EIDRM: [Getter/Setter],\n EHOSTUNREACH: [Getter/Setter],\n EFBIG: [Getter/Setter],\n EFAULT: [Getter/Setter],\n EEXIST: [Getter/Setter],\n EDQUOT: [Getter/Setter],\n EDOM: [Getter/Setter],\n EDESTADDRREQ: [Getter/Setter],\n EDEADLK: [Getter/Setter],\n ECONNRESET: [Getter/Setter],\n ECONNREFUSED: [Getter/Setter],\n ECONNABORTED: [Getter/Setter],\n ECHILD: [Getter/Setter],\n ECANCELED: [Getter/Setter],\n EBUSY: [Getter/Setter],\n EBADMSG: [Getter/Setter],\n EBADF: [Getter/Setter],\n EALREADY: [Getter/Setter],\n EAGAIN: [Getter/Setter],\n EAFNOSUPPORT: [Getter/Setter],\n EADDRNOTAVAIL: [Getter/Setter],\n EADDRINUSE: [Getter/Setter],\n EACCES: [Getter/Setter],\n E2BIG: [Getter/Setter],\n DH_UNABLE_TO_CHECK_GENERATOR: [Getter/Setter],\n DH_NOT_SUITABLE_GENERATOR: [Getter/Setter],\n DH_CHECK_P_NOT_SAFE_PRIME: [Getter/Setter],\n DH_CHECK_P_NOT_PRIME: [Getter/Setter],\n },\n crypto: {\n webcrypto: [Getter/Setter],\n rng: [Getter/Setter],\n randomUUID: [Getter/Setter],\n randomFillSync: [Getter/Setter],\n randomFill: [Getter/Setter],\n randomBytes: [Getter/Setter],\n publicEncrypt: [Getter/Setter],\n publicDecrypt: [Getter/Setter],\n pseudoRandomBytes: [Getter/Setter],\n prng: [Getter/Setter],\n privateEncrypt: [Getter/Setter],\n privateDecrypt: [Getter/Setter],\n pbkdf2Sync: [Getter/Setter],\n pbkdf2: [Getter/Setter],\n listCiphers: [Getter/Setter],\n getRandomValues: [Getter/Setter],\n getHashes: [Getter/Setter],\n getDiffieHellman: [Getter/Setter],\n getCurves: [Getter/Setter],\n getCiphers: [Getter/Setter],\n createVerify: [Getter/Setter],\n createSign: [Getter/Setter],\n createHmac: [Getter/Setter],\n createHash: [Getter/Setter],\n createECDH: [Getter/Setter],\n createDiffieHellmanGroup: [Getter/Setter],\n createDiffieHellman: [Getter/Setter],\n createDecipheriv: [Getter/Setter],\n createDecipher: [Getter/Setter],\n createCredentials: [Getter/Setter],\n createCipheriv: [Getter/Setter],\n createCipher: [Getter/Setter],\n constants: [Getter/Setter],\n Verify: [Getter/Setter],\n Sign: [Getter/Setter],\n Hmac: [Getter/Setter],\n Hash: [Getter/Setter],\n DiffieHellmanGroup: [Getter/Setter],\n DiffieHellman: [Getter/Setter],\n Decipheriv: [Getter/Setter],\n Decipher: [Getter/Setter],\n DEFAULT_ENCODING: [Getter/Setter],\n Cipheriv: [Getter/Setter],\n Cipher: [Getter/Setter],\n },\n dgram: [Function: dgram],\n dns: [Function: dns],\n domain: {\n createDomain: [Getter/Setter],\n create: [Getter/Setter],\n },\n events: {\n setMaxListeners: [Getter/Setter],\n once: [Getter/Setter],\n listenerCount: [Getter/Setter],\n init: [Getter/Setter],\n getMaxListeners: [Getter/Setter],\n getEventListeners: [Getter/Setter],\n default: [Getter/Setter],\n captureRejectionSymbol: [Getter/Setter],\n addAbortListener: [Getter/Setter],\n EventEmitter: [Getter/Setter],\n },\n fs: [Function: fs],\n http: {\n request: [Getter/Setter],\n globalAgent: [Getter/Setter],\n get: [Getter/Setter],\n STATUS_CODES: [Getter/Setter],\n METHODS: [Getter/Setter],\n IncomingMessage: [Getter/Setter],\n ClientRequest: [Getter/Setter],\n Agent: [Getter/Setter],\n },\n https: {\n validateHeaderValue: [Getter/Setter],\n validateHeaderName: [Getter/Setter],\n setMaxIdleHTTPParsers: [Getter/Setter],\n request: [Getter/Setter],\n maxHeaderSize: [Getter/Setter],\n globalAgent: [Getter/Setter],\n get: [Getter/Setter],\n createServer: [Getter/Setter],\n ServerResponse: [Getter/Setter],\n Server: [Getter/Setter],\n STATUS_CODES: [Getter/Setter],\n OutgoingMessage: [Getter/Setter],\n METHODS: [Getter/Setter],\n IncomingMessage: [Getter/Setter],\n ClientRequest: [Getter/Setter],\n Agent: [Getter/Setter],\n },\n module: [Function: module2],\n net: {\n isIPv6: [Getter/Setter],\n isIPv4: [Getter/Setter],\n isIP: [Getter/Setter],\n },\n os: {\n uptime: [Getter/Setter],\n type: [Getter/Setter],\n totalmem: [Getter/Setter],\n tmpdir: [Getter/Setter],\n tmpDir: [Getter/Setter],\n release: [Getter/Setter],\n platform: [Getter/Setter],\n networkInterfaces: [Getter/Setter],\n loadavg: [Getter/Setter],\n hostname: [Getter/Setter],\n homedir: [Getter/Setter],\n getNetworkInterfaces: [Getter/Setter],\n freemem: [Getter/Setter],\n endianness: [Getter/Setter],\n cpus: [Getter/Setter],\n arch: [Getter/Setter],\n EOL: [Getter/Setter],\n },\n path: {\n sep: [Getter/Setter],\n resolve: [Getter/Setter],\n relative: [Getter/Setter],\n posix: [Getter/Setter],\n parse: [Getter/Setter],\n normalize: [Getter/Setter],\n join: [Getter/Setter],\n isAbsolute: [Getter/Setter],\n format: [Getter/Setter],\n extname: [Getter/Setter],\n dirname: [Getter/Setter],\n delimiter: [Getter/Setter],\n default: [Getter/Setter],\n basename: [Getter/Setter],\n _makeLong: [Getter/Setter],\n },\n perf_hooks: [Function: perf_hooks],\n process: {\n versions: [Getter/Setter],\n version: [Getter/Setter],\n umask: [Getter/Setter],\n title: [Getter/Setter],\n removeListener: [Getter/Setter],\n removeAllListeners: [Getter/Setter],\n prependOnceListener: [Getter/Setter],\n prependListener: [Getter/Setter],\n once: [Getter/Setter],\n on: [Getter/Setter],\n off: [Getter/Setter],\n nextTick: [Getter/Setter],\n listeners: [Getter/Setter],\n env: [Getter/Setter],\n emit: [Getter/Setter],\n cwd: [Getter/Setter],\n chdir: [Getter/Setter],\n browser: [Getter/Setter],\n binding: [Getter/Setter],\n argv: [Getter/Setter],\n addListener: [Getter/Setter],\n },\n punycode: {\n default: [Getter/Setter],\n },\n querystring: {\n unescapeBuffer: [Getter/Setter],\n unescape: [Getter/Setter],\n stringify: [Getter/Setter],\n parse: [Getter/Setter],\n escape: [Getter/Setter],\n encode: [Getter/Setter],\n default: [Getter/Setter],\n decode: [Getter/Setter],\n },\n readline: [Function: readline],\n repl: [Function: repl],\n stream: Function {\n default: [Stream: Readable],\n length: [Getter],\n name: [Getter],\n prototype: [Getter],\n ReadableState: [Getter],\n _fromList: [Getter],\n from: [Getter],\n fromWeb: [Getter],\n toWeb: [Getter],\n wrap: [Getter],\n _uint8ArrayToBuffer: [Getter],\n _isUint8Array: [Getter],\n isDisturbed: [Getter],\n isErrored: [Getter],\n isReadable: [Getter],\n Readable: [Getter],\n Writable: [Getter],\n Duplex: [Getter],\n Transform: [Getter],\n PassThrough: [Getter],\n addAbortSignal: [Getter],\n finished: [Getter],\n destroy: [Getter],\n pipeline: [Getter],\n compose: [Getter],\n Stream: [Getter],\n isDestroyed: [Function: isDestroyed],\n isWritable: [Function: isWritable],\n setDefaultHighWaterMark: [Function: setDefaultHighWaterMark],\n getDefaultHighWaterMark: [Function: getDefaultHighWaterMark],\n promises: [Getter],\n },\n string_decoder: {\n default: [Getter/Setter],\n StringDecoder: [Getter/Setter],\n },\n sys: {\n types: [Getter/Setter],\n promisify: [Getter/Setter],\n log: [Getter/Setter],\n isUndefined: [Getter/Setter],\n isSymbol: [Getter/Setter],\n isString: [Getter/Setter],\n isRegExp: [Getter/Setter],\n isPrimitive: [Getter/Setter],\n isObject: [Getter/Setter],\n isNumber: [Getter/Setter],\n isNullOrUndefined: [Getter/Setter],\n isNull: [Getter/Setter],\n isFunction: [Getter/Setter],\n isError: [Getter/Setter],\n isDate: [Getter/Setter],\n isBuffer: [Getter/Setter],\n isBoolean: [Getter/Setter],\n isArray: [Getter/Setter],\n inspect: [Getter/Setter],\n inherits: [Getter/Setter],\n format: [Getter/Setter],\n deprecate: [Getter/Setter],\n default: [Getter/Setter],\n debuglog: [Getter/Setter],\n callbackifyOnRejected: [Getter/Setter],\n callbackify: [Getter/Setter],\n _extend: [Getter/Setter],\n TextEncoder: [Getter/Setter],\n TextDecoder: [Getter/Setter],\n },\n timers: {\n setTimeout: [Getter/Setter],\n setInterval: [Getter/Setter],\n setImmediate: [Getter/Setter],\n promises: [Getter/Setter],\n clearTimeout: [Getter/Setter],\n clearInterval: [Getter/Setter],\n clearImmediate: [Getter/Setter],\n _unrefActive: [Getter/Setter],\n },\n tls: [Function: tls],\n tty: {\n isatty: [Getter/Setter],\n WriteStream: [Getter/Setter],\n ReadStream: [Getter/Setter],\n },\n url: {\n resolveObject: [Getter/Setter],\n resolve: [Getter/Setter],\n parse: [Getter/Setter],\n format: [Getter/Setter],\n default: [Getter/Setter],\n Url: [Getter/Setter],\n URLSearchParams: [Getter/Setter],\n URL: [Getter/Setter],\n },\n util: {\n types: [Getter/Setter],\n promisify: [Getter/Setter],\n log: [Getter/Setter],\n isUndefined: [Getter/Setter],\n isSymbol: [Getter/Setter],\n isString: [Getter/Setter],\n isRegExp: [Getter/Setter],\n isPrimitive: [Getter/Setter],\n isObject: [Getter/Setter],\n isNumber: [Getter/Setter],\n isNullOrUndefined: [Getter/Setter],\n isNull: [Getter/Setter],\n isFunction: [Getter/Setter],\n isError: [Getter/Setter],\n isDate: [Getter/Setter],\n isBuffer: [Getter/Setter],\n isBoolean: [Getter/Setter],\n isArray: [Getter/Setter],\n inspect: [Getter/Setter],\n inherits: [Getter/Setter],\n format: [Getter/Setter],\n deprecate: [Getter/Setter],\n debuglog: [Getter/Setter],\n callbackifyOnRejected: [Getter/Setter],\n callbackify: [Getter/Setter],\n _extend: [Getter/Setter],\n TextEncoder: [Getter/Setter],\n TextDecoder: [Getter/Setter],\n },\n v8: [Function: v8],\n vm: [Function: vm],\n zlib: {\n default: [Getter/Setter],\n },\n}", + "{\n assert: {\n throws: [Getter/Setter],\n strictEqual: [Getter/Setter],\n strict: [Getter/Setter],\n rejects: [Getter/Setter],\n ok: [Getter/Setter],\n notStrictEqual: [Getter/Setter],\n notEqual: [Getter/Setter],\n notDeepStrictEqual: [Getter/Setter],\n notDeepEqual: [Getter/Setter],\n match: [Getter/Setter],\n ifError: [Getter/Setter],\n fail: [Getter/Setter],\n equal: [Getter/Setter],\n doesNotThrow: [Getter/Setter],\n doesNotReject: [Getter/Setter],\n doesNotMatch: [Getter/Setter],\n default: [Getter/Setter],\n deepStrictEqual: [Getter/Setter],\n deepEqual: [Getter/Setter],\n CallTracker: [Getter/Setter],\n AssertionError: [Getter/Setter],\n },\n buffer: {\n transcode: [Getter/Setter],\n resolveObjectURL: [Getter/Setter],\n kStringMaxLength: [Getter/Setter],\n kMaxLength: [Getter/Setter],\n isUtf8: [Getter/Setter],\n isAscii: [Getter/Setter],\n default: [Getter/Setter],\n constants: [Getter/Setter],\n btoa: [Getter/Setter],\n atob: [Getter/Setter],\n INSPECT_MAX_BYTES: [Getter/Setter],\n File: [Getter/Setter],\n Buffer: [Getter/Setter],\n Blob: [Getter/Setter],\n },\n child_process: [Function: child_process],\n cluster: [Function: cluster],\n console2: {\n default: [Getter/Setter],\n },\n constants: {\n X_OK: [Getter/Setter],\n W_OK: [Getter/Setter],\n UV_UDP_REUSEADDR: [Getter/Setter],\n S_IXUSR: [Getter/Setter],\n S_IXOTH: [Getter/Setter],\n S_IXGRP: [Getter/Setter],\n S_IWUSR: [Getter/Setter],\n S_IWOTH: [Getter/Setter],\n S_IWGRP: [Getter/Setter],\n S_IRWXU: [Getter/Setter],\n S_IRWXO: [Getter/Setter],\n S_IRWXG: [Getter/Setter],\n S_IRUSR: [Getter/Setter],\n S_IROTH: [Getter/Setter],\n S_IRGRP: [Getter/Setter],\n S_IFSOCK: [Getter/Setter],\n S_IFREG: [Getter/Setter],\n S_IFMT: [Getter/Setter],\n S_IFLNK: [Getter/Setter],\n S_IFIFO: [Getter/Setter],\n S_IFDIR: [Getter/Setter],\n S_IFCHR: [Getter/Setter],\n S_IFBLK: [Getter/Setter],\n SSL_OP_TLS_ROLLBACK_BUG: [Getter/Setter],\n SSL_OP_TLS_D5_BUG: [Getter/Setter],\n SSL_OP_TLS_BLOCK_PADDING_BUG: [Getter/Setter],\n SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: [Getter/Setter],\n SSL_OP_SSLEAY_080_CLIENT_DH_BUG: [Getter/Setter],\n SSL_OP_SINGLE_ECDH_USE: [Getter/Setter],\n SSL_OP_SINGLE_DH_USE: [Getter/Setter],\n SSL_OP_PKCS1_CHECK_2: [Getter/Setter],\n SSL_OP_PKCS1_CHECK_1: [Getter/Setter],\n SSL_OP_NO_TLSv1_2: [Getter/Setter],\n SSL_OP_NO_TLSv1_1: [Getter/Setter],\n SSL_OP_NO_TLSv1: [Getter/Setter],\n SSL_OP_NO_TICKET: [Getter/Setter],\n SSL_OP_NO_SSLv3: [Getter/Setter],\n SSL_OP_NO_SSLv2: [Getter/Setter],\n SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: [Getter/Setter],\n SSL_OP_NO_QUERY_MTU: [Getter/Setter],\n SSL_OP_NO_COMPRESSION: [Getter/Setter],\n SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: [Getter/Setter],\n SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: [Getter/Setter],\n SSL_OP_NETSCAPE_CHALLENGE_BUG: [Getter/Setter],\n SSL_OP_NETSCAPE_CA_DN_BUG: [Getter/Setter],\n SSL_OP_MSIE_SSLV2_RSA_PADDING: [Getter/Setter],\n SSL_OP_MICROSOFT_SESS_ID_BUG: [Getter/Setter],\n SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: [Getter/Setter],\n SSL_OP_LEGACY_SERVER_CONNECT: [Getter/Setter],\n SSL_OP_EPHEMERAL_RSA: [Getter/Setter],\n SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: [Getter/Setter],\n SSL_OP_CRYPTOPRO_TLSEXT_BUG: [Getter/Setter],\n SSL_OP_COOKIE_EXCHANGE: [Getter/Setter],\n SSL_OP_CISCO_ANYCONNECT: [Getter/Setter],\n SSL_OP_CIPHER_SERVER_PREFERENCE: [Getter/Setter],\n SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: [Getter/Setter],\n SSL_OP_ALL: [Getter/Setter],\n SIGXFSZ: [Getter/Setter],\n SIGXCPU: [Getter/Setter],\n SIGWINCH: [Getter/Setter],\n SIGVTALRM: [Getter/Setter],\n SIGUSR2: [Getter/Setter],\n SIGUSR1: [Getter/Setter],\n SIGURG: [Getter/Setter],\n SIGTTOU: [Getter/Setter],\n SIGTTIN: [Getter/Setter],\n SIGTSTP: [Getter/Setter],\n SIGTRAP: [Getter/Setter],\n SIGTERM: [Getter/Setter],\n SIGSYS: [Getter/Setter],\n SIGSTOP: [Getter/Setter],\n SIGSEGV: [Getter/Setter],\n SIGQUIT: [Getter/Setter],\n SIGPROF: [Getter/Setter],\n SIGPIPE: [Getter/Setter],\n SIGKILL: [Getter/Setter],\n SIGIOT: [Getter/Setter],\n SIGIO: [Getter/Setter],\n SIGINT: [Getter/Setter],\n SIGILL: [Getter/Setter],\n SIGHUP: [Getter/Setter],\n SIGFPE: [Getter/Setter],\n SIGCONT: [Getter/Setter],\n SIGCHLD: [Getter/Setter],\n SIGBUS: [Getter/Setter],\n SIGALRM: [Getter/Setter],\n SIGABRT: [Getter/Setter],\n R_OK: [Getter/Setter],\n RSA_X931_PADDING: [Getter/Setter],\n RSA_SSLV23_PADDING: [Getter/Setter],\n RSA_PKCS1_PSS_PADDING: [Getter/Setter],\n RSA_PKCS1_PADDING: [Getter/Setter],\n RSA_PKCS1_OAEP_PADDING: [Getter/Setter],\n RSA_NO_PADDING: [Getter/Setter],\n POINT_CONVERSION_UNCOMPRESSED: [Getter/Setter],\n POINT_CONVERSION_HYBRID: [Getter/Setter],\n POINT_CONVERSION_COMPRESSED: [Getter/Setter],\n O_WRONLY: [Getter/Setter],\n O_TRUNC: [Getter/Setter],\n O_SYNC: [Getter/Setter],\n O_SYMLINK: [Getter/Setter],\n O_RDWR: [Getter/Setter],\n O_RDONLY: [Getter/Setter],\n O_NONBLOCK: [Getter/Setter],\n O_NOFOLLOW: [Getter/Setter],\n O_NOCTTY: [Getter/Setter],\n O_EXCL: [Getter/Setter],\n O_DIRECTORY: [Getter/Setter],\n O_CREAT: [Getter/Setter],\n O_APPEND: [Getter/Setter],\n NPN_ENABLED: [Getter/Setter],\n F_OK: [Getter/Setter],\n EXDEV: [Getter/Setter],\n EWOULDBLOCK: [Getter/Setter],\n ETXTBSY: [Getter/Setter],\n ETIMEDOUT: [Getter/Setter],\n ETIME: [Getter/Setter],\n ESTALE: [Getter/Setter],\n ESRCH: [Getter/Setter],\n ESPIPE: [Getter/Setter],\n EROFS: [Getter/Setter],\n ERANGE: [Getter/Setter],\n EPROTOTYPE: [Getter/Setter],\n EPROTONOSUPPORT: [Getter/Setter],\n EPROTO: [Getter/Setter],\n EPIPE: [Getter/Setter],\n EPERM: [Getter/Setter],\n EOVERFLOW: [Getter/Setter],\n EOPNOTSUPP: [Getter/Setter],\n ENXIO: [Getter/Setter],\n ENOTTY: [Getter/Setter],\n ENOTSUP: [Getter/Setter],\n ENOTSOCK: [Getter/Setter],\n ENOTEMPTY: [Getter/Setter],\n ENOTDIR: [Getter/Setter],\n ENOTCONN: [Getter/Setter],\n ENOSYS: [Getter/Setter],\n ENOSTR: [Getter/Setter],\n ENOSR: [Getter/Setter],\n ENOSPC: [Getter/Setter],\n ENOPROTOOPT: [Getter/Setter],\n ENOMSG: [Getter/Setter],\n ENOMEM: [Getter/Setter],\n ENOLINK: [Getter/Setter],\n ENOLCK: [Getter/Setter],\n ENOEXEC: [Getter/Setter],\n ENOENT: [Getter/Setter],\n ENODEV: [Getter/Setter],\n ENODATA: [Getter/Setter],\n ENOBUFS: [Getter/Setter],\n ENGINE_METHOD_STORE: [Getter/Setter],\n ENGINE_METHOD_RAND: [Getter/Setter],\n ENGINE_METHOD_PKEY_METHS: [Getter/Setter],\n ENGINE_METHOD_PKEY_ASN1_METHS: [Getter/Setter],\n ENGINE_METHOD_NONE: [Getter/Setter],\n ENGINE_METHOD_ECDSA: [Getter/Setter],\n ENGINE_METHOD_ECDH: [Getter/Setter],\n ENGINE_METHOD_DSA: [Getter/Setter],\n ENGINE_METHOD_DIGESTS: [Getter/Setter],\n ENGINE_METHOD_DH: [Getter/Setter],\n ENGINE_METHOD_CIPHERS: [Getter/Setter],\n ENGINE_METHOD_ALL: [Getter/Setter],\n ENFILE: [Getter/Setter],\n ENETUNREACH: [Getter/Setter],\n ENETRESET: [Getter/Setter],\n ENETDOWN: [Getter/Setter],\n ENAMETOOLONG: [Getter/Setter],\n EMULTIHOP: [Getter/Setter],\n EMSGSIZE: [Getter/Setter],\n EMLINK: [Getter/Setter],\n EMFILE: [Getter/Setter],\n ELOOP: [Getter/Setter],\n EISDIR: [Getter/Setter],\n EISCONN: [Getter/Setter],\n EIO: [Getter/Setter],\n EINVAL: [Getter/Setter],\n EINTR: [Getter/Setter],\n EINPROGRESS: [Getter/Setter],\n EILSEQ: [Getter/Setter],\n EIDRM: [Getter/Setter],\n EHOSTUNREACH: [Getter/Setter],\n EFBIG: [Getter/Setter],\n EFAULT: [Getter/Setter],\n EEXIST: [Getter/Setter],\n EDQUOT: [Getter/Setter],\n EDOM: [Getter/Setter],\n EDESTADDRREQ: [Getter/Setter],\n EDEADLK: [Getter/Setter],\n ECONNRESET: [Getter/Setter],\n ECONNREFUSED: [Getter/Setter],\n ECONNABORTED: [Getter/Setter],\n ECHILD: [Getter/Setter],\n ECANCELED: [Getter/Setter],\n EBUSY: [Getter/Setter],\n EBADMSG: [Getter/Setter],\n EBADF: [Getter/Setter],\n EALREADY: [Getter/Setter],\n EAGAIN: [Getter/Setter],\n EAFNOSUPPORT: [Getter/Setter],\n EADDRNOTAVAIL: [Getter/Setter],\n EADDRINUSE: [Getter/Setter],\n EACCES: [Getter/Setter],\n E2BIG: [Getter/Setter],\n DH_UNABLE_TO_CHECK_GENERATOR: [Getter/Setter],\n DH_NOT_SUITABLE_GENERATOR: [Getter/Setter],\n DH_CHECK_P_NOT_SAFE_PRIME: [Getter/Setter],\n DH_CHECK_P_NOT_PRIME: [Getter/Setter],\n },\n crypto: {\n webcrypto: [Getter/Setter],\n rng: [Getter/Setter],\n randomUUID: [Getter/Setter],\n randomFillSync: [Getter/Setter],\n randomFill: [Getter/Setter],\n randomBytes: [Getter/Setter],\n publicEncrypt: [Getter/Setter],\n publicDecrypt: [Getter/Setter],\n pseudoRandomBytes: [Getter/Setter],\n prng: [Getter/Setter],\n privateEncrypt: [Getter/Setter],\n privateDecrypt: [Getter/Setter],\n pbkdf2Sync: [Getter/Setter],\n pbkdf2: [Getter/Setter],\n listCiphers: [Getter/Setter],\n getRandomValues: [Getter/Setter],\n getHashes: [Getter/Setter],\n getDiffieHellman: [Getter/Setter],\n getCurves: [Getter/Setter],\n getCiphers: [Getter/Setter],\n default: [Getter/Setter],\n createVerify: [Getter/Setter],\n createSign: [Getter/Setter],\n createHmac: [Getter/Setter],\n createHash: [Getter/Setter],\n createECDH: [Getter/Setter],\n createDiffieHellmanGroup: [Getter/Setter],\n createDiffieHellman: [Getter/Setter],\n createDecipheriv: [Getter/Setter],\n createDecipher: [Getter/Setter],\n createCredentials: [Getter/Setter],\n createCipheriv: [Getter/Setter],\n createCipher: [Getter/Setter],\n constants: [Getter/Setter],\n Verify: [Getter/Setter],\n Sign: [Getter/Setter],\n Hmac: [Getter/Setter],\n Hash: [Getter/Setter],\n DiffieHellmanGroup: [Getter/Setter],\n DiffieHellman: [Getter/Setter],\n Decipheriv: [Getter/Setter],\n Decipher: [Getter/Setter],\n DEFAULT_ENCODING: [Getter/Setter],\n Cipheriv: [Getter/Setter],\n Cipher: [Getter/Setter],\n },\n dgram: [Function: dgram],\n dns: [Function: dns],\n domain: {\n createDomain: [Getter/Setter],\n create: [Getter/Setter],\n },\n events: {\n setMaxListeners: [Getter/Setter],\n once: [Getter/Setter],\n listenerCount: [Getter/Setter],\n init: [Getter/Setter],\n getMaxListeners: [Getter/Setter],\n getEventListeners: [Getter/Setter],\n default: [Getter/Setter],\n captureRejectionSymbol: [Getter/Setter],\n addAbortListener: [Getter/Setter],\n EventEmitter: [Getter/Setter],\n },\n fs: [Function: fs],\n http: {\n request: [Getter/Setter],\n globalAgent: [Getter/Setter],\n get: [Getter/Setter],\n default: [Getter/Setter],\n STATUS_CODES: [Getter/Setter],\n METHODS: [Getter/Setter],\n IncomingMessage: [Getter/Setter],\n ClientRequest: [Getter/Setter],\n Agent: [Getter/Setter],\n },\n https: {\n validateHeaderValue: [Getter/Setter],\n validateHeaderName: [Getter/Setter],\n setMaxIdleHTTPParsers: [Getter/Setter],\n request: [Getter/Setter],\n maxHeaderSize: [Getter/Setter],\n globalAgent: [Getter/Setter],\n get: [Getter/Setter],\n default: [Getter/Setter],\n createServer: [Getter/Setter],\n ServerResponse: [Getter/Setter],\n Server: [Getter/Setter],\n STATUS_CODES: [Getter/Setter],\n OutgoingMessage: [Getter/Setter],\n METHODS: [Getter/Setter],\n IncomingMessage: [Getter/Setter],\n ClientRequest: [Getter/Setter],\n Agent: [Getter/Setter],\n },\n module: [Function: module2],\n net: {\n isIPv6: [Getter/Setter],\n isIPv4: [Getter/Setter],\n isIP: [Getter/Setter],\n default: [Getter/Setter],\n },\n os: {\n uptime: [Getter/Setter],\n type: [Getter/Setter],\n totalmem: [Getter/Setter],\n tmpdir: [Getter/Setter],\n tmpDir: [Getter/Setter],\n release: [Getter/Setter],\n platform: [Getter/Setter],\n networkInterfaces: [Getter/Setter],\n loadavg: [Getter/Setter],\n hostname: [Getter/Setter],\n homedir: [Getter/Setter],\n getNetworkInterfaces: [Getter/Setter],\n freemem: [Getter/Setter],\n endianness: [Getter/Setter],\n cpus: [Getter/Setter],\n arch: [Getter/Setter],\n EOL: [Getter/Setter],\n },\n path: {\n sep: [Getter/Setter],\n resolve: [Getter/Setter],\n relative: [Getter/Setter],\n posix: [Getter/Setter],\n parse: [Getter/Setter],\n normalize: [Getter/Setter],\n join: [Getter/Setter],\n isAbsolute: [Getter/Setter],\n format: [Getter/Setter],\n extname: [Getter/Setter],\n dirname: [Getter/Setter],\n delimiter: [Getter/Setter],\n default: [Getter/Setter],\n basename: [Getter/Setter],\n _makeLong: [Getter/Setter],\n },\n perf_hooks: [Function: perf_hooks],\n process: {\n versions: [Getter/Setter],\n version: [Getter/Setter],\n umask: [Getter/Setter],\n title: [Getter/Setter],\n removeListener: [Getter/Setter],\n removeAllListeners: [Getter/Setter],\n prependOnceListener: [Getter/Setter],\n prependListener: [Getter/Setter],\n once: [Getter/Setter],\n on: [Getter/Setter],\n off: [Getter/Setter],\n nextTick: [Getter/Setter],\n listeners: [Getter/Setter],\n env: [Getter/Setter],\n emit: [Getter/Setter],\n cwd: [Getter/Setter],\n chdir: [Getter/Setter],\n browser: [Getter/Setter],\n binding: [Getter/Setter],\n argv: [Getter/Setter],\n addListener: [Getter/Setter],\n },\n punycode: {\n default: [Getter/Setter],\n },\n querystring: {\n unescapeBuffer: [Getter/Setter],\n unescape: [Getter/Setter],\n stringify: [Getter/Setter],\n parse: [Getter/Setter],\n escape: [Getter/Setter],\n encode: [Getter/Setter],\n default: [Getter/Setter],\n decode: [Getter/Setter],\n },\n readline: [Function: readline],\n repl: [Function: repl],\n stream: Function {\n default: [Stream: Readable],\n length: [Getter],\n name: [Getter],\n prototype: [Getter],\n ReadableState: [Getter],\n _fromList: [Getter],\n from: [Getter],\n fromWeb: [Getter],\n toWeb: [Getter],\n wrap: [Getter],\n _uint8ArrayToBuffer: [Getter],\n _isUint8Array: [Getter],\n isDisturbed: [Getter],\n isErrored: [Getter],\n isReadable: [Getter],\n Readable: [Getter],\n Writable: [Getter],\n Duplex: [Getter],\n Transform: [Getter],\n PassThrough: [Getter],\n addAbortSignal: [Getter],\n finished: [Getter],\n destroy: [Getter],\n pipeline: [Getter],\n compose: [Getter],\n Stream: [Getter],\n isDestroyed: [Function: isDestroyed],\n isWritable: [Function: isWritable],\n setDefaultHighWaterMark: [Function: setDefaultHighWaterMark],\n getDefaultHighWaterMark: [Function: getDefaultHighWaterMark],\n promises: [Getter],\n },\n string_decoder: {\n default: [Getter/Setter],\n StringDecoder: [Getter/Setter],\n },\n sys: {\n types: [Getter/Setter],\n promisify: [Getter/Setter],\n log: [Getter/Setter],\n isUndefined: [Getter/Setter],\n isSymbol: [Getter/Setter],\n isString: [Getter/Setter],\n isRegExp: [Getter/Setter],\n isPrimitive: [Getter/Setter],\n isObject: [Getter/Setter],\n isNumber: [Getter/Setter],\n isNullOrUndefined: [Getter/Setter],\n isNull: [Getter/Setter],\n isFunction: [Getter/Setter],\n isError: [Getter/Setter],\n isDate: [Getter/Setter],\n isBuffer: [Getter/Setter],\n isBoolean: [Getter/Setter],\n isArray: [Getter/Setter],\n inspect: [Getter/Setter],\n inherits: [Getter/Setter],\n format: [Getter/Setter],\n deprecate: [Getter/Setter],\n default: [Getter/Setter],\n debuglog: [Getter/Setter],\n callbackifyOnRejected: [Getter/Setter],\n callbackify: [Getter/Setter],\n _extend: [Getter/Setter],\n TextEncoder: [Getter/Setter],\n TextDecoder: [Getter/Setter],\n },\n timers: {\n setTimeout: [Getter/Setter],\n setInterval: [Getter/Setter],\n setImmediate: [Getter/Setter],\n promises: [Getter/Setter],\n clearTimeout: [Getter/Setter],\n clearInterval: [Getter/Setter],\n clearImmediate: [Getter/Setter],\n _unrefActive: [Getter/Setter],\n },\n tls: [Function: tls],\n tty: {\n isatty: [Getter/Setter],\n default: [Getter/Setter],\n WriteStream: [Getter/Setter],\n ReadStream: [Getter/Setter],\n },\n url: {\n resolveObject: [Getter/Setter],\n resolve: [Getter/Setter],\n parse: [Getter/Setter],\n format: [Getter/Setter],\n default: [Getter/Setter],\n Url: [Getter/Setter],\n URLSearchParams: [Getter/Setter],\n URL: [Getter/Setter],\n },\n util: {\n types: [Getter/Setter],\n promisify: [Getter/Setter],\n log: [Getter/Setter],\n isUndefined: [Getter/Setter],\n isSymbol: [Getter/Setter],\n isString: [Getter/Setter],\n isRegExp: [Getter/Setter],\n isPrimitive: [Getter/Setter],\n isObject: [Getter/Setter],\n isNumber: [Getter/Setter],\n isNullOrUndefined: [Getter/Setter],\n isNull: [Getter/Setter],\n isFunction: [Getter/Setter],\n isError: [Getter/Setter],\n isDate: [Getter/Setter],\n isBuffer: [Getter/Setter],\n isBoolean: [Getter/Setter],\n isArray: [Getter/Setter],\n inspect: [Getter/Setter],\n inherits: [Getter/Setter],\n format: [Getter/Setter],\n deprecate: [Getter/Setter],\n default: [Getter/Setter],\n debuglog: [Getter/Setter],\n callbackifyOnRejected: [Getter/Setter],\n callbackify: [Getter/Setter],\n _extend: [Getter/Setter],\n TextEncoder: [Getter/Setter],\n TextDecoder: [Getter/Setter],\n },\n v8: [Function: v8],\n vm: [Function: vm],\n zlib: {\n default: [Getter/Setter],\n },\n}", + + validate(ctx) {}, }, }); itBundled("browser/NodePolyfillExternal", { diff --git a/test/bundler/bundler_html_server.test.ts b/test/bundler/bundler_html_server.test.ts new file mode 100644 index 0000000000..122709df27 --- /dev/null +++ b/test/bundler/bundler_html_server.test.ts @@ -0,0 +1,121 @@ +import { describe } from "bun:test"; +import { itBundled } from "./expectBundled"; + +describe("bundler", () => { + itBundled("compile/HTMLServerBasic", { + compile: true, + files: { + "/entry.ts": /* js */ ` + import index from "./index.html"; + + using server = Bun.serve({ + port: 0, + routes: { + "/": index, + }, + }); + + const res = await fetch(server.url); + console.log("Status:", res.status); + console.log("Content-Type:", res.headers.get("content-type")); + + const html = await res.text(); + console.log("Has HTML tag:", html.includes("")); + console.log("Has h1:", html.includes("Hello HTML")); + + `, + "/index.html": /* html */ ` + + + + Test Page + + + +

              Hello HTML

              + + + + `, + "/styles.css": /* css */ ` + body { + background: blue; + } + `, + "/app.js": /* js */ ` + console.log("Client app loaded"); + `, + }, + run: { + stdout: "Status: 200\nContent-Type: text/html;charset=utf-8\nHas HTML tag: true\nHas h1: true", + }, + }); + + itBundled("compile/HTMLServerMultipleRoutes", { + compile: true, + files: { + "/entry.ts": /* js */ ` + import home from "./home.html"; + import about from "./about.html"; + + using server = Bun.serve({ + port: 0, + routes: { + "/": home, + "/about": about, + }, + }); + + // Test home route + const homeRes = await fetch(server.url); + console.log("Home status:", homeRes.status); + const homeHtml = await homeRes.text(); + console.log("Home has content:", homeHtml.includes("Home Page")); + + // Test about route + const aboutRes = await fetch(server.url + "about"); + console.log("About status:", aboutRes.status); + const aboutHtml = await aboutRes.text(); + console.log("About has content:", aboutHtml.includes("About Page")); + `, + "/home.html": /* html */ ` + + + + Home + + + +

              Home Page

              + + + + `, + "/about.html": /* html */ ` + + + + About + + + +

              About Page

              + + + + `, + "/styles.css": /* css */ ` + body { + margin: 0; + font-family: sans-serif; + } + `, + "/app.js": /* js */ ` + console.log("App loaded"); + `, + }, + run: { + stdout: "Home status: 200\nHome has content: true\nAbout status: 200\nAbout has content: true", + }, + }); +}); diff --git a/test/bundler/bundler_plugin.test.ts b/test/bundler/bundler_plugin.test.ts index e2cdcbffa9..44188998ed 100644 --- a/test/bundler/bundler_plugin.test.ts +++ b/test/bundler/bundler_plugin.test.ts @@ -820,4 +820,80 @@ describe("bundler", () => { }, }; }); + + itBundled("plugin/FileLoaderWithCustomContents", { + files: { + "index.html": /* html */ ` + + + + Test + + + + + + + `, + "script.js": /* js */ ` + console.log("Script loaded"); + `, + "image.jpeg": "actual image data would be here", + }, + entryPoints: ["./index.html"], + outdir: "/out", + plugins(build) { + // This plugin intercepts .jpeg files and returns them with custom contents + // This previously caused a crash because additional_files wasn't populated + build.onLoad({ filter: /\.jpe?g$/ }, async args => { + return { + loader: "file", + contents: "custom image contents", + }; + }); + }, + onAfterBundle(api) { + // Verify the build succeeded and files were created + api.assertFileExists("index.html"); + // The image should be copied with a hashed name + const html = api.readFile("index.html"); + expect(html).toContain('src="'); + expect(html).toContain('.jpeg"'); + }, + }); + + itBundled("plugin/FileLoaderMultipleAssets", { + files: { + "index.js": /* js */ ` + import imgUrl from "./image.png"; + import wasmUrl from "./module.wasm"; + console.log(imgUrl, wasmUrl); + `, + "image.png": "png data", + "module.wasm": "wasm data", + }, + entryPoints: ["./index.js"], + outdir: "/out", + plugins(build) { + // Test multiple file types with custom contents + build.onLoad({ filter: /\.(png|wasm)$/ }, async args => { + const ext = args.path.split(".").pop(); + return { + loader: "file", + contents: `custom ${ext} contents`, + }; + }); + }, + run: { + stdout: /\.(png|wasm)/, + }, + onAfterBundle(api) { + // Verify the build succeeded and files were created + api.assertFileExists("index.js"); + const js = api.readFile("index.js"); + // Should contain references to the copied files + expect(js).toContain('.png"'); + expect(js).toContain('.wasm"'); + }, + }); }); diff --git a/test/bundler/bundler_regressions.test.ts b/test/bundler/bundler_regressions.test.ts index 315515b0d9..e4efcf36f8 100644 --- a/test/bundler/bundler_regressions.test.ts +++ b/test/bundler/bundler_regressions.test.ts @@ -225,6 +225,26 @@ describe("bundler", () => { entryPointsRaw: ["test/entry.ts", "--external", "*"], }); + itBundled(`regression/NODE_PATHBuild cli`, { + files: { + "/entry.js": ` + import MyClass from 'MyClass'; + console.log(new MyClass().constructor.name); + `, + "/src/MyClass.js": ` + export default class MyClass {} + `, + }, + entryPoints: ["/entry.js"], + backend: "cli", + env: { + NODE_PATH: "{{root}}/src", + }, + run: { + stdout: "MyClass", + }, + }); + itBundled("regression/NamespaceTracking#12337", { files: { "/entry.ts": /* ts */ ` diff --git a/test/bundler/bundler_splitting.test.ts b/test/bundler/bundler_splitting.test.ts new file mode 100644 index 0000000000..3f98a07a79 --- /dev/null +++ b/test/bundler/bundler_splitting.test.ts @@ -0,0 +1,308 @@ +import { describe } from "bun:test"; +import { itBundled } from "./expectBundled"; + +describe("bundler", () => { + itBundled("splitting/DynamicImportCSSFile", { + files: { + "/client.tsx": `import('./test')`, + "/test.ts": ` + import './test.css' + console.log('test.ts loaded') + `, + "/test.css": `.aaa { color: red; }`, + }, + entryPoints: ["/client.tsx"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/client.js", + stdout: "test.ts loaded", + }, + }); + + itBundled("splitting/DynamicImportMultipleCSSImports", { + files: { + "/entry.js": ` + import('./module1').then(() => console.log('module1 loaded')); + import('./module2').then(() => console.log('module2 loaded')); + `, + "/module1.js": ` + import './styles1.css' + console.log('module1.js executed') + `, + "/module2.js": ` + import './styles2.css' + console.log('module2.js executed') + `, + "/styles1.css": `.class1 { color: red; }`, + "/styles2.css": `.class2 { color: blue; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "module1.js executed\nmodule1 loaded\nmodule2.js executed\nmodule2 loaded", + }, + }); + + itBundled("splitting/StaticAndDynamicCSSImports", { + files: { + "/entry.js": ` + import './static.css'; + import('./dynamic').then(() => console.log('dynamic module loaded')); + `, + "/dynamic.js": ` + import './dynamic.css' + console.log('dynamic.js executed') + `, + "/static.css": `.static { color: green; }`, + "/dynamic.css": `.dynamic { color: purple; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "dynamic.js executed\ndynamic module loaded", + }, + }); + + itBundled("splitting/NestedDynamicImportWithCSS", { + files: { + "/entry.js": ` + import('./level1').then(() => console.log('level1 loaded')); + `, + "/level1.js": ` + import './level1.css' + console.log('level1.js executed') + import('./level2').then(() => console.log('level2 loaded from level1')); + `, + "/level2.js": ` + import './level2.css' + console.log('level2.js executed') + `, + "/level1.css": `.level1 { color: red; }`, + "/level2.css": `.level2 { color: blue; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "level1.js executed\nlevel1 loaded\nlevel2.js executed\nlevel2 loaded from level1", + }, + }); + + itBundled("splitting/SharedCSSBetweenChunks", { + files: { + "/entry.js": ` + import('./moduleA').then(() => console.log('moduleA loaded')); + import('./moduleB').then(() => console.log('moduleB loaded')); + `, + "/moduleA.js": ` + import './shared.css' + import './moduleA.css' + console.log('moduleA.js executed') + `, + "/moduleB.js": ` + import './shared.css' + import './moduleB.css' + console.log('moduleB.js executed') + `, + "/shared.css": `.shared { color: green; }`, + "/moduleA.css": `.moduleA { color: red; }`, + "/moduleB.css": `.moduleB { color: blue; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "moduleA.js executed\nmoduleA loaded\nmoduleB.js executed\nmoduleB loaded", + }, + }); + + itBundled("splitting/DynamicImportChainWithCSS", { + files: { + "/entry.js": ` + const chain = () => import('./chain1') + .then(() => { + console.log('chain1 loaded'); + return import('./chain2'); + }) + .then(() => { + console.log('chain2 loaded'); + return import('./chain3'); + }) + .then(() => { + console.log('chain3 loaded'); + }); + chain(); + `, + "/chain1.js": ` + import './chain1.css' + console.log('chain1.js executed') + `, + "/chain2.js": ` + import './chain2.css' + console.log('chain2.js executed') + `, + "/chain3.js": ` + import './chain3.css' + console.log('chain3.js executed') + `, + "/chain1.css": `.chain1 { color: red; }`, + "/chain2.css": `.chain2 { color: green; }`, + "/chain3.css": `.chain3 { color: blue; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "chain1.js executed\nchain1 loaded\nchain2.js executed\nchain2 loaded\nchain3.js executed\nchain3 loaded", + }, + }); + + itBundled("splitting/ConditionalDynamicImportWithCSS", { + files: { + "/entry.js": ` + const condition = true; + if (condition) { + import('./moduleTrue').then(() => console.log('true branch loaded')); + } else { + import('./moduleFalse').then(() => console.log('false branch loaded')); + } + `, + "/moduleTrue.js": ` + import './true.css' + console.log('moduleTrue.js executed') + `, + "/moduleFalse.js": ` + import './false.css' + console.log('moduleFalse.js executed') + `, + "/true.css": `.true { color: green; }`, + "/false.css": `.false { color: red; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "moduleTrue.js executed\ntrue branch loaded", + }, + }); + + itBundled("splitting/MultipleEntryPointsWithSharedCSS", { + files: { + "/entry1.js": ` + import './shared.css' + import './entry1.css' + console.log('entry1.js executed') + `, + "/entry2.js": ` + import './shared.css' + import './entry2.css' + console.log('entry2.js executed') + `, + "/shared.css": `.shared { font-size: 16px; }`, + "/entry1.css": `.entry1 { color: red; }`, + "/entry2.css": `.entry2 { color: blue; }`, + }, + entryPoints: ["/entry1.js", "/entry2.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: [ + { + file: "/out/entry1.js", + stdout: "entry1.js executed", + }, + { + file: "/out/entry2.js", + stdout: "entry2.js executed", + }, + ], + }); + + itBundled("splitting/DynamicImportWithOnlyCSSNoJS", { + files: { + "/entry.js": ` + import('./styles.css').then(() => console.log('CSS import succeeded')).catch(err => console.log('CSS import failed:', err.message)); + `, + "/styles.css": `.styles { color: blue; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "CSS import succeeded", + }, + }); + + itBundled("splitting/CircularDynamicImportsWithCSS", { + files: { + "/entry.js": ` + import('./a').then(module => { + console.log('a loaded from entry'); + return import('./b'); + }).then(module => { + console.log('b loaded from entry, value:', module.bValue); + }); + `, + "/a.js": ` + import './a.css' + console.log('a.js executed') + `, + "/b.js": ` + import './b.css' + console.log('b.js executed') + export const bValue = 'B'; + // Import a to create circular dependency + import * as A from './a'; + console.log('b.js imports a', A); + `, + "/a.css": `.a { color: red; }`, + "/b.css": `.b { color: blue; }`, + }, + entryPoints: ["/entry.js"], + splitting: true, + outdir: "/out", + target: "browser", + env: "inline", + format: "esm", + run: { + file: "/out/entry.js", + stdout: "a.js executed\na loaded from entry\nb.js executed\nb.js imports a {}\nb loaded from entry, value: B", + }, + }); +}); diff --git a/test/bundler/cli.test.ts b/test/bundler/cli.test.ts index 43d8cbd1d9..094304945c 100644 --- a/test/bundler/cli.test.ts +++ b/test/bundler/cli.test.ts @@ -63,7 +63,7 @@ describe("bun build", () => { const tmpdir = tmpdirSync(); const baseDir = `${tmpdir}/bun-build-dirname-filename-${Date.now()}`; fs.mkdirSync(baseDir, { recursive: true }); - fs.mkdirSync(path.join(baseDir, "我")), { recursive: true }; + (fs.mkdirSync(path.join(baseDir, "我")), { recursive: true }); fs.writeFileSync(path.join(baseDir, "我", "我.ts"), "console.log(__dirname); console.log(__filename);"); const { exitCode } = Bun.spawnSync({ cmd: [bunExe(), "build", path.join(baseDir, "我/我.ts"), "--compile", "--outfile", path.join(baseDir, "exe.exe")], @@ -201,7 +201,14 @@ test("you can use --outfile=... and --sourcemap", () => { const outputContent = fs.readFileSync(outFile, "utf8"); expect(outputContent).toContain("//# sourceMappingURL=out.js.map"); - expect(stdout.toString()).toMatchInlineSnapshot(); + expect(stdout.toString().replace(/\d{1,}ms/, "0.000000001ms")).toMatchInlineSnapshot(` + "Bundled 1 module in 0.000000001ms + + out.js 120 bytes (entry point) + out.js.map 213 bytes (source map) + + " + `); }); test("some log cases", () => { diff --git a/test/bundler/esbuild/__snapshots__/css.test.ts.snap b/test/bundler/esbuild/__snapshots__/css.test.ts.snap index bebc1bf66c..31fedbf0e0 100644 --- a/test/bundler/esbuild/__snapshots__/css.test.ts.snap +++ b/test/bundler/esbuild/__snapshots__/css.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`esbuild-bundler css/CSSAtImportConditionsFromExternalRepo: /out/001/default/style.css 1`] = ` "/* 001/default/a.css */ diff --git a/test/bundler/esbuild/default.test.ts b/test/bundler/esbuild/default.test.ts index 21e213bfe2..a17df2e2ff 100644 --- a/test/bundler/esbuild/default.test.ts +++ b/test/bundler/esbuild/default.test.ts @@ -1,6 +1,6 @@ import assert from "assert"; import { describe, expect } from "bun:test"; -import { osSlashes } from "harness"; +import { isMacOS, isMusl, osSlashes } from "harness"; import path from "path"; import { dedent, ESBUILD_PATH, itBundled } from "../expectBundled"; @@ -198,7 +198,7 @@ describe("bundler", () => { onAfterBundle(api) { api.appendFile( "/out.js", - dedent/* js */ ` + dedent /* js */ ` import { strictEqual } from "node:assert"; strictEqual(globalName.default, 123, ".default"); strictEqual(globalName.v, 234, ".v"); @@ -299,7 +299,7 @@ describe("bundler", () => { export default 3; export const a2 = 4; `, - "/test.js": String.raw/* js */ ` + "/test.js": String.raw /* js */ ` import { deepEqual } from 'node:assert'; globalThis.deepEqual = deepEqual; await import ('./out.js'); @@ -1581,6 +1581,29 @@ describe("bundler", () => { "/entry.js": ["Top-level return cannot be used inside an ECMAScript module"], }, }); + itBundled("default/CircularTLADependency", { + files: { + "/entry.js": /* js */ ` + const { A } = await import('./a.js'); + console.log(A); + `, + "/a.js": /* js */ ` + import { B } from './b.js'; + export const A = 'hi'; + `, + "/b.js": /* js */ ` + import { A } from './a.js'; + + // TLA that should mark the wrapper closure for a.js as async + await 1; + + export const B = 'hello'; + `, + }, + run: { + stdout: "hi\n", + }, + }); itBundled("default/ThisOutsideFunctionRenamedToExports", { files: { "/entry.js": /* js */ ` @@ -5290,6 +5313,7 @@ describe("bundler", () => { }, }); const RequireShimSubstitutionBrowser = itBundled("default/RequireShimSubstitutionBrowser", { + todo: isMacOS || isMusl, files: { "/entry.js": /* js */ ` Promise.all([ @@ -5351,12 +5375,13 @@ describe("bundler", () => { number 567 string ${JSON.stringify(osSlashes("/node_modules/some-path/index.js"))} string ${JSON.stringify(osSlashes("/node_modules/second-path/index.js"))} - object {"default":123} - object {"default":567} + object {"default":123,"module.exports":123} + object {"default":567,"module.exports":567} `, }, }); itBundled("default/RequireShimSubstitutionNode", { + todo: isMacOS || isMusl, files: RequireShimSubstitutionBrowser.options.files, runtimeFiles: RequireShimSubstitutionBrowser.options.runtimeFiles, target: "node", @@ -5377,8 +5402,8 @@ describe("bundler", () => { number 567 string ${JSON.stringify(osSlashes("/node_modules/some-path/index.js"))} string ${JSON.stringify(osSlashes("/node_modules/second-path/index.js"))} - object {"default":123} - object {"default":567} + object {"default":123,"module.exports":123} + object {"default":567,"module.exports":567} `, }, }); diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts index fb9d111679..c4126316e8 100644 --- a/test/bundler/expectBundled.ts +++ b/test/bundler/expectBundled.ts @@ -826,10 +826,13 @@ function expectBundled( } const bundlerEnv = { ...bunEnv, ...env }; - // remove undefined keys instead of passing "undefined" + // remove undefined keys instead of passing "undefined" and resolve {{root}} for (const key in bundlerEnv) { - if (bundlerEnv[key] === undefined) { + const value = bundlerEnv[key]; + if (value === undefined) { delete bundlerEnv[key]; + } else if (typeof value === "string") { + bundlerEnv[key] = value.replaceAll("{{root}}", root); } } diff --git a/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap b/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap index 8e21dd38d1..a78b2167dd 100644 --- a/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap +++ b/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`Bun.Transpiler using statements work right 1`] = ` "let __bun_temp_ref_1$ = []; diff --git a/test/bundler/transpiler/macro-test.test.ts b/test/bundler/transpiler/macro-test.test.ts index 5eaa5d747c..bc5ea871f3 100644 --- a/test/bundler/transpiler/macro-test.test.ts +++ b/test/bundler/transpiler/macro-test.test.ts @@ -8,6 +8,7 @@ import defaultMacro, { identity, identity as identity1, identity as identity2, + ireturnapromise, } from "./macro.ts" assert { type: "macro" }; import * as macros from "./macro.ts" assert { type: "macro" }; @@ -124,3 +125,7 @@ test("namespace import", () => { // test("template string latin1", () => { // expect(identity(`©${""}`)).toBe("©"); // }); + +test("ireturnapromise", async () => { + expect(await ireturnapromise()).toEqual("aaa"); +}); diff --git a/test/bundler/transpiler/macro.ts b/test/bundler/transpiler/macro.ts index 430fab84ee..9da8d72c5a 100644 --- a/test/bundler/transpiler/macro.ts +++ b/test/bundler/transpiler/macro.ts @@ -17,3 +17,9 @@ export function addStringsUTF16(arg: string) { export default function() { return "defaultdefaultdefault"; } + +export async function ireturnapromise() { + const { promise, resolve } = Promise.withResolvers(); + setTimeout(() => resolve("aaa"), 100); + return promise; +} diff --git a/test/cli/create/__snapshots__/create-jsx.test.ts.snap b/test/cli/create/__snapshots__/create-jsx.test.ts.snap index 2d13b2dc65..793466f6ca 100644 --- a/test/cli/create/__snapshots__/create-jsx.test.ts.snap +++ b/test/cli/create/__snapshots__/create-jsx.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`development: true react spa (no tailwind) dev server 1`] = ` " diff --git a/test/cli/inspect/__snapshots__/inspect.test.ts.snap b/test/cli/inspect/__snapshots__/inspect.test.ts.snap index 1b8aaf67ee..af9ed4cfee 100644 --- a/test/cli/inspect/__snapshots__/inspect.test.ts.snap +++ b/test/cli/inspect/__snapshots__/inspect.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`junit reporter 1`] = ` " diff --git a/test/cli/install/__snapshots__/bun-audit.test.ts.snap b/test/cli/install/__snapshots__/bun-audit.test.ts.snap index d7b514d5f5..35cf46d7bd 100644 --- a/test/cli/install/__snapshots__/bun-audit.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-audit.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`\`bun audit\` should exit code 1 when there are vulnerabilities: bun-audit-expect-vulnerabilities-found 1`] = ` "minimist <0.2.4 diff --git a/test/cli/install/__snapshots__/bun-install-dep.test.ts.snap b/test/cli/install/__snapshots__/bun-install-dep.test.ts.snap index 94cd72502d..96abf0eb3e 100644 --- a/test/cli/install/__snapshots__/bun-install-dep.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-install-dep.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`npa @scoped/package 1`] = ` { diff --git a/test/cli/install/__snapshots__/bun-install-registry.test.ts.snap b/test/cli/install/__snapshots__/bun-install-registry.test.ts.snap index 0c7bcd1b7c..5f05a34fcd 100644 --- a/test/cli/install/__snapshots__/bun-install-registry.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-install-registry.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`auto-install symlinks (and junctions) are created correctly in the install cache 1`] = ` "{ diff --git a/test/cli/install/__snapshots__/bun-install.test.ts.snap b/test/cli/install/__snapshots__/bun-install.test.ts.snap index f82188350b..eb0f27c652 100644 --- a/test/cli/install/__snapshots__/bun-install.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-install.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`should report error on invalid format for package.json 1`] = ` "1 | foo diff --git a/test/cli/install/__snapshots__/bun-lock.test.ts.snap b/test/cli/install/__snapshots__/bun-lock.test.ts.snap index 61cfd88fdd..879aa0517c 100644 --- a/test/cli/install/__snapshots__/bun-lock.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-lock.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`should write plaintext lockfiles 1`] = ` "{ diff --git a/test/cli/install/__snapshots__/bun-pm.test.ts.snap b/test/cli/install/__snapshots__/bun-pm.test.ts.snap index e548c4ef52..4298b81516 100644 --- a/test/cli/install/__snapshots__/bun-pm.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-pm.test.ts.snap @@ -1,3 +1,3 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`bun pm migrate 1`] = `"E7F4C15F76D43059-37ed01456afdc149-B17A9541F8322712-04892ad4e094e703"`; diff --git a/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap b/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap index 922c072bd8..6f2caec2ca 100644 --- a/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`dependency on workspace without version in package.json: version: * 1`] = ` "{ diff --git a/test/cli/install/__snapshots__/catalogs.test.ts.snap b/test/cli/install/__snapshots__/catalogs.test.ts.snap index e4af894193..815c94e560 100644 --- a/test/cli/install/__snapshots__/catalogs.test.ts.snap +++ b/test/cli/install/__snapshots__/catalogs.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`basic detect changes (bun.lock) 1`] = ` "{ diff --git a/test/cli/install/__snapshots__/semver.test.ts.snap b/test/cli/install/__snapshots__/semver.test.ts.snap index 85897d6404..59c2bd81a2 100644 --- a/test/cli/install/__snapshots__/semver.test.ts.snap +++ b/test/cli/install/__snapshots__/semver.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`Bun.semver.satisfies() pre-release snapshot 1`] = ` [ diff --git a/test/cli/install/bun-add.test.ts b/test/cli/install/bun-add.test.ts index 6587d1e0f1..b8455aec02 100644 --- a/test/cli/install/bun-add.test.ts +++ b/test/cli/install/bun-add.test.ts @@ -2266,6 +2266,6 @@ it("should add local tarball dependency", async () => { const package_json = await file(join(package_dir, "node_modules", "baz", "package.json")).json(); expect(package_json.name).toBe("baz"); expect(package_json.version).toBe("0.0.3"); - expect(await file(join(package_dir, "package.json")).text()).toInclude('"baz-0.0.3.tgz"'), - await access(join(package_dir, "bun.lockb")); + (expect(await file(join(package_dir, "package.json")).text()).toInclude('"baz-0.0.3.tgz"'), + await access(join(package_dir, "bun.lockb"))); }); diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index 4eab58b334..b48ddf49b7 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -486,7 +486,7 @@ it("should work when moving workspace packages", async () => { await Bun.$`${bunExe()} i`.env(bunEnv).cwd(package_dir); - await Bun.$/* sh */ ` + await Bun.$ /* sh */ ` mkdir config # change workspaces from "packages/*" to "config/*" @@ -558,7 +558,7 @@ it("should work when renaming a single workspace package", async () => { await Bun.$`${bunExe()} i`.env(bunEnv).cwd(package_dir); - await Bun.$/* sh */ ` + await Bun.$ /* sh */ ` echo ${JSON.stringify({ "name": "my-workspace", version: "0.0.1", diff --git a/test/cli/install/bun-pack.test.ts b/test/cli/install/bun-pack.test.ts index ea3693d1e6..31fa4e005d 100644 --- a/test/cli/install/bun-pack.test.ts +++ b/test/cli/install/bun-pack.test.ts @@ -12,7 +12,7 @@ beforeEach(() => { packageDir = tmpdirSync(); }); -async function packExpectError(cwd: string, env: NodeJS.ProcessEnv, ...args: string[]) { +async function packExpectError(cwd: string, env: NodeJS.Dict, ...args: string[]) { const { stdout, stderr, exited } = spawn({ cmd: [bunExe(), "pm", "pack", ...args], cwd, diff --git a/test/cli/install/bun-pm-version.test.ts b/test/cli/install/bun-pm-version.test.ts new file mode 100644 index 0000000000..6f37ad2a9e --- /dev/null +++ b/test/cli/install/bun-pm-version.test.ts @@ -0,0 +1,877 @@ +import { spawn, spawnSync } from "bun"; +import { describe, expect, it } from "bun:test"; +import { bunEnv, bunExe, tempDirWithFiles } from "harness"; +import { join } from "node:path"; + +describe("bun pm version", () => { + let i = 0; + + function setupTest() { + const testDir = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify( + { + name: "test-package", + version: "1.0.0", + }, + null, + 2, + ), + }); + return testDir; + } + + function setupGitTest() { + const testDir = setupTest(); + + spawnSync({ + cmd: ["git", "init"], + cwd: testDir, + env: bunEnv, + }); + + spawnSync({ + cmd: ["git", "config", "user.name", "Test User"], + cwd: testDir, + env: bunEnv, + }); + + spawnSync({ + cmd: ["git", "config", "user.email", "test@example.com"], + cwd: testDir, + env: bunEnv, + }); + + spawnSync({ + cmd: ["git", "add", "package.json"], + cwd: testDir, + env: bunEnv, + }); + + spawnSync({ + cmd: ["git", "commit", "-m", "Initial commit"], + cwd: testDir, + env: bunEnv, + }); + + return testDir; + } + + function setupMonorepoTest() { + const testDir = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify( + { + name: "monorepo-root", + version: "1.0.0", + workspaces: ["packages/*"], + }, + null, + 2, + ), + "packages/pkg-a/package.json": JSON.stringify( + { + name: "@test/pkg-a", + version: "2.0.0", + }, + null, + 2, + ), + "packages/pkg-b/package.json": JSON.stringify( + { + name: "@test/pkg-b", + version: "3.0.0", + dependencies: { + "@test/pkg-a": "workspace:*", + }, + }, + null, + 2, + ), + }); + + return testDir; + } + + async function runCommand(args: string[], cwd: string, expectSuccess = true) { + const result = spawn({ + cmd: args, + cwd, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const [output, error] = await Promise.all([new Response(result.stdout).text(), new Response(result.stderr).text()]); + + const code = await result.exited; + + return { output, error, code }; + } + + describe("help and version previews", () => { + it("should show help when no arguments provided", async () => { + const testDir = setupTest(); + + const { output, code } = await runCommand([bunExe(), "pm", "version"], testDir); + + expect(code).toBe(0); + expect(output).toContain("bun pm version"); + expect(output).toContain("Current package version: v1.0.0"); + expect(output).toContain("patch"); + expect(output).toContain("minor"); + expect(output).toContain("major"); + }); + + it("shows help with version previews", async () => { + const testDir1 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "2.5.3" }, null, 2), + }); + + const { output: output1, code: code1 } = await runCommand([bunExe(), "pm", "version"], testDir1); + + expect(code1).toBe(0); + expect(output1).toContain("Current package version: v2.5.3"); + expect(output1).toContain("patch 2.5.3 → 2.5.4"); + expect(output1).toContain("minor 2.5.3 → 2.6.0"); + expect(output1).toContain("major 2.5.3 → 3.0.0"); + + const testDir2 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.0-alpha.0" }, null, 2), + }); + + const { output: output2, code: code2 } = await runCommand([bunExe(), "pm", "version"], testDir2); + + expect(code2).toBe(0); + expect(output2).toContain("prepatch"); + expect(output2).toContain("preminor"); + expect(output2).toContain("premajor"); + expect(output2).toContain("1.0.1-alpha.0"); + expect(output2).toContain("1.1.0-alpha.0"); + expect(output2).toContain("2.0.0-alpha.0"); + + const testDir3 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.0" }, null, 2), + }); + + const { output: output3, code: code3 } = await runCommand( + [bunExe(), "pm", "version", "--preid", "beta"], + testDir3, + ); + + expect(code3).toBe(0); + expect(output3).toContain("prepatch"); + expect(output3).toContain("preminor"); + expect(output3).toContain("premajor"); + expect(output3).toContain("1.0.1-beta.0"); + expect(output3).toContain("1.1.0-beta.0"); + expect(output3).toContain("2.0.0-beta.0"); + + const testDir4 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test" }, null, 2), + }); + + const { output: output4 } = await runCommand([bunExe(), "pm", "version"], testDir4); + + expect(output4).not.toContain("Current package version:"); + expect(output4).toContain("patch 1.0.0 → 1.0.1"); + }); + }); + + describe("basic version incrementing", () => { + it("should increment versions correctly", async () => { + const testDir = setupTest(); + + const { output: patchOutput, code: patchCode } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + testDir, + ); + expect(patchCode).toBe(0); + expect(patchOutput.trim()).toBe("v1.0.1"); + + const { output: minorOutput, code: minorCode } = await runCommand( + [bunExe(), "pm", "version", "minor", "--no-git-tag-version"], + testDir, + ); + expect(minorCode).toBe(0); + expect(minorOutput.trim()).toBe("v1.1.0"); + + const { output: majorOutput, code: majorCode } = await runCommand( + [bunExe(), "pm", "version", "major", "--no-git-tag-version"], + testDir, + ); + expect(majorCode).toBe(0); + expect(majorOutput.trim()).toBe("v2.0.0"); + + const packageJson = await Bun.file(`${testDir}/package.json`).json(); + expect(packageJson.version).toBe("2.0.0"); + }); + + it("should set specific version", async () => { + const testDir = setupTest(); + + const { output, code } = await runCommand([bunExe(), "pm", "version", "3.2.1", "--no-git-tag-version"], testDir); + + expect(code).toBe(0); + expect(output.trim()).toBe("v3.2.1"); + + const packageJson = await Bun.file(`${testDir}/package.json`).json(); + expect(packageJson.version).toBe("3.2.1"); + }); + + it("handles empty package.json", async () => { + const testDir = tempDirWithFiles(`version-${i++}`, { + "package.json": "{}", + }); + + const { output, code } = await runCommand([bunExe(), "pm", "version", "patch", "--no-git-tag-version"], testDir); + + expect(code).toBe(0); + expect(output.trim()).toBe("v0.0.1"); + + const packageJson = await Bun.file(`${testDir}/package.json`).json(); + expect(packageJson.version).toBe("0.0.1"); + }); + }); + + describe("error handling", () => { + it("handles various error conditions", async () => { + const testDir2 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "invalid-version" }, null, 2), + }); + + const { error: error2, code: code2 } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + testDir2, + false, + ); + expect(error2).toContain("is not a valid semver"); + expect(code2).toBe(1); + + const testDir3 = setupTest(); + + const { error: error3, code: code3 } = await runCommand( + [bunExe(), "pm", "version", "invalid-arg", "--no-git-tag-version"], + testDir3, + false, + ); + expect(error3).toContain("Invalid version argument"); + expect(code3).toBe(1); + + const testDir4 = setupTest(); + + const { error: error4, code: code4 } = await runCommand( + [bunExe(), "pm", "version", "1.0.0", "--no-git-tag-version"], + testDir4, + false, + ); + expect(error4).toContain("Version not changed"); + expect(code4).toBe(1); + + const { output: output5, code: code5 } = await runCommand( + [bunExe(), "pm", "version", "1.0.0", "--no-git-tag-version", "--allow-same-version"], + testDir4, + ); + expect(output5.trim()).toBe("v1.0.0"); + expect(code5).toBe(0); + }); + + it("handles missing package.json like npm", async () => { + const testDir = tempDirWithFiles(`version-${i++}`, { + "README.md": "# Test project", + }); + + const { error, code } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + testDir, + false, + ); + expect(error).toContain("package.json"); + expect(code).toBe(1); + // its an ealier check that "bun pm *" commands do so not "bun pm version" specific + // expect(error.includes("ENOENT") || error.includes("no such file")).toBe(true); + }); + + it("handles empty string package.json like npm", async () => { + const testDir = tempDirWithFiles(`version-${i++}`, { + "package.json": '""', + }); + + const { error, code } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + testDir, + false, + ); + expect(error).toContain("Failed to parse package.json"); + expect(code).toBe(1); + }); + + it("handles malformed JSON like npm", async () => { + const testDir = tempDirWithFiles(`version-${i++}`, { + "package.json": '{ "name": "test", invalid json }', + }); + + const { error, code } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + testDir, + false, + ); + expect(error).toContain("Failed to parse package.json"); + expect(code).toBe(1); + }); + }); + + describe("git integration", () => { + it("creates git commits and tags by default", async () => { + const testDir1 = setupGitTest(); + + const { + output: output1, + code: code1, + error: stderr1, + } = await runCommand([bunExe(), "pm", "version", "patch"], testDir1); + + expect(stderr1.trim()).toBe(""); + expect(output1.trim()).toBe("v1.0.1"); + expect(code1).toBe(0); + + const { output: tagOutput } = await runCommand(["git", "tag", "-l"], testDir1); + expect(tagOutput).toContain("v1.0.1"); + + const { output: logOutput } = await runCommand(["git", "log", "--oneline"], testDir1); + expect(logOutput).toContain("v1.0.1"); + }); + + it("supports custom commit messages", async () => { + const testDir2 = setupGitTest(); + + const { + output: output2, + error: error2, + code: code2, + } = await runCommand([bunExe(), "pm", "version", "patch", "--message", "Custom release message"], testDir2); + expect(error2).toBe(""); + + const { output: gitLogOutput } = await runCommand(["git", "log", "--oneline"], testDir2); + expect(gitLogOutput).toContain("Custom release message"); + + expect(code2).toBe(0); + expect(output2.trim()).toBe("v1.0.1"); + }); + + it("fails when git working directory is not clean", async () => { + const testDir3 = setupGitTest(); + + await Bun.write(join(testDir3, "untracked.txt"), "untracked content"); + + const { error: error3, code: code3 } = await runCommand([bunExe(), "pm", "version", "patch"], testDir3, false); + + expect(error3).toContain("Git working directory not clean"); + expect(code3).toBe(1); + }); + + it("allows dirty working directory with --force flag", async () => { + const testDir = setupGitTest(); + + await Bun.write(join(testDir, "untracked.txt"), "untracked content"); + + const { output, code, error } = await runCommand([bunExe(), "pm", "version", "patch", "--force"], testDir); + + expect(code).toBe(0); + expect(error.trim()).toBe(""); + expect(output.trim()).toBe("v1.0.1"); + + const { output: tagOutput } = await runCommand(["git", "tag", "-l"], testDir); + expect(tagOutput).toContain("v1.0.1"); + + const { output: logOutput } = await runCommand(["git", "log", "--oneline"], testDir); + expect(logOutput).toContain("v1.0.1"); + }); + + it("works without git when no repo is present", async () => { + const testDir4 = setupTest(); + + const { output: output4, code: code4 } = await runCommand([bunExe(), "pm", "version", "patch"], testDir4); + + expect(code4).toBe(0); + expect(output4.trim()).toBe("v1.0.1"); + + const packageJson = await Bun.file(`${testDir4}/package.json`).json(); + expect(packageJson.version).toBe("1.0.1"); + }); + + it("respects --no-git-tag-version flag", async () => { + const testDir5 = setupGitTest(); + const { output: output5, code: code5 } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + testDir5, + ); + + expect(code5).toBe(0); + expect(output5.trim()).toBe("v1.0.1"); + + const packageJson5 = await Bun.file(`${testDir5}/package.json`).json(); + expect(packageJson5.version).toBe("1.0.1"); + + const { output: tagOutput5 } = await runCommand(["git", "tag", "-l"], testDir5); + expect(tagOutput5.trim()).toBe(""); + + const { output: logOutput5 } = await runCommand(["git", "log", "--oneline"], testDir5); + expect(logOutput5).toContain("Initial commit"); + expect(logOutput5).not.toContain("v1.0.1"); + }); + + it("respects --git-tag-version=false flag", async () => { + const testDir6 = setupGitTest(); + const { output: output6, code: code6 } = await runCommand( + [bunExe(), "pm", "version", "patch", "--git-tag-version=false"], + testDir6, + ); + + expect(code6).toBe(0); + expect(output6.trim()).toBe("v1.0.1"); + + const packageJson6 = await Bun.file(`${testDir6}/package.json`).json(); + expect(packageJson6.version).toBe("1.0.1"); + + const { output: tagOutput6 } = await runCommand(["git", "tag", "-l"], testDir6); + expect(tagOutput6.trim()).toBe(""); + + const { output: logOutput6 } = await runCommand(["git", "log", "--oneline"], testDir6); + expect(logOutput6).toContain("Initial commit"); + expect(logOutput6).not.toContain("v1.0.1"); + }); + + it("respects --git-tag-version=true flag", async () => { + const testDir7 = setupGitTest(); + const { output: output7, code: code7 } = await runCommand( + [bunExe(), "pm", "version", "patch", "--git-tag-version=true"], + testDir7, + ); + + expect(code7).toBe(0); + expect(output7.trim()).toBe("v1.0.1"); + + const packageJson7 = await Bun.file(`${testDir7}/package.json`).json(); + expect(packageJson7.version).toBe("1.0.1"); + + const { output: tagOutput7 } = await runCommand(["git", "tag", "-l"], testDir7); + expect(tagOutput7).toContain("v1.0.1"); + + const { output: logOutput7 } = await runCommand(["git", "log", "--oneline"], testDir7); + expect(logOutput7).toContain("v1.0.1"); + }); + + it("supports %s substitution in commit messages", async () => { + const testDir8 = setupGitTest(); + const { output: output8, code: code8 } = await runCommand( + [bunExe(), "pm", "version", "patch", "--message", "Bump version to %s"], + testDir8, + ); + + expect(code8).toBe(0); + expect(output8.trim()).toBe("v1.0.1"); + + const { output: logOutput8 } = await runCommand(["git", "log", "--oneline", "-1"], testDir8); + expect(logOutput8).toContain("Bump version to 1.0.1"); + + const testDir9 = setupGitTest(); + const { output: output9, code: code9 } = await runCommand( + [bunExe(), "pm", "version", "2.5.0", "-m", "Release %s with fixes"], + testDir9, + ); + + expect(code9).toBe(0); + expect(output9.trim()).toBe("v2.5.0"); + + const { output: logOutput9 } = await runCommand(["git", "log", "--oneline", "-1"], testDir9); + expect(logOutput9).toContain("Release 2.5.0 with fixes"); + }); + }); + + describe("JSON formatting preservation", () => { + it("preserves JSON formatting correctly", async () => { + const originalJson1 = `{ + "name": "test", + "version": "1.0.0", + "scripts": { + "test": "echo test" + }, + "dependencies": { + "lodash": "^4.17.21" + } + }`; + + const testDir1 = tempDirWithFiles(`version-${i++}`, { + "package.json": originalJson1, + }); + + const { output: output1, code: code1 } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + testDir1, + ); + + expect(code1).toBe(0); + expect(output1.trim()).toBe("v1.0.1"); + + const updatedJson1 = await Bun.file(`${testDir1}/package.json`).text(); + + expect(updatedJson1).toContain(' "version": "1.0.1"'); + expect(updatedJson1).toContain('"name": "test"'); + expect(updatedJson1).toContain(' "test": "echo test"'); + + expect(JSON.parse(updatedJson1)).toMatchObject({ + name: "test", + version: "1.0.1", + scripts: { + test: "echo test", + }, + }); + }); + }); + + describe("prerelease handling", () => { + it("handles custom preid and prerelease scenarios", async () => { + const testDir1 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.0" }, null, 2), + }); + + const { output: output1, code: code1 } = await runCommand( + [bunExe(), "pm", "version", "prerelease", "--preid", "beta", "--no-git-tag-version"], + testDir1, + ); + + expect(code1).toBe(0); + expect(output1.trim()).toBe("v1.0.1-beta.0"); + + const testDir3 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.0" }, null, 2), + }); + + const { output: output3, code: code3 } = await runCommand( + [bunExe(), "pm", "version", "prerelease", "--no-git-tag-version"], + testDir3, + ); + + expect(code3).toBe(0); + expect(output3.trim()).toBe("v1.0.1-0"); + + const testDir5 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.0-alpha" }, null, 2), + }); + + const { output: output5, code: code5 } = await runCommand( + [bunExe(), "pm", "version", "prerelease", "--no-git-tag-version"], + testDir5, + ); + + expect(code5).toBe(0); + expect(output5.trim()).toBe("v1.0.0-alpha.1"); + + const testDir6 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.0-3" }, null, 2), + }); + + const { output: output6, code: code6 } = await runCommand( + [bunExe(), "pm", "version", "prerelease", "--no-git-tag-version"], + testDir6, + ); + + expect(code6).toBe(0); + expect(output6.trim()).toBe("v1.0.0-4"); + }); + + it("should preserve prerelease identifiers correctly", async () => { + const scenarios = [ + { + version: "1.0.3-alpha.1", + preid: "beta", + expected: { + patch: "1.0.3-alpha.1 → 1.0.4", + minor: "1.0.3-alpha.1 → 1.1.0", + major: "1.0.3-alpha.1 → 2.0.0", + prerelease: "1.0.3-alpha.1 → 1.0.3-beta.2", + prepatch: "1.0.3-alpha.1 → 1.0.4-beta.0", + preminor: "1.0.3-alpha.1 → 1.1.0-beta.0", + premajor: "1.0.3-alpha.1 → 2.0.0-beta.0", + }, + }, + { + version: "1.0.3-1", + preid: "abcd", + expected: { + patch: "1.0.3-1 → 1.0.4", + minor: "1.0.3-1 → 1.1.0", + major: "1.0.3-1 → 2.0.0", + prerelease: "1.0.3-1 → 1.0.3-abcd.2", + prepatch: "1.0.3-1 → 1.0.4-abcd.0", + preminor: "1.0.3-1 → 1.1.0-abcd.0", + premajor: "1.0.3-1 → 2.0.0-abcd.0", + }, + }, + { + version: "2.5.0-rc.3", + preid: "next", + expected: { + patch: "2.5.0-rc.3 → 2.5.1", + minor: "2.5.0-rc.3 → 2.6.0", + major: "2.5.0-rc.3 → 3.0.0", + prerelease: "2.5.0-rc.3 → 2.5.0-next.4", + prepatch: "2.5.0-rc.3 → 2.5.1-next.0", + preminor: "2.5.0-rc.3 → 2.6.0-next.0", + premajor: "2.5.0-rc.3 → 3.0.0-next.0", + }, + }, + { + version: "1.0.0-a", + preid: "b", + expected: { + patch: "1.0.0-a → 1.0.1", + minor: "1.0.0-a → 1.1.0", + major: "1.0.0-a → 2.0.0", + prerelease: "1.0.0-a → 1.0.0-b.1", + prepatch: "1.0.0-a → 1.0.1-b.0", + preminor: "1.0.0-a → 1.1.0-b.0", + premajor: "1.0.0-a → 2.0.0-b.0", + }, + }, + ]; + + for (const scenario of scenarios) { + const testDir = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: scenario.version }, null, 2), + }); + + const { output, code } = await runCommand( + [bunExe(), "pm", "version", "--no-git-tag-version", `--preid=${scenario.preid}`], + testDir, + ); + + expect(code).toBe(0); + expect(output).toContain(`Current package version: v${scenario.version}`); + + for (const [incrementType, expectedTransformation] of Object.entries(scenario.expected)) { + expect(output).toContain(`${incrementType.padEnd(10)} ${expectedTransformation}`); + } + } + + const testDir2 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.3-alpha.1" }, null, 2), + }); + + const { output: output2, code: code2 } = await runCommand( + [bunExe(), "pm", "version", "--no-git-tag-version"], + testDir2, + ); + + expect(code2).toBe(0); + expect(output2).toContain("prerelease 1.0.3-alpha.1 → 1.0.3-alpha.2"); + }); + }); + + describe("lifecycle scripts", () => { + it("runs lifecycle scripts in correct order and handles failures", async () => { + const testDir1 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify( + { + name: "test", + version: "1.0.0", + scripts: { + preversion: "echo 'step1' >> lifecycle.log", + version: "echo 'step2' >> lifecycle.log", + postversion: "echo 'step3' >> lifecycle.log", + }, + }, + null, + 2, + ), + }); + + await Bun.spawn([bunExe(), "pm", "version", "patch", "--no-git-tag-version"], { + cwd: testDir1, + env: bunEnv, + stderr: "ignore", + stdout: "ignore", + }).exited; + + expect(await Bun.file(join(testDir1, "lifecycle.log")).exists()).toBe(true); + const logContent = await Bun.file(join(testDir1, "lifecycle.log")).text(); + expect(logContent.trim()).toBe("step1\nstep2\nstep3"); + + const testDir2 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify( + { + name: "test", + version: "1.0.0", + scripts: { + preversion: "echo $npm_lifecycle_event > event.log && echo $npm_lifecycle_script > script.log", + }, + }, + null, + 2, + ), + }); + + await Bun.spawn([bunExe(), "pm", "version", "patch", "--no-git-tag-version"], { + cwd: testDir2, + env: bunEnv, + stderr: "ignore", + stdout: "ignore", + }).exited; + + expect(Bun.file(join(testDir2, "event.log")).exists()).resolves.toBe(true); + expect(Bun.file(join(testDir2, "script.log")).exists()).resolves.toBe(true); + + const eventContent = await Bun.file(join(testDir2, "event.log")).text(); + const scriptContent = await Bun.file(join(testDir2, "script.log")).text(); + + expect(eventContent.trim()).toBe("preversion"); + expect(scriptContent.trim()).toContain("echo $npm_lifecycle_event"); + + const testDir3 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify( + { + name: "test", + version: "1.0.0", + scripts: { + preversion: "exit 1", + }, + }, + null, + 2, + ), + }); + + const proc = Bun.spawn([bunExe(), "pm", "version", "minor", "--no-git-tag-version"], { + cwd: testDir3, + env: bunEnv, + stderr: "pipe", + stdout: "ignore", + }); + + await proc.exited; + expect(proc.exitCode).toBe(1); + expect(await proc.stderr.text()).toContain('script "preversion" exited with code 1'); + + const packageJson = await Bun.file(join(testDir3, "package.json")).json(); + expect(packageJson.version).toBe("1.0.0"); + + const testDir4 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify( + { + name: "test", + version: "1.0.0", + scripts: { + preversion: "mkdir -p build && echo 'built' > build/output.txt", + version: "cp build/output.txt version-output.txt", + postversion: "rm -rf build", + }, + }, + null, + 2, + ), + }); + + await Bun.spawn([bunExe(), "pm", "version", "patch", "--no-git-tag-version"], { + cwd: testDir4, + env: bunEnv, + stderr: "ignore", + stdout: "ignore", + }).exited; + + expect(Bun.file(join(testDir4, "version-output.txt")).exists()).resolves.toBe(true); + expect(Bun.file(join(testDir4, "build")).exists()).resolves.toBe(false); + + const content = await Bun.file(join(testDir4, "version-output.txt")).text(); + expect(content.trim()).toBe("built"); + + const testDir5 = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify( + { + name: "test", + version: "1.0.0", + scripts: { + preversion: "echo 'should not run' >> ignored.log", + version: "echo 'should not run' >> ignored.log", + postversion: "echo 'should not run' >> ignored.log", + }, + }, + null, + 2, + ), + }); + + const { output: output5, code: code5 } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version", "--ignore-scripts"], + testDir5, + ); + + expect(code5).toBe(0); + expect(output5.trim()).toBe("v1.0.1"); + + const packageJson5 = await Bun.file(join(testDir5, "package.json")).json(); + expect(packageJson5.version).toBe("1.0.1"); + + expect(await Bun.file(join(testDir5, "ignored.log")).exists()).toBe(false); + }); + }); + + describe("workspace and directory handling", () => { + it("should version workspace packages individually", async () => { + const testDir = setupMonorepoTest(); + + const { output: outputA, code: codeA } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + join(testDir, "packages", "pkg-a"), + ); + + expect(codeA).toBe(0); + expect(outputA.trim()).toBe("v2.0.1"); + + const rootPackageJson = await Bun.file(`${testDir}/package.json`).json(); + expect(rootPackageJson.version).toBe("1.0.0"); + + const pkgAJson = await Bun.file(`${testDir}/packages/pkg-a/package.json`).json(); + const pkgBJson = await Bun.file(`${testDir}/packages/pkg-b/package.json`).json(); + + expect(pkgAJson.version).toBe("2.0.1"); + expect(pkgBJson.version).toBe("3.0.0"); + }); + + it("should work from subdirectories", async () => { + const testDir = tempDirWithFiles(`version-${i++}`, { + "package.json": JSON.stringify({ name: "test", version: "1.0.0" }, null, 2), + "src/index.js": "console.log('hello');", + }); + + const { output, code } = await runCommand( + [bunExe(), "pm", "version", "patch", "--no-git-tag-version"], + join(testDir, "src"), + ); + + expect(code).toBe(0); + expect(output.trim()).toBe("v1.0.1"); + + const packageJson = await Bun.file(`${testDir}/package.json`).json(); + expect(packageJson.version).toBe("1.0.1"); + + const monorepoDir = setupMonorepoTest(); + + await Bun.write(join(monorepoDir, "packages", "pkg-a", "lib", "index.js"), ""); + + const { output: output2, code: code2 } = await runCommand( + [bunExe(), "pm", "version", "minor", "--no-git-tag-version"], + join(monorepoDir, "packages", "pkg-a", "lib"), + ); + + expect(code2).toBe(0); + expect(output2.trim()).toBe("v2.1.0"); + + const rootJson = await Bun.file(`${monorepoDir}/package.json`).json(); + const pkgAJson = await Bun.file(`${monorepoDir}/packages/pkg-a/package.json`).json(); + const pkgBJson = await Bun.file(`${monorepoDir}/packages/pkg-b/package.json`).json(); + + expect(rootJson.version).toBe("1.0.0"); + expect(pkgAJson.version).toBe("2.1.0"); + expect(pkgBJson.version).toBe("3.0.0"); + }); + }); +}); diff --git a/test/cli/install/bunx.test.ts b/test/cli/install/bunx.test.ts index 4b0c9e609e..2bd65f5791 100644 --- a/test/cli/install/bunx.test.ts +++ b/test/cli/install/bunx.test.ts @@ -497,3 +497,23 @@ it("should handle postinstall scripts correctly with symlinked bunx", async () = expect(out.trim()).not.toContain(Bun.version); expect(exited).toBe(0); }); + +it("should handle package that requires node 24", async () => { + const subprocess = spawn({ + cmd: [bunExe(), "x", "--bun", "@angular/cli@latest", "--help"], + cwd: x_dir, + stdout: "pipe", + stdin: "inherit", + stderr: "pipe", + env, + }); + + let [err, out, exited] = await Promise.all([ + new Response(subprocess.stderr).text(), + new Response(subprocess.stdout).text(), + subprocess.exited, + ]); + expect(err).not.toContain("error:"); + expect(out.trim()).not.toContain(Bun.version); + expect(exited).toBe(0); +}); diff --git a/test/cli/install/isolated-install.test.ts b/test/cli/install/isolated-install.test.ts new file mode 100644 index 0000000000..bf8aac23f6 --- /dev/null +++ b/test/cli/install/isolated-install.test.ts @@ -0,0 +1,433 @@ +import { file, write } from "bun"; +import { afterAll, beforeAll, describe, expect, setDefaultTimeout, test } from "bun:test"; +import { existsSync, readlinkSync } from "fs"; +import { VerdaccioRegistry, bunEnv, readdirSorted, runBunInstall } from "harness"; +import { join } from "path"; + +const registry = new VerdaccioRegistry(); + +beforeAll(async () => { + setDefaultTimeout(10 * 60 * 1000); + await registry.start(); +}); + +afterAll(() => { + registry.stop(); +}); + +describe("basic", () => { + test("single dependency", async () => { + const { packageJson, packageDir } = await registry.createTestDir(); + + await write( + packageJson, + JSON.stringify({ + name: "test-pkg-1", + workspaces: { + nodeLinker: "isolated", + }, + dependencies: { + "no-deps": "1.0.0", + }, + }), + ); + + await runBunInstall(bunEnv, packageDir); + + expect(readlinkSync(join(packageDir, "node_modules", "no-deps"))).toBe( + join(".bun", "no-deps@1.0.0", "node_modules", "no-deps"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "no-deps"))).toBe( + join("..", "no-deps@1.0.0", "node_modules", "no-deps"), + ); + expect( + await file( + join(packageDir, "node_modules", ".bun", "no-deps@1.0.0", "node_modules", "no-deps", "package.json"), + ).json(), + ).toEqual({ + name: "no-deps", + version: "1.0.0", + }); + }); + + test("scope package", async () => { + const { packageJson, packageDir } = await registry.createTestDir(); + + await write( + packageJson, + JSON.stringify({ + name: "test-pkg-2", + workspaces: { + nodeLinker: "isolated", + }, + dependencies: { + "@types/is-number": "1.0.0", + }, + }), + ); + + await runBunInstall(bunEnv, packageDir); + + expect(readlinkSync(join(packageDir, "node_modules", "@types", "is-number"))).toBe( + join("..", ".bun", "@types+is-number@1.0.0", "node_modules", "@types", "is-number"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "@types", "is-number"))).toBe( + join("..", "..", "@types+is-number@1.0.0", "node_modules", "@types", "is-number"), + ); + expect( + await file( + join( + packageDir, + "node_modules", + ".bun", + "@types+is-number@1.0.0", + "node_modules", + "@types", + "is-number", + "package.json", + ), + ).json(), + ).toEqual({ + name: "@types/is-number", + version: "1.0.0", + }); + }); + + test("transitive dependencies", async () => { + const { packageJson, packageDir } = await registry.createTestDir(); + + await write( + packageJson, + JSON.stringify({ + name: "test-pkg-3", + workspaces: { + nodeLinker: "isolated", + }, + dependencies: { + "two-range-deps": "1.0.0", + }, + }), + ); + + await runBunInstall(bunEnv, packageDir); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bun", "two-range-deps"]); + expect(readlinkSync(join(packageDir, "node_modules", "two-range-deps"))).toBe( + join(".bun", "two-range-deps@1.0.0", "node_modules", "two-range-deps"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "two-range-deps"))).toBe( + join("..", "two-range-deps@1.0.0", "node_modules", "two-range-deps"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "no-deps"))).toBe( + join("..", "no-deps@1.1.0", "node_modules", "no-deps"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "@types", "is-number"))).toBe( + join("..", "..", "@types+is-number@2.0.0", "node_modules", "@types", "is-number"), + ); + expect( + await file( + join( + packageDir, + "node_modules", + ".bun", + "two-range-deps@1.0.0", + "node_modules", + "two-range-deps", + "package.json", + ), + ).json(), + ).toEqual({ + name: "two-range-deps", + version: "1.0.0", + dependencies: { + "no-deps": "^1.0.0", + "@types/is-number": ">=1.0.0", + }, + }); + expect( + await readdirSorted(join(packageDir, "node_modules", ".bun", "two-range-deps@1.0.0", "node_modules")), + ).toEqual(["@types", "no-deps", "two-range-deps"]); + expect( + readlinkSync( + join(packageDir, "node_modules", ".bun", "two-range-deps@1.0.0", "node_modules", "@types", "is-number"), + ), + ).toBe(join("..", "..", "..", "@types+is-number@2.0.0", "node_modules", "@types", "is-number")); + expect( + readlinkSync(join(packageDir, "node_modules", ".bun", "two-range-deps@1.0.0", "node_modules", "no-deps")), + ).toBe(join("..", "..", "no-deps@1.1.0", "node_modules", "no-deps")); + expect( + await file( + join(packageDir, "node_modules", ".bun", "no-deps@1.1.0", "node_modules", "no-deps", "package.json"), + ).json(), + ).toEqual({ + name: "no-deps", + version: "1.1.0", + }); + expect( + await file( + join( + packageDir, + "node_modules", + ".bun", + "@types+is-number@2.0.0", + "node_modules", + "@types", + "is-number", + "package.json", + ), + ).json(), + ).toEqual({ + name: "@types/is-number", + version: "2.0.0", + }); + }); +}); + +test("handles cyclic dependencies", async () => { + const { packageJson, packageDir } = await registry.createTestDir(); + + await write( + packageJson, + JSON.stringify({ + name: "test-pkg-cyclic", + workspaces: { + nodeLinker: "isolated", + }, + dependencies: { + "a-dep-b": "1.0.0", + }, + }), + ); + + await runBunInstall(bunEnv, packageDir); + + expect(readlinkSync(join(packageDir, "node_modules", "a-dep-b"))).toBe( + join(".bun", "a-dep-b@1.0.0", "node_modules", "a-dep-b"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "a-dep-b"))).toBe( + join("..", "a-dep-b@1.0.0", "node_modules", "a-dep-b"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "b-dep-a"))).toBe( + join("..", "b-dep-a@1.0.0", "node_modules", "b-dep-a"), + ); + expect( + await file( + join(packageDir, "node_modules", ".bun", "a-dep-b@1.0.0", "node_modules", "a-dep-b", "package.json"), + ).json(), + ).toEqual({ + name: "a-dep-b", + version: "1.0.0", + dependencies: { + "b-dep-a": "1.0.0", + }, + }); + + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "a-dep-b@1.0.0", "node_modules", "b-dep-a"))).toBe( + join("..", "..", "b-dep-a@1.0.0", "node_modules", "b-dep-a"), + ); + expect( + await file( + join(packageDir, "node_modules", ".bun", "a-dep-b@1.0.0", "node_modules", "b-dep-a", "package.json"), + ).json(), + ).toEqual({ + name: "b-dep-a", + version: "1.0.0", + dependencies: { + "a-dep-b": "1.0.0", + }, + }); +}); + +test("can install folder dependencies", async () => { + const { packageJson, packageDir } = await registry.createTestDir(); + + await write( + packageJson, + JSON.stringify({ + name: "test-pkg-folder-deps", + workspaces: { + nodeLinker: "isolated", + }, + dependencies: { + "folder-dep": "file:./pkg-1", + }, + }), + ); + + await write(join(packageDir, "pkg-1", "package.json"), JSON.stringify({ name: "folder-dep", version: "1.0.0" })); + + await runBunInstall(bunEnv, packageDir); + + expect(readlinkSync(join(packageDir, "node_modules", "folder-dep"))).toBe( + join(".bun", "folder-dep@file+pkg-1", "node_modules", "folder-dep"), + ); + expect( + await file( + join(packageDir, "node_modules", ".bun", "folder-dep@file+pkg-1", "node_modules", "folder-dep", "package.json"), + ).json(), + ).toEqual({ + name: "folder-dep", + version: "1.0.0", + }); + + await write(join(packageDir, "pkg-1", "index.js"), "module.exports = 'hello from pkg-1';"); + + await runBunInstall(bunEnv, packageDir, { savesLockfile: false }); + expect(readlinkSync(join(packageDir, "node_modules", "folder-dep"))).toBe( + join(".bun", "folder-dep@file+pkg-1", "node_modules", "folder-dep"), + ); + expect( + await file( + join(packageDir, "node_modules", ".bun", "folder-dep@file+pkg-1", "node_modules", "folder-dep", "index.js"), + ).text(), + ).toBe("module.exports = 'hello from pkg-1';"); +}); + +describe("isolated workspaces", () => { + test("basic", async () => { + const { packageJson, packageDir } = await registry.createTestDir(); + + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "test-pkg-workspaces", + workspaces: { + nodeLinker: "isolated", + packages: ["pkg-1", "pkg-2"], + }, + dependencies: { + "no-deps": "1.0.0", + }, + }), + ), + write( + join(packageDir, "pkg-1", "package.json"), + JSON.stringify({ + name: "pkg-1", + version: "1.0.0", + dependencies: { + "a-dep": "1.0.1", + "pkg-2": "workspace:", + "@types/is-number": "1.0.0", + }, + }), + ), + write( + join(packageDir, "pkg-2", "package.json"), + JSON.stringify({ + name: "pkg-2", + version: "1.0.0", + dependencies: { + "b-dep-a": "1.0.0", + }, + }), + ), + ]); + + await runBunInstall(bunEnv, packageDir); + + expect(existsSync(join(packageDir, "node_modules", "pkg-1"))).toBeFalse(); + expect(readlinkSync(join(packageDir, "pkg-1", "node_modules", "pkg-2"))).toBe(join("..", "..", "pkg-2")); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bun", "no-deps"]); + expect(readlinkSync(join(packageDir, "node_modules", "no-deps"))).toBe( + join(".bun", "no-deps@1.0.0", "node_modules", "no-deps"), + ); + + expect(await readdirSorted(join(packageDir, "pkg-1", "node_modules"))).toEqual(["@types", "a-dep", "pkg-2"]); + expect(await readdirSorted(join(packageDir, "pkg-2", "node_modules"))).toEqual(["b-dep-a"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bun"))).toEqual([ + "@types+is-number@1.0.0", + "a-dep-b@1.0.0", + "a-dep@1.0.1", + "b-dep-a@1.0.0", + "no-deps@1.0.0", + "node_modules", + ]); + + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "no-deps"))).toBe( + join("..", "no-deps@1.0.0", "node_modules", "no-deps"), + ); + expect( + await file( + join(packageDir, "node_modules", ".bun", "no-deps@1.0.0", "node_modules", "no-deps", "package.json"), + ).json(), + ).toEqual({ + name: "no-deps", + version: "1.0.0", + }); + }); +}); + +test("many transitive dependencies", async () => { + const { packageJson, packageDir } = await registry.createTestDir(); + + await write( + packageJson, + JSON.stringify({ + name: "test-pkg-many-transitive-deps", + workspaces: { + nodeLinker: "isolated", + }, + dependencies: { + "alias-loop-1": "1.0.0", + "alias-loop-2": "1.0.0", + "1-peer-dep-a": "1.0.0", + "basic-1": "1.0.0", + "is-number": "1.0.0", + }, + }), + ); + + await runBunInstall(bunEnv, packageDir); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + ".bun", + "1-peer-dep-a", + "alias-loop-1", + "alias-loop-2", + "basic-1", + "is-number", + ]); + expect(readlinkSync(join(packageDir, "node_modules", "alias-loop-1"))).toBe( + join(".bun", "alias-loop-1@1.0.0", "node_modules", "alias-loop-1"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "alias-loop-1"))).toBe( + join("..", "alias-loop-1@1.0.0", "node_modules", "alias-loop-1"), + ); + expect(readlinkSync(join(packageDir, "node_modules", ".bun", "node_modules", "alias-loop-2"))).toBe( + join("..", "alias-loop-2@1.0.0", "node_modules", "alias-loop-2"), + ); + expect( + await file( + join(packageDir, "node_modules", ".bun", "alias-loop-1@1.0.0", "node_modules", "alias-loop-1", "package.json"), + ).json(), + ).toEqual({ + name: "alias-loop-1", + version: "1.0.0", + dependencies: { + "alias1": "npm:alias-loop-2@*", + }, + }); + expect( + await file( + join(packageDir, "node_modules", ".bun", "alias-loop-2@1.0.0", "node_modules", "alias-loop-2", "package.json"), + ).json(), + ).toEqual({ + name: "alias-loop-2", + version: "1.0.0", + dependencies: { + "alias2": "npm:alias-loop-1@*", + }, + }); + // expect(await readdirSorted(join(packageDir, "node_modules", ".bun", "alias-loop-1@1.0.0", "node_modules"))).toEqual([ + // "alias1", + // "alias-loop-1", + // ]); + // expect(readlinkSync(join(packageDir, "node_modules", ".bun", "alias-loop-1@1.0.0", "node_modules", "alias1"))).toBe( + // join("..", "..", "alias-loop-2@1.0.0", "node_modules", "alias-loop-2"), + // ); + // expect(readlinkSync(join(packageDir, "node_modules", ".bun", "alias-loop-2@1.0.0", "node_modules", "alias2"))).toBe( + // join("..", "..", "alias-loop-1@1.0.0", "node_modules", "alias-loop-1"), + // ); +}); diff --git a/test/cli/install/registry/packages/a-dep-b/a-dep-b-1.0.0.tgz b/test/cli/install/registry/packages/a-dep-b/a-dep-b-1.0.0.tgz new file mode 100644 index 0000000000..a97705bcbe Binary files /dev/null and b/test/cli/install/registry/packages/a-dep-b/a-dep-b-1.0.0.tgz differ diff --git a/test/cli/install/registry/packages/a-dep-b/package.json b/test/cli/install/registry/packages/a-dep-b/package.json new file mode 100644 index 0000000000..6313607fcf --- /dev/null +++ b/test/cli/install/registry/packages/a-dep-b/package.json @@ -0,0 +1,44 @@ +{ + "name": "a-dep-b", + "versions": { + "1.0.0": { + "name": "a-dep-b", + "version": "1.0.0", + "dependencies": { + "b-dep-a": "1.0.0" + }, + "_id": "a-dep-b@1.0.0", + "_integrity": "sha512-PW1l4ruYaxcIw4rMkOVzb9zcR2srZhTPv2H2aH7QFc7vVxkD7EEMGHg1GPT8ycLFb8vriydUXEPwOy1FcbodaQ==", + "_nodeVersion": "22.6.0", + "_npmVersion": "10.8.3", + "integrity": "sha512-PW1l4ruYaxcIw4rMkOVzb9zcR2srZhTPv2H2aH7QFc7vVxkD7EEMGHg1GPT8ycLFb8vriydUXEPwOy1FcbodaQ==", + "shasum": "ed69ada9bf7341ed905c41f1282bd87713cc315f", + "dist": { + "integrity": "sha512-PW1l4ruYaxcIw4rMkOVzb9zcR2srZhTPv2H2aH7QFc7vVxkD7EEMGHg1GPT8ycLFb8vriydUXEPwOy1FcbodaQ==", + "shasum": "ed69ada9bf7341ed905c41f1282bd87713cc315f", + "tarball": "http://http://localhost:4873/a-dep-b/-/a-dep-b-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-06-01T20:45:08.728Z", + "created": "2025-06-01T20:45:08.728Z", + "1.0.0": "2025-06-01T20:45:08.728Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "a-dep-b-1.0.0.tgz": { + "shasum": "ed69ada9bf7341ed905c41f1282bd87713cc315f", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "a-dep-b", + "readme": "" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/b-dep-a/b-dep-a-1.0.0.tgz b/test/cli/install/registry/packages/b-dep-a/b-dep-a-1.0.0.tgz new file mode 100644 index 0000000000..7dd885257c Binary files /dev/null and b/test/cli/install/registry/packages/b-dep-a/b-dep-a-1.0.0.tgz differ diff --git a/test/cli/install/registry/packages/b-dep-a/package.json b/test/cli/install/registry/packages/b-dep-a/package.json new file mode 100644 index 0000000000..349d09efc8 --- /dev/null +++ b/test/cli/install/registry/packages/b-dep-a/package.json @@ -0,0 +1,44 @@ +{ + "name": "b-dep-a", + "versions": { + "1.0.0": { + "name": "b-dep-a", + "version": "1.0.0", + "dependencies": { + "a-dep-b": "1.0.0" + }, + "_id": "b-dep-a@1.0.0", + "_integrity": "sha512-1owp4Wy5QE893BGgjDQGZm9Oayk38MA++fXmPTQA1WY/NFQv7CcCVpK2Ht/4mU4KejDeHOxaAj7qbzv1dSQA2w==", + "_nodeVersion": "22.6.0", + "_npmVersion": "10.8.3", + "integrity": "sha512-1owp4Wy5QE893BGgjDQGZm9Oayk38MA++fXmPTQA1WY/NFQv7CcCVpK2Ht/4mU4KejDeHOxaAj7qbzv1dSQA2w==", + "shasum": "3d94682ad5231596f47745e03ef3d59af5945e1d", + "dist": { + "integrity": "sha512-1owp4Wy5QE893BGgjDQGZm9Oayk38MA++fXmPTQA1WY/NFQv7CcCVpK2Ht/4mU4KejDeHOxaAj7qbzv1dSQA2w==", + "shasum": "3d94682ad5231596f47745e03ef3d59af5945e1d", + "tarball": "http://http://localhost:4873/b-dep-a/-/b-dep-a-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-06-01T20:45:23.481Z", + "created": "2025-06-01T20:45:23.481Z", + "1.0.0": "2025-06-01T20:45:23.481Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "b-dep-a-1.0.0.tgz": { + "shasum": "3d94682ad5231596f47745e03ef3d59af5945e1d", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "b-dep-a", + "readme": "" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/diff-peer-1/diff-peer-1-1.0.0.tgz b/test/cli/install/registry/packages/diff-peer-1/diff-peer-1-1.0.0.tgz new file mode 100644 index 0000000000..6efe2d9c80 Binary files /dev/null and b/test/cli/install/registry/packages/diff-peer-1/diff-peer-1-1.0.0.tgz differ diff --git a/test/cli/install/registry/packages/diff-peer-1/package.json b/test/cli/install/registry/packages/diff-peer-1/package.json new file mode 100644 index 0000000000..3630c213f8 --- /dev/null +++ b/test/cli/install/registry/packages/diff-peer-1/package.json @@ -0,0 +1,42 @@ +{ + "name": "diff-peer-1", + "versions": { + "1.0.0": { + "name": "diff-peer-1", + "version": "1.0.0", + "dependencies": { + "has-peer": "1.0.0", + "peer-no-deps": "1.0.0" + }, + "_id": "diff-peer-1@1.0.0", + "_nodeVersion": "23.10.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-a9nTh3aUOE6VDmn23Q9v6JUqBGnsnSBGcZ7P5Qff+5YuJ3KhWd0rbY/+DLDpwO7zAsTzKP1Bs9KtWDwQHzocVA==", + "shasum": "2a72f1f0e12b5a7790c26cce6b0e018b47e06c90", + "tarball": "http://localhost:4873/diff-peer-1/-/diff-peer-1-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-06-08T19:48:23.111Z", + "created": "2025-06-08T19:48:23.111Z", + "1.0.0": "2025-06-08T19:48:23.111Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "diff-peer-1-1.0.0.tgz": { + "shasum": "2a72f1f0e12b5a7790c26cce6b0e018b47e06c90", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "diff-peer-1", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/diff-peer-2/diff-peer-2-1.0.0.tgz b/test/cli/install/registry/packages/diff-peer-2/diff-peer-2-1.0.0.tgz new file mode 100644 index 0000000000..20e7f30b3f Binary files /dev/null and b/test/cli/install/registry/packages/diff-peer-2/diff-peer-2-1.0.0.tgz differ diff --git a/test/cli/install/registry/packages/diff-peer-2/package.json b/test/cli/install/registry/packages/diff-peer-2/package.json new file mode 100644 index 0000000000..73646097ba --- /dev/null +++ b/test/cli/install/registry/packages/diff-peer-2/package.json @@ -0,0 +1,42 @@ +{ + "name": "diff-peer-2", + "versions": { + "1.0.0": { + "name": "diff-peer-2", + "version": "1.0.0", + "dependencies": { + "has-peer": "1.0.0", + "peer-no-deps": "1.0.1" + }, + "_id": "diff-peer-2@1.0.0", + "_nodeVersion": "23.10.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-SPuo1oUuIxLXS9SJa35qU74g3rhBuK5mbdI1HGdRKQJXByDrF+msNAitd0v1g+tDknVHP9otSZllp7XelLQorQ==", + "shasum": "4d0819fe19cb838ed81b346c1f07b823158a541f", + "tarball": "http://localhost:4873/diff-peer-2/-/diff-peer-2-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-06-08T19:48:32.766Z", + "created": "2025-06-08T19:48:32.766Z", + "1.0.0": "2025-06-08T19:48:32.766Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "diff-peer-2-1.0.0.tgz": { + "shasum": "4d0819fe19cb838ed81b346c1f07b823158a541f", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "diff-peer-2", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/has-peer/has-peer-1.0.0.tgz b/test/cli/install/registry/packages/has-peer/has-peer-1.0.0.tgz new file mode 100644 index 0000000000..e5796727d9 Binary files /dev/null and b/test/cli/install/registry/packages/has-peer/has-peer-1.0.0.tgz differ diff --git a/test/cli/install/registry/packages/has-peer/package.json b/test/cli/install/registry/packages/has-peer/package.json new file mode 100644 index 0000000000..bd11b78261 --- /dev/null +++ b/test/cli/install/registry/packages/has-peer/package.json @@ -0,0 +1,41 @@ +{ + "name": "has-peer", + "versions": { + "1.0.0": { + "name": "has-peer", + "version": "1.0.0", + "peerDependencies": { + "peer-no-deps": "^1.0.0" + }, + "_id": "has-peer@1.0.0", + "_nodeVersion": "23.10.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-Q7Sg8KeLCUYEurarnoM/c31svn1IvmwYtkZ7DQdzJg4qzONeXs5u/q32iguDmzGS330ch/GnTiwnUVdhIuB8cQ==", + "shasum": "e0a4f8b2812eec8eada2aef68b71cdf572236702", + "tarball": "http://localhost:4873/has-peer/-/has-peer-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-06-08T19:49:59.426Z", + "created": "2025-06-08T19:49:59.426Z", + "1.0.0": "2025-06-08T19:49:59.426Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "has-peer-1.0.0.tgz": { + "shasum": "e0a4f8b2812eec8eada2aef68b71cdf572236702", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "has-peer", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/peer-no-deps/package.json b/test/cli/install/registry/packages/peer-no-deps/package.json new file mode 100644 index 0000000000..844368ab96 --- /dev/null +++ b/test/cli/install/registry/packages/peer-no-deps/package.json @@ -0,0 +1,74 @@ +{ + "name": "peer-no-deps", + "versions": { + "1.0.0": { + "name": "peer-no-deps", + "version": "1.0.0", + "_id": "peer-no-deps@1.0.0", + "_nodeVersion": "23.10.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-SfaNgbuAdCAj30SPPmdUNQLMFYoQcBD2dS7cxyv+dutkDyCY/ZzxGwK2syEkzN7QuZNdXouiNRx43mdxC/YpfA==", + "shasum": "508a718b20f2e452919a86fc2add84c008f120d2", + "tarball": "http://localhost:4873/peer-no-deps/-/peer-no-deps-1.0.0.tgz" + }, + "contributors": [] + }, + "1.0.1": { + "name": "peer-no-deps", + "version": "1.0.1", + "_id": "peer-no-deps@1.0.1", + "_nodeVersion": "23.10.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-V/R/oUJEjX8GWwGs6Ayye+6alHjRj0eKkpDJPzywgUjTt0iQIaTDSRCgieMfHLgB1JSFs2ogyppAXX5cwQ7lWw==", + "shasum": "7f21c80e4f2ec05c453a73aa78f995e21d8008d1", + "tarball": "http://localhost:4873/peer-no-deps/-/peer-no-deps-1.0.1.tgz" + }, + "contributors": [] + }, + "2.0.0": { + "name": "peer-no-deps", + "version": "2.0.0", + "_id": "peer-no-deps@2.0.0", + "_nodeVersion": "23.10.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-CR+AY66qH9+QUbKt7dxuH4iw36/mFIkpk1I8Lf+2DfucwGRcc0qwYswXQy+70jtz7ylHkmUMbhhgcMsIdsfK+w==", + "shasum": "5ae71b940adc2f9a1b346897183e7042591735c0", + "tarball": "http://localhost:4873/peer-no-deps/-/peer-no-deps-2.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-06-08T22:04:06.599Z", + "created": "2025-06-08T19:50:19.891Z", + "1.0.0": "2025-06-08T19:50:19.891Z", + "1.0.1": "2025-06-08T19:50:23.698Z", + "2.0.0": "2025-06-08T22:04:06.599Z" + }, + "users": {}, + "dist-tags": { + "latest": "2.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "peer-no-deps-1.0.0.tgz": { + "shasum": "508a718b20f2e452919a86fc2add84c008f120d2", + "version": "1.0.0" + }, + "peer-no-deps-1.0.1.tgz": { + "shasum": "7f21c80e4f2ec05c453a73aa78f995e21d8008d1", + "version": "1.0.1" + }, + "peer-no-deps-2.0.0.tgz": { + "shasum": "5ae71b940adc2f9a1b346897183e7042591735c0", + "version": "2.0.0" + } + }, + "_rev": "", + "_id": "peer-no-deps", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-1.0.0.tgz b/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-1.0.0.tgz new file mode 100644 index 0000000000..5d240567bb Binary files /dev/null and b/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-1.0.0.tgz differ diff --git a/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-1.0.1.tgz b/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-1.0.1.tgz new file mode 100644 index 0000000000..f512bfe1db Binary files /dev/null and b/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-1.0.1.tgz differ diff --git a/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-2.0.0.tgz b/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-2.0.0.tgz new file mode 100644 index 0000000000..a610fdfec1 Binary files /dev/null and b/test/cli/install/registry/packages/peer-no-deps/peer-no-deps-2.0.0.tgz differ diff --git a/test/cli/run/garbage-env.c b/test/cli/run/garbage-env.c new file mode 100644 index 0000000000..de651b305b --- /dev/null +++ b/test/cli/run/garbage-env.c @@ -0,0 +1,133 @@ +#include +#include +#include +#include +#include +#include + +int main() { + int stdout_pipe[2], stderr_pipe[2]; + pid_t pid; + int status; + char stdout_buffer[4096] = {0}; + char stderr_buffer[4096] = {0}; + + // Create pipes for stdout and stderr + if (pipe(stdout_pipe) == -1 || pipe(stderr_pipe) == -1) { + perror("pipe"); + return 1; + } + + // Create garbage environment variables with stack buffers containing + // arbitrary bytes + char garbage1[64]; + char garbage2[64]; + char garbage3[64]; + char garbage4[64]; + char garbage5[64]; + + // Fill with arbitrary non-ASCII/UTF-8 bytes + for (int i = 0; i < 63; i++) { + garbage1[i] = (char)(0x80 + (i % 128)); // Invalid UTF-8 start bytes + garbage2[i] = (char)(0xFF - (i % 256)); // High bytes + garbage3[i] = (char)(i * 3 + 128); // Mixed garbage + garbage4[i] = (char)(0xC0 | (i & 0x1F)); // Invalid UTF-8 sequences + } + garbage1[63] = '\0'; + garbage2[63] = '\0'; + garbage3[63] = '\0'; + garbage4[63] = '\0'; + + for (int i = 0; i < 10; i++) { + garbage5[i] = (char)(0x80 + (i % 128)); + } + garbage5[10] = '='; + garbage5[11] = 0x81; + garbage5[12] = 0xF5; + garbage5[13] = 0xC1; + garbage5[14] = 0xC2; + + char *garbage_env[] = { + garbage5, + // garbage1, + // garbage2, + // garbage3, + // garbage4, + "PATH=/usr/bin:/bin", // Keep PATH so we can find commands + "BUN_DEBUG_QUIET_LOGS=1", "OOGA=booga", "OOGA=laskdjflsdf", NULL}; + + pid = fork(); + + if (pid == -1) { + perror("fork"); + return 1; + } + + if (pid == 0) { + // Child process + close(stdout_pipe[0]); // Close read end + close(stderr_pipe[0]); // Close read end + + // Redirect stdout and stderr to pipes + dup2(stdout_pipe[1], STDOUT_FILENO); + dup2(stderr_pipe[1], STDERR_FILENO); + + close(stdout_pipe[1]); + close(stderr_pipe[1]); + + char *BUN_PATH = getenv("BUN_PATH"); + if (BUN_PATH == NULL) { + fprintf(stderr, "Missing BUN_PATH!\n"); + fflush(stderr); + exit(1); + } + execve(BUN_PATH, + (char *[]){"bun-debug", "-e", "console.log(process.env)", NULL}, + garbage_env); + + // If both fail, exit with error + perror("execve"); + exit(127); + } else { + // Parent process + close(stdout_pipe[1]); // Close write end + close(stderr_pipe[1]); // Close write end + + // Read from stdout pipe + ssize_t stdout_bytes = + read(stdout_pipe[0], stdout_buffer, sizeof(stdout_buffer) - 1); + if (stdout_bytes > 0) { + stdout_buffer[stdout_bytes] = '\0'; + } + + // Read from stderr pipe + ssize_t stderr_bytes = + read(stderr_pipe[0], stderr_buffer, sizeof(stderr_buffer) - 1); + if (stderr_bytes > 0) { + stderr_buffer[stderr_bytes] = '\0'; + } + + close(stdout_pipe[0]); + close(stderr_pipe[0]); + + // Wait for child process + waitpid(pid, &status, 0); + + // Print results + printf("=== PROCESS OUTPUT ===\n"); + printf("Exit code: %d\n", WEXITSTATUS(status)); + + printf("\n=== STDOUT ===\n"); + printf("%s", stdout_buffer); + fflush(stdout); + + if (stderr_bytes > 0) { + fprintf(stderr, "\n=== STDERR ===\n"); + fprintf(stderr, "%s", stderr_buffer); + fflush(stderr); + } + exit(status); + } + + return 0; +} \ No newline at end of file diff --git a/test/cli/run/garbage-env.test.ts b/test/cli/run/garbage-env.test.ts new file mode 100644 index 0000000000..299e0f213b --- /dev/null +++ b/test/cli/run/garbage-env.test.ts @@ -0,0 +1,25 @@ +import { describe, expect, test } from "bun:test"; +import { bunExe, isPosix } from "harness"; +import path from "path"; + +describe.if(isPosix)("garbage env", () => { + test("garbage env", async () => { + const cfile = path.join(import.meta.dirname, "garbage-env.c"); + { + const cc = Bun.which("clang") || Bun.which("gcc") || Bun.which("cc"); + const { exitCode, stderr } = await Bun.$`${cc} -o garbage-env ${cfile}`; + const stderrText = stderr.toString(); + if (stderrText.length > 0) { + console.error(stderrText); + } + expect(exitCode).toBe(0); + } + + const { exitCode, stderr } = await Bun.$`./garbage-env`.env({ BUN_PATH: bunExe() }); + const stderrText = stderr.toString(); + if (stderrText.length > 0) { + console.error(stderrText); + } + expect(exitCode).toBe(0); + }); +}); diff --git a/test/cli/run/run-eval.test.ts b/test/cli/run/run-eval.test.ts index 6df01acc37..90219278ca 100644 --- a/test/cli/run/run-eval.test.ts +++ b/test/cli/run/run-eval.test.ts @@ -64,6 +64,15 @@ for (const flag of ["-e", "--print"]) { testProcessArgv(["--", "abc", "def"], [exe, "abc", "def"]); // testProcessArgv(["--", "abc", "--", "def"], [exe, "abc", "--", "def"]); }); + + test("process._eval", async () => { + const code = flag === "--print" ? "process._eval" : "console.log(process._eval)"; + const { stdout } = Bun.spawnSync({ + cmd: [bunExe(), flag, code], + env: bunEnv, + }); + expect(stdout.toString("utf8")).toEqual(code + "\n"); + }); }); } @@ -140,6 +149,18 @@ function group(run: (code: string) => SyncSubprocess<"pipe", "inherit">) { const exe = isWindows ? bunExe().replaceAll("/", "\\") : bunExe(); expect(JSON.parse(stdout.toString("utf8"))).toEqual([exe, "-"]); }); + + test("process._eval", async () => { + const code = "console.log(process._eval)"; + const { stdout } = run(code); + + // the file piping one on windows can include extra carriage returns + if (isWindows) { + expect(stdout.toString("utf8")).toInclude(code); + } else { + expect(stdout.toString("utf8")).toEqual(code + "\n"); + } + }); } describe("bun run - < file-path.js", () => { @@ -196,3 +217,18 @@ describe("echo | bun run -", () => { group(run); }); + +test("process._eval (undefined for normal run)", async () => { + const cwd = tmpdirSync(); + const file = join(cwd, "test.js"); + writeFileSync(file, "console.log(typeof process._eval)"); + + const { stdout } = Bun.spawnSync({ + cmd: [bunExe(), "run", file], + cwd: cwd, + env: bunEnv, + }); + expect(stdout.toString("utf8")).toEqual("undefined\n"); + + rmSync(cwd, { recursive: true, force: true }); +}); diff --git a/test/cli/test/__snapshots__/coverage.test.ts.snap b/test/cli/test/__snapshots__/coverage.test.ts.snap index 12122efc4d..fa77d0dba3 100644 --- a/test/cli/test/__snapshots__/coverage.test.ts.snap +++ b/test/cli/test/__snapshots__/coverage.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`lcov coverage reporter 1`] = ` "TN: diff --git a/test/cli/test/bun-test.test.ts b/test/cli/test/bun-test.test.ts index 18f47e3388..b32c0fdfab 100644 --- a/test/cli/test/bun-test.test.ts +++ b/test/cli/test/bun-test.test.ts @@ -883,6 +883,61 @@ describe("bun test", () => { test.todo("check formatting for %p", () => {}); }); + test("Prints error when no test matches", () => { + const stderr = runTest({ + args: ["-t", "not-a-test"], + input: ` + import { test, expect } from "bun:test"; + test("test", () => {}); + `, + expectExitCode: 1, + }); + expect( + stderr + .replace(/bun-test-(.*)\.test\.ts/, "bun-test-*.test.ts") + .trim() + .replace(/\[.*\ms\]/, "[xx ms]"), + ).toMatchInlineSnapshot(` + "bun-test-*.test.ts: + + error: regex "not-a-test" matched 0 tests. Searched 1 file (skipping 1 test) [xx ms]" + `); + }); + + test("Does not print the regex error when a test fails", () => { + const stderr = runTest({ + args: ["-t", "not-a-test"], + input: ` + import { test, expect } from "bun:test"; + test("not-a-test", () => { + expect(false).toBe(true); + }); + `, + expectExitCode: 1, + }); + expect(stderr).not.toContain("error: regex"); + expect(stderr).toContain("1 fail"); + }); + + test("Does not print the regex error when a test matches and a test passes", () => { + const stderr = runTest({ + args: ["-t", "not-a-test"], + input: ` + import { test, expect } from "bun:test"; + test("not-a-test", () => { + expect(false).toBe(true); + }); + test("not-a-test", () => { + expect(true).toBe(true); + }); + `, + expectExitCode: 1, + }); + expect(stderr).not.toContain("error: regex"); + expect(stderr).toContain("1 fail"); + expect(stderr).toContain("1 pass"); + }); + test("path to a non-test.ts file will work", () => { const stderr = runTest({ args: ["./index.ts"], @@ -944,21 +999,26 @@ function runTest({ cwd, args = [], env = {}, + expectExitCode = undefined, }: { input?: string | (string | { filename: string; contents: string })[]; cwd?: string; args?: string[]; env?: Record; + expectExitCode?: number; } = {}): string { cwd ??= createTest(input); try { - const { stderr } = spawnSync({ + const { stderr, exitCode } = spawnSync({ cwd, cmd: [bunExe(), "test", ...args], env: { ...bunEnv, ...env }, stderr: "pipe", stdout: "ignore", }); + if (expectExitCode !== undefined) { + expect(exitCode).toBe(expectExitCode); + } return stderr.toString(); } finally { rmSync(cwd, { recursive: true }); diff --git a/test/config/bunfig/fixtures/preload/many/index.ts b/test/config/bunfig/fixtures/preload/many/index.ts new file mode 100644 index 0000000000..8f356d1362 --- /dev/null +++ b/test/config/bunfig/fixtures/preload/many/index.ts @@ -0,0 +1 @@ +console.log(globalThis.preload); \ No newline at end of file diff --git a/test/config/bunfig/fixtures/preload/many/preload1.ts b/test/config/bunfig/fixtures/preload/many/preload1.ts new file mode 100644 index 0000000000..63141e2cd2 --- /dev/null +++ b/test/config/bunfig/fixtures/preload/many/preload1.ts @@ -0,0 +1 @@ +(globalThis.preload ??= []).push("multi/preload1.ts"); diff --git a/test/config/bunfig/fixtures/preload/many/preload2.ts b/test/config/bunfig/fixtures/preload/many/preload2.ts new file mode 100644 index 0000000000..59d054a998 --- /dev/null +++ b/test/config/bunfig/fixtures/preload/many/preload2.ts @@ -0,0 +1 @@ +(globalThis.preload ??= []).push("multi/preload2.ts"); diff --git a/test/config/bunfig/fixtures/preload/many/preload3.ts b/test/config/bunfig/fixtures/preload/many/preload3.ts new file mode 100644 index 0000000000..c5da4a3366 --- /dev/null +++ b/test/config/bunfig/fixtures/preload/many/preload3.ts @@ -0,0 +1 @@ +(globalThis.preload ??= []).push("multi/preload3.ts"); diff --git a/test/config/bunfig/preload.test.ts b/test/config/bunfig/preload.test.ts index 895be5ea57..44daf6c91d 100644 --- a/test/config/bunfig/preload.test.ts +++ b/test/config/bunfig/preload.test.ts @@ -7,13 +7,17 @@ const fixturePath = (...segs: string[]) => resolve(import.meta.dirname, "fixture type Opts = { args?: string[]; cwd?: string; + env?: Record; }; type Out = [stdout: string, stderr: string, exitCode: number]; -const run = (file: string, { args = [], cwd }: Opts = {}): Promise => { +const run = (file: string, { args = [], cwd, env = {} }: Opts = {}): Promise => { const res = Bun.spawn([bunExe(), ...args, file], { cwd, stdio: ["ignore", "pipe", "pipe"], - env: bunEnv, + env: { + ...env, + ...bunEnv, + }, } satisfies SpawnOptions.OptionsObject<"ignore", "pipe", "pipe">); return Promise.all([ @@ -134,3 +138,65 @@ describe("Given a `bunfig.toml` file with a relative path without a leading './' expect(code).toBe(0); }); }); // + +describe("Test that all the aliases for --preload work", () => { + const dir = fixturePath("many"); + + it.each(["--preload=./preload1.ts", "--require=./preload1.ts", "--import=./preload1.ts"])( + "When `bun run` is run with %s, the preload is executed", + async flag => { + const [out, err, code] = await run("index.ts", { args: [flag], cwd: dir }); + expect(err).toBeEmpty(); + expect(out).toBe('[ "multi/preload1.ts" ]'); + expect(code).toBe(0); + }, + ); + + it.each(["1", "2", "3", "4"])( + "When multiple preload flags are used, they execute in order: --preload, --require, --import (#%s)", + async i => { + let args: string[] = []; + if (i === "1") args = ["--preload", "./preload1.ts", "--require", "./preload2.ts", "--import", "./preload3.ts"]; + if (i === "2") args = ["--import", "./preload3.ts", "--preload=./preload1.ts", "--require", "./preload2.ts"]; + if (i === "3") args = ["--require", "./preload2.ts", "--import", "./preload3.ts", "--preload", "./preload1.ts"]; + if (i === "4") args = ["--require", "./preload1.ts", "--import", "./preload3.ts", "--require", "./preload2.ts"]; + const [out, err, code] = await run("index.ts", { args, cwd: dir }); + expect(err).toBeEmpty(); + expect(out).toBe('[ "multi/preload1.ts", "multi/preload2.ts", "multi/preload3.ts" ]'); + expect(code).toBe(0); + }, + ); + + it("Duplicate preload flags are only executed once", async () => { + const args = ["--preload", "./preload1.ts", "--require", "./preload1.ts", "--import", "./preload1.ts"]; + const [out, err, code] = await run("index.ts", { args, cwd: dir }); + expect(err).toBeEmpty(); + expect(out).toBe('[ "multi/preload1.ts" ]'); + expect(code).toBe(0); + }); + + it("Test double preload flags", async () => { + const dir = fixturePath("many"); + const args = [ + "--preload", + "./preload1.ts", + "--preload=./preload2.ts", + "--preload", + "./preload3.ts", + "-r", + "./preload3.ts", + ]; + const [out, err, code] = await run("index.ts", { args, cwd: dir }); + expect(err).toBeEmpty(); + expect(out).toMatchInlineSnapshot(`"[ "multi/preload1.ts", "multi/preload2.ts", "multi/preload3.ts" ]"`); + expect(code).toBe(0); + }); +}); // + +test("Test BUN_INSPECT_PRELOAD is used to set preloads", async () => { + const dir = fixturePath("many"); + const [out, err, code] = await run("index.ts", { args: [], cwd: dir, env: { BUN_INSPECT_PRELOAD: "./preload1.ts" } }); + expect(err).toBeEmpty(); + expect(out).toMatchInlineSnapshot(`"[ "multi/preload1.ts" ]"`); + expect(code).toBe(0); +}); // diff --git a/test/harness.ts b/test/harness.ts index ee3cb2c0cb..be7dc5b950 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -64,7 +64,7 @@ export const bunEnv: NodeJS.Dict = { const ciEnv = { ...bunEnv }; if (isASAN) { - bunEnv.ASAN_OPTIONS ??= "allow_user_segv_handler=1"; + bunEnv.ASAN_OPTIONS ??= "allow_user_segv_handler=1:disable_coredump=0"; } if (isWindows) { @@ -262,6 +262,12 @@ export function tempDirWithFiles( return base; } +export function tempDirWithFilesAnon(filesOrAbsolutePathToCopyFolderFrom: DirectoryTree | string): string { + const base = tmpdirSync(); + makeTreeSync(base, filesOrAbsolutePathToCopyFolderFrom); + return base; +} + export function bunRun(file: string, env?: Record | NodeJS.ProcessEnv) { var path = require("path"); const result = Bun.spawnSync([bunExe(), file], { @@ -1119,7 +1125,7 @@ export function tmpdirSync(pattern: string = "bun.test."): string { } export async function runBunInstall( - env: NodeJS.ProcessEnv, + env: NodeJS.Dict, cwd: string, options?: { allowWarnings?: boolean; @@ -1207,7 +1213,7 @@ export async function runBunUpdate( return { out: out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/), err, exitCode }; } -export async function pack(cwd: string, env: NodeJS.ProcessEnv, ...args: string[]) { +export async function pack(cwd: string, env: NodeJS.Dict, ...args: string[]) { const { stdout, stderr, exited } = Bun.spawn({ cmd: [bunExe(), "pm", "pack", ...args], cwd, @@ -1641,7 +1647,7 @@ export class VerdaccioRegistry { async writeBunfig(dir: string, opts: BunfigOpts = {}) { let bunfig = ` [install] - cache = "${join(dir, ".bun-cache")}" + cache = "${join(dir, ".bun-cache").replaceAll("\\", "\\\\")}" `; if ("saveTextLockfile" in opts) { bunfig += `saveTextLockfile = ${opts.saveTextLockfile} diff --git a/test/integration/bun-types/bun-types.test.ts b/test/integration/bun-types/bun-types.test.ts index 4564d0051b..d8e355a007 100644 --- a/test/integration/bun-types/bun-types.test.ts +++ b/test/integration/bun-types/bun-types.test.ts @@ -29,7 +29,7 @@ beforeAll(async () => { await $` cd ${BUN_TYPES_PACKAGE_ROOT} bun install - + # temp package.json with @types/bun name and version cp package.json package.json.backup `; @@ -100,7 +100,7 @@ describe("@types/bun integration test", () => { ), ); - const p = await $` + const p = await $` cd ${FIXTURE_DIR} bun run check `; @@ -124,6 +124,10 @@ describe("@types/bun integration test", () => { "Overload 1 of 3, '(underlyingSource: UnderlyingByteSource, strategy?: { highWaterMark?: number", // This line truncates because we've seen TypeScript emit differing messages in different environments `Type '"direct"' is not assignable to type '"bytes"'`, "error TS2339: Property 'write' does not exist on type 'ReadableByteStreamController'.", + "error TS2339: Property 'json' does not exist on type 'ReadableStream>'.", + "error TS2339: Property 'bytes' does not exist on type 'ReadableStream>'.", + "error TS2339: Property 'text' does not exist on type 'ReadableStream>'.", + "error TS2339: Property 'blob' does not exist on type 'ReadableStream>'.", "websocket.ts", `error TS2353: Object literal may only specify known properties, and 'protocols' does not exist in type 'string[]'.`, diff --git a/test/integration/bun-types/fixture/[slug].tsx b/test/integration/bun-types/fixture/[slug].tsx new file mode 100644 index 0000000000..02ebd5fd7f --- /dev/null +++ b/test/integration/bun-types/fixture/[slug].tsx @@ -0,0 +1,38 @@ +import { join } from "path"; +import { expectType } from "./utilities"; + +// we're just checking types here really +declare function markdownToJSX(markdown: string): React.ReactNode; + +type Params = { + slug: string; +}; + +const Index: Bun.__experimental.SSGPage = async ({ params }) => { + expectType(params.slug).is(); + + const content = await Bun.file(join(process.cwd(), "posts", params.slug + ".md")).text(); + const node = markdownToJSX(content); + + return
              {node}
              ; +}; + +expectType(Index.displayName).is(); + +export default Index; + +export const getStaticPaths: Bun.__experimental.GetStaticPaths = async () => { + const glob = new Bun.Glob("**/*.md"); + const postsDir = join(process.cwd(), "posts"); + const paths: Bun.__experimental.SSGPaths = []; + + for (const file of glob.scanSync({ cwd: postsDir })) { + const slug = file.replace(/\.md$/, ""); + + paths.push({ + params: { slug }, + }); + } + + return { paths }; +}; diff --git a/test/integration/bun-types/fixture/spawn.ts b/test/integration/bun-types/fixture/spawn.ts index b999e6be8a..6c9273df00 100644 --- a/test/integration/bun-types/fixture/spawn.ts +++ b/test/integration/bun-types/fixture/spawn.ts @@ -49,7 +49,7 @@ function depromise(_promise: Promise): T { tsd.expectType(proc.pid).is(); - tsd.expectType(proc.stdout).is>>(); + tsd.expectType(proc.stdout).is>(); tsd.expectType(proc.stderr).is(); tsd.expectType(proc.stdin).is(); } diff --git a/test/integration/bun-types/fixture/sql.ts b/test/integration/bun-types/fixture/sql.ts index 245d4d1602..20aab93e96 100644 --- a/test/integration/bun-types/fixture/sql.ts +++ b/test/integration/bun-types/fixture/sql.ts @@ -29,12 +29,13 @@ const sql2 = new Bun.SQL("postgres://localhost:5432/mydb"); const sql3 = new Bun.SQL(new URL("postgres://localhost:5432/mydb")); const sql4 = new Bun.SQL({ url: "postgres://localhost:5432/mydb", idleTimeout: 1000 }); -const query1 = sql1`SELECT * FROM users WHERE id = ${1}`; +const query1 = sql1`SELECT * FROM users WHERE id = ${1}`; const query2 = sql2({ foo: "bar" }); query1.cancel().simple().execute().raw().values(); -const _promise: Promise = query1; +expectType(query1).extends>(); +expectType(query1).extends>(); sql1.connect(); sql1.close(); @@ -50,33 +51,74 @@ sql1.begin(async txn => { }); }); -sql1.transaction(async txn => { - txn`SELECT 3`; -}); +expectType( + sql1.transaction(async txn => { + txn`SELECT 3`; + }), +).is>(); -sql1.begin("read write", async txn => { - txn`SELECT 4`; -}); +expectType( + sql1.begin("read write", async txn => { + txn`SELECT 4`; + }), +).is>(); -sql1.transaction("read write", async txn => { - txn`SELECT 5`; -}); +expectType( + sql1.transaction("read write", async txn => { + txn`SELECT 5`; + }), +).is>(); -sql1.beginDistributed("foo", async txn => { - txn`SELECT 6`; -}); +expectType( + sql1.beginDistributed("foo", async txn => { + txn`SELECT 6`; + }), +).is>(); -sql1.distributed("bar", async txn => { - txn`SELECT 7`; -}); +expectType( + sql1.distributed("bar", async txn => { + txn`SELECT 7`; + }), +).is>(); -sql1.unsafe("SELECT * FROM users"); -sql1.file("query.sql", [1, 2, 3]); +expectType( + sql1.beginDistributed("foo", async txn => { + txn`SELECT 8`; + }), +).is>(); + +{ + const tx = await sql1.transaction(async txn => { + return [await txn<[9]>`SELECT 9`, await txn<[10]>`SELECT 10`]; + }); + + expectType(tx).is(); +} + +{ + const tx = await sql1.begin(async txn => { + return [await txn<[9]>`SELECT 9`, await txn<[10]>`SELECT 10`]; + }); + + expectType(tx).is(); +} + +{ + const tx = await sql1.distributed("name", async txn => { + return [await txn<[9]>`SELECT 9`, await txn<[10]>`SELECT 10`]; + }); + + expectType(tx).is(); +} + +expectType(sql1.unsafe("SELECT * FROM users")).is>(); +expectType(sql1.unsafe<{ id: string }[]>("SELECT * FROM users")).is>(); +expectType(sql1.file("query.sql", [1, 2, 3])).is>(); sql1.reserve().then(reserved => { reserved.release(); - reserved[Symbol.dispose]?.(); - reserved`SELECT 8`; + + expectType(reserved<[8]>`SELECT 8`).is>(); }); sql1.begin(async txn => { @@ -109,45 +151,48 @@ sql1.begin("read write", 123); // @ts-expect-error sql1.transaction("read write", 123); -const sqlQueryAny: Bun.SQLQuery = {} as any; -const sqlQueryNumber: Bun.SQLQuery = {} as any; -const sqlQueryString: Bun.SQLQuery = {} as any; +const sqlQueryAny: Bun.SQL.Query = {} as any; +const sqlQueryNumber: Bun.SQL.Query = {} as any; +const sqlQueryString: Bun.SQL.Query = {} as any; expectAssignable>(sqlQueryAny); expectAssignable>(sqlQueryNumber); expectAssignable>(sqlQueryString); -expectType(sqlQueryNumber).is>(); -expectType(sqlQueryString).is>(); -expectType(sqlQueryNumber).is>(); +expectType(sqlQueryNumber).is>(); +expectType(sqlQueryString).is>(); +expectType(sqlQueryNumber).is>(); const queryA = sql`SELECT 1`; -expectType(queryA).is(); +expectType(queryA).is>(); +expectType(await queryA).is(); + const queryB = sql({ foo: "bar" }); -expectType(queryB).is(); +expectType(queryB).is>(); expectType(sql).is(); -const opts2: Bun.SQLOptions = { url: "postgres://localhost" }; -expectType(opts2).is(); +const opts2 = { url: "postgres://localhost" } satisfies Bun.SQL.Options; +expectType(opts2).extends(); -const txCb: Bun.SQLTransactionContextCallback = async sql => [sql`SELECT 1`]; -const spCb: Bun.SQLSavepointContextCallback = async sql => [sql`SELECT 2`]; -expectType(txCb).is(); -expectType(spCb).is(); +const txCb = (async sql => [sql<[1]>`SELECT 1`]) satisfies Bun.SQL.TransactionContextCallback; +const spCb = (async sql => [sql<[2]>`SELECT 2`]) satisfies Bun.SQL.SavepointContextCallback; -expectType(queryA.cancel()).is(); -expectType(queryA.simple()).is(); -expectType(queryA.execute()).is(); -expectType(queryA.raw()).is(); -expectType(queryA.values()).is(); +expectType(await sql.begin(txCb)).is<[1][]>(); +expectType(await sql.begin(spCb)).is<[2][]>(); -declare const queryNum: Bun.SQLQuery; -expectType(queryNum.cancel()).is>(); -expectType(queryNum.simple()).is>(); -expectType(queryNum.execute()).is>(); -expectType(queryNum.raw()).is>(); -expectType(queryNum.values()).is>(); +expectType(queryA.cancel()).is>(); +expectType(queryA.simple()).is>(); +expectType(queryA.execute()).is>(); +expectType(queryA.raw()).is>(); +expectType(queryA.values()).is>(); + +declare const queryNum: Bun.SQL.Query; +expectType(queryNum.cancel()).is>(); +expectType(queryNum.simple()).is>(); +expectType(queryNum.execute()).is>(); +expectType(queryNum.raw()).is>(); +expectType(queryNum.values()).is>(); expectType(await queryNum.cancel()).is(); expectType(await queryNum.simple()).is(); @@ -155,29 +200,54 @@ expectType(await queryNum.execute()).is(); expectType(await queryNum.raw()).is(); expectType(await queryNum.values()).is(); -const _sqlInstance: Bun.SQL = Bun.sql; +expectType({ + password: () => "hey", + pass: async () => "hey", +}); -expectType(sql({ name: "Alice", email: "alice@example.com" })).is(); +expectType({ + password: "hey", +}); + +expectType(sql({ name: "Alice", email: "alice@example.com" })).is< + Bun.SQL.Helper<{ + name: string; + email: string; + }> +>(); expectType( sql([ { name: "Alice", email: "alice@example.com" }, { name: "Bob", email: "bob@example.com" }, ]), -).is(); +).is< + Bun.SQL.Helper<{ + name: string; + email: string; + }> +>(); -const user = { name: "Alice", email: "alice@example.com", age: 25 }; -expectType(sql(user, "name", "email")).is(); +const userWithAge = { name: "Alice", email: "alice@example.com", age: 25 }; + +expectType(sql(userWithAge, "name", "email")).is< + Bun.SQL.Helper<{ + name: string; + email: string; + }> +>(); const users = [ { id: 1, name: "Alice" }, { id: 2, name: "Bob" }, ]; -expectType(sql(users, "id")).is(); +expectType(sql(users, "id")).is>(); -expectType(sql([1, 2, 3])).is(); +expectType(sql([1, 2, 3])).is>(); +expectType(sql([1, 2, 3] as const)).is>(); -expectType(sql("users")).is(); +expectType(sql("users")).is>(); +expectType(sql<1>("users")).is>(); // @ts-expect-error - missing key in object sql(user, "notAKey"); @@ -190,3 +260,8 @@ sql(users, "notAKey"); // @ts-expect-error - array of numbers, extra key argument sql([1, 2, 3], "notAKey"); + +// check the deprecated stuff still exists +expectType>(); +expectType>(); +expectType>(); diff --git a/test/integration/bun-types/fixture/streams.ts b/test/integration/bun-types/fixture/streams.ts index 19020c7c23..dea6178a00 100644 --- a/test/integration/bun-types/fixture/streams.ts +++ b/test/integration/bun-types/fixture/streams.ts @@ -38,3 +38,10 @@ await writer.close(); for await (const chunk of uint8Transform.readable) { expectType(chunk).is>(); } + +declare const stream: ReadableStream; + +expectType(stream.json()).is>(); +expectType(stream.bytes()).is>(); +expectType(stream.text()).is>(); +expectType(stream.blob()).is>(); diff --git a/test/integration/bun-types/fixture/utilities.ts b/test/integration/bun-types/fixture/utilities.ts index 4a81e3acdf..881f4df3e4 100644 --- a/test/integration/bun-types/fixture/utilities.ts +++ b/test/integration/bun-types/fixture/utilities.ts @@ -26,10 +26,12 @@ export function expectType(arg: T): { * expectType(my_Uint8Array).is(); // pass * ``` */ - is(...args: IfEquals extends true ? [] : [expected: X, butGot: T]): void; + is(...args: IfEquals extends true ? [] : [expected: X, but_got: T]): void; + extends(...args: T extends X ? [] : [expected: T, but_got: X]): void; }; + export function expectType(arg?: T) { - return { is() {} }; + return { is() {}, extends() {} }; } export declare const expectAssignable: (expression: T) => void; diff --git a/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap b/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap index 0f2f17408e..e88fa04fe8 100644 --- a/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`ssr works for 100-ish requests 1`] = ` { diff --git a/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap b/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap index fb0bfc5e8c..46f3ea6329 100644 --- a/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` { diff --git a/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap b/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap index 1e3b9f6cba..01c4b4859a 100644 --- a/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`next build works: bun 1`] = ` { diff --git a/test/integration/sass/__snapshots__/sass.test.ts.snap b/test/integration/sass/__snapshots__/sass.test.ts.snap index 5160965305..56d639fe8d 100644 --- a/test/integration/sass/__snapshots__/sass.test.ts.snap +++ b/test/integration/sass/__snapshots__/sass.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`sass source maps 1`] = ` { diff --git a/test/integration/svelte/__snapshots__/server-side.test.ts.snap b/test/integration/svelte/__snapshots__/server-side.test.ts.snap index 79744a952b..9f29e19be9 100644 --- a/test/integration/svelte/__snapshots__/server-side.test.ts.snap +++ b/test/integration/svelte/__snapshots__/server-side.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`When bun-plugin-svelte is enabled via Bun.plugin() can be render()-ed 1`] = ` { diff --git a/test/internal/ban-words.test.ts b/test/internal/ban-words.test.ts index f23b5535ab..5a06632661 100644 --- a/test/internal/ban-words.test.ts +++ b/test/internal/ban-words.test.ts @@ -34,15 +34,19 @@ const words: Record [String.raw`: [a-zA-Z0-9_\.\*\?\[\]\(\)]+ = undefined,`]: { reason: "Do not default a struct field to undefined", limit: 242, regex: true }, "usingnamespace": { reason: "Zig 0.15 will remove `usingnamespace`" }, - "catch unreachable": { reason: "For out-of-memory, prefer 'catch bun.outOfMemory()'", limit: 1860 }, + "catch unreachable": { reason: "For out-of-memory, prefer 'catch bun.outOfMemory()'", limit: 1865 }, - "std.fs.Dir": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 179 }, + "std.fs.Dir": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 170 }, "std.fs.cwd": { reason: "Prefer bun.FD.cwd()", limit: 102 }, "std.fs.File": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 62 }, ".stdFile()": { reason: "Prefer bun.sys + bun.FD instead of std.fs.File. Zig hides 'errno' when Bun wants to match libuv", limit: 18 }, - ".stdDir()": { reason: "Prefer bun.sys + bun.FD instead of std.fs.File. Zig hides 'errno' when Bun wants to match libuv", limit: 49 }, - ".arguments_old(": { reason: "Please migrate to .argumentsAsArray() or another argument API", limit: 284 }, - "// autofix": { reason: "Evaluate if this variable should be deleted entirely or explicitly discarded.", limit: 175 }, + ".stdDir()": { reason: "Prefer bun.sys + bun.FD instead of std.fs.File. Zig hides 'errno' when Bun wants to match libuv", limit: 40 }, + ".arguments_old(": { reason: "Please migrate to .argumentsAsArray() or another argument API", limit: 280 }, + "// autofix": { reason: "Evaluate if this variable should be deleted entirely or explicitly discarded.", limit: 173 }, + + "global.hasException": { reason: "Incompatible with strict exception checks. Use a CatchScope instead.", limit: 28 }, + "globalObject.hasException": { reason: "Incompatible with strict exception checks. Use a CatchScope instead.", limit: 47 }, + "globalThis.hasException": { reason: "Incompatible with strict exception checks. Use a CatchScope instead.", limit: 140 }, }; const words_keys = [...Object.keys(words)]; diff --git a/test/js/bun/console/__snapshots__/bun-inspect-table.test.ts.snap b/test/js/bun/console/__snapshots__/bun-inspect-table.test.ts.snap index 0dbd06b2d1..f166e92f89 100644 --- a/test/js/bun/console/__snapshots__/bun-inspect-table.test.ts.snap +++ b/test/js/bun/console/__snapshots__/bun-inspect-table.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`inspect.table { a: 1, b: 2 } 1`] = ` "┌───┬────────┐ diff --git a/test/js/bun/console/__snapshots__/console-table.test.ts.snap b/test/js/bun/console/__snapshots__/console-table.test.ts.snap index 28d4755f7e..256621eb70 100644 --- a/test/js/bun/console/__snapshots__/console-table.test.ts.snap +++ b/test/js/bun/console/__snapshots__/console-table.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`console.table expected output for: not object (number) 1`] = ` "42 diff --git a/test/js/bun/ffi/__snapshots__/cc.test.ts.snap b/test/js/bun/ffi/__snapshots__/cc.test.ts.snap index 7f08a1de50..619f2abb61 100644 --- a/test/js/bun/ffi/__snapshots__/cc.test.ts.snap +++ b/test/js/bun/ffi/__snapshots__/cc.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`can run a .c file: cc-fixture-stderr 1`] = `"Hello, World!"`; diff --git a/test/js/bun/glob/__snapshots__/scan.test.ts.snap b/test/js/bun/glob/__snapshots__/scan.test.ts.snap index 91b9cce320..b2ae59fed8 100644 --- a/test/js/bun/glob/__snapshots__/scan.test.ts.snap +++ b/test/js/bun/glob/__snapshots__/scan.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`fast-glob e2e tests patterns regular fixtures/*: fixtures/* 1`] = ` [ diff --git a/test/js/bun/http/bun-serve-html-manifest.test.ts b/test/js/bun/http/bun-serve-html-manifest.test.ts new file mode 100644 index 0000000000..7173fd64ad --- /dev/null +++ b/test/js/bun/http/bun-serve-html-manifest.test.ts @@ -0,0 +1,361 @@ +import { describe, expect, it } from "bun:test"; +import { bunEnv, bunExe, rmScope, tempDirWithFiles } from "harness"; +import { join } from "node:path"; +import { StringDecoder } from "node:string_decoder"; + +describe("Bun.serve HTML manifest", () => { + it("serves HTML import with manifest", async () => { + const dir = tempDirWithFiles("serve-html", { + "server.ts": ` + import index from "./index.html"; + + const server = Bun.serve({ + port: 0, + routes: { + "/": index, + }, + }); + + console.log("PORT=" + server.port); + + // Test the manifest structure + console.log("Manifest type:", typeof index); + console.log("Has index:", "index" in index); + console.log("Has files:", "files" in index); + if (index.files) { + console.log("File count:", index.files.length); + } + `, + "index.html": ` + + + Test + + + +

              Hello World

              + + +`, + "styles.css": `body { background: red; }`, + "app.js": `console.log("Hello from app");`, + }); + + using cleanup = { [Symbol.dispose]: () => rmScope(dir) }; + + const proc = Bun.spawn({ + cmd: [bunExe(), "run", join(dir, "server.ts")], + cwd: dir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + const { stdout, stderr, exited } = proc; + + // Read stdout line by line until we get the PORT + let port: number | undefined; + const reader = stdout.getReader(); + const decoder = new StringDecoder("utf8"); + let buffer = ""; + + while (!port) { + const { done, value } = await reader.read(); + if (done) break; + + buffer += decoder.write(value); + const lines = buffer.split("\n"); + buffer = lines.pop() || ""; + + for (const line of lines) { + const portMatch = line.match(/PORT=(\d+)/); + if (portMatch) { + port = parseInt(portMatch[1]); + break; + } + } + } + + reader.releaseLock(); + expect(port).toBeDefined(); + + if (port) { + // Test the server + const res = await fetch(`http://localhost:${port}/`); + expect(res.status).toBe(200); + expect(res.headers.get("content-type")).toContain("text/html"); + + const html = await res.text(); + expect(html).toContain("Hello World"); + expect(html).toContain(" { + const dir = tempDirWithFiles("serve-html-bundled", { + "build.ts": ` + const result = await Bun.build({ + entrypoints: ["./server.ts"], + target: "bun", + outdir: "./dist", + }); + + if (!result.success) { + console.error("Build failed"); + process.exit(1); + } + + console.log("Build complete"); + `, + "server.ts": ` + import index from "./index.html"; + import about from "./about.html"; + + const server = Bun.serve({ + port: 0, + routes: { + "/": index, + "/about": about, + }, + }); + + console.log("PORT=" + server.port); + `, + "index.html": ` + + + Home + + + +

              Home Page

              + + +`, + "about.html": ` + + + About + + + +

              About Page

              + + +`, + "shared.css": `body { margin: 0; }`, + "app.js": `console.log("App loaded");`, + }); + + using cleanup = { [Symbol.dispose]: () => rmScope(dir) }; + + // Build first + const buildProc = Bun.spawn({ + cmd: [bunExe(), "run", join(dir, "build.ts")], + cwd: dir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + await buildProc.exited; + expect(buildProc.exitCode).toBe(0); + + // Run the built server + const serverProc = Bun.spawn({ + cmd: [bunExe(), "run", join(dir, "dist", "server.js")], + cwd: join(dir, "dist"), + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + // Read stdout line by line until we get the PORT + let port: number | undefined; + const reader = serverProc.stdout.getReader(); + const decoder = new StringDecoder("utf8"); + let buffer = ""; + + while (!port) { + const { done, value } = await reader.read(); + if (done) break; + + buffer += decoder.write(value); + const lines = buffer.split("\n"); + buffer = lines.pop() || ""; + + for (const line of lines) { + const portMatch = line.match(/PORT=(\d+)/); + if (portMatch) { + port = parseInt(portMatch[1]); + break; + } + } + } + + reader.releaseLock(); + expect(port).toBeDefined(); + + if (port) { + // Test both routes + const homeRes = await fetch(`http://localhost:${port}/`); + expect(homeRes.status).toBe(200); + const homeHtml = await homeRes.text(); + expect(homeHtml).toContain("Home Page"); + + const aboutRes = await fetch(`http://localhost:${port}/about`); + expect(aboutRes.status).toBe(200); + const aboutHtml = await aboutRes.text(); + expect(aboutHtml).toContain("About Page"); + } + + serverProc.kill(); + await serverProc.exited; + }); + + it("validates manifest files exist", async () => { + const dir = tempDirWithFiles("serve-html-validate", { + "test.ts": ` + // Create a fake manifest + const fakeManifest = { + index: "./index.html", + files: [ + { + input: "index.html", + path: "./does-not-exist.html", + loader: "html", + isEntry: true, + headers: { + etag: "test123", + "content-type": "text/html;charset=utf-8" + } + } + ] + }; + + try { + const server = Bun.serve({ + port: 0, + routes: { + "/": fakeManifest, + }, + }); + console.log("ERROR: Server started when it should have failed"); + server.stop(); + } catch (error) { + console.log("SUCCESS: Manifest validation failed as expected"); + } + `, + }); + + using cleanup = { [Symbol.dispose]: () => rmScope(dir) }; + + const proc = Bun.spawn({ + cmd: [bunExe(), "run", join(dir, "test.ts")], + cwd: dir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + const out = await new Response(proc.stdout).text(); + await proc.exited; + + expect(out).toContain("SUCCESS: Manifest validation failed as expected"); + }); + + it("serves manifest with proper headers", async () => { + const dir = tempDirWithFiles("serve-html-headers", { + "server.ts": ` + import index from "./index.html"; + + using server = Bun.serve({ + port: 0, + routes: { + "/": index, + }, + }); + + console.log("PORT=" + server.port); + + // Check manifest structure + if (index.files) { + for (const file of index.files) { + console.log("File:", file.path, "Loader:", file.loader); + if (file.headers) { + console.log(" Content-Type:", file.headers["content-type"]); + console.log(" Has ETag:", !!file.headers.etag); + } + } + } + `, + "index.html": ` + + + Test + + + +

              Test

              + +`, + "test.css": `h1 { color: red; }`, + }); + + using cleanup = { [Symbol.dispose]: () => rmScope(dir) }; + + // Build first to generate the manifest + await using buildProc = Bun.spawn({ + cmd: [bunExe(), "build", join(dir, "server.ts"), "--outdir", join(dir, "dist"), "--target", "bun"], + cwd: dir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + await buildProc.exited; + expect(buildProc.exitCode).toBe(0); + + // Run the built server + await using proc = Bun.spawn({ + cmd: [bunExe(), "run", join(dir, "dist", "server.js")], + cwd: join(dir, "dist"), + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + // Read stdout line by line to collect all output + const out = await new Response(proc.stdout).text(); + expect(await proc.exited).toBe(0); + + expect( + out + .trim() + .replaceAll(/PORT=\d+/g, "PORT=99999") + .replaceAll(/.\/index-[a-z0-9]+\.js/g, "index-[hash].js") + .replaceAll(/.\/index-[a-z0-9]+\.css/g, "index-[hash].css"), + ).toMatchInlineSnapshot(` + "PORT=99999 + File: index-[hash].js Loader: js + Content-Type: text/javascript;charset=utf-8 + Has ETag: true + File: ./index.html Loader: html + Content-Type: text/html;charset=utf-8 + Has ETag: true + File: index-[hash].css Loader: css + Content-Type: text/css;charset=utf-8 + Has ETag: true" + `); + }); +}); diff --git a/test/js/bun/http/bun-serve-routes.test.ts b/test/js/bun/http/bun-serve-routes.test.ts index 9c4e29655b..f2e851bd61 100644 --- a/test/js/bun/http/bun-serve-routes.test.ts +++ b/test/js/bun/http/bun-serve-routes.test.ts @@ -561,7 +561,7 @@ it("throws a validation error when passing invalid routes", () => { }); \`\`\` - See https://bun.sh/docs/api/http for more information." + See https://bun.com/docs/api/http for more information." `); }); @@ -586,7 +586,7 @@ it("throws a validation error when routes object is empty and fetch is not speci return new Response("Hello") } - Learn more at https://bun.sh/docs/api/http" + Learn more at https://bun.com/docs/api/http" `); }); @@ -611,7 +611,7 @@ it("throws a validation error when routes object is undefined and fetch is not s return new Response("Hello") } - Learn more at https://bun.sh/docs/api/http" + Learn more at https://bun.com/docs/api/http" `); }); diff --git a/test/js/bun/s3/s3-storage-class.test.ts b/test/js/bun/s3/s3-storage-class.test.ts index 838e8873b1..285bc032c9 100644 --- a/test/js/bun/s3/s3-storage-class.test.ts +++ b/test/js/bun/s3/s3-storage-class.test.ts @@ -175,7 +175,7 @@ describe("s3 - Storage class", () => { const smallFile = Buffer.alloc(10 * 1024); for (let i = 0; i < 10; i++) { - await writer.write(smallFile); + writer.write(smallFile); } await writer.end(); @@ -249,7 +249,7 @@ describe("s3 - Storage class", () => { const bigFile = Buffer.alloc(10 * 1024 * 1024); for (let i = 0; i < 10; i++) { - await writer.write(bigFile); + writer.write(bigFile); } await writer.end(); diff --git a/test/js/bun/s3/s3.test.ts b/test/js/bun/s3/s3.test.ts index 5582a37b32..49e37a188a 100644 --- a/test/js/bun/s3/s3.test.ts +++ b/test/js/bun/s3/s3.test.ts @@ -3,7 +3,7 @@ import { S3Client, s3 as defaultS3, file, randomUUIDv7, which } from "bun"; import { afterEach, beforeEach, describe, expect, it } from "bun:test"; import child_process from "child_process"; import { randomUUID } from "crypto"; -import { getSecret, tempDirWithFiles } from "harness"; +import { bunRun, getSecret, tempDirWithFiles } from "harness"; import path from "path"; const s3 = (...args) => defaultS3.file(...args); const S3 = (...args) => new S3Client(...args); @@ -21,8 +21,11 @@ function isDockerEnabled(): boolean { return false; } } - -const allCredentials = [ +type S3Credentials = S3Options & { + service: string; +}; +let minioCredentials: S3Credentials | undefined; +const allCredentials: S3Credentials[] = [ { accessKeyId: getSecret("S3_R2_ACCESS_KEY"), secretAccessKey: getSecret("S3_R2_SECRET_KEY"), @@ -73,13 +76,14 @@ if (isDockerEnabled()) { stdio: "ignore", }); - allCredentials.push({ + minioCredentials = { endpoint: "http://localhost:9000", // MinIO endpoint accessKeyId: "minioadmin", secretAccessKey: "minioadmin", bucket: "buntest", service: "MinIO" as string, - }); + }; + allCredentials.push(minioCredentials); } describe("Virtual Hosted-Style", () => { @@ -565,6 +569,26 @@ for (let credentials of allCredentials) { await writer.end(); expect(await s3file.text()).toBe(mediumPayload.repeat(2)); }); + it("should be able to upload large files using flush and partSize", async () => { + const s3file = file(tmp_filename, options); + + const writer = s3file.writer({ + //@ts-ignore + partSize: mediumPayload.length, + }); + writer.write(mediumPayload); + writer.write(mediumPayload); + let total = 0; + while (true) { + const flushed = await writer.flush(); + if (flushed === 0) break; + expect(flushed).toBe(Buffer.byteLength(mediumPayload)); + total += flushed; + } + expect(total).toBe(Buffer.byteLength(mediumPayload) * 2); + await writer.end(); + expect(await s3file.text()).toBe(mediumPayload.repeat(2)); + }); it("should be able to upload large files in one go using Bun.write", async () => { { await Bun.write(file(tmp_filename, options), bigPayload); @@ -680,6 +704,26 @@ for (let credentials of allCredentials) { } }, 10_000); + it("should be able to upload large files using flush and partSize", async () => { + const s3file = s3(tmp_filename, options); + + const writer = s3file.writer({ + partSize: mediumPayload.length, + }); + writer.write(mediumPayload); + writer.write(mediumPayload); + let total = 0; + while (true) { + const flushed = await writer.flush(); + if (flushed === 0) break; + expect(flushed).toBe(Buffer.byteLength(mediumPayload)); + total += flushed; + } + expect(total).toBe(Buffer.byteLength(mediumPayload) * 2); + await writer.end(); + expect(await s3file.text()).toBe(mediumPayload.repeat(2)); + }); + it("should be able to upload large files in one go using S3File.write", async () => { { const s3File = s3(tmp_filename, options); @@ -1292,3 +1336,34 @@ for (let credentials of allCredentials) { }); }); } +describe.skipIf(!minioCredentials)("http endpoint should work when using env variables", () => { + const testDir = tempDirWithFiles("minio-credential-test", { + "index.mjs": ` + import { s3, randomUUIDv7 } from "bun"; + import { expect } from "bun:test"; + const name = randomUUIDv7("hex") + ".txt"; + const s3file = s3.file(name); + await s3file.write("Hello Bun!"); + try { + const text = await s3file.text(); + expect(text).toBe("Hello Bun!"); + process.stdout.write(text); + } finally { + await s3file.unlink(); + } + `, + }); + for (const endpoint of ["S3_ENDPOINT", "AWS_ENDPOINT"]) { + it(endpoint, async () => { + const { stdout, stderr } = await bunRun(path.join(testDir, "index.mjs"), { + // @ts-ignore + [endpoint]: minioCredentials!.endpoint as string, + "S3_BUCKET": minioCredentials!.bucket as string, + "S3_ACCESS_KEY_ID": minioCredentials!.accessKeyId as string, + "S3_SECRET_ACCESS_KEY": minioCredentials!.secretAccessKey as string, + }); + expect(stderr).toBe(""); + expect(stdout).toBe("Hello Bun!"); + }); + } +}); diff --git a/test/js/bun/shell/bunshell.test.ts b/test/js/bun/shell/bunshell.test.ts index 235cf0f511..d4ec55bb0c 100644 --- a/test/js/bun/shell/bunshell.test.ts +++ b/test/js/bun/shell/bunshell.test.ts @@ -7,7 +7,7 @@ import { $ } from "bun"; import { afterAll, beforeAll, describe, expect, it, test } from "bun:test"; import { mkdir, rm, stat } from "fs/promises"; -import { bunExe, isWindows, runWithErrorPromise, tempDirWithFiles, tmpdirSync } from "harness"; +import { bunExe, isPosix, isWindows, runWithErrorPromise, tempDirWithFiles, tmpdirSync } from "harness"; import { join, sep } from "path"; import { createTestBuilder, sortedShellOutput } from "./util"; const TestBuilder = createTestBuilder(import.meta.path); @@ -582,7 +582,7 @@ bar\n`, describe("escaped_newline", () => { const printArgs = /* ts */ `console.log(JSON.stringify(process.argv))`; - TestBuilder.command/* sh */ `${BUN} run ./code.ts hi hello \ + TestBuilder.command /* sh */ `${BUN} run ./code.ts hi hello \ on a newline! ` .ensureTempDir() @@ -590,7 +590,7 @@ bar\n`, .stdout(out => expect(JSON.parse(out).slice(2)).toEqual(["hi", "hello", "on", "a", "newline!"])) .runAsTest("single"); - TestBuilder.command/* sh */ `${BUN} run ./code.ts hi hello \ + TestBuilder.command /* sh */ `${BUN} run ./code.ts hi hello \ on a newline! \ and \ a few \ @@ -603,7 +603,7 @@ bar\n`, ) .runAsTest("many"); - TestBuilder.command/* sh */ `${BUN} run ./code.ts hi hello \ + TestBuilder.command /* sh */ `${BUN} run ./code.ts hi hello \ on a newline! \ ooga" booga" @@ -974,6 +974,132 @@ describe("deno_task", () => { TestBuilder.command`echo 1 | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stderr)' 2> output.txt` .fileEquals("output.txt", "1\n") .runAsTest("pipe with redirect stderr to file"); + + if (isPosix) { + TestBuilder.command`ls . | echo hi`.exitCode(0).stdout("hi\n").runAsTest("broken pipe builtin"); + TestBuilder.command`grep hi src/js_parser.zig | echo hi` + .exitCode(0) + .stdout("hi\n") + .stderr("") + .runAsTest("broken pipe subproc"); + } + + TestBuilder.command`${BUN} -e 'process.exit(1)' | ${BUN} -e 'console.log("hi")'` + .exitCode(0) + .stdout("hi\n") + .runAsTest("last exit code"); + + TestBuilder.command`ls sldkfjlskdjflksdjflksjdf | ${BUN} -e 'console.log("hi")'` + .exitCode(0) + .stdout("hi\n") + .stderr("ls: sldkfjlskdjflksdjflksjdf: No such file or directory\n") + .runAsTest("last exit code"); + + TestBuilder.command`ksldfjsdflsdfjskdfjlskdjflksdf | ${BUN} -e 'console.log("hi")'` + .exitCode(0) + .stdout("hi\n") + .stderr("bun: command not found: ksldfjsdflsdfjskdfjlskdjflksdf\n") + .runAsTest("last exit code 2"); + + TestBuilder.command`echo hi | ${BUN} -e 'process.exit(69)'`.exitCode(69).stdout("").runAsTest("last exit code 3"); + + describe("pipeline stack behavior", () => { + // Test deep pipeline chains to stress the stack implementation + TestBuilder.command`echo 1 | echo 2 | echo 3 | echo 4 | echo 5 | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("5\n") + .runAsTest("deep pipeline chain"); + + // Test very deep chains that could overflow a recursion-based implementation + TestBuilder.command`echo start | echo 1 | echo 2 | echo 3 | echo 4 | echo 5 | echo 6 | echo 7 | echo 8 | echo 9 | echo 10 | echo 11 | echo 12 | echo 13 | echo 14 | echo 15 | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("15\n") + .runAsTest("very deep pipeline chain"); + + // Test nested pipelines in subshells + TestBuilder.command`echo outer | (echo inner1 | echo inner2) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("inner2\n") + .runAsTest("nested pipeline in subshell"); + + // Test nested pipelines with command substitution + TestBuilder.command`echo $(echo nested | echo pipe) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("pipe\n") + .runAsTest("nested pipeline in command substitution"); + + // Test multiple nested pipelines + TestBuilder.command`(echo a | echo b) | (echo c | echo d) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("d\n") + .runAsTest("multiple nested pipelines"); + + // Test pipeline with conditional that contains another pipeline + TestBuilder.command`echo test | (echo inner | echo nested && echo after) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("nested\nafter\n") + .runAsTest("pipeline with conditional containing pipeline"); + + // Test deeply nested subshells with pipelines + TestBuilder.command`echo start | (echo l1 | (echo l2 | (echo l3 | echo final))) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("final\n") + .runAsTest("deeply nested subshells with pipelines"); + + // Test pipeline stack unwinding with early termination + TestBuilder.command`echo 1 | echo 2 | echo 3 | false | echo 4 | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("4\n") + .runAsTest("pipeline with failing command"); + + // Test interleaved pipelines and conditionals + TestBuilder.command`echo a | echo b && echo c | echo d | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("b\nd\n") + .runAsTest("interleaved pipelines and conditionals"); + + // Test pipeline with background process (when supported) + TestBuilder.command`echo foreground | echo pipe && (echo background &) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("pipe\n") + .todo("background processes not fully supported") + .runAsTest("pipeline with background process"); + + // Test rapid pipeline creation and destruction + TestBuilder.command`echo 1 | echo 2; echo 3 | echo 4; echo 5 | echo 6 | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("2\n4\n6\n") + .runAsTest("rapid pipeline creation"); + + // Test pipeline stack with error propagation + TestBuilder.command`echo start | nonexistent_command | echo after || echo fallback` + .stdout("after\n") + .stderr("bun: command not found: nonexistent_command\n") + .runAsTest("pipeline error propagation"); + + // Test nested pipeline with mixed success/failure + TestBuilder.command`(echo success | echo works) | (nonexistent | echo backup) || echo final_fallback` + .stdout("backup\n") + .stderr(s => s.includes("command not found")) + .runAsTest("nested pipeline mixed success failure"); + + TestBuilder.command`echo 0 | echo 1 | echo 2 | echo 3 | echo 4 | echo 5 | echo 6 | echo 7 | echo 8 | echo 9 | echo 10 | echo 11 | echo 12 | echo 13 | echo 14 | echo 15 | echo 16 | echo 17 | echo 18 | echo 19 | echo 20 | echo 21 | echo 22 | echo 23 | echo 24 | echo 25 | echo 26 | echo 27 | echo 28 | echo 29 | echo 30 | echo 31 | echo 32 | echo 33 | echo 34 | echo 35 | echo 36 | echo 37 | echo 38 | echo 39 | echo 40 | echo 41 | echo 42 | echo 43 | echo 44 | echo 45 | echo 46 | echo 47 | echo 48 | echo 49 | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("49\n") + .runAsTest("long pipeline builtin"); + + TestBuilder.command`echo 0 | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | cat | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("0\n") + .runAsTest("long pipeline"); + + // Test pipeline stack consistency with complex nesting + TestBuilder.command`echo outer | (echo inner1 | echo inner2 | (echo deep1 | echo deep2) | echo inner3) | echo final | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("final\n") + .runAsTest("complex nested pipeline consistency"); + + // Test pipeline interruption and resumption + TestBuilder.command`echo start | (echo pause; echo resume) | echo end | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("end\n") + .runAsTest("pipeline interruption resumption"); + + // Test extremely deep nested pipeline - this would cause stack overflow with recursion + TestBuilder.command`echo level0 | (echo level1 | (echo level2 | (echo level3 | (echo level4 | (echo level5 | (echo level6 | (echo level7 | (echo level8 | (echo level9 | (echo level10 | (echo level11 | (echo level12 | (echo level13 | (echo level14 | (echo level15 | (echo level16 | (echo level17 | (echo level18 | (echo level19 | echo deep_final))))))))))))))))))) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("deep_final\n") + .runAsTest("extremely deep nested pipeline"); + + // Test pathological case: deep nesting + long chains + TestBuilder.command`echo start | (echo n1 | echo n2 | echo n3 | (echo deep1 | echo deep2 | echo deep3 | (echo deeper1 | echo deeper2 | echo deeper3 | (echo deepest1 | echo deepest2 | echo deepest_final)))) | BUN_TEST_VAR=1 ${BUN} -e 'process.stdin.pipe(process.stdout)'` + .stdout("deepest_final\n") + .runAsTest("pathological deep nesting with long chains"); + }); }); describe("redirects", async function igodf() { @@ -1190,14 +1316,17 @@ describe("deno_task", () => { const expected = [ '\\x1b[B', '\\x0D' - ] + ].join("") let i = 0 + let buf = "" const writer = Bun.stdout.writer(); process.stdin.on("data", async chunk => { const input = chunk.toString(); - expect(input).toEqual(expected[i++]) - writer.write(input) - await writer.flush() + buf += input; + if (buf === expected) { + writer.write(buf); + await writer.flush(); + } }); `; @@ -1211,7 +1340,7 @@ describe("deno_task", () => { }); describe("if_clause", () => { - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` # The name of the package we're interested in package_name=react @@ -2070,7 +2199,7 @@ describe("subshell", () => { } }`; - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` mkdir sharp-test cd sharp-test echo ${sharppkgjson} > package.json @@ -2083,16 +2212,16 @@ describe("subshell", () => { .env(bunEnv) .runAsTest("sharp"); - TestBuilder.command/* sh */ `( ( ( ( echo HI! ) ) ) )`.stdout("HI!\n").runAsTest("multiple levels"); - TestBuilder.command/* sh */ `( + TestBuilder.command /* sh */ `( ( ( ( echo HI! ) ) ) )`.stdout("HI!\n").runAsTest("multiple levels"); + TestBuilder.command /* sh */ `( echo HELLO! ; echo HELLO AGAIN! )` .stdout("HELLO!\nHELLO AGAIN!\n") .runAsTest("multiline"); - TestBuilder.command/* sh */ `(exit 42)`.exitCode(42).runAsTest("exit code"); - TestBuilder.command/* sh */ `(exit 42); echo hi`.exitCode(0).stdout("hi\n").runAsTest("exit code 2"); - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ `(exit 42)`.exitCode(42).runAsTest("exit code"); + TestBuilder.command /* sh */ `(exit 42); echo hi`.exitCode(0).stdout("hi\n").runAsTest("exit code 2"); + TestBuilder.command /* sh */ ` VAR1=VALUE1 VAR2=VALUE2 VAR3=VALUE3 @@ -2108,7 +2237,7 @@ describe("subshell", () => { .stdout("VALUE1 VALUE2 VALUE3\nyou cant see me my time is now\nVALUE1 VALUE2 VALUE3\n") .runAsTest("copy of environment"); - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` mkdir foo ( echo $PWD @@ -2138,7 +2267,7 @@ describe("subshell", () => { TestBuilder.command`\(echo hi \)`.stderr("bun: command not found: (echo\n").exitCode(1).runAsTest("escaped subshell"); TestBuilder.command`echo \\\(hi\\\)`.stdout("\\(hi\\)\n").runAsTest("escaped subshell 2"); - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` mkdir dir ( cd dir @@ -2150,7 +2279,7 @@ describe("subshell", () => { .stdout(`$TEMP_DIR${sep}dir\n$TEMP_DIR\n`) .runAsTest("pipeline in subshell"); - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` mkdir dir (pwd) | cat (cd dir; pwd) | cat @@ -2160,7 +2289,7 @@ describe("subshell", () => { .stdout(`$TEMP_DIR\n$TEMP_DIR${sep}dir\n$TEMP_DIR\n`) .runAsTest("subshell in pipeline"); - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` mkdir dir (pwd) | cat (cd dir; pwd) | cat @@ -2170,7 +2299,7 @@ describe("subshell", () => { .stdout(`$TEMP_DIR\n$TEMP_DIR${sep}dir\n$TEMP_DIR\n`) .runAsTest("subshell in pipeline"); - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` mkdir foo ( ( (cd foo ; pwd) | cat) ) | ( ( (cat) ) | cat ) @@ -2179,7 +2308,7 @@ describe("subshell", () => { .stdout(`$TEMP_DIR${sep}foo\n`) .runAsTest("imbricated subshells and pipelines"); - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` echo (echo) ` .error("Unexpected token: `(`") @@ -2187,7 +2316,7 @@ describe("subshell", () => { describe("ported", () => { // test_oE 'effect of subshell' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` a=1 # (a=2; echo $a; exit; echo not reached) # NOTE: We actually implemented exit wrong so changing this for now until we fix it @@ -2198,14 +2327,14 @@ describe("subshell", () => { .runAsTest("effect of subshell"); // test_x -e 23 'exit status of subshell' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` (true; exit 23) ` .exitCode(23) .runAsTest("exit status of subshell"); // test_oE 'redirection on subshell' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` (echo 1; echo 2; echo 3; echo 4) >sub_out # (tail -n 2) { .runAsTest("redirection on subshell"); // test_oE 'subshell ending with semicolon' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` (echo foo;) ` .stdout("foo\n") .runAsTest("subshell ending with semicolon"); // test_oE 'subshell ending with asynchronous list' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` mkfifo fifo1 (echo foo >fifo1&) cat fifo1 @@ -2232,7 +2361,7 @@ cat fifo1 .runAsTest("subshell ending with asynchronous list"); // test_oE 'newlines in subshell' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` ( echo foo ) @@ -2241,7 +2370,7 @@ echo foo .runAsTest("newlines in subshell"); // test_oE 'effect of brace grouping' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` a=1 { a=2; echo $a; exit; echo not reached; } echo $a @@ -2251,7 +2380,7 @@ echo $a .runAsTest("effect of brace grouping"); // test_x -e 29 'exit status of brace grouping' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` { true; sh -c 'exit 29'; } ` .exitCode(29) @@ -2259,7 +2388,7 @@ echo $a .runAsTest("exit status of brace grouping"); // test_oE 'redirection on brace grouping' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` { echo 1; echo 2; echo 3; echo 4; } >brace_out { tail -n 2; } fifo1& } cat fifo1 @@ -2286,7 +2415,7 @@ cat fifo1 .runAsTest("brace grouping ending with asynchronous list"); // test_oE 'newlines in brace grouping' - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` { echo foo } diff --git a/test/js/bun/shell/commands/cp.test.ts b/test/js/bun/shell/commands/cp.test.ts index 500eb11bc7..80a28d2c7e 100644 --- a/test/js/bun/shell/commands/cp.test.ts +++ b/test/js/bun/shell/commands/cp.test.ts @@ -61,7 +61,7 @@ describe.if(!builtinDisabled("cp"))("bunshell cp", async () => { .runAsTest("dir -> ? fails without -R"); describe("EBUSY windows", () => { - TestBuilder.command/* sh */ ` + TestBuilder.command /* sh */ ` echo hi! > hello.txt mkdir somedir cp ${{ raw: Array(50).fill("hello.txt").join(" ") }} somedir diff --git a/test/js/bun/shell/commands/echo.test.ts b/test/js/bun/shell/commands/echo.test.ts new file mode 100644 index 0000000000..050d0bf3b6 --- /dev/null +++ b/test/js/bun/shell/commands/echo.test.ts @@ -0,0 +1,77 @@ +import { describe } from "bun:test"; +import { createTestBuilder } from "../test_builder"; +const TestBuilder = createTestBuilder(import.meta.path); + +describe("echo", async () => { + TestBuilder.command`echo`.exitCode(0).stdout("\n").stderr("").runAsTest("no arguments outputs newline"); + + TestBuilder.command`echo hello`.exitCode(0).stdout("hello\n").stderr("").runAsTest("single argument"); + + TestBuilder.command`echo hello world`.exitCode(0).stdout("hello world\n").stderr("").runAsTest("multiple arguments"); + + TestBuilder.command`echo "hello world"`.exitCode(0).stdout("hello world\n").stderr("").runAsTest("quoted argument"); + + TestBuilder.command`echo hello world` + .exitCode(0) + .stdout("hello world\n") + .stderr("") + .runAsTest("multiple spaces collapsed"); + + TestBuilder.command`echo ""`.exitCode(0).stdout("\n").stderr("").runAsTest("empty string"); + + TestBuilder.command`echo one two three four` + .exitCode(0) + .stdout("one two three four\n") + .stderr("") + .runAsTest("many arguments"); +}); + +describe("echo -n flag", async () => { + TestBuilder.command`echo -n`.exitCode(0).stdout("").stderr("").runAsTest("no arguments with -n flag"); + + TestBuilder.command`echo -n hello`.exitCode(0).stdout("hello").stderr("").runAsTest("single argument with -n flag"); + + TestBuilder.command`echo -n hello world` + .exitCode(0) + .stdout("hello world") + .stderr("") + .runAsTest("multiple arguments with -n flag"); + + TestBuilder.command`echo -n "hello world"` + .exitCode(0) + .stdout("hello world") + .stderr("") + .runAsTest("quoted argument with -n flag"); + + TestBuilder.command`echo -n ""`.exitCode(0).stdout("").stderr("").runAsTest("empty string with -n flag"); + + TestBuilder.command`echo -n one two three` + .exitCode(0) + .stdout("one two three") + .stderr("") + .runAsTest("many arguments with -n flag"); +}); + +describe("echo error handling", async () => { + TestBuilder.command`echo -x`.exitCode(0).stdout("-x\n").runAsTest("invalid flag"); + + TestBuilder.command`echo -abc`.exitCode(0).stdout("-abc\n").runAsTest("invalid multi-char flag"); + + TestBuilder.command`echo --invalid`.exitCode(0).stdout("--invalid\n").runAsTest("invalid long flag"); +}); + +describe("echo special cases", async () => { + TestBuilder.command`echo -n -n hello` + .exitCode(0) + .stdout("-n hello") + .stderr("") + .runAsTest("-n flag with -n as argument"); + + TestBuilder.command`echo -- -n hello` + .exitCode(0) + .stdout("-- -n hello\n") + .stderr("") + .runAsTest("double dash treated as argument"); + + TestBuilder.command`echo "\n"`.exitCode(0).stdout("\\n\n").stderr("").runAsTest("literal backslash n"); +}); diff --git a/test/js/bun/shell/commands/ls.test.ts b/test/js/bun/shell/commands/ls.test.ts new file mode 100644 index 0000000000..7b985b0f96 --- /dev/null +++ b/test/js/bun/shell/commands/ls.test.ts @@ -0,0 +1,362 @@ +import { $ } from "bun"; +import { beforeAll, describe, expect, setDefaultTimeout, test } from "bun:test"; +import { isPosix, tempDirWithFiles } from "harness"; +import { createTestBuilder } from "../util"; +const TestBuilder = createTestBuilder(import.meta.path); + +const fileExists = async (path: string): Promise => + $`ls -d ${path}`.then(o => o.stdout.toString() === `${path}\n`); + +$.nothrow(); + +beforeAll(() => { + setDefaultTimeout(1000 * 60 * 5); +}); + +const BUN = process.argv0; +const DEV_NULL = process.platform === "win32" ? "NUL" : "/dev/null"; + +let node_modules_tempdir: string; +let allNodeModuleFiles: string[] = []; + +let tempdir: string; +let allFiles: string[] = []; + +const sortedLsOutput = (s: string) => + s + .split("\n") + .map(s => s.trim().replaceAll("\\", "/")) + .filter( + s => + s.length > 0 && + // GNU coreutils prints out the current directory like: + // + // ``` + // .: + // a b c + // ``` + // + // We probably should match this + s !== ".:", + ) + .sort(); + +describe("bunshell ls", () => { + beforeAll(async () => { + node_modules_tempdir = tempDirWithFiles("ls-node_modules", {}); + tempdir = tempDirWithFiles("ls", {}); + await $`echo ${packagejson()} > package.json; ${BUN} install &> ${DEV_NULL}` + .quiet() + .throws(true) + .cwd(node_modules_tempdir); + await $`touch a b c; mkdir foo; touch foo/a foo/b foo/c`.quiet().throws(true).cwd(tempdir); + + allNodeModuleFiles = isPosix + ? await Bun.$`ls -RA .` + .quiet() + .throws(true) + .cwd(node_modules_tempdir) + .text() + .then(s => sortedLsOutput(s)) + : []; + + allFiles = ["./foo:", "a", "a", "b", "b", "c", "c", "foo"]; + }); + + describe("recursive", () => { + test.if(isPosix)("node_modules", async () => { + const s = await Bun.$`ls -RA .`.quiet().throws(true).cwd(node_modules_tempdir).text(); + const lines = sortedLsOutput(s); + expect(lines).toEqual(allNodeModuleFiles); + }); + + test("basic", async () => { + const s = await Bun.$`ls -RA .`.quiet().throws(true).cwd(tempdir).text(); + const lines = sortedLsOutput(s); + expect(lines).toEqual(allFiles); + }); + }); + + describe("basic flags", () => { + test("no arguments (current directory)", async () => { + await TestBuilder.command`ls` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toEqual(["a", "b", "c", "foo"].sort())) + .run(); + }); + + test("-a flag shows all files including . and ..", async () => { + const tempdir = tempDirWithFiles("ls-show-all", {}); + await $`touch .hidden regular; mkdir .hidden-dir`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls -a` + .setTempdir(tempdir) + .stdout(s => { + expect(sortedLsOutput(s)).toContain("."); + expect(sortedLsOutput(s)).toContain(".."); + expect(sortedLsOutput(s)).toContain(".hidden"); + expect(sortedLsOutput(s)).toContain(".hidden-dir"); + }) + .run(); + }); + + test("-A flag shows almost all (excludes . and ..)", async () => { + const tempdir = tempDirWithFiles("ls-almost-all", {}); + await $`touch .hidden regular`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls -A` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).not.toContain(".")) + .stdout(s => expect(sortedLsOutput(s)).not.toContain("..")) + .stdout(s => expect(sortedLsOutput(s)).toContain(".hidden")) + .stdout(s => expect(sortedLsOutput(s)).toContain("regular")) + .run(); + }); + + test("-d flag lists directories themselves", async () => { + await TestBuilder.command`ls -d foo`.setTempdir(tempdir).stdout("foo\n").run(); + }); + + // test("-1 flag lists one file per line", async () => { + // await TestBuilder.command`ls -1` + // .setTempdir(tempdir) + // .stdout(s => expect(s.split("\n").filter(l => l.trim())).toEqual(["a", "b", "c", "foo"])) + // .run(); + // }); + }); + + describe("multiple arguments", () => { + test("multiple files", async () => { + await TestBuilder.command`ls a b c` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toEqual(["a", "b", "c"])) + .run(); + }); + + test("multiple directories", async () => { + const tempdir = tempDirWithFiles("ls-multi-dirs", {}); + await $`mkdir dir1 dir2; touch dir1/file1 dir2/file2`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls dir1 dir2` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toEqual(["dir1:", "dir2:", "file1", "file2"])) + .run(); + }); + + test("mixed files and directories", async () => { + await TestBuilder.command`ls a foo` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toEqual(["a", "foo:", "a", "b", "c"].sort())) + .run(); + }); + }); + + describe("edge cases", () => { + test("empty directory", async () => { + const tempdir = tempDirWithFiles("ls-empty", {}); + await $`mkdir empty`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls empty`.setTempdir(tempdir).stdout("").run(); + }); + + test("directory with only hidden files using -a", async () => { + const tempdir = tempDirWithFiles("ls-hidden-only-a", {}); + await $`mkdir hidden-only; touch hidden-only/.hidden1 hidden-only/.hidden2`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls -a hidden-only` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toEqual([".", "..", ".hidden1", ".hidden2"])) + .run(); + }); + + test("very long filename", async () => { + const tempdir = tempDirWithFiles("ls-long-name", {}); + const longName = "a".repeat(100); + await $`touch ${longName}`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toContain(longName)) + .run(); + }); + + test("filename with spaces", async () => { + const tempdir = tempDirWithFiles("ls-spaces", {}); + await $`touch "file with spaces"`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toContain("file with spaces")) + .run(); + }); + + test.if(isPosix)("filename with special characters", async () => { + const tempdir = tempDirWithFiles("ls-special", {}); + await $`touch "file-with-!@#$%^&*()"`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toContain("file-with-!@#$%^&*()")) + .run(); + }); + }); + + describe("flag combinations", () => { + test("-Ra flag (recursive + show all)", async () => { + const tempdir = tempDirWithFiles("ls-ra", {}); + await $`mkdir sub; touch .hidden sub/.hidden-sub`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls -Ra` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toContain(".hidden")) + .stdout(s => expect(sortedLsOutput(s)).toContain(".hidden-sub")) + .run(); + }); + + test("-RA flag (recursive + almost all)", async () => { + const tempdir = tempDirWithFiles("ls-ra-caps", {}); + await $`mkdir sub; touch .hidden sub/.hidden-sub`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls -RA` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toContain(".hidden")) + .stdout(s => expect(sortedLsOutput(s)).toContain(".hidden-sub")) + .stdout(s => expect(sortedLsOutput(s)).not.toContain(".")) + .run(); + }); + + test("-d with multiple directories", async () => { + const tempdir = tempDirWithFiles("ls-d-multi", {}); + await $`mkdir dir1 dir2`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls -d dir1 dir2` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toEqual(["dir1", "dir2"])) + .run(); + }); + }); + + describe("errors", () => { + TestBuilder.command`ls lskdjflksdjf` + .stderr("ls: lskdjflksdjf: No such file or directory\n") + .exitCode(1) + .runAsTest("ls -R lskdjflksdjf"); + + test("multiple non-existent files", async () => { + await TestBuilder.command`ls nonexistent1 nonexistent2` + .exitCode(1) + .stderr(s => { + expect(s).toContain("nonexistent1: No such file or directory"); + expect(s).toContain("nonexistent2: No such file or directory"); + }) + .ensureTempDir() + .run(); + }); + + test("mixed existent and non-existent files", async () => { + await TestBuilder.command`ls a nonexistent` + .setTempdir(tempdir) + .exitCode(1) + .stdout(s => expect(sortedLsOutput(s)).toContain("a")) + .stderr(s => expect(s).toContain("nonexistent: No such file or directory")) + .run(); + }); + + test("invalid flag", async () => { + await TestBuilder.command`ls -z` + .exitCode(1) + .stderr(s => expect(s).toContain("illegal option")) + .run(); + }); + + test("invalid combined flags", async () => { + await TestBuilder.command`ls -az` + .exitCode(1) + .stderr(s => expect(s).toContain("illegal option")) + .run(); + }); + + test.if(isPosix)("permission denied directory", async () => { + const tempdir = tempDirWithFiles("ls-permission", {}); + await $`mkdir restricted; chmod 000 restricted`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls restricted` + .setTempdir(tempdir) + .exitCode(1) + .stderr(s => expect(s).toContain("Permission denied")) + .run(); + await $`chmod 755 restricted`.quiet().throws(true).cwd(tempdir); // cleanup + }); + + test.if(isPosix)("permission denied directory recursive", async () => { + const tempdir = tempDirWithFiles("ls-permission-recursive", {}); + // Create 3-level deep directory structure with 3+ items per level + await $`mkdir -p level1/level2/level3; + touch level1/file1 level1/file2 level1/file3; + touch level1/level2/file4 level1/level2/file5 level1/level2/file6; + touch level1/level2/level3/file7 level1/level2/level3/file8 level1/level2/level3/file9; + chmod 000 level1/level2` + .quiet() + .throws(true) + .cwd(tempdir); + + await TestBuilder.command`ls -R level1` + .setTempdir(tempdir) + .exitCode(1) + .stdout(s => expect(sortedLsOutput(s)).toContain("file1")) + .stdout(s => expect(sortedLsOutput(s)).toContain("file2")) + .stdout(s => expect(sortedLsOutput(s)).toContain("file3")) + .stderr(s => expect(s).toContain("Permission denied")) + .run(); + + await $`chmod 755 level1/level2`.quiet().throws(true).cwd(tempdir); // cleanup + }); + + test.if(isPosix)("broken symlink file", async () => { + const tempdir = tempDirWithFiles("ls-broken-symlink", {}); + await $`touch will-remove; ln -s will-remove broken-link; rm will-remove`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls broken-link` + .exitCode(1) + .stderr("ls: broken-link: No such file or directory\n") + .setTempdir(tempdir) + .run(); + }); + + test.if(isPosix)("broken symlink directory", async () => { + const tempdir = tempDirWithFiles("ls-broken-symlink", {}); + await $`mkdir will-remove; ln -s will-remove broken-link; rm -rf will-remove`.quiet().throws(true).cwd(tempdir); + await TestBuilder.command`ls broken-link` + .exitCode(1) + .stderr("ls: broken-link: No such file or directory\n") + .setTempdir(tempdir) + .run(); + }); + + test.if(isPosix)("broken symlink directory recursive", async () => { + const tempdir = tempDirWithFiles("ls-broken-symlink", {}); + console.log("TEMPDIR", tempdir); + await $`mkdir foo; cd foo; touch a b c; mkdir will-remove; ln -s will-remove broken-link; rm -rf will-remove` + .quiet() + .throws(true) + .cwd(tempdir); + await TestBuilder.command`ls -RA .` + .setTempdir(tempdir) + .stdout(s => expect(sortedLsOutput(s)).toEqual(["./foo:", "a", "b", "broken-link", "c", "foo"])) + .run(); + }); + }); +}); + +function packagejson() { + return `{ + "name": "dummy", + "dependencies": { + "@biomejs/biome": "^1.5.3", + "@vscode/debugadapter": "^1.61.0", + "esbuild": "^0.17.15", + "eslint": "^8.20.0", + "eslint-config-prettier": "^8.5.0", + "mitata": "^0.1.3", + "peechy": "0.4.34", + "prettier": "3.2.2", + "react": "next", + "react-dom": "next", + "source-map-js": "^1.0.2", + "typescript": "^5.0.2" + }, + "devDependencies": { + "@types/react": "^18.0.25", + "@typescript-eslint/eslint-plugin": "^5.31.0", + "@typescript-eslint/parser": "^5.31.0" + }, + "version": "0.0.0" +}`; +} diff --git a/test/js/bun/shell/file-io.test.ts b/test/js/bun/shell/file-io.test.ts new file mode 100644 index 0000000000..6c8f8e87e3 --- /dev/null +++ b/test/js/bun/shell/file-io.test.ts @@ -0,0 +1,143 @@ +import { describe } from "bun:test"; +import { createTestBuilder } from "./test_builder"; +const TestBuilder = createTestBuilder(import.meta.path); + +describe("IOWriter file output redirection", () => { + describe("basic file redirection", () => { + TestBuilder.command`echo "hello world" > output.txt` + .exitCode(0) + .fileEquals("output.txt", "hello world\n") + .runAsTest("simple echo to file"); + + TestBuilder.command`echo -n "" > empty.txt` + .exitCode(0) + .fileEquals("empty.txt", "") + .runAsTest("empty output to file"); + + TestBuilder.command`echo "" > zero.txt` + .exitCode(0) + .fileEquals("zero.txt", "\n") + .runAsTest("zero-length write should trigger onIOWriterChunk callback"); + }); + + describe("drainBufferedData edge cases", () => { + TestBuilder.command`echo -n ${"x".repeat(1024 * 10)} > large.txt` + .exitCode(0) + .fileEquals("large.txt", "x".repeat(1024 * 10)) + .runAsTest("large single write"); + + TestBuilder.command`mkdir -p subdir && echo "test" > subdir/file.txt` + .exitCode(0) + .fileEquals("subdir/file.txt", "test\n") + .runAsTest("write to subdirectory"); + }); + + describe("file system error conditions", () => { + TestBuilder.command`echo "should fail" > /dev/null/invalid/path` + .exitCode(1) + .stderr_contains("directory: /dev/null/invalid/path") + .runAsTest("write to invalid path should fail"); + + TestBuilder.command`echo "should fail" > /nonexistent/file.txt` + .exitCode(1) + .stderr_contains("No such file or directory") + .runAsTest("write to non-existent directory should fail"); + }); + + describe("special file types", () => { + TestBuilder.command`echo "disappear" > /dev/null`.exitCode(0).stdout("").runAsTest("write to /dev/null"); + }); + + describe("writer queue and bump behavior", () => { + TestBuilder.command`echo "single" > single_writer.txt` + .exitCode(0) + .fileEquals("single_writer.txt", "single\n") + .runAsTest("single writer completion and cleanup"); + + TestBuilder.command`echo "robust test" > robust.txt` + .exitCode(0) + .fileEquals("robust.txt", "robust test\n") + .runAsTest("writer marked as dead during write"); + + TestBuilder.command`echo "captured content" > capture.txt` + .exitCode(0) + .fileEquals("capture.txt", "captured content\n") + .stdout("") + .runAsTest("bytelist capture during file write"); + }); + + describe("error handling and unreachable paths", () => { + TestBuilder.command`echo -n ${"A".repeat(2 * 1024)} > atomic.txt` + .exitCode(0) + .fileEquals("atomic.txt", "A".repeat(2 * 1024)) + .runAsTest("attempt to trigger partial write panic"); + + TestBuilder.command`echo "synchronous" > sync_write.txt` + .exitCode(0) + .fileEquals("sync_write.txt", "synchronous\n") + .runAsTest("EAGAIN should never occur for files"); + + TestBuilder.command`echo "error test" > nonexistent_dir/file.txt` + .exitCode(1) + .stderr_contains("No such file or directory") + .runAsTest("write error propagation"); + }); + + describe("file permissions and creation", () => { + TestBuilder.command`echo "new file" > new_file.txt` + .exitCode(0) + .fileEquals("new_file.txt", "new file\n") + .runAsTest("file creation with default permissions"); + + TestBuilder.command`echo "original" > overwrite.txt && echo "short" > overwrite.txt` + .exitCode(0) + .fileEquals("overwrite.txt", "short\n") + .runAsTest("overwrite existing file"); + + TestBuilder.command`echo "line1" > append.txt && echo "line2" >> append.txt && echo "line3" >> append.txt` + .exitCode(0) + .fileEquals("append.txt", "line1\nline2\nline3\n") + .runAsTest("append to existing file"); + }); + + // describe("concurrent operations", () => { + // TestBuilder.command`echo "content 0" > concurrent_0.txt & echo "content 1" > concurrent_1.txt & echo "content 2" > concurrent_2.txt & wait` + // .exitCode(0) + // .fileEquals("concurrent_0.txt", "content 0\n") + // .fileEquals("concurrent_1.txt", "content 1\n") + // .fileEquals("concurrent_2.txt", "content 2\n") + // .runAsTest("concurrent writes to different files"); + + // TestBuilder.command`echo "iteration 0" > rapid.txt && echo "iteration 1" > rapid.txt && echo "iteration 2" > rapid.txt` + // .exitCode(0) + // .fileEquals("rapid.txt", "iteration 2\n") + // .runAsTest("rapid sequential writes to same file"); + // }); + + describe("additional TestBuilder integration", () => { + TestBuilder.command`echo "builder test" > output.txt` + .exitCode(0) + .fileEquals("output.txt", "builder test\n") + .runAsTest("basic file output"); + + TestBuilder.command`printf "no newline" > no_newline.txt` + .exitCode(0) + .fileEquals("no_newline.txt", "no newline") + .runAsTest("output without trailing newline"); + + TestBuilder.command`echo "first" > multi.txt && echo "second" >> multi.txt` + .exitCode(0) + .fileEquals("multi.txt", "first\nsecond\n") + .runAsTest("write then append"); + + TestBuilder.command`echo "test with spaces in filename" > "file with spaces.txt"` + .exitCode(0) + .fileEquals("file with spaces.txt", "test with spaces in filename\n") + .runAsTest("write to file with spaces in name"); + + TestBuilder.command`echo "pipe test" | cat > pipe_output.txt` + .exitCode(0) + .fileEquals("pipe_output.txt", "pipe test\n") + .runAsTest("pipe with file redirection"); + }); +}); diff --git a/test/js/bun/shell/leak.test.ts b/test/js/bun/shell/leak.test.ts index b63c3822eb..bfcb6f24ff 100644 --- a/test/js/bun/shell/leak.test.ts +++ b/test/js/bun/shell/leak.test.ts @@ -1,7 +1,7 @@ import { $ } from "bun"; import { heapStats } from "bun:jsc"; import { describe, expect, test } from "bun:test"; -import { bunEnv, tempDirWithFiles } from "harness"; +import { bunEnv, isPosix, tempDirWithFiles } from "harness"; import { appendFileSync, closeSync, openSync, writeFileSync } from "node:fs"; import { devNull, tmpdir } from "os"; import { join } from "path"; @@ -113,6 +113,7 @@ describe("fd leak", () => { prev = val; prevprev = val; } else { + // console.error('Prev', prev, 'Val', val, 'Diff', Math.abs(prev - val), 'Threshold', threshold); if (!(Math.abs(prev - val) < threshold)) process.exit(1); } } @@ -125,7 +126,7 @@ describe("fd leak", () => { const { stdout, stderr, exitCode } = Bun.spawnSync([process.argv0, "--smol", "test", tempfile], { env: bunEnv, }); - // console.log('STDOUT:', stdout.toString(), '\n\nSTDERR:', stderr.toString()); + // console.log("STDOUT:", stdout.toString(), "\n\nSTDERR:", stderr.toString()); if (exitCode != 0) { console.log("\n\nSTDERR:", stderr.toString()); } @@ -139,8 +140,8 @@ describe("fd leak", () => { }); // Use text of this file so its big enough to cause a leak - memLeakTest("ArrayBuffer", () => TestBuilder.command`cat ${import.meta.filename} > ${new ArrayBuffer(1 << 20)}`, 100); - memLeakTest("Buffer", () => TestBuilder.command`cat ${import.meta.filename} > ${Buffer.alloc(1 << 20)}`, 100); + memLeakTest("ArrayBuffer", () => TestBuilder.command`cat ${import.meta.filename} > ${new ArrayBuffer(128)}`, 100); + memLeakTest("Buffer", () => TestBuilder.command`cat ${import.meta.filename} > ${Buffer.alloc(128)}`, 100); memLeakTest( "Blob_something", () => @@ -169,6 +170,209 @@ describe("fd leak", () => { ); memLeakTest("String", () => TestBuilder.command`echo ${Array(4096).fill("a").join("")}`.stdout(() => {}), 100); + function memLeakTestProtect( + name: string, + className: string, + constructStmt: string, + builder: string, + posixOnly: boolean = false, + runs: number = 5, + ) { + const runTheTest = !posixOnly ? true : isPosix; + test.if(runTheTest)( + `memleak_protect_${name}`, + async () => { + const tempfile = join(tmpdir(), "script.ts"); + + const filepath = import.meta.dirname; + const testcode = await Bun.file(join(filepath, "./test_builder.ts")).text(); + + writeFileSync(tempfile, testcode); + + const impl = /* ts */ ` + import { heapStats } from "bun:jsc"; + const TestBuilder = createTestBuilder(import.meta.path); + + Bun.gc(true); + const startValue = heapStats().protectedObjectTypeCounts.${className} ?? 0; + for (let i = 0; i < ${runs}; i++) { + await (async function() { + let val = ${constructStmt} + await ${builder} + })() + Bun.gc(true); + + let value = heapStats().protectedObjectTypeCounts.${className} ?? 0; + + if (value > startValue) { + console.error('Leaked ${className} objects') + process.exit(1); + } + } + `; + + appendFileSync(tempfile, impl); + + // console.log("THE CODE", readFileSync(tempfile, "utf-8")); + + const { stdout, stderr, exitCode } = Bun.spawnSync([process.argv0, "--smol", "test", tempfile], { + env: bunEnv, + }); + // console.log("STDOUT:", stdout.toString(), "\n\nSTDERR:", stderr.toString()); + if (exitCode != 0) { + console.log("\n\nSTDERR:", stderr.toString()); + } + expect(exitCode).toBe(0); + }, + 100_000, + ); + } + + memLeakTestProtect( + "ArrayBuffer", + "ArrayBuffer", + "new ArrayBuffer(64)", + "TestBuilder.command`cat ${import.meta.filename} > ${val}`", + ); + memLeakTestProtect( + "Buffer", + "Buffer", + "Buffer.alloc(64)", + "TestBuilder.command`cat ${import.meta.filename} > ${val}`", + ); + memLeakTestProtect( + "ArrayBuffer_builtin", + "ArrayBuffer", + "new ArrayBuffer(64)", + "TestBuilder.command`echo ${import.meta.filename} > ${val}`", + ); + memLeakTestProtect( + "Buffer_builtin", + "Buffer", + "Buffer.alloc(64)", + "TestBuilder.command`echo ${import.meta.filename} > ${val}`", + ); + + memLeakTestProtect( + "Uint8Array", + "Uint8Array", + "new Uint8Array(64)", + "TestBuilder.command`cat ${import.meta.filename} > ${val}`", + ); + memLeakTestProtect( + "Uint8Array_builtin", + "Uint8Array", + "new Uint8Array(64)", + "TestBuilder.command`echo ${import.meta.filename} > ${val}`", + ); + + memLeakTestProtect( + "DataView", + "DataView", + "new DataView(new ArrayBuffer(64))", + "TestBuilder.command`cat ${import.meta.filename} > ${val}`", + ); + memLeakTestProtect( + "DataView_builtin", + "DataView", + "new DataView(new ArrayBuffer(64))", + "TestBuilder.command`echo ${import.meta.filename} > ${val}`", + ); + + memLeakTestProtect( + "String_large_input", + "String", + "Array(4096).fill('test').join('')", + "TestBuilder.command`echo ${val}`", + ); + memLeakTestProtect( + "String_pipeline", + "String", + "Array(1024).fill('data').join('')", + "TestBuilder.command`echo ${val} | cat`", + ); + + // Complex nested pipelines + memLeakTestProtect( + "ArrayBuffer_nested_pipeline", + "ArrayBuffer", + "new ArrayBuffer(256)", + "TestBuilder.command`echo ${val} | head -n 10 | tail -n 5 | wc -l`", + true, + ); + memLeakTestProtect( + "Buffer_triple_pipeline", + "Buffer", + "Buffer.alloc(256)", + "TestBuilder.command`echo ${val} | cat | grep -v nonexistent | wc -c`", + true, + ); + memLeakTestProtect( + "String_complex_pipeline", + "String", + "Array(512).fill('pipeline').join('\\n')", + "TestBuilder.command`echo ${val} | sort | uniq | head -n 3`", + true, + ); + + // Subshells with JS objects + memLeakTestProtect( + "ArrayBuffer_subshell", + "ArrayBuffer", + "new ArrayBuffer(128)", + "TestBuilder.command`echo $(echo ${val} | wc -c)`", + true, + ); + memLeakTestProtect( + "Buffer_nested_subshell", + "Buffer", + "Buffer.alloc(128)", + "TestBuilder.command`echo $(echo ${val} | head -c 10) done`", + true, + ); + memLeakTestProtect( + "String_subshell_pipeline", + "String", + "Array(256).fill('sub').join('')", + "TestBuilder.command`echo start $(echo ${val} | wc -c | cat) end`", + true, + ); + + // Mixed builtin and subprocess commands + memLeakTestProtect( + "ArrayBuffer_mixed_commands", + "ArrayBuffer", + "new ArrayBuffer(192)", + "TestBuilder.command`mkdir -p tmp && echo ${val} > tmp/test.txt && cat tmp/test.txt && rm -rf tmp`", + ); + memLeakTestProtect( + "Buffer_builtin_external_mix", + "Buffer", + "Buffer.alloc(192)", + "TestBuilder.command`echo ${val} | ${bunExe()} -e 'process.stdin.on(\"data\", d => process.stdout.write(d))' | head -c 50`", + ); + memLeakTestProtect( + "String_cd_operations", + "String", + "Array(128).fill('dir').join('')", + "TestBuilder.command`mkdir -p testdir && cd testdir && echo ${val} > file.txt && cd .. && cat testdir/file.txt && rm -rf testdir`", + ); + + // Conditional execution + memLeakTestProtect( + "ArrayBuffer_conditional", + "ArrayBuffer", + "new ArrayBuffer(64)", + "TestBuilder.command`echo ${val} && echo success || echo failure`", + ); + memLeakTestProtect( + "Buffer_test_conditional", + "Buffer", + "Buffer.alloc(64)", + "TestBuilder.command`test -n ${val} && echo 'has content' || echo 'empty'`", + true, + ); + describe("#11816", async () => { function doit(builtin: boolean) { test(builtin ? "builtin" : "external", async () => { diff --git a/test/js/bun/shell/yield.test.ts b/test/js/bun/shell/yield.test.ts new file mode 100644 index 0000000000..37f0fad75b --- /dev/null +++ b/test/js/bun/shell/yield.test.ts @@ -0,0 +1,11 @@ +import { describe } from "bun:test"; +import { createTestBuilder } from "./test_builder"; +const TestBuilder = createTestBuilder(import.meta.path); + +describe("yield", async () => { + const array = Array(10000).fill("a"); + TestBuilder.command`echo -n ${array} > myfile.txt` + .exitCode(0) + .fileEquals("myfile.txt", array.join(" ")) + .runAsTest("doesn't stackoverflow"); +}); diff --git a/test/js/bun/spawn/readablestream-helpers.test.ts b/test/js/bun/spawn/readablestream-helpers.test.ts new file mode 100644 index 0000000000..f5d0e55547 --- /dev/null +++ b/test/js/bun/spawn/readablestream-helpers.test.ts @@ -0,0 +1,198 @@ +import { describe, expect, test } from "bun:test"; +import { bunEnv, bunExe } from "harness"; + +describe("ReadableStream conversion methods", () => { + test("Bun.spawn() process.stdout.text() should capture process output", async () => { + // Spawn a process that outputs some text + await using process = Bun.spawn([bunExe(), "-e", "console.log('Hello from Bun spawn! 🚀')"], { + env: bunEnv, + }); + + // Convert the process stdout to text using .text() + const result = await process.stdout.text(); + await process.exited; + + expect(result).toBe("Hello from Bun spawn! 🚀\n"); + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.text() should capture process output (after exited)", async () => { + // Spawn a process that outputs some text + await using process = Bun.spawn([bunExe(), "-e", "console.log('Hello from Bun spawn! 🚀')"], { + env: bunEnv, + }); + + await process.exited; + + // Convert the process stdout to text using .text() + const result = await process.stdout.text(); + + expect(result).toBe("Hello from Bun spawn! 🚀\n"); + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.text() should convert stream to text", async () => { + // Spawn a process that outputs text + const text = "Hello, this is a test stream! 🌊 测试"; + await using process = Bun.spawn([bunExe(), "-e", `console.log("${text}")`], { + env: bunEnv, + }); + + // Convert the process stdout to text using .text() + const result = await process.stdout.text(); + await process.exited; + + expect(result.trim()).toBe(text); + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.text() should convert stream to text (after exited)", async () => { + // Spawn a process that outputs text + const text = "Hello, this is a test stream! 🌊 测试"; + await using process = Bun.spawn([bunExe(), "-e", `console.log("${text}")`], { + env: bunEnv, + }); + + await process.exited; + + // Convert the process stdout to text using .text() + const result = await process.stdout.text(); + + expect(result.trim()).toBe(text); + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.json() should convert stream to JSON", async () => { + // Spawn a process that outputs JSON data + const jsonData = { message: "Hello from JSON stream! 🎯", count: 42, active: true, emoji: "🌟" }; + await using process = Bun.spawn([bunExe(), "-e", `console.log('${JSON.stringify(jsonData)}')`], { + env: bunEnv, + }); + + // Convert the process stdout to JSON using .json() + const result = await process.stdout.json(); + await process.exited; + + expect(result).toEqual(jsonData); + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.json() should convert stream to JSON (after exited)", async () => { + // Spawn a process that outputs JSON data + const jsonData = { message: "Hello from JSON stream! 🎯", count: 42, active: true, emoji: "🌟" }; + await using process = Bun.spawn([bunExe(), "-e", `console.log('${JSON.stringify(jsonData)}')`], { + env: bunEnv, + }); + + await process.exited; + + // Convert the process stdout to JSON using .json() + const result = await process.stdout.json(); + + expect(result).toEqual(jsonData); + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.json() should throw on invalid JSON", async () => { + // Spawn a process that outputs invalid JSON + const invalidJson = "{ invalid json content }"; + await using process = Bun.spawn([bunExe(), "-e", `console.log('${invalidJson}')`], { + env: bunEnv, + }); + + // Attempt to convert the process stdout to JSON using .json() + // check that it doesn't throw synchronously. + const result = process.stdout.json(); + expect(result).toBeInstanceOf(Promise); + + expect(async () => await result).toThrowErrorMatchingInlineSnapshot(`"JSON Parse error: Expected '}'"`); + await process.exited; + + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.json() should throw on invalid JSON (after exited)", async () => { + // Spawn a process that outputs invalid JSON + const invalidJson = "{ invalid json content }"; + await using process = Bun.spawn([bunExe(), "-e", `console.log('${invalidJson}')`], { + env: bunEnv, + }); + + await process.exited; + + // Attempt to convert the process stdout to JSON using .json() + + const result = process.stdout.json(); + // Check it doesn't throw synchronously. + expect(result).toBeInstanceOf(Promise); + + // TODO: why is the error message different here?? + expect(async () => await result).toThrowErrorMatchingInlineSnapshot(`"Failed to parse JSON"`); + + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.blob() should convert stream to Blob", async () => { + // Generate random binary data + const randomBytes = new Uint8Array(256); + crypto.getRandomValues(randomBytes); + const binaryData = Buffer.from(randomBytes); + + await using process = Bun.spawn( + [bunExe(), "-e", `process.stdout.write(Buffer.from([${Array.from(binaryData)}]))`], + { + env: bunEnv, + }, + ); + + // Convert the process stdout to Blob using .blob() + const result = await process.stdout.blob(); + await process.exited; + + // Compare the Blob directly with the original binary data + expect(await result.bytes()).toEqual(new Uint8Array(binaryData)); + expect(process.exitCode).toBe(0); + }); + + test("Bun.spawn() process.stdout.bytes() should convert stream to Uint8Array", async () => { + // Generate random binary data + const randomBytes = new Uint8Array(128); + crypto.getRandomValues(randomBytes); + const binaryData = Buffer.from(randomBytes); + + await using process = Bun.spawn( + [bunExe(), "-e", `process.stdout.write(Buffer.from([${Array.from(binaryData)}]))`], + { + env: bunEnv, + }, + ); + + // Convert the process stdout to Uint8Array using .bytes() + const result = await process.stdout.bytes(); + await process.exited; + + // Compare the Uint8Array directly with the original binary data + expect(result).toEqual(new Uint8Array(binaryData)); + expect(process.exitCode).toBe(0); + expect(result).toBeInstanceOf(Uint8Array); + }); + + for (const method of ["text", "json", "bytes", "blob"] as const) { + describe(`ReadableStream.prototype.${method}() should throw when called with wrong this value`, () => { + for (const thisValue of [null, undefined, "not a stream", {}, []]) { + test(String(thisValue), () => { + // Test that calling .text() with wrong this value throws an error + // @ts-ignore + const fn = ReadableStream.prototype[method]; + expect(() => { + fn.call(thisValue); + }).toThrowError( + expect.objectContaining({ + code: "ERR_INVALID_THIS", + }), + ); + }); + } + }); + } +}); diff --git a/test/js/bun/spawn/spawn-stdin-readable-stream-edge-cases.test.ts b/test/js/bun/spawn/spawn-stdin-readable-stream-edge-cases.test.ts new file mode 100644 index 0000000000..ccc398d657 --- /dev/null +++ b/test/js/bun/spawn/spawn-stdin-readable-stream-edge-cases.test.ts @@ -0,0 +1,479 @@ +/** + * Edge case tests for spawn with ReadableStream stdin. + * + * **IMPORTANT**: Many of these tests use `await` in ReadableStream constructors + * (e.g., `await Bun.sleep(0)`, `await 42`) to prevent Bun from optimizing + * the ReadableStream into a Blob. When a ReadableStream is synchronous and + * contains only string/buffer data, Bun may normalize it to a Blob for + * performance reasons. The `await` ensures the stream remains truly streaming + * and tests the actual ReadableStream code paths in spawn. + */ + +import { spawn } from "bun"; +import { describe, expect, test } from "bun:test"; +import { bunEnv, bunExe } from "harness"; + +describe("spawn stdin ReadableStream edge cases", () => { + test("ReadableStream with exception in pull", async () => { + let pullCount = 0; + const stream = new ReadableStream({ + pull(controller) { + pullCount++; + if (pullCount === 1) { + controller.enqueue("chunk 1\n"); + } else if (pullCount === 2) { + controller.enqueue("chunk 2\n"); + throw new Error("Pull error"); + } + }, + }); + + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + // Should receive data before the exception + expect(text).toContain("chunk 1\n"); + expect(text).toContain("chunk 2\n"); + }); + + test("ReadableStream writing after process closed", async () => { + let writeAttempts = 0; + let errorOccurred = false; + + const stream = new ReadableStream({ + async pull(controller) { + writeAttempts++; + if (writeAttempts <= 10) { + await Bun.sleep(100); + try { + controller.enqueue(`attempt ${writeAttempts}\n`); + } catch (e) { + errorOccurred = true; + throw e; + } + } else { + controller.close(); + } + }, + }); + + // Use a command that exits quickly after reading one line + const proc = spawn({ + cmd: [ + bunExe(), + "-e", + `const readline = require('readline'); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + rl.on('line', (line) => { + console.log(line); + process.exit(0); + });`, + ], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + await proc.exited; + + // Give time for more pull attempts + await Bun.sleep(500); + + // The stream should have attempted multiple writes but only the first succeeded + expect(writeAttempts).toBeGreaterThanOrEqual(1); + expect(text).toBe("attempt 1\n"); + }); + + test("ReadableStream with mixed types", async () => { + const stream = new ReadableStream({ + start(controller) { + // String + controller.enqueue("text "); + // Uint8Array + controller.enqueue(new TextEncoder().encode("binary ")); + // ArrayBuffer + const buffer = new ArrayBuffer(5); + const view = new Uint8Array(buffer); + view.set([100, 97, 116, 97, 32]); // "data " + controller.enqueue(buffer); + // Another string + controller.enqueue("end"); + controller.close(); + }, + }); + + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("text binary data end"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with process consuming data slowly", async () => { + const chunks: string[] = []; + for (let i = 0; i < 10; i++) { + chunks.push(`chunk ${i}\n`); + } + + let currentChunk = 0; + const stream = new ReadableStream({ + pull(controller) { + if (currentChunk < chunks.length) { + controller.enqueue(chunks[currentChunk]); + currentChunk++; + } else { + controller.close(); + } + }, + }); + + // Use a script that reads slowly + const proc = spawn({ + cmd: [ + bunExe(), + "-e", + ` + const readline = require('readline'); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + + rl.on('line', async (line) => { + await Bun.sleep(10); + console.log(line); + }); + `, + ], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + const lines = text.trim().split("\n"); + expect(lines.length).toBe(10); + for (let i = 0; i < 10; i++) { + expect(lines[i]).toBe(`chunk ${i}`); + } + expect(await proc.exited).toBe(0); + }); + + test.todo("ReadableStream with cancel callback verification", async () => { + let cancelReason: any = null; + let cancelCalled = false; + + const stream = new ReadableStream({ + start(controller) { + // Start sending data + let count = 0; + const interval = setInterval(() => { + count++; + try { + controller.enqueue(`data ${count}\n`); + } catch (e) { + clearInterval(interval); + } + }, 50); + + // Store interval for cleanup + (controller as any).interval = interval; + }, + cancel(reason) { + cancelCalled = true; + cancelReason = reason; + // Clean up interval if exists + if ((this as any).interval) { + clearInterval((this as any).interval); + } + }, + }); + + // Kill the process after some data + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + // Wait a bit then kill + await Bun.sleep(150); + proc.kill(); + + try { + await proc.exited; + } catch (e) { + // Expected - process was killed + } + + // Give time for cancel to be called + await Bun.sleep(50); + + expect(cancelCalled).toBe(true); + }); + + test("ReadableStream with high frequency small chunks", async () => { + const totalChunks = 1000; + let sentChunks = 0; + + const stream = new ReadableStream({ + pull(controller) { + // Send multiple small chunks per pull + for (let i = 0; i < 10 && sentChunks < totalChunks; i++) { + controller.enqueue(`${sentChunks}\n`); + sentChunks++; + } + + if (sentChunks >= totalChunks) { + controller.close(); + } + }, + }); + + const proc = spawn({ + cmd: [ + bunExe(), + "-e", + `let count = 0; + const readline = require('readline'); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + rl.on('line', () => count++); + rl.on('close', () => console.log(count));`, + ], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(parseInt(text.trim())).toBe(totalChunks); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with several pulls", async () => { + let pullCount = 0; + + const stream = new ReadableStream({ + pull(controller) { + pullCount++; + if (pullCount <= 5) { + // Enqueue data larger than high water mark + controller.enqueue(Buffer.alloc(1024, "x")); + } else { + controller.close(); + } + }, + }); + + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("x".repeat(1024 * 5)); + expect(await proc.exited).toBe(0); + + // TODO: this is not quite right. But it's still godo to have + expect(pullCount).toBe(6); + }); + + test("ReadableStream reuse prevention", async () => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue("test data"); + controller.close(); + }, + }); + + // First use + const proc1 = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text1 = await new Response(proc1.stdout).text(); + expect(text1).toBe("test data"); + expect(await proc1.exited).toBe(0); + + // Second use should fail + expect(() => { + spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + env: bunEnv, + }); + }).toThrow(); + }); + + test("ReadableStream with byte stream", async () => { + const data = new Uint8Array(256); + for (let i = 0; i < 256; i++) { + data[i] = i; + } + + const stream = new ReadableStream({ + type: "bytes", + start(controller) { + // Enqueue as byte chunks + controller.enqueue(data.slice(0, 128)); + controller.enqueue(data.slice(128, 256)); + controller.close(); + }, + }); + + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const buffer = await new Response(proc.stdout).arrayBuffer(); + const result = new Uint8Array(buffer); + expect(result).toEqual(data); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with stdin and other pipes", async () => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue("stdin data"); + controller.close(); + }, + }); + + // Create a script that also writes to stdout and stderr + const script = ` + process.stdin.on('data', (data) => { + process.stdout.write('stdout: ' + data); + process.stderr.write('stderr: ' + data); + }); + `; + + const proc = spawn({ + cmd: [bunExe(), "-e", script], + stdin: stream, + stdout: "pipe", + stderr: "pipe", + env: bunEnv, + }); + + const [stdout, stderr] = await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]); + + expect(stdout).toBe("stdout: stdin data"); + expect(stderr).toBe("stderr: stdin data"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with very long single chunk", async () => { + // Create a chunk larger than typical pipe buffer (64KB on most systems) + const size = 256 * 1024; // 256KB + const chunk = "a".repeat(size); + + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(chunk); + controller.close(); + }, + }); + + const proc = spawn({ + cmd: [ + bunExe(), + "-e", + `let count = 0; + process.stdin.on('data', (chunk) => count += chunk.length); + process.stdin.on('end', () => console.log(count));`, + ], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(parseInt(text.trim())).toBe(size); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with alternating data types", async () => { + const stream = new ReadableStream({ + async pull(controller) { + await Bun.sleep(0); + + // Alternate between strings and Uint8Arrays + controller.enqueue("string1 "); + controller.enqueue(new TextEncoder().encode("binary1 ")); + controller.enqueue("string2 "); + controller.enqueue(new TextEncoder().encode("binary2")); + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("string1 binary1 string2 binary2"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with spawn options variations", async () => { + // Test with different spawn configurations + const configs = [ + { stdout: "pipe", stderr: "ignore" }, + { stdout: "pipe", stderr: "pipe" }, + { stdout: "pipe", stderr: "inherit" }, + ]; + + for (const config of configs) { + const stream = new ReadableStream({ + async pull(controller) { + await Bun.sleep(0); + controller.enqueue("test input"); + controller.close(); + }, + }); + + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + ...config, + env: bunEnv, + }); + + const stdout = await new Response(proc.stdout).text(); + expect(stdout).toBe("test input"); + expect(await proc.exited).toBe(0); + } + }); +}); diff --git a/test/js/bun/spawn/spawn-stdin-readable-stream-integration.test.ts b/test/js/bun/spawn/spawn-stdin-readable-stream-integration.test.ts new file mode 100644 index 0000000000..3b8edf996c --- /dev/null +++ b/test/js/bun/spawn/spawn-stdin-readable-stream-integration.test.ts @@ -0,0 +1,181 @@ +import { spawn } from "bun"; +import { describe, expect, test } from "bun:test"; +import { bunEnv, bunExe } from "harness"; + +describe("spawn stdin ReadableStream integration", () => { + test("example from documentation", async () => { + const stream = new ReadableStream({ + async pull(controller) { + await Bun.sleep(1); + controller.enqueue("some data from a stream"); + controller.close(); + }, + }); + + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + console.log(text); // "some data from a stream" + expect(text).toBe("some data from a stream"); + }); + + test("piping HTTP response to process", async () => { + using server = Bun.serve({ + port: 0, + async fetch(req) { + return new Response(async function* () { + yield "Line 1\n"; + yield "Line 2\n"; + yield "Line 3\n"; + }); + }, + }); + + // Count lines using Bun subprocess + const proc = spawn({ + cmd: [ + bunExe(), + "-e", + /*js*/ ` + let count = 0; + const readline = require('readline'); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + rl.on('line', () => count++); + rl.on('close', () => console.log(count));`, + ], + stdin: await fetch(server.url), + stdout: "pipe", + env: bunEnv, + }); + const output = await new Response(proc.stdout).text(); + expect(parseInt(output.trim())).toBe(3); + }); + + test("transforming data before passing to process", async () => { + // Original data stream + const dataStream = new ReadableStream({ + async pull(controller) { + await Bun.sleep(1); + controller.enqueue("hello world"); + controller.enqueue("\n"); + controller.enqueue("foo bar"); + controller.close(); + }, + }); + + // Transform to uppercase + const upperCaseTransform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + }, + }); + + // Pipe through transform then to process + const transformedStream = dataStream.pipeThrough(upperCaseTransform); + + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: transformedStream, + stdout: "pipe", + env: bunEnv, + }); + + const result = await new Response(proc.stdout).text(); + expect(result).toBe("HELLO WORLD\nFOO BAR"); + }); + + test("streaming large file through process", async () => { + // Simulate streaming a large file in chunks + const chunkSize = 1024; + const numChunks = 100; + let currentChunk = 0; + + const fileStream = new ReadableStream({ + pull(controller) { + if (currentChunk < numChunks) { + // Simulate file chunk + controller.enqueue(`Chunk ${currentChunk}: ${"x".repeat(chunkSize - 20)}\n`); + currentChunk++; + } else { + controller.close(); + } + }, + }); + + // Process the stream (just echo it for cross-platform compatibility) + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: fileStream, + stdout: "pipe", + env: bunEnv, + }); + + const result = await new Response(proc.stdout).text(); + const lines = result.trim().split("\n"); + expect(lines.length).toBe(numChunks); + expect(lines[0]).toStartWith("Chunk 0:"); + expect(lines[99]).toStartWith("Chunk 99:"); + }); + + test("real-time data processing", async () => { + let dataPoints = 0; + const maxDataPoints = 5; + + // Simulate real-time data stream + const dataStream = new ReadableStream({ + async pull(controller) { + if (dataPoints < maxDataPoints) { + const timestamp = Date.now(); + const value = Math.random() * 100; + controller.enqueue(`${timestamp},${value.toFixed(2)}\n`); + dataPoints++; + + // Simulate real-time delay + await Bun.sleep(10); + } else { + controller.close(); + } + }, + }); + + // Process the CSV data using Bun + const proc = spawn({ + cmd: [ + bunExe(), + "-e", + `let sum = 0, count = 0; + const readline = require('readline'); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + rl.on('line', (line) => { + const [_, value] = line.split(','); + sum += parseFloat(value); + count++; + }); + rl.on('close', () => console.log(sum / count));`, + ], + stdin: dataStream, + stdout: "pipe", + env: bunEnv, + }); + + const avgStr = await new Response(proc.stdout).text(); + const avg = parseFloat(avgStr.trim()); + + // Average should be between 0 and 100 + expect(avg).toBeGreaterThanOrEqual(0); + expect(avg).toBeLessThanOrEqual(100); + }); +}); diff --git a/test/js/bun/spawn/spawn-stdin-readable-stream-sync.test.ts b/test/js/bun/spawn/spawn-stdin-readable-stream-sync.test.ts new file mode 100644 index 0000000000..8f8089b2dc --- /dev/null +++ b/test/js/bun/spawn/spawn-stdin-readable-stream-sync.test.ts @@ -0,0 +1,23 @@ +import { spawnSync } from "bun"; +import { describe, expect, test } from "bun:test"; +import { bunExe } from "harness"; + +describe("spawnSync with ReadableStream stdin", () => { + test("spawnSync should throw", () => { + const stream = new ReadableStream({ + async start(controller) { + await 42; + controller.enqueue("test data"); + controller.close(); + }, + }); + + expect(() => + spawnSync({ + cmd: [bunExe()], + stdin: stream, + stdout: "pipe", + }), + ).toThrowErrorMatchingInlineSnapshot(`"'stdin' ReadableStream cannot be used in sync mode"`); + }); +}); diff --git a/test/js/bun/spawn/spawn-stdin-readable-stream.test.ts b/test/js/bun/spawn/spawn-stdin-readable-stream.test.ts new file mode 100644 index 0000000000..49d50dc9cf --- /dev/null +++ b/test/js/bun/spawn/spawn-stdin-readable-stream.test.ts @@ -0,0 +1,590 @@ +import { spawn } from "bun"; +import { describe, expect, mock, test } from "bun:test"; +import { bunEnv, bunExe, expectMaxObjectTypeCount, isASAN, isCI } from "harness"; + +describe("spawn stdin ReadableStream", () => { + test("basic ReadableStream as stdin", async () => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue("hello from stream"); + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("hello from stream"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with multiple chunks", async () => { + const chunks = ["chunk1\n", "chunk2\n", "chunk3\n"]; + const stream = new ReadableStream({ + start(controller) { + for (const chunk of chunks) { + controller.enqueue(chunk); + } + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe(chunks.join("")); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with Uint8Array chunks", async () => { + const encoder = new TextEncoder(); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(encoder.encode("binary ")); + controller.enqueue(encoder.encode("data ")); + controller.enqueue(encoder.encode("stream")); + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("binary data stream"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with delays between chunks", async () => { + const stream = new ReadableStream({ + async start(controller) { + controller.enqueue("first\n"); + await Bun.sleep(50); + controller.enqueue("second\n"); + await Bun.sleep(50); + controller.enqueue("third\n"); + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("first\nsecond\nthird\n"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with pull method", async () => { + let pullCount = 0; + const stream = new ReadableStream({ + pull(controller) { + pullCount++; + if (pullCount <= 3) { + controller.enqueue(`pull ${pullCount}\n`); + } else { + controller.close(); + } + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("pull 1\npull 2\npull 3\n"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with async pull and delays", async () => { + let pullCount = 0; + const stream = new ReadableStream({ + async pull(controller) { + pullCount++; + if (pullCount <= 3) { + await Bun.sleep(30); + controller.enqueue(`async pull ${pullCount}\n`); + } else { + controller.close(); + } + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("async pull 1\nasync pull 2\nasync pull 3\n"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with large data", async () => { + const largeData = "x".repeat(1024 * 1024); // 1MB + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(largeData); + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe(largeData); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with very large chunked data", async () => { + const chunkSize = 64 * 1024; // 64KB chunks + const numChunks = 16; // 1MB total + let pushedChunks = 0; + + const stream = new ReadableStream({ + pull(controller) { + if (pushedChunks < numChunks) { + controller.enqueue("x".repeat(chunkSize)); + pushedChunks++; + } else { + controller.close(); + } + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text.length).toBe(chunkSize * numChunks); + expect(text).toBe("x".repeat(chunkSize * numChunks)); + expect(await proc.exited).toBe(0); + }); + + test.todo("ReadableStream cancellation when process exits early", async () => { + let cancelled = false; + let chunksEnqueued = 0; + + const stream = new ReadableStream({ + async pull(controller) { + // Keep enqueueing data slowly + await Bun.sleep(50); + chunksEnqueued++; + controller.enqueue(`chunk ${chunksEnqueued}\n`); + }, + cancel(_reason) { + cancelled = true; + }, + }); + + await using proc = spawn({ + cmd: [ + bunExe(), + "-e", + `const readline = require('readline'); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + let lines = 0; + rl.on('line', (line) => { + console.log(line); + lines++; + if (lines >= 2) process.exit(0); + });`, + ], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + await proc.exited; + + // Give some time for cancellation to happen + await Bun.sleep(100); + + expect(cancelled).toBe(true); + expect(chunksEnqueued).toBeGreaterThanOrEqual(2); + // head -n 2 should only output 2 lines + expect(text.trim().split("\n").length).toBe(2); + }); + + test("ReadableStream error handling", async () => { + const stream = new ReadableStream({ + async start(controller) { + controller.enqueue("before error\n"); + // Give time for the data to be consumed + await Bun.sleep(10); + controller.error(new Error("Stream error")); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + // Process should receive data before the error + expect(text).toBe("before error\n"); + + // Process should exit normally (the stream error happens after data is sent) + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with process that exits immediately", async () => { + const stream = new ReadableStream({ + start(controller) { + // Enqueue a lot of data + for (let i = 0; i < 1000; i++) { + controller.enqueue(`line ${i}\n`); + } + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.exit(0)"], // exits immediately + stdin: stream, + env: bunEnv, + }); + + expect(await proc.exited).toBe(0); + + // Give time for any pending operations + await Bun.sleep(50); + + // The stream might be cancelled since the process exits before reading + // This is implementation-dependent behavior + }); + + test("ReadableStream with process that fails", async () => { + const stream = new ReadableStream({ + async pull(controller) { + await Bun.sleep(0); + controller.enqueue("data for failing process\n"); + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.exit(1)"], + stdin: stream, + env: bunEnv, + }); + + expect(await proc.exited).toBe(1); + }); + + test("already disturbed ReadableStream throws error", async () => { + const stream = new ReadableStream({ + async pull(controller) { + await Bun.sleep(0); + controller.enqueue("data"); + controller.close(); + }, + }); + + // Disturb the stream by reading from it + const reader = stream.getReader(); + await reader.read(); + reader.releaseLock(); + + expect(() => { + const proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + env: bunEnv, + }); + }).toThrow("'stdin' ReadableStream has already been used"); + }); + + test("ReadableStream with abort signal calls cancel", async () => { + const controller = new AbortController(); + const cancel = mock(); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue("data before abort\n"); + }, + async pull(controller) { + // Keep the stream open + // but don't block the event loop. + await Bun.sleep(1); + controller.enqueue("more data\n"); + }, + cancel, + }); + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + signal: controller.signal, + env: bunEnv, + }); + + // Give it some time to start + await Bun.sleep(10); + + // Abort the process + controller.abort(); + + try { + await proc.exited; + } catch (e) { + // Process was aborted + } + + // The process should have been killed + expect(proc.killed).toBe(true); + expect(cancel).toHaveBeenCalledTimes(1); + }); + + test("ReadableStream with backpressure", async () => { + let pullCalls = 0; + const maxChunks = 5; + + const stream = new ReadableStream({ + async pull(controller) { + pullCalls++; + if (pullCalls <= maxChunks) { + // Add async to prevent optimization to blob + await Bun.sleep(0); + controller.enqueue(`chunk ${pullCalls}\n`); + } else { + controller.close(); + } + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + await proc.exited; + + // The pull method should have been called multiple times + expect(pullCalls).toBeGreaterThan(1); + expect(pullCalls).toBeLessThanOrEqual(maxChunks + 1); // +1 for the close pull + expect(text).toContain("chunk 1\n"); + expect(text).toContain(`chunk ${maxChunks}\n`); + }); + + test("ReadableStream with multiple processes", async () => { + const stream1 = new ReadableStream({ + start(controller) { + controller.enqueue("stream1 data"); + controller.close(); + }, + }); + + const stream2 = new ReadableStream({ + start(controller) { + controller.enqueue("stream2 data"); + controller.close(); + }, + }); + + await using proc1 = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream1, + stdout: "pipe", + env: bunEnv, + }); + + await using proc2 = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream2, + stdout: "pipe", + env: bunEnv, + }); + + const [text1, text2] = await Promise.all([new Response(proc1.stdout).text(), new Response(proc2.stdout).text()]); + + expect(text1).toBe("stream1 data"); + expect(text2).toBe("stream2 data"); + expect(await proc1.exited).toBe(0); + expect(await proc2.exited).toBe(0); + }); + + test("ReadableStream with empty stream", async () => { + const stream = new ReadableStream({ + start(controller) { + // Close immediately without enqueueing anything + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe(""); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with null bytes", async () => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array([72, 101, 108, 108, 111, 0, 87, 111, 114, 108, 100])); // "Hello\0World" + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + env: bunEnv, + }); + + const buffer = await new Response(proc.stdout).arrayBuffer(); + const bytes = new Uint8Array(buffer); + expect(bytes).toEqual(new Uint8Array([72, 101, 108, 108, 111, 0, 87, 111, 114, 108, 100])); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with transform stream", async () => { + // Create a transform stream that uppercases text + const upperCaseTransform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + }, + }); + + const originalStream = new ReadableStream({ + start(controller) { + controller.enqueue("hello "); + controller.enqueue("world"); + controller.close(); + }, + }); + + const transformedStream = originalStream.pipeThrough(upperCaseTransform); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: transformedStream, + stdout: "pipe", + env: bunEnv, + }); + + const text = await new Response(proc.stdout).text(); + expect(text).toBe("HELLO WORLD"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream with tee", async () => { + const originalStream = new ReadableStream({ + start(controller) { + controller.enqueue("shared data"); + controller.close(); + }, + }); + + const [stream1, stream2] = originalStream.tee(); + + // Use the first branch for the process + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream1, + stdout: "pipe", + env: bunEnv, + }); + + // Read from the second branch independently + const text2 = await new Response(stream2).text(); + + const text1 = await new Response(proc.stdout).text(); + expect(text1).toBe("shared data"); + expect(text2).toBe("shared data"); + expect(await proc.exited).toBe(0); + }); + + test("ReadableStream object type count", async () => { + const iterations = + isASAN && isCI + ? // With ASAN, entire process gets killed, including the test runner in CI. Likely an OOM or out of file descriptors. + 10 + : 50; + + async function main() { + async function iterate(i: number) { + const stream = new ReadableStream({ + async pull(controller) { + await Bun.sleep(0); + controller.enqueue(`iteration ${i}`); + controller.close(); + }, + }); + + await using proc = spawn({ + cmd: [bunExe(), "-e", "process.stdin.pipe(process.stdout)"], + stdin: stream, + stdout: "pipe", + stderr: "inherit", + env: bunEnv, + }); + + await Promise.all([new Response(proc.stdout).text(), proc.exited]); + } + + const promises = Array.from({ length: iterations }, (_, i) => iterate(i)); + await Promise.all(promises); + } + + await main(); + + await Bun.sleep(1); + Bun.gc(true); + await Bun.sleep(1); + + // Check that we're not leaking objects + await expectMaxObjectTypeCount(expect, "ReadableStream", 10); + await expectMaxObjectTypeCount(expect, "Subprocess", 5); + }); +}); diff --git a/test/js/bun/spawn/spawn.test.ts b/test/js/bun/spawn/spawn.test.ts index de00e904f0..0ea0a68b3a 100644 --- a/test/js/bun/spawn/spawn.test.ts +++ b/test/js/bun/spawn/spawn.test.ts @@ -5,6 +5,7 @@ import { bunEnv, bunExe, getMaxFD, + isBroken, isMacOS, isPosix, isWindows, @@ -492,7 +493,7 @@ for (let [gcTick, label] of [ expect(output).toBe(expected); }); - it("after exit", async () => { + it.skipIf(isWindows && isBroken && callback === huge)("after exit", async () => { const process = callback(); await process.exited; const output = await readableStreamToText(process.stdout); diff --git a/test/js/bun/symbols.test.ts b/test/js/bun/symbols.test.ts index 72cc44b73a..621871b6d6 100644 --- a/test/js/bun/symbols.test.ts +++ b/test/js/bun/symbols.test.ts @@ -11,7 +11,7 @@ if (process.platform === "linux") { throw new Error("objdump executable not found. Please install it."); } - const output = await $`${objdump} -T ${BUN_EXE} | grep GLIBC_`.text(); + const output = await $`${objdump} -T ${BUN_EXE} | grep GLIBC_`.nothrow().text(); const lines = output.split("\n"); const errors = []; for (const line of lines) { diff --git a/test/js/bun/test/__snapshots__/test-interop.js.snap b/test/js/bun/test/__snapshots__/test-interop.js.snap index 39cfad68b8..9ab001e8fb 100644 --- a/test/js/bun/test/__snapshots__/test-interop.js.snap +++ b/test/js/bun/test/__snapshots__/test-interop.js.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`expect() toMatchSnapshot to return undefined 1`] = `"abc"`; diff --git a/test/js/bun/test/__snapshots__/test-test.test.ts.snap b/test/js/bun/test/__snapshots__/test-test.test.ts.snap index d07cc41e5a..4749566917 100644 --- a/test/js/bun/test/__snapshots__/test-test.test.ts.snap +++ b/test/js/bun/test/__snapshots__/test-test.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`unhandled errors between tests are reported in beforeAll 1`] = ` " diff --git a/test/js/bun/test/expect.test.js b/test/js/bun/test/expect.test.js index 148285d263..53b5cf60ca 100644 --- a/test/js/bun/test/expect.test.js +++ b/test/js/bun/test/expect.test.js @@ -4157,13 +4157,13 @@ describe("expect()", () => { expect(expect.objectContaining({ first: { second: {} } })).not.toEqual({ first: { second: {}, third: {} }, }); - expect( + (expect( expect.objectContaining({ answer: 42, foo: { bar: "baz", foobar: "qux" }, }), ).not.toEqual({ foo: { bar: "baz" } }), - expect(expect.objectContaining({ [foo]: "foo" })).not.toEqual({ [bar]: "bar" }); + expect(expect.objectContaining({ [foo]: "foo" })).not.toEqual({ [bar]: "bar" })); }); test("ObjectContaining matches defined properties", () => { diff --git a/test/js/bun/test/snapshot-tests/__snapshots__/bun-snapshots.test.ts.snap b/test/js/bun/test/snapshot-tests/__snapshots__/bun-snapshots.test.ts.snap index 449a99d727..5b7b1e57cc 100644 --- a/test/js/bun/test/snapshot-tests/__snapshots__/bun-snapshots.test.ts.snap +++ b/test/js/bun/test/snapshot-tests/__snapshots__/bun-snapshots.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`toMatchSnapshot errors should throw if arguments are in the wrong order: right spot 1`] = ` { diff --git a/test/js/bun/test/snapshot-tests/__snapshots__/existing-snapshots.test.ts.snap b/test/js/bun/test/snapshot-tests/__snapshots__/existing-snapshots.test.ts.snap index 7c0d55c60f..86f185de3b 100644 --- a/test/js/bun/test/snapshot-tests/__snapshots__/existing-snapshots.test.ts.snap +++ b/test/js/bun/test/snapshot-tests/__snapshots__/existing-snapshots.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`it will work with an existing snapshot file made with bun 1`] = ` { diff --git a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/more.test.ts.snap b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/more.test.ts.snap index bee96183b3..f4907cae49 100644 --- a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/more.test.ts.snap +++ b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/more.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Jest Snapshot v1, https://bun.sh/docs/test/snapshots exports[`d0 d1 t1 1`] = `"hello\`snapshot\\"`; diff --git a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/moremore.test.ts.snap b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/moremore.test.ts.snap index ab8c168fae..a47ab54cec 100644 --- a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/moremore.test.ts.snap +++ b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/moremore.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Jest Snapshot v1, https://bun.sh/docs/test/snapshots exports[`test snapshots with Boolean and Number 1`] = `1`; diff --git a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap index 34d5ce7c32..d649371c77 100644 --- a/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap +++ b/test/js/bun/test/snapshot-tests/snapshots/__snapshots__/snapshot.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Jest Snapshot v1, https://bun.sh/docs/test/snapshots exports[`most types 1`] = `3`; @@ -370,35 +370,35 @@ exports[`most types: testing 7 3`] = `8`; exports[`most types: undefined 1`] = `undefined`; exports[`snapshots dollars 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"$"\`; " `; exports[`snapshots backslash 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"\\\\"\`; " `; exports[`snapshots dollars curly 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"\\\${}"\`; " `; exports[`snapshots dollars curly 2 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"\\\${"\`; " `; exports[`snapshots stuff 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \` "æ™ @@ -409,7 +409,7 @@ exports[\`abc 1\`] = \` `; exports[`snapshots stuff 2 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \` "æ™ @@ -420,28 +420,28 @@ exports[\`abc 1\`] = \` `; exports[`snapshots regexp 1 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`/\\\${1..}/\`; " `; exports[`snapshots regexp 2 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`/\\\${2..}/\`; " `; exports[`snapshots string 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"abc"\`; " `; exports[`snapshots string with newline 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \` "qwerty @@ -451,35 +451,35 @@ ioup" `; exports[`snapshots null byte 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"1 \\x00"\`; " `; exports[`snapshots null byte 2 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"2 \\x00"\`; " `; exports[`snapshots backticks 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"This is \\\`wrong\\\`"\`; " `; exports[`snapshots unicode 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"😊abc\\\`\\\${def} �, � "\`; " `; exports[`snapshots jest newline oddity 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \` " @@ -489,14 +489,14 @@ exports[\`abc 1\`] = \` `; exports[`snapshots grow file for new snapshot 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"hello"\`; " `; exports[`snapshots grow file for new snapshot 2`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"hello"\`; @@ -505,7 +505,7 @@ exports[\`def 1\`] = \`"hello"\`; `; exports[`snapshots grow file for new snapshot 3`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"goodbye"\`; @@ -514,35 +514,35 @@ exports[\`def 1\`] = \`"hello"\`; `; exports[`snapshots backtick in test name 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`\\\` 1\`] = \`"abc"\`; " `; exports[`snapshots dollars curly in test name 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`\\\${} 1\`] = \`"abc"\`; " `; exports[`snapshots #15283 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`Should work 1\`] = \`"This is \\\`wrong\\\`"\`; " `; exports[`snapshots #15283 unicode 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`Should work 1\`] = \`"😊This is \\\`wrong\\\`"\`; " `; exports[`snapshots replaces file that fails to parse when update flag is used 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`t1 1\`] = \`"abc def ghi jkl"\`; @@ -553,7 +553,7 @@ exports[\`t3 1\`] = \`"abc def ghi"\`; `; exports[`snapshots property matchers 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \` { @@ -582,7 +582,7 @@ exports[`inline snapshots #15283 1`] = ` `; exports[`snapshots unicode surrogate halves 1`] = ` -"// Bun Snapshot v1, https://goo.gl/fbAQLP +"// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[\`abc 1\`] = \`"😊abc\\\`\\\${def} �, � "\`; " diff --git a/test/js/bun/test/snapshot-tests/snapshots/more-snapshots/__snapshots__/different-directory.test.ts.snap b/test/js/bun/test/snapshot-tests/snapshots/more-snapshots/__snapshots__/different-directory.test.ts.snap index f45127b386..6b43eec53e 100644 --- a/test/js/bun/test/snapshot-tests/snapshots/more-snapshots/__snapshots__/different-directory.test.ts.snap +++ b/test/js/bun/test/snapshot-tests/snapshots/more-snapshots/__snapshots__/different-directory.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Jest Snapshot v1, https://bun.sh/docs/test/snapshots exports[`snapshots in different directory 1`] = ` "12 diff --git a/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts b/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts index debc737ef4..ae2276109c 100644 --- a/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts +++ b/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts @@ -311,7 +311,7 @@ for (const inlineSnapshot of [false, true]) { { forceUpdate: true }, ); expect(await t.getSnapshotContents()).toBe( - '// Bun Snapshot v1, https://goo.gl/fbAQLP\n\nexports[`t1 1`] = `"abc def ghi jkl"`;\n\nexports[`t2 1`] = `"abc\\`def"`;\n\nexports[`t3 1`] = `"abc def ghi"`;\n', + '// Bun Snapshot v1, https://bun.sh/docs/test/snapshots\n\nexports[`t1 1`] = `"abc def ghi jkl"`;\n\nexports[`t2 1`] = `"abc\\`def"`;\n\nexports[`t3 1`] = `"abc def ghi"`;\n', ); }); diff --git a/test/js/bun/test/test-error-code-done-callback.test.ts b/test/js/bun/test/test-error-code-done-callback.test.ts index bfdc62d945..0d4e9eab0e 100644 --- a/test/js/bun/test/test-error-code-done-callback.test.ts +++ b/test/js/bun/test/test-error-code-done-callback.test.ts @@ -134,7 +134,7 @@ test("verify we print error messages passed to done callbacks", () => { 0 pass 9 fail - Ran 9 tests across 1 files. + Ran 9 tests across 1 file. " `); }); diff --git a/test/js/bun/test/test-only.test.ts b/test/js/bun/test/test-only.test.ts index 5a45cc484a..cae48a490b 100644 --- a/test/js/bun/test/test-only.test.ts +++ b/test/js/bun/test/test-only.test.ts @@ -9,7 +9,7 @@ test.each(["./only-fixture-1.ts", "./only-fixture-2.ts", "./only-fixture-3.ts"]) expect(result.stderr.toString()).toContain(" 1 pass\n"); expect(result.stderr.toString()).toContain(" 0 fail\n"); - expect(result.stderr.toString()).toContain("Ran 1 tests across 1 files"); + expect(result.stderr.toString()).toContain("Ran 1 test across 1 file"); }, ); diff --git a/test/js/bun/test/test-test.test.ts b/test/js/bun/test/test-test.test.ts index a24e036ef5..76668873d2 100644 --- a/test/js/bun/test/test-test.test.ts +++ b/test/js/bun/test/test-test.test.ts @@ -297,11 +297,27 @@ it("should return non-zero exit code for invalid syntax", async () => { stderr: "pipe", env: bunEnv, }); - const err = await new Response(stderr).text(); - expect(err).toContain("error: Unexpected end of file"); - expect(err).toContain(" 0 pass"); - expect(err).toContain(" 1 fail"); - expect(err).toContain("Ran 1 tests across 1 files"); + const err = (await new Response(stderr).text()).replaceAll("\\", "/"); + expect(err.replaceAll(test_dir.replaceAll("\\", "/"), "").replaceAll(/\[(.*)\ms\]/g, "[xx ms]")) + .toMatchInlineSnapshot(` + " + bad.test.js: + + # Unhandled error between tests + ------------------------------- + 1 | !!! + ^ + error: Unexpected end of file + at /bad.test.js:1:3 + ------------------------------- + + + 0 pass + 1 fail + 1 error + Ran 1 test across 1 file. [xx ms] + " + `); expect(stdout).toBeDefined(); expect(await new Response(stdout).text()).toBe(`bun test ${Bun.version_with_sha}\n`); expect(await exited).toBe(1); @@ -732,7 +748,7 @@ test("my-test", () => { expect(output).toContain("1 error"); } - expect(output).toContain("Ran 1 tests across 1 files"); + expect(output).toContain("Ran 1 test across 1 file"); }); } }); diff --git a/test/js/bun/util/__snapshots__/inspect-error.test.js.snap b/test/js/bun/util/__snapshots__/inspect-error.test.js.snap index eff7103964..c60a55e2b9 100644 --- a/test/js/bun/util/__snapshots__/inspect-error.test.js.snap +++ b/test/js/bun/util/__snapshots__/inspect-error.test.js.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`error.cause 1`] = ` "1 | import { expect, test } from "bun:test"; diff --git a/test/js/bun/util/__snapshots__/reportError.test.ts.snap b/test/js/bun/util/__snapshots__/reportError.test.ts.snap index bc70b514db..661ab1af60 100644 --- a/test/js/bun/util/__snapshots__/reportError.test.ts.snap +++ b/test/js/bun/util/__snapshots__/reportError.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`reportError 1`] = ` "1 | reportError(new Error("reportError Test!")); diff --git a/test/js/bun/util/fuzzy-wuzzy.test.ts b/test/js/bun/util/fuzzy-wuzzy.test.ts index dcd78bc921..e233c7f3db 100644 --- a/test/js/bun/util/fuzzy-wuzzy.test.ts +++ b/test/js/bun/util/fuzzy-wuzzy.test.ts @@ -210,11 +210,11 @@ function callAllMethods(object) { object?.on?.("error", () => {}); } const returnValue = wrap(Reflect.apply(object?.[methodName], object, [])); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); calls++; } catch (e) { const returnValue = wrap(Reflect.apply(object.constructor?.[methodName], object?.constructor, [])); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); calls++; } } catch (e) { @@ -244,7 +244,7 @@ function callAllMethods(object) { if (returnValue?.then) { continue; } - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); calls++; } catch (e) {} } @@ -261,17 +261,17 @@ function constructAllConstructors(object) { try { try { const returnValue = Reflect.construct(object, [], method); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); constructs++; } catch (e) { const returnValue = Reflect.construct(object?.constructor, [], method); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); constructs++; } } catch (e) { try { const returnValue = Reflect.construct(object?.prototype?.constructor, [], method); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); constructs++; } catch (e) { Error.captureStackTrace(e); @@ -293,7 +293,7 @@ function constructAllConstructors(object) { continue; } - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); seen.add(returnValue); constructs++; } catch (e) {} @@ -312,21 +312,21 @@ function constructAllConstructorsWithSubclassing(object) { // Create a subclass of the constructor class Subclass extends object {} const returnValue = Reflect.construct(object, [], Subclass); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); subclasses++; } catch (e) { try { // Try with the constructor property class Subclass extends object?.constructor {} const returnValue = Reflect.construct(object?.constructor, [], Subclass); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); subclasses++; } catch (e) { // Fallback to a more generic approach const Subclass = function () {}; Object.setPrototypeOf(Subclass.prototype, object); const returnValue = Reflect.construct(object, [], Subclass); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); subclasses++; } } @@ -335,7 +335,7 @@ function constructAllConstructorsWithSubclassing(object) { // Try with prototype constructor class Subclass extends object?.prototype?.constructor {} const returnValue = Reflect.construct(object?.prototype?.constructor, [], Subclass); - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); subclasses++; } catch (e) { Error.captureStackTrace(e); @@ -360,7 +360,7 @@ function constructAllConstructorsWithSubclassing(object) { continue; } - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); seen.add(returnValue); subclasses++; } catch (e) { @@ -372,7 +372,7 @@ function constructAllConstructorsWithSubclassing(object) { continue; } - Bun.inspect?.(returnValue), queue.push(returnValue); + (Bun.inspect?.(returnValue), queue.push(returnValue)); seen.add(returnValue); subclasses++; } diff --git a/test/js/bun/util/inspect-error-leak.test.js b/test/js/bun/util/inspect-error-leak.test.js index 6634edf6e0..aab13c8d37 100644 --- a/test/js/bun/util/inspect-error-leak.test.js +++ b/test/js/bun/util/inspect-error-leak.test.js @@ -20,5 +20,5 @@ test("Printing errors does not leak", () => { const after = Math.floor(process.memoryUsage.rss() / 1024); const diff = ((after - baseline) / 1024) | 0; console.log(`RSS increased by ${diff} MB`); - expect(diff, `RSS grew by ${diff} MB after ${perBatch * repeat} iterations`).toBeLessThan(isASAN ? 16 : 10); + expect(diff, `RSS grew by ${diff} MB after ${perBatch * repeat} iterations`).toBeLessThan(isASAN ? 20 : 10); }, 10_000); diff --git a/test/js/bun/util/randomUUIDv5.test.ts b/test/js/bun/util/randomUUIDv5.test.ts new file mode 100644 index 0000000000..769a983997 --- /dev/null +++ b/test/js/bun/util/randomUUIDv5.test.ts @@ -0,0 +1,382 @@ +import { describe, expect, test } from "bun:test"; +import * as uuid from "uuid"; + +describe("randomUUIDv5", () => { + const dnsNamespace = "6ba7b810-9dad-11d1-80b4-00c04fd430c8"; + const urlNamespace = "6ba7b811-9dad-11d1-80b4-00c04fd430c8"; + + test("basic functionality", () => { + const result = Bun.randomUUIDv5("www.example.com", dnsNamespace); + expect(result).toBeTypeOf("string"); + expect(result).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/); + + // Check that it's version 5 + expect(result[14]).toBe("5"); + }); + + test("deterministic output", () => { + const uuid1 = Bun.randomUUIDv5("www.example.com", dnsNamespace); + const uuid2 = Bun.randomUUIDv5("www.example.com", dnsNamespace); + + // Should always generate the same UUID for the same namespace + name + expect(uuid1).toBe(uuid2); + }); + + test("compatibility with uuid library", () => { + const name = "www.example.com"; + const bunUuid = Bun.randomUUIDv5(name, dnsNamespace); + const uuidLibUuid = uuid.v5(name, dnsNamespace); + + expect(bunUuid).toBe(uuidLibUuid); + }); + + test("predefined namespace strings", () => { + // Test with predefined namespace strings + const uuid1 = Bun.randomUUIDv5("www.example.com", "dns"); + const uuid2 = Bun.randomUUIDv5("www.example.com", dnsNamespace); + + expect(uuid1).toBe(uuid2); + + const uuid3 = Bun.randomUUIDv5("http://example.com", "url"); + const uuid4 = Bun.randomUUIDv5("http://example.com", urlNamespace); + + expect(uuid3).toBe(uuid4); + }); + + test("empty name", () => { + const result = Bun.randomUUIDv5("", dnsNamespace); + expect(result).toBeTypeOf("string"); + expect(result[14]).toBe("5"); + }); + + test("long name", () => { + const longName = "a".repeat(1000); + const result = Bun.randomUUIDv5(longName, dnsNamespace); + expect(result).toBeTypeOf("string"); + expect(result[14]).toBe("5"); + }); + + test("unicode name", () => { + const unicodeName = "测试.example.com"; + const result = Bun.randomUUIDv5(unicodeName, dnsNamespace); + expect(result).toBeTypeOf("string"); + expect(result[14]).toBe("5"); + + // Should be deterministic + const uuid2 = Bun.randomUUIDv5(unicodeName, dnsNamespace); + expect(result).toBe(uuid2); + }); + + test("name as ArrayBuffer", () => { + const nameString = "test"; + const nameBuffer = new TextEncoder().encode(nameString); + + const uuid1 = Bun.randomUUIDv5(nameString, dnsNamespace); + const uuid2 = Bun.randomUUIDv5(nameBuffer, dnsNamespace); + + expect(uuid1).toBe(uuid2); + }); + + test("name as TypedArray", () => { + const nameString = "test"; + const nameArray = new Uint8Array(new TextEncoder().encode(nameString)); + + const uuid1 = Bun.randomUUIDv5(nameString, dnsNamespace); + const uuid2 = Bun.randomUUIDv5(nameArray, dnsNamespace); + + expect(uuid1).toBe(uuid2); + }); + + test("error handling - invalid namespace", () => { + expect(() => { + Bun.randomUUIDv5("test", "invalid-uuid"); + }).toThrow(); + }); + + test("error handling - wrong namespace buffer size", () => { + const wrongSizeBuffer = new Uint8Array(15); // Should be 16 bytes + expect(() => { + Bun.randomUUIDv5("test", wrongSizeBuffer); + }).toThrow(); + }); + + test("error handling - invalid encoding", () => { + expect(() => { + // @ts-expect-error - testing invalid encoding + Bun.randomUUIDv5("test", dnsNamespace, "invalid"); + }).toThrow(); + }); + + test("variant bits are correct", () => { + const result = Bun.randomUUIDv5("test", dnsNamespace); + const bytes = result.replace(/-/g, ""); + + // Extract the variant byte (17th hex character, index 16) + const variantByte = parseInt(bytes.substr(16, 2), 16); + + // Variant bits should be 10xxxxxx (0x80-0xBF) + expect(variantByte & 0xc0).toBe(0x80); + }); + + test("version bits are correct", () => { + const result = Bun.randomUUIDv5("test", dnsNamespace); + const bytes = result.replace(/-/g, ""); + + // Extract the version byte (13th hex character, index 12) + const versionByte = parseInt(bytes.substr(12, 2), 16); + + // Version bits should be 0101xxxx (0x50-0x5F) + expect(versionByte & 0xf0).toBe(0x50); + }); + + test("case insensitive namespace strings", () => { + const uuid1 = Bun.randomUUIDv5("test", "DNS"); + const uuid2 = Bun.randomUUIDv5("test", "dns"); + const uuid3 = Bun.randomUUIDv5("test", "Dns"); + + expect(uuid1).toBe(uuid2); + expect(uuid2).toBe(uuid3); + }); + + test("all predefined namespaces", () => { + const name = "test"; + + const dnsUuid = Bun.randomUUIDv5(name, "dns"); + const urlUuid = Bun.randomUUIDv5(name, "url"); + const oidUuid = Bun.randomUUIDv5(name, "oid"); + const x500Uuid = Bun.randomUUIDv5(name, "x500"); + + // All should be different + expect(dnsUuid).not.toBe(urlUuid); + expect(urlUuid).not.toBe(oidUuid); + expect(oidUuid).not.toBe(x500Uuid); + + // All should be valid UUIDs + [dnsUuid, urlUuid, oidUuid, x500Uuid].forEach(result => { + expect(result).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/); + expect(result[14]).toBe("5"); + }); + }); + + test("different namespaces produce different UUIDs", () => { + const uuid1 = Bun.randomUUIDv5("www.example.com", dnsNamespace); + const uuid2 = Bun.randomUUIDv5("www.example.com", urlNamespace); + + expect(uuid1).not.toBe(uuid2); + expect(uuid.v5("www.example.com", dnsNamespace)).toBe(uuid1); + expect(uuid.v5("www.example.com", urlNamespace)).toBe(uuid2); + }); + + test("different names produce different UUIDs", () => { + const uuid1 = Bun.randomUUIDv5("www.example.com", dnsNamespace); + const uuid2 = Bun.randomUUIDv5("api.example.com", dnsNamespace); + + expect(uuid1).not.toBe(uuid2); + }); + + test("hex encoding (default)", () => { + const result = Bun.randomUUIDv5("test", dnsNamespace); + expect(result).toMatch(/^[0-9a-f-]+$/); + expect(result.length).toBe(36); // Standard UUID string length + }); + + test("buffer encoding", () => { + const result = Bun.randomUUIDv5("test", dnsNamespace, "buffer"); + expect(result).toBeInstanceOf(Uint8Array); + expect(result.byteLength).toBe(16); + }); + + test("base64 encoding", () => { + const result = Bun.randomUUIDv5("test", dnsNamespace, "base64"); + expect(result).toBeTypeOf("string"); + expect(result).toMatch(/^[A-Za-z0-9+/=]+$/); + }); + + test("base64url encoding", () => { + const result = Bun.randomUUIDv5("test", dnsNamespace, "base64url"); + expect(result).toBeTypeOf("string"); + expect(result).toMatch(/^[A-Za-z0-9_-]+$/); + }); + + test("namespace as Buffer", () => { + // Convert UUID string to buffer + const nsBytes = new Uint8Array(16); + const nsString = dnsNamespace.replace(/-/g, ""); + for (let i = 0; i < 16; i++) { + nsBytes[i] = parseInt(nsString.substr(i * 2, 2), 16); + } + + const uuid1 = Bun.randomUUIDv5("test", dnsNamespace); + const uuid2 = Bun.randomUUIDv5("test", nsBytes); + + expect(uuid1).toBe(uuid2); + }); + + test("name as Buffer", () => { + const nameBuffer = new TextEncoder().encode("test"); + const uuid1 = Bun.randomUUIDv5("test", dnsNamespace); + const uuid2 = Bun.randomUUIDv5(nameBuffer, dnsNamespace); + + expect(uuid1).toBe(uuid2); + }); + + // Ported v5 tests from uuid library test suite + test("v5 - hello.example.com with DNS namespace", () => { + expect(Bun.randomUUIDv5("hello.example.com", dnsNamespace)).toBe("fdda765f-fc57-5604-a269-52a7df8164ec"); + }); + + test("v5 - http://example.com/hello with URL namespace", () => { + expect(Bun.randomUUIDv5("http://example.com/hello", urlNamespace)).toBe("3bbcee75-cecc-5b56-8031-b6641c1ed1f1"); + }); + + test("v5 - hello with custom namespace", () => { + expect(Bun.randomUUIDv5("hello", "0f5abcd1-c194-47f3-905b-2df7263a084b")).toBe( + "90123e1c-7512-523e-bb28-76fab9f2f73d", + ); + }); + + test("v5 namespace.toUpperCase", () => { + expect(Bun.randomUUIDv5("hello.example.com", dnsNamespace.toUpperCase())).toBe( + "fdda765f-fc57-5604-a269-52a7df8164ec", + ); + expect(Bun.randomUUIDv5("http://example.com/hello", urlNamespace.toUpperCase())).toBe( + "3bbcee75-cecc-5b56-8031-b6641c1ed1f1", + ); + expect(Bun.randomUUIDv5("hello", "0f5abcd1-c194-47f3-905b-2df7263a084b".toUpperCase())).toBe( + "90123e1c-7512-523e-bb28-76fab9f2f73d", + ); + }); + + test("v5 namespace string validation", () => { + expect(() => { + Bun.randomUUIDv5("hello.example.com", "zyxwvuts-rqpo-nmlk-jihg-fedcba000000"); + }).toThrow(); + + expect(() => { + Bun.randomUUIDv5("hello.example.com", "invalid uuid value"); + }).toThrow(); + + expect(Bun.randomUUIDv5("hello.example.com", "00000000-0000-0000-0000-000000000000")).toBeTypeOf("string"); + }); + + test("v5 namespace buffer validation", () => { + expect(() => { + Bun.randomUUIDv5("hello.example.com", new Uint8Array(15)); + }).toThrow(); + + expect(() => { + Bun.randomUUIDv5("hello.example.com", new Uint8Array(17)); + }).toThrow(); + + expect(Bun.randomUUIDv5("hello.example.com", new Uint8Array(16).fill(0))).toBeTypeOf("string"); + }); + + test("v5 fill buffer", () => { + const expectedUuid = Buffer.from([ + 0xfd, 0xda, 0x76, 0x5f, 0xfc, 0x57, 0x56, 0x04, 0xa2, 0x69, 0x52, 0xa7, 0xdf, 0x81, 0x64, 0xec, + ]); + + const result = Bun.randomUUIDv5("hello.example.com", dnsNamespace, "buffer"); + expect(result.toString("hex")).toEqual(expectedUuid.toString("hex")); + }); + + test("v5 undefined/null", () => { + // @ts-expect-error testing invalid input + expect(() => Bun.randomUUIDv5()).toThrow(); + // @ts-expect-error testing invalid input + expect(() => Bun.randomUUIDv5("hello")).toThrow(); + // @ts-expect-error testing invalid input + expect(() => Bun.randomUUIDv5("hello.example.com", undefined)).toThrow(); + // @ts-expect-error testing invalid input + expect(() => Bun.randomUUIDv5("hello.example.com", null)).toThrow(); + }); + + test("RFC 4122 test vectors", () => { + // These should be deterministic + const uuid1 = Bun.randomUUIDv5("http://www.example.com/", dnsNamespace); + const uuid2 = Bun.randomUUIDv5("http://www.example.com/", urlNamespace); + + // Both should be valid version 5 UUIDs + expect(uuid1).toEqual("b50f73c9-e407-5ea4-8540-70886e8aa2cd"); + expect(uuid2).toEqual("fcde3c85-2270-590f-9e7c-ee003d65e0e2"); + }); + + test("error cases", () => { + // Missing namespace + // @ts-expect-error + expect(() => Bun.randomUUIDv5()).toThrow(); + + // Missing name + // @ts-expect-error + expect(() => Bun.randomUUIDv5(dnsNamespace)).toThrow(); + + // Invalid namespace format + expect(() => Bun.randomUUIDv5("test", "invalid-uuid")).toThrow(); + + // Invalid encoding + // @ts-expect-error + expect(() => Bun.randomUUIDv5("test", dnsNamespace, "invalid")).toThrow(); + + // Namespace buffer wrong size + expect(() => Bun.randomUUIDv5("test", new Uint8Array(10))).toThrow(); + }); + + test("long names", () => { + const longName = "a".repeat(10000); + const result = Bun.randomUUIDv5(longName, dnsNamespace); + expect(result).toBeTypeOf("string"); + expect(result[14]).toBe("5"); + }); + + test("unicode names", () => { + const unicodeName = "测试🌟"; + const result = Bun.randomUUIDv5(unicodeName, dnsNamespace); + expect(result).toBeTypeOf("string"); + expect(result[14]).toBe("5"); + + // Should be deterministic + const uuid2 = Bun.randomUUIDv5(unicodeName, dnsNamespace); + expect(result).toBe(uuid2); + + expect(uuid.v5(unicodeName, dnsNamespace)).toBe(result); + }); + + test("variant bits are set correctly", () => { + const result = Bun.randomUUIDv5("test", dnsNamespace, "buffer"); + + // Check variant bits (bits 6-7 of clock_seq_hi_and_reserved should be 10) + const variantByte = result[8]; + const variantBits = (variantByte & 0xc0) >> 6; + expect(variantBits).toBe(2); // Binary 10 + + expect(uuid.v5("test", dnsNamespace).replace(/-/g, "")).toEqual(result.toString("hex")); + }); + + test("url namespace", () => { + const result = Bun.randomUUIDv5("test", "6ba7b811-9dad-11d1-80b4-00c04fd430c8"); + expect(result).toBeTypeOf("string"); + expect(result).toEqual("da5b8893-d6ca-5c1c-9a9c-91f40a2a3649"); + + expect(uuid.v5("test", urlNamespace)).toEqual(result); + }); + + test("dns namespace", () => { + const result = Bun.randomUUIDv5("test", "dns"); + expect(result).toBeTypeOf("string"); + expect(result[14]).toBe("5"); + expect(result).toEqual(uuid.v5("test", uuid.v5.DNS)); + }); + + test("consistent across multiple calls", () => { + const results: string[] = []; + for (let i = 0; i < 100; i++) { + results.push(Bun.randomUUIDv5("consistency-test", dnsNamespace)); + } + + // All results should be identical + const first = results[0]; + for (const result of results) { + expect(result).toBe(first); + } + }); +}); diff --git a/test/js/node/async_hooks/AsyncLocalStorage-tracking.test.ts b/test/js/node/async_hooks/AsyncLocalStorage-tracking.test.ts new file mode 100644 index 0000000000..7e77f3141e --- /dev/null +++ b/test/js/node/async_hooks/AsyncLocalStorage-tracking.test.ts @@ -0,0 +1,40 @@ +import { Glob } from "bun"; +import { describe, test } from "bun:test"; +import { bunEnv, bunExe, isASAN, isBroken, isLinux, nodeExe } from "harness"; +import { basename, join } from "path"; + +describe("AsyncLocalStorage passes context to callbacks", () => { + let files = [...new Glob(join(import.meta.dir, "async-context", "async-context-*.js")).scanSync()]; + + let todos = ["async-context-worker_threads-message.js"]; + if (isASAN && isBroken && isLinux) { + todos.push("async-context-dns-resolveTxt.js"); + } + + files = files.filter(file => !todos.includes(basename(file))); + + for (const filepath of files) { + const file = basename(filepath).replaceAll("async-context-", "").replaceAll(".js", ""); + test(file, async () => { + async function run(exe) { + const { exited } = Bun.spawn({ + cmd: [exe, filepath], + stdout: "inherit", + stderr: "inherit", + env: bunEnv, + }); + + if (await exited) { + throw new Error(`${basename(exe)} failed in ${filepath}`); + } + } + + await Promise.all([run(bunExe()), run(nodeExe())]); + }); + } + + for (const filepath of todos) { + const file = basename(filepath).replaceAll("async-context-", "").replaceAll(".js", ""); + test.todo(file); + } +}); diff --git a/test/js/node/async_hooks/async-context/async-context-async-iterator.js b/test/js/node/async_hooks/async-context/async-context-async-iterator.js new file mode 100644 index 0000000000..fe60d2d9e8 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-async-iterator.js @@ -0,0 +1,25 @@ +const { AsyncLocalStorage } = require("async_hooks"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +// Create an async generator +async function* asyncGenerator() { + yield 1; + yield 2; + yield 3; +} + +asyncLocalStorage.run({ test: "async.iterator" }, async () => { + try { + for await (const value of asyncGenerator()) { + if (asyncLocalStorage.getStore()?.test !== "async.iterator") { + console.error("FAIL: async iterator lost context at value", value); + process.exit(1); + } + } + process.exit(0); + } catch (err) { + console.error("ERROR:", err); + process.exit(1); + } +}); diff --git a/test/js/node/async_hooks/async-context/async-context-child_process-exec.js b/test/js/node/async_hooks/async-context/async-context-child_process-exec.js new file mode 100644 index 0000000000..9ff5acabeb --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-child_process-exec.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { exec } = require("child_process"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "child_process.exec" }, () => { + exec("echo test", (error, stdout, stderr) => { + if (asyncLocalStorage.getStore()?.test !== "child_process.exec") { + console.error("FAIL: child_process.exec callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-child_process-execFile.js b/test/js/node/async_hooks/async-context/async-context-child_process-execFile.js new file mode 100644 index 0000000000..c3fe10e868 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-child_process-execFile.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { execFile } = require("child_process"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "child_process.execFile" }, () => { + execFile("echo", ["test"], (error, stdout, stderr) => { + if (asyncLocalStorage.getStore()?.test !== "child_process.execFile") { + console.error("FAIL: child_process.execFile callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-child_process-spawn-events.js b/test/js/node/async_hooks/async-context/async-context-child_process-spawn-events.js new file mode 100644 index 0000000000..dd7b9ae218 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-child_process-spawn-events.js @@ -0,0 +1,35 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { spawn } = require("child_process"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "child_process.spawn" }, () => { + const child = spawn("bun", ["-e", "Bun.sleepSync(100)"]); + + child.on("spawn", () => { + if (asyncLocalStorage.getStore()?.test !== "child_process.spawn") { + console.error("FAIL: spawn event lost context"); + failed = true; + } + }); + + child.stdout.on("data", data => { + if (asyncLocalStorage.getStore()?.test !== "child_process.spawn") { + console.error("FAIL: spawn stdout data event lost context"); + failed = true; + } + }); + + child.on("close", code => { + if (asyncLocalStorage.getStore()?.test !== "child_process.spawn") { + console.error("FAIL: spawn close event lost context"); + failed = true; + } + process.exit(failed ? 1 : 0); + }); + + child.on("error", () => { + process.exit(1); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-cipher.js b/test/js/node/async_hooks/async-context/async-context-crypto-cipher.js new file mode 100644 index 0000000000..ec6608dd83 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-cipher.js @@ -0,0 +1,31 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.cipher" }, () => { + const algorithm = "aes-256-cbc"; + const key = crypto.randomBytes(32); + const iv = crypto.randomBytes(16); + + const cipher = crypto.createCipheriv(algorithm, key, iv); + + cipher.on("readable", () => { + if (asyncLocalStorage.getStore()?.test !== "crypto.cipher") { + console.error("FAIL: crypto cipher readable event lost context"); + process.exit(1); + } + cipher.read(); + }); + + cipher.on("end", () => { + if (asyncLocalStorage.getStore()?.test !== "crypto.cipher") { + console.error("FAIL: crypto cipher end event lost context"); + process.exit(1); + } + process.exit(0); + }); + + cipher.write("test data"); + cipher.end(); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-generateKey.js b/test/js/node/async_hooks/async-context/async-context-crypto-generateKey.js new file mode 100644 index 0000000000..a9327bda68 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-generateKey.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.generateKey" }, () => { + crypto.generateKey("hmac", { length: 64 }, (err, key) => { + if (asyncLocalStorage.getStore()?.test !== "crypto.generateKey") { + console.error("FAIL: crypto.generateKey callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-generateKeyPair.js b/test/js/node/async_hooks/async-context/async-context-crypto-generateKeyPair.js new file mode 100644 index 0000000000..81df0fa59f --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-generateKeyPair.js @@ -0,0 +1,20 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.generateKeyPair" }, () => { + crypto.generateKeyPair( + "rsa", + { + modulusLength: 512, // Small for faster test + }, + (err, publicKey, privateKey) => { + if (asyncLocalStorage.getStore()?.test !== "crypto.generateKeyPair") { + console.error("FAIL: crypto.generateKeyPair callback lost context"); + process.exit(1); + } + process.exit(0); + }, + ); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-hash.js b/test/js/node/async_hooks/async-context/async-context-crypto-hash.js new file mode 100644 index 0000000000..74ed4f18d7 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-hash.js @@ -0,0 +1,19 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.hash" }, () => { + const hash = crypto.createHash("sha256"); + hash.update("test data"); + + // Test with callback style if available + setImmediate(() => { + if (asyncLocalStorage.getStore()?.test !== "crypto.hash") { + console.error("FAIL: crypto hash operation lost context"); + process.exit(1); + } + const digest = hash.digest("hex"); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-pbkdf2.js b/test/js/node/async_hooks/async-context/async-context-crypto-pbkdf2.js new file mode 100644 index 0000000000..e308e73db8 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-pbkdf2.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.pbkdf2" }, () => { + crypto.pbkdf2("password", "salt", 100, 32, "sha256", (err, derivedKey) => { + if (asyncLocalStorage.getStore()?.test !== "crypto.pbkdf2") { + console.error("FAIL: crypto.pbkdf2 callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-randomBytes.js b/test/js/node/async_hooks/async-context/async-context-crypto-randomBytes.js new file mode 100644 index 0000000000..7e52553ce8 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-randomBytes.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.randomBytes" }, () => { + crypto.randomBytes(16, (err, buf) => { + if (asyncLocalStorage.getStore()?.test !== "crypto.randomBytes") { + console.error("FAIL: crypto.randomBytes callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-randomFill.js b/test/js/node/async_hooks/async-context/async-context-crypto-randomFill.js new file mode 100644 index 0000000000..2e69af78b2 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-randomFill.js @@ -0,0 +1,15 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.randomFill" }, () => { + const buffer = Buffer.alloc(16); + crypto.randomFill(buffer, (err, buf) => { + if (asyncLocalStorage.getStore()?.test !== "crypto.randomFill") { + console.error("FAIL: crypto.randomFill callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-randomInt.js b/test/js/node/async_hooks/async-context/async-context-crypto-randomInt.js new file mode 100644 index 0000000000..2053ad7ece --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-randomInt.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.randomInt" }, () => { + crypto.randomInt(100, (err, n) => { + if (asyncLocalStorage.getStore()?.test !== "crypto.randomInt") { + console.error("FAIL: crypto.randomInt callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-randomUUID.js b/test/js/node/async_hooks/async-context/async-context-crypto-randomUUID.js new file mode 100644 index 0000000000..7985977a2d --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-randomUUID.js @@ -0,0 +1,17 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +// Note: crypto.randomUUID is synchronous in Node.js +// Testing if async wrapper maintains context +asyncLocalStorage.run({ test: "crypto.randomUUID" }, () => { + setImmediate(() => { + const uuid = crypto.randomUUID(); + if (asyncLocalStorage.getStore()?.test !== "crypto.randomUUID") { + console.error("FAIL: crypto.randomUUID async wrapper lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-scrypt.js b/test/js/node/async_hooks/async-context/async-context-crypto-scrypt.js new file mode 100644 index 0000000000..efa13aba37 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-scrypt.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "crypto.scrypt" }, () => { + crypto.scrypt("password", "salt", 32, (err, derivedKey) => { + if (asyncLocalStorage.getStore()?.test !== "crypto.scrypt") { + console.error("FAIL: crypto.scrypt callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-crypto-sign-verify.js b/test/js/node/async_hooks/async-context/async-context-crypto-sign-verify.js new file mode 100644 index 0000000000..b538b3d269 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-crypto-sign-verify.js @@ -0,0 +1,34 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const crypto = require("crypto"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +// First generate a key pair synchronously +const { publicKey, privateKey } = crypto.generateKeyPairSync("rsa", { + modulusLength: 512, +}); + +asyncLocalStorage.run({ test: "crypto.sign/verify" }, () => { + // Test sign with stream + const sign = crypto.createSign("SHA256"); + sign.write("test data"); + sign.end(); + + const signature = sign.sign(privateKey); + + // Test verify with stream + const verify = crypto.createVerify("SHA256"); + verify.write("test data"); + verify.end(); + + setImmediate(() => { + if (asyncLocalStorage.getStore()?.test !== "crypto.sign/verify") { + console.error("FAIL: crypto sign/verify lost context"); + failed = true; + } + + const isValid = verify.verify(publicKey, signature); + process.exit(failed ? 1 : 0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dgram-events.js b/test/js/node/async_hooks/async-context/async-context-dgram-events.js new file mode 100644 index 0000000000..f66aa3ff4e --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dgram-events.js @@ -0,0 +1,39 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dgram = require("dgram"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "dgram.events" }, () => { + const server = dgram.createSocket("udp4"); + const client = dgram.createSocket("udp4"); + + server.on("message", (msg, rinfo) => { + if (asyncLocalStorage.getStore()?.test !== "dgram.events") { + console.error("FAIL: dgram message event lost context"); + failed = true; + } + server.close(); + client.close(); + }); + + server.on("listening", () => { + if (asyncLocalStorage.getStore()?.test !== "dgram.events") { + console.error("FAIL: dgram listening event lost context"); + failed = true; + } + + const port = server.address().port; + client.send("test", port, "localhost"); + }); + + server.on("close", () => { + if (asyncLocalStorage.getStore()?.test !== "dgram.events") { + console.error("FAIL: dgram close event lost context"); + failed = true; + } + process.exit(failed ? 1 : 0); + }); + + server.bind(0); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dgram-send.js b/test/js/node/async_hooks/async-context/async-context-dgram-send.js new file mode 100644 index 0000000000..ac72d16b1e --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dgram-send.js @@ -0,0 +1,25 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dgram = require("dgram"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "dgram.send" }, () => { + const server = dgram.createSocket("udp4"); + const client = dgram.createSocket("udp4"); + + server.on("message", () => { + server.close(); + client.close(); + }); + + server.bind(0, () => { + const port = server.address().port; + client.send("test", port, "localhost", err => { + if (asyncLocalStorage.getStore()?.test !== "dgram.send") { + console.error("FAIL: dgram.send callback lost context"); + process.exit(1); + } + setTimeout(() => process.exit(0), 100); + }); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dns-lookup.js b/test/js/node/async_hooks/async-context/async-context-dns-lookup.js new file mode 100644 index 0000000000..e4d6df2f03 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dns-lookup.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dns = require("dns"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "dns.lookup" }, () => { + dns.lookup("localhost", (err, address, family) => { + if (asyncLocalStorage.getStore()?.test !== "dns.lookup") { + console.error("FAIL: dns.lookup callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dns-resolve4.js b/test/js/node/async_hooks/async-context/async-context-dns-resolve4.js new file mode 100644 index 0000000000..af901f606e --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dns-resolve4.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dns = require("dns"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "dns.resolve4" }, () => { + dns.resolve4("localhost", (err, addresses) => { + if (asyncLocalStorage.getStore()?.test !== "dns.resolve4") { + console.error("FAIL: dns.resolve4 callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dns-resolveCname.js b/test/js/node/async_hooks/async-context/async-context-dns-resolveCname.js new file mode 100644 index 0000000000..bf33f21d88 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dns-resolveCname.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dns = require("dns"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "dns.resolveCname" }, () => { + dns.resolveCname("www.example.com", (err, addresses) => { + if (asyncLocalStorage.getStore()?.test !== "dns.resolveCname") { + console.error("FAIL: dns.resolveCname callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dns-resolveMx.js b/test/js/node/async_hooks/async-context/async-context-dns-resolveMx.js new file mode 100644 index 0000000000..51d739f33f --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dns-resolveMx.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dns = require("dns"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "dns.resolveMx" }, () => { + dns.resolveMx("example.com", (err, addresses) => { + if (asyncLocalStorage.getStore()?.test !== "dns.resolveMx") { + console.error("FAIL: dns.resolveMx callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dns-resolveTxt.js b/test/js/node/async_hooks/async-context/async-context-dns-resolveTxt.js new file mode 100644 index 0000000000..b29368a0ab --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dns-resolveTxt.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dns = require("dns"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "dns.resolveTxt" }, () => { + dns.resolveTxt("google.com", (err, records) => { + if (asyncLocalStorage.getStore()?.test !== "dns.resolveTxt") { + console.error("FAIL: dns.resolveTxt callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-dns-reverse.js b/test/js/node/async_hooks/async-context/async-context-dns-reverse.js new file mode 100644 index 0000000000..f3ce75f8fe --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-dns-reverse.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const dns = require("dns"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "dns.reverse" }, () => { + dns.reverse("8.8.8.8", (err, hostnames) => { + if (asyncLocalStorage.getStore()?.test !== "dns.reverse") { + console.error("FAIL: dns.reverse callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-events-emitter.js b/test/js/node/async_hooks/async-context/async-context-events-emitter.js new file mode 100644 index 0000000000..e518d6e2d4 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-events-emitter.js @@ -0,0 +1,43 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { EventEmitter } = require("events"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "EventEmitter" }, () => { + const emitter = new EventEmitter(); + + // Test regular event + emitter.on("test", () => { + if (asyncLocalStorage.getStore()?.test !== "EventEmitter") { + console.error("FAIL: EventEmitter listener lost context"); + failed = true; + } + }); + + // Test once event + emitter.once("once-test", () => { + if (asyncLocalStorage.getStore()?.test !== "EventEmitter") { + console.error("FAIL: EventEmitter once listener lost context"); + failed = true; + } + }); + + // Test async event handler + emitter.on("async-test", async () => { + await new Promise(resolve => setImmediate(resolve)); + if (asyncLocalStorage.getStore()?.test !== "EventEmitter") { + console.error("FAIL: EventEmitter async listener lost context"); + failed = true; + } + }); + + // Emit events + emitter.emit("test"); + emitter.emit("once-test"); + emitter.emit("async-test"); + + setTimeout(() => { + process.exit(failed ? 1 : 0); + }, 100); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-events-on-async.js b/test/js/node/async_hooks/async-context/async-context-events-on-async.js new file mode 100644 index 0000000000..9e716b1895 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-events-on-async.js @@ -0,0 +1,32 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { EventEmitter, on } = require("events"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "events.on" }, async () => { + const emitter = new EventEmitter(); + + // Start async iterator in background + (async () => { + try { + for await (const [value] of on(emitter, "data")) { + if (asyncLocalStorage.getStore()?.test !== "events.on") { + console.error("FAIL: events.on async iterator lost context"); + process.exit(1); + } + if (value === "end") break; + } + process.exit(0); + } catch (err) { + console.error("ERROR:", err); + process.exit(1); + } + })(); + + // Emit events after a delay + setTimeout(() => { + emitter.emit("data", "test1"); + emitter.emit("data", "test2"); + emitter.emit("data", "end"); + }, 10); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-access.js b/test/js/node/async_hooks/async-context/async-context-fs-access.js new file mode 100644 index 0000000000..3d2381910e --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-access.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "fs.access" }, () => { + fs.access(__filename, fs.constants.R_OK, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.access") { + console.error("FAIL: fs.access callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-appendFile.js b/test/js/node/async_hooks/async-context/async-context-fs-appendFile.js new file mode 100644 index 0000000000..e14c65670a --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-appendFile.js @@ -0,0 +1,20 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "appendfile-test-" + Date.now() + ".txt"); + +asyncLocalStorage.run({ test: "fs.appendFile" }, () => { + fs.appendFile(testFile, "test data", err => { + if (asyncLocalStorage.getStore()?.test !== "fs.appendFile") { + console.error("FAIL: fs.appendFile callback lost context"); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(1); + } + fs.unlinkSync(testFile); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-chmod.js b/test/js/node/async_hooks/async-context/async-context-fs-chmod.js new file mode 100644 index 0000000000..cfcaa0ebdf --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-chmod.js @@ -0,0 +1,20 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "chmod-test-" + Date.now() + ".txt"); + +fs.writeFileSync(testFile, "test"); + +asyncLocalStorage.run({ test: "fs.chmod" }, () => { + fs.chmod(testFile, 0o644, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.chmod") { + console.error("FAIL: fs.chmod callback lost context"); + fs.unlinkSync(testFile); + process.exit(1); + } + fs.unlinkSync(testFile); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-copyFile.js b/test/js/node/async_hooks/async-context/async-context-fs-copyFile.js new file mode 100644 index 0000000000..a9ca9899fa --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-copyFile.js @@ -0,0 +1,27 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const dir = fs.mkdtempSync("copy-test-"); +const srcFile = path.join(dir, "copy-src-" + Date.now() + ".txt"); +const destFile = path.join(dir, "copy-dest-" + Date.now() + ".txt"); + +fs.writeFileSync(srcFile, "test data"); + +asyncLocalStorage.run({ test: "fs.copyFile" }, () => { + fs.copyFile(srcFile, destFile, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.copyFile") { + console.error("FAIL: fs.copyFile callback lost context"); + try { + fs.unlinkSync(srcFile); + fs.unlinkSync(destFile); + } catch {} + process.exit(1); + } + fs.unlinkSync(srcFile); + fs.unlinkSync(destFile); + fs.rmdirSync(dir); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-createReadStream.js b/test/js/node/async_hooks/async-context/async-context-fs-createReadStream.js new file mode 100644 index 0000000000..6edcdb0d97 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-createReadStream.js @@ -0,0 +1,37 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "readstream-test-" + Date.now() + ".txt"); +let failed = false; + +// Create test file +fs.writeFileSync(testFile, "test data for read stream"); + +asyncLocalStorage.run({ test: "fs.createReadStream" }, () => { + const stream = fs.createReadStream(testFile); + + stream.on("data", chunk => { + if (asyncLocalStorage.getStore()?.test !== "fs.createReadStream") { + console.error("FAIL: fs.createReadStream data event lost context"); + failed = true; + } + }); + + stream.on("end", () => { + if (asyncLocalStorage.getStore()?.test !== "fs.createReadStream") { + console.error("FAIL: fs.createReadStream end event lost context"); + failed = true; + } + }); + + stream.on("close", () => { + if (asyncLocalStorage.getStore()?.test !== "fs.createReadStream") { + console.error("FAIL: fs.createReadStream close event lost context"); + failed = true; + } + fs.unlinkSync(testFile); + process.exit(failed ? 1 : 0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-createWriteStream.js b/test/js/node/async_hooks/async-context/async-context-fs-createWriteStream.js new file mode 100644 index 0000000000..eb81a37156 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-createWriteStream.js @@ -0,0 +1,30 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "writestream-test-" + Date.now() + ".txt"); +let failed = false; + +asyncLocalStorage.run({ test: "fs.createWriteStream" }, () => { + const stream = fs.createWriteStream(testFile); + + stream.on("finish", () => { + if (asyncLocalStorage.getStore()?.test !== "fs.createWriteStream") { + console.error("FAIL: fs.createWriteStream finish event lost context"); + failed = true; + } + }); + + stream.on("close", () => { + if (asyncLocalStorage.getStore()?.test !== "fs.createWriteStream") { + console.error("FAIL: fs.createWriteStream close event lost context"); + failed = true; + } + fs.unlinkSync(testFile); + process.exit(failed ? 1 : 0); + }); + + stream.write("test data"); + stream.end(); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-fstat.js b/test/js/node/async_hooks/async-context/async-context-fs-fstat.js new file mode 100644 index 0000000000..8fdeec3ff3 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-fstat.js @@ -0,0 +1,30 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "fstat-test-" + Date.now() + ".txt"); + +fs.writeFileSync(testFile, "test"); + +asyncLocalStorage.run({ test: "fs.fstat" }, () => { + fs.open(testFile, "r", (err, fd) => { + if (err) { + console.error("ERROR:", err); + process.exit(1); + } + + fs.fstat(fd, (err, stats) => { + if (asyncLocalStorage.getStore()?.test !== "fs.fstat") { + console.error("FAIL: fs.fstat callback lost context"); + fs.closeSync(fd); + fs.unlinkSync(testFile); + process.exit(1); + } + + fs.closeSync(fd); + fs.unlinkSync(testFile); + process.exit(0); + }); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-lstat.js b/test/js/node/async_hooks/async-context/async-context-fs-lstat.js new file mode 100644 index 0000000000..fb194d909c --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-lstat.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "fs.lstat" }, () => { + fs.lstat(__filename, (err, stats) => { + if (asyncLocalStorage.getStore()?.test !== "fs.lstat") { + console.error("FAIL: fs.lstat callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-mkdir.js b/test/js/node/async_hooks/async-context/async-context-fs-mkdir.js new file mode 100644 index 0000000000..28f548cf10 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-mkdir.js @@ -0,0 +1,22 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const dir = fs.mkdtempSync("mkdir-test-"); +const testDir = path.join(dir, "mkdir-test-" + Date.now()); + +asyncLocalStorage.run({ test: "fs.mkdir" }, () => { + fs.mkdir(testDir, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.mkdir") { + console.error("FAIL: fs.mkdir callback lost context"); + try { + fs.rmdirSync(testDir); + } catch {} + process.exit(1); + } + fs.rmdirSync(testDir); + fs.rmdirSync(dir); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-mkdtemp.js b/test/js/node/async_hooks/async-context/async-context-fs-mkdtemp.js new file mode 100644 index 0000000000..4e74138b61 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-mkdtemp.js @@ -0,0 +1,20 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "fs.mkdtemp" }, () => { + const dir = fs.mkdtempSync("test-"); + fs.mkdtemp(path.join(dir, "test-"), (err, directory) => { + if (asyncLocalStorage.getStore()?.test !== "fs.mkdtemp") { + console.error("FAIL: fs.mkdtemp callback lost context"); + try { + fs.rmdirSync(directory); + } catch {} + process.exit(1); + } + fs.rmdirSync(directory); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-open.js b/test/js/node/async_hooks/async-context/async-context-fs-open.js new file mode 100644 index 0000000000..1d0c6dd072 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-open.js @@ -0,0 +1,35 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "open-test-" + Date.now() + ".txt"); + +fs.writeFileSync(testFile, "test"); + +asyncLocalStorage.run({ test: "fs.open" }, () => { + fs.open(testFile, "r", (err, fd) => { + if (err) { + console.error("ERROR:", err); + process.exit(1); + } + + if (asyncLocalStorage.getStore()?.test !== "fs.open") { + console.error("FAIL: fs.open callback lost context"); + fs.closeSync(fd); + fs.unlinkSync(testFile); + process.exit(1); + } + + // Test fs.close + fs.close(fd, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.open") { + console.error("FAIL: fs.close callback lost context"); + fs.unlinkSync(testFile); + process.exit(1); + } + fs.unlinkSync(testFile); + process.exit(0); + }); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-promises.js b/test/js/node/async_hooks/async-context/async-context-fs-promises.js new file mode 100644 index 0000000000..be3e8cce7e --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-promises.js @@ -0,0 +1,28 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs").promises; +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(require("fs").mkdtempSync("fstest"), "promises-test-" + Date.now() + ".txt"); + +asyncLocalStorage.run({ test: "fs.promises" }, async () => { + try { + await fs.writeFile(testFile, "test"); + if (asyncLocalStorage.getStore()?.test !== "fs.promises") { + console.error("FAIL: fs.promises.writeFile lost context"); + process.exit(1); + } + + await fs.readFile(testFile); + if (asyncLocalStorage.getStore()?.test !== "fs.promises") { + console.error("FAIL: fs.promises.readFile lost context"); + process.exit(1); + } + + await fs.unlink(testFile); + process.exit(0); + } catch (err) { + console.error("ERROR:", err); + process.exit(1); + } +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-read.js b/test/js/node/async_hooks/async-context/async-context-fs-read.js new file mode 100644 index 0000000000..a1251bb09c --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-read.js @@ -0,0 +1,31 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "read-test-" + Date.now() + ".txt"); + +fs.writeFileSync(testFile, "test data for read"); + +asyncLocalStorage.run({ test: "fs.read" }, () => { + fs.open(testFile, "r", (err, fd) => { + if (err) { + console.error("ERROR:", err); + process.exit(1); + } + + const buffer = Buffer.alloc(10); + fs.read(fd, buffer, 0, 10, 0, (err, bytesRead, buffer) => { + if (asyncLocalStorage.getStore()?.test !== "fs.read") { + console.error("FAIL: fs.read callback lost context"); + fs.closeSync(fd); + fs.unlinkSync(testFile); + process.exit(1); + } + + fs.closeSync(fd); + fs.unlinkSync(testFile); + process.exit(0); + }); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-readdir.js b/test/js/node/async_hooks/async-context/async-context-fs-readdir.js new file mode 100644 index 0000000000..83c6fc3616 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-readdir.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "fs.readdir" }, () => { + fs.readdir("/tmp", (err, files) => { + if (asyncLocalStorage.getStore()?.test !== "fs.readdir") { + console.error("FAIL: fs.readdir callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-realpath.js b/test/js/node/async_hooks/async-context/async-context-fs-realpath.js new file mode 100644 index 0000000000..bf0fced6ca --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-realpath.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "fs.realpath" }, () => { + fs.realpath("/tmp", (err, resolvedPath) => { + if (asyncLocalStorage.getStore()?.test !== "fs.realpath") { + console.error("FAIL: fs.realpath callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-rename.js b/test/js/node/async_hooks/async-context/async-context-fs-rename.js new file mode 100644 index 0000000000..81141e8a55 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-rename.js @@ -0,0 +1,28 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const dir = fs.mkdtempSync("rename-test-"); +const oldPath = path.join(dir, "rename-old-" + Date.now() + ".txt"); +const newPath = path.join(dir, "rename-new-" + Date.now() + ".txt"); + +fs.writeFileSync(oldPath, "test"); + +asyncLocalStorage.run({ test: "fs.rename" }, () => { + fs.rename(oldPath, newPath, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.rename") { + console.error("FAIL: fs.rename callback lost context"); + try { + fs.unlinkSync(oldPath); + } catch {} + try { + fs.unlinkSync(newPath); + } catch {} + process.exit(1); + } + fs.unlinkSync(newPath); + fs.rmdirSync(dir); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-rmdir.js b/test/js/node/async_hooks/async-context/async-context-fs-rmdir.js new file mode 100644 index 0000000000..b60d86cea1 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-rmdir.js @@ -0,0 +1,20 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const dir = fs.mkdtempSync("rmdir-test-"); +const testDir = path.join(dir, "rmdir-test-" + Date.now()); + +fs.mkdirSync(testDir); + +asyncLocalStorage.run({ test: "fs.rmdir" }, () => { + fs.rmdir(testDir, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.rmdir") { + console.error("FAIL: fs.rmdir callback lost context"); + process.exit(1); + } + fs.rmdirSync(dir); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-stat.js b/test/js/node/async_hooks/async-context/async-context-fs-stat.js new file mode 100644 index 0000000000..4ca3e9a2fb --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-stat.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "fs.stat" }, () => { + fs.stat(__filename, (err, stats) => { + if (asyncLocalStorage.getStore()?.test !== "fs.stat") { + console.error("FAIL: fs.stat callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-truncate.js b/test/js/node/async_hooks/async-context/async-context-fs-truncate.js new file mode 100644 index 0000000000..47feb2aa7f --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-truncate.js @@ -0,0 +1,22 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("fstest"), "truncate-test-" + Date.now() + ".txt"); + +fs.writeFileSync(testFile, "test data for truncation"); + +asyncLocalStorage.run({ test: "fs.truncate" }, () => { + fs.truncate(testFile, 5, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.truncate") { + console.error("FAIL: fs.truncate callback lost context"); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(1); + } + fs.unlinkSync(testFile); + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-unlink.js b/test/js/node/async_hooks/async-context/async-context-fs-unlink.js new file mode 100644 index 0000000000..34749e8fbb --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-unlink.js @@ -0,0 +1,18 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("unlink-test"), "unlink-test-" + Date.now() + ".txt"); + +fs.writeFileSync(testFile, "test"); + +asyncLocalStorage.run({ test: "fs.unlink" }, () => { + fs.unlink(testFile, err => { + if (asyncLocalStorage.getStore()?.test !== "fs.unlink") { + console.error("FAIL: fs.unlink callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-watch.js b/test/js/node/async_hooks/async-context/async-context-fs-watch.js new file mode 100644 index 0000000000..561e0defeb --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-watch.js @@ -0,0 +1,40 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("watch-test"), "watch-test-" + Date.now() + ".txt"); + +asyncLocalStorage.run({ test: "fs.watch" }, () => { + fs.writeFileSync(testFile, "initial"); + + const watcher = fs.watch(testFile, (eventType, filename) => { + if (asyncLocalStorage.getStore()?.test !== "fs.watch") { + console.error("FAIL: fs.watch callback lost context"); + watcher.close(); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(1); + } + watcher.close(); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(0); + }); + + // Trigger the watch event + setTimeout(() => { + fs.writeFileSync(testFile, "modified"); + }, 100); + + // Timeout safety + setTimeout(() => { + watcher.close(); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(0); + }, 5000); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-fs-watchFile.js b/test/js/node/async_hooks/async-context/async-context-fs-watchFile.js new file mode 100644 index 0000000000..caa5127ef1 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-fs-watchFile.js @@ -0,0 +1,40 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const fs = require("fs"); +const path = require("path"); + +const asyncLocalStorage = new AsyncLocalStorage(); +const testFile = path.join(fs.mkdtempSync("watchfile-test"), "watchfile-test-" + Date.now() + ".txt"); + +asyncLocalStorage.run({ test: "fs.watchFile" }, () => { + fs.writeFileSync(testFile, "initial"); + + fs.watchFile(testFile, { interval: 50 }, (curr, prev) => { + if (asyncLocalStorage.getStore()?.test !== "fs.watchFile") { + console.error("FAIL: fs.watchFile callback lost context"); + fs.unwatchFile(testFile); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(1); + } + fs.unwatchFile(testFile); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(0); + }); + + // Trigger the watch event + setTimeout(() => { + fs.writeFileSync(testFile, "modified"); + }, 100); + + // Timeout safety + setTimeout(() => { + fs.unwatchFile(testFile); + try { + fs.unlinkSync(testFile); + } catch {} + process.exit(0); + }, 5000); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-http-clientrequest.js b/test/js/node/async_hooks/async-context/async-context-http-clientrequest.js new file mode 100644 index 0000000000..505bdc45ed --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-http-clientrequest.js @@ -0,0 +1,48 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const http = require("http"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +const server = http.createServer((req, res) => { + res.writeHead(200); + res.end("ok"); +}); + +server.listen(0, () => { + const port = server.address().port; + + asyncLocalStorage.run({ test: "http.ClientRequest" }, () => { + const req = http.request({ + port, + method: "POST", + }); + + req.on("response", res => { + if (asyncLocalStorage.getStore()?.test !== "http.ClientRequest") { + console.error("FAIL: ClientRequest response event lost context"); + failed = true; + } + res.resume(); + }); + + req.on("finish", () => { + if (asyncLocalStorage.getStore()?.test !== "http.ClientRequest") { + console.error("FAIL: ClientRequest finish event lost context"); + failed = true; + } + }); + + req.on("close", () => { + if (asyncLocalStorage.getStore()?.test !== "http.ClientRequest") { + console.error("FAIL: ClientRequest close event lost context"); + failed = true; + } + server.close(); + process.exit(failed ? 1 : 0); + }); + + req.write("test data"); + req.end(); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-http-request.js b/test/js/node/async_hooks/async-context/async-context-http-request.js new file mode 100644 index 0000000000..e4c7940eef --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-http-request.js @@ -0,0 +1,51 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const http = require("http"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +const server = http.createServer((req, res) => { + res.end("ok"); +}); + +server.listen(0, () => { + const port = server.address().port; + + asyncLocalStorage.run({ test: "http.request" }, () => { + const req = http.request( + { + port, + method: "GET", + }, + res => { + if (asyncLocalStorage.getStore()?.test !== "http.request") { + console.error("FAIL: http.request response callback lost context"); + failed = true; + } + + res.on("data", chunk => { + if (asyncLocalStorage.getStore()?.test !== "http.request") { + console.error("FAIL: http response data event lost context"); + failed = true; + } + }); + + res.on("end", () => { + if (asyncLocalStorage.getStore()?.test !== "http.request") { + console.error("FAIL: http response end event lost context"); + failed = true; + } + server.close(); + process.exit(failed ? 1 : 0); + }); + }, + ); + + req.on("error", () => { + server.close(); + process.exit(1); + }); + + req.end(); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-https-request.js b/test/js/node/async_hooks/async-context/async-context-https-request.js new file mode 100644 index 0000000000..4fdc6791a0 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-https-request.js @@ -0,0 +1,25 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const https = require("https"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "https.request" }, () => { + const req = https.request("https://httpbin.org/get", res => { + if (asyncLocalStorage.getStore()?.test !== "https.request") { + console.error("FAIL: https.request response callback lost context"); + process.exit(1); + } + + res.on("data", () => {}); + res.on("end", () => { + process.exit(0); + }); + }); + + req.on("error", () => { + // Skip test if network is unavailable + process.exit(0); + }); + + req.end(); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-net-connect.js b/test/js/node/async_hooks/async-context/async-context-net-connect.js new file mode 100644 index 0000000000..e21b95f1f7 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-net-connect.js @@ -0,0 +1,30 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const net = require("net"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +const server = net.createServer(); +server.listen(0, () => { + const port = server.address().port; + + asyncLocalStorage.run({ test: "net.connect" }, () => { + const client = net.connect(port, () => { + if (asyncLocalStorage.getStore()?.test !== "net.connect") { + console.error("FAIL: net.connect callback lost context"); + failed = true; + } + client.end(); + }); + + client.on("close", () => { + if (asyncLocalStorage.getStore()?.test !== "net.connect") { + console.error("FAIL: net socket close event lost context"); + failed = true; + } + server.close(() => { + process.exit(failed ? 1 : 0); + }); + }); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-net-server.js b/test/js/node/async_hooks/async-context/async-context-net-server.js new file mode 100644 index 0000000000..beaa9ba2ba --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-net-server.js @@ -0,0 +1,37 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const net = require("net"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "net.Server" }, () => { + const server = net.createServer(); + + server.on("connection", socket => { + if (asyncLocalStorage.getStore()?.test !== "net.Server") { + console.error("FAIL: net.Server connection event lost context"); + failed = true; + } + socket.end(); + }); + + server.on("listening", () => { + if (asyncLocalStorage.getStore()?.test !== "net.Server") { + console.error("FAIL: net.Server listening event lost context"); + failed = true; + } + + // Connect to trigger connection event + const client = net.connect(server.address().port); + client.on("close", () => { + // Give time for server connection event to fire + setTimeout(() => { + server.close(() => { + process.exit(failed ? 1 : 0); + }); + }, 50); + }); + }); + + server.listen(0); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-net-socket-write.js b/test/js/node/async_hooks/async-context/async-context-net-socket-write.js new file mode 100644 index 0000000000..a9964a08ec --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-net-socket-write.js @@ -0,0 +1,42 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const net = require("net"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +const server = net.createServer(socket => { + socket.on("data", () => { + socket.end(); + }); +}); + +server.listen(0, () => { + const port = server.address().port; + + asyncLocalStorage.run({ test: "net.Socket.write" }, () => { + const client = net.connect(port); + + client.on("connect", () => { + // Test write callback + client.write("test data", err => { + if (asyncLocalStorage.getStore()?.test !== "net.Socket.write") { + console.error("FAIL: net.Socket write callback lost context"); + failed = true; + } + }); + + // Test end callback + client.end("final data", err => { + if (asyncLocalStorage.getStore()?.test !== "net.Socket.write") { + console.error("FAIL: net.Socket end callback lost context"); + failed = true; + } + }); + }); + + client.on("close", () => { + server.close(); + process.exit(failed ? 1 : 0); + }); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-process-nextTick.js b/test/js/node/async_hooks/async-context/async-context-process-nextTick.js new file mode 100644 index 0000000000..ab2524eb5a --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-process-nextTick.js @@ -0,0 +1,21 @@ +const { AsyncLocalStorage } = require("async_hooks"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "process.nextTick" }, () => { + process.nextTick(() => { + if (asyncLocalStorage.getStore()?.test !== "process.nextTick") { + console.error("FAIL: process.nextTick callback lost context"); + process.exit(1); + } + + // Test nested nextTick + process.nextTick(() => { + if (asyncLocalStorage.getStore()?.test !== "process.nextTick") { + console.error("FAIL: nested process.nextTick callback lost context"); + process.exit(1); + } + process.exit(0); + }); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-queueMicrotask.js b/test/js/node/async_hooks/async-context/async-context-queueMicrotask.js new file mode 100644 index 0000000000..4f82d66503 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-queueMicrotask.js @@ -0,0 +1,13 @@ +const { AsyncLocalStorage } = require("async_hooks"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "queueMicrotask" }, () => { + queueMicrotask(() => { + if (asyncLocalStorage.getStore()?.test !== "queueMicrotask") { + console.error("FAIL: queueMicrotask callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-readline-interface.js b/test/js/node/async_hooks/async-context/async-context-readline-interface.js new file mode 100644 index 0000000000..a8a4be084f --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-readline-interface.js @@ -0,0 +1,37 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const readline = require("readline"); +const { Readable } = require("stream"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "readline" }, () => { + const input = new Readable({ + read() {}, + }); + + const rl = readline.createInterface({ + input, + output: process.stdout, + terminal: false, + }); + + rl.on("line", line => { + if (asyncLocalStorage.getStore()?.test !== "readline") { + console.error("FAIL: readline line event lost context"); + failed = true; + } + }); + + rl.on("close", () => { + if (asyncLocalStorage.getStore()?.test !== "readline") { + console.error("FAIL: readline close event lost context"); + failed = true; + } + process.exit(failed ? 1 : 0); + }); + + // Send data and close + input.push("test line\n"); + input.push(null); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-stream-async-iterator.js b/test/js/node/async_hooks/async-context/async-context-stream-async-iterator.js new file mode 100644 index 0000000000..1b896c7c88 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-stream-async-iterator.js @@ -0,0 +1,28 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { Readable } = require("stream"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "stream.async.iterator" }, async () => { + const readable = new Readable({ + read() { + this.push("a"); + this.push("b"); + this.push("c"); + this.push(null); + }, + }); + + try { + for await (const chunk of readable) { + if (asyncLocalStorage.getStore()?.test !== "stream.async.iterator") { + console.error("FAIL: stream async iterator lost context"); + process.exit(1); + } + } + process.exit(0); + } catch (err) { + console.error("ERROR:", err); + process.exit(1); + } +}); diff --git a/test/js/node/async_hooks/async-context/async-context-stream-readable.js b/test/js/node/async_hooks/async-context/async-context-stream-readable.js new file mode 100644 index 0000000000..43e80abec8 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-stream-readable.js @@ -0,0 +1,36 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { Readable } = require("stream"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "stream.Readable" }, () => { + const readable = new Readable({ + read() { + this.push("test"); + this.push(null); + }, + }); + + readable.on("data", chunk => { + if (asyncLocalStorage.getStore()?.test !== "stream.Readable") { + console.error("FAIL: Readable stream data event lost context"); + failed = true; + } + }); + + readable.on("end", () => { + if (asyncLocalStorage.getStore()?.test !== "stream.Readable") { + console.error("FAIL: Readable stream end event lost context"); + failed = true; + } + process.exit(failed ? 1 : 0); + }); + + readable.on("close", () => { + if (asyncLocalStorage.getStore()?.test !== "stream.Readable") { + console.error("FAIL: Readable stream close event lost context"); + failed = true; + } + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-stream-transform.js b/test/js/node/async_hooks/async-context/async-context-stream-transform.js new file mode 100644 index 0000000000..49f9ecc0be --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-stream-transform.js @@ -0,0 +1,35 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { Transform } = require("stream"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "stream.Transform" }, () => { + const transform = new Transform({ + transform(chunk, encoding, callback) { + if (asyncLocalStorage.getStore()?.test !== "stream.Transform") { + console.error("FAIL: Transform stream transform method lost context"); + failed = true; + } + callback(null, chunk); + }, + }); + + transform.on("data", chunk => { + if (asyncLocalStorage.getStore()?.test !== "stream.Transform") { + console.error("FAIL: Transform stream data event lost context"); + failed = true; + } + }); + + transform.on("end", () => { + if (asyncLocalStorage.getStore()?.test !== "stream.Transform") { + console.error("FAIL: Transform stream end event lost context"); + failed = true; + } + process.exit(failed ? 1 : 0); + }); + + transform.write("test"); + transform.end(); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-stream-writable.js b/test/js/node/async_hooks/async-context/async-context-stream-writable.js new file mode 100644 index 0000000000..baa494dc03 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-stream-writable.js @@ -0,0 +1,28 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { Writable } = require("stream"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "stream.Writable" }, () => { + const writable = new Writable({ + write(chunk, encoding, callback) { + if (asyncLocalStorage.getStore()?.test !== "stream.Writable") { + console.error("FAIL: Writable stream write method lost context"); + failed = true; + } + callback(); + }, + }); + + writable.on("finish", () => { + if (asyncLocalStorage.getStore()?.test !== "stream.Writable") { + console.error("FAIL: Writable stream finish event lost context"); + failed = true; + } + process.exit(failed ? 1 : 0); + }); + + writable.write("test"); + writable.end(); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-timers-promises.js b/test/js/node/async_hooks/async-context/async-context-timers-promises.js new file mode 100644 index 0000000000..3c3f5755c6 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-timers-promises.js @@ -0,0 +1,27 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const timers = require("timers/promises"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "timers.promises" }, async () => { + try { + // Test setTimeout promise + await timers.setTimeout(10); + if (asyncLocalStorage.getStore()?.test !== "timers.promises") { + console.error("FAIL: timers.promises.setTimeout lost context"); + process.exit(1); + } + + // Test setImmediate promise + await timers.setImmediate(); + if (asyncLocalStorage.getStore()?.test !== "timers.promises") { + console.error("FAIL: timers.promises.setImmediate lost context"); + process.exit(1); + } + + process.exit(0); + } catch (err) { + console.error("ERROR:", err); + process.exit(1); + } +}); diff --git a/test/js/node/async_hooks/async-context/async-context-timers-ref-unref.js b/test/js/node/async_hooks/async-context/async-context-timers-ref-unref.js new file mode 100644 index 0000000000..31cc46582c --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-timers-ref-unref.js @@ -0,0 +1,17 @@ +const { AsyncLocalStorage } = require("async_hooks"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "timers.ref.unref" }, () => { + const timeout = setTimeout(() => { + if (asyncLocalStorage.getStore()?.test !== "timers.ref.unref") { + console.error("FAIL: setTimeout with ref/unref lost context"); + process.exit(1); + } + process.exit(0); + }, 10); + + // Test ref/unref operations + timeout.unref(); + timeout.ref(); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-timers-setInterval.js b/test/js/node/async_hooks/async-context/async-context-timers-setInterval.js new file mode 100644 index 0000000000..89c252b171 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-timers-setInterval.js @@ -0,0 +1,19 @@ +const { AsyncLocalStorage } = require("async_hooks"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let count = 0; + +asyncLocalStorage.run({ test: "setInterval" }, () => { + const interval = setInterval(() => { + if (asyncLocalStorage.getStore()?.test !== "setInterval") { + console.error("FAIL: setInterval callback lost context"); + clearInterval(interval); + process.exit(1); + } + count++; + if (count >= 2) { + clearInterval(interval); + process.exit(0); + } + }, 10); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-tls-connect.js b/test/js/node/async_hooks/async-context/async-context-tls-connect.js new file mode 100644 index 0000000000..0a3b31c990 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-tls-connect.js @@ -0,0 +1,28 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const tls = require("tls"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "tls.connect" }, () => { + const socket = tls.connect( + 443, + "example.com", + { + rejectUnauthorized: true, + }, + () => { + if (asyncLocalStorage.getStore()?.test !== "tls.connect") { + console.error("FAIL: tls.connect callback lost context"); + socket.destroy(); + process.exit(1); + } + socket.destroy(); + process.exit(0); + }, + ); + + socket.on("error", () => { + // Skip test if network is unavailable + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-util-promisify-custom.js b/test/js/node/async_hooks/async-context/async-context-util-promisify-custom.js new file mode 100644 index 0000000000..218f70aa69 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-util-promisify-custom.js @@ -0,0 +1,27 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const util = require("util"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +// Custom callback function +function customAsync(value, callback) { + setTimeout(() => { + callback(null, value * 2); + }, 10); +} + +const customPromise = util.promisify(customAsync); + +asyncLocalStorage.run({ test: "util.promisify.custom" }, async () => { + try { + const result = await customPromise(21); + if (asyncLocalStorage.getStore()?.test !== "util.promisify.custom") { + console.error("FAIL: util.promisify with custom function lost context"); + process.exit(1); + } + process.exit(0); + } catch (err) { + console.error("ERROR:", err); + process.exit(1); + } +}); diff --git a/test/js/node/async_hooks/async-context/async-context-util-promisify.js b/test/js/node/async_hooks/async-context/async-context-util-promisify.js new file mode 100644 index 0000000000..665059cf2b --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-util-promisify.js @@ -0,0 +1,22 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const util = require("util"); +const fs = require("fs"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +// Test util.promisify with a built-in callback function +const readFilePromise = util.promisify(fs.readFile); + +asyncLocalStorage.run({ test: "util.promisify" }, async () => { + try { + await readFilePromise(__filename, "utf8"); + if (asyncLocalStorage.getStore()?.test !== "util.promisify") { + console.error("FAIL: util.promisify lost context"); + process.exit(1); + } + process.exit(0); + } catch (err) { + console.error("ERROR:", err); + process.exit(1); + } +}); diff --git a/test/js/node/async_hooks/async-context/async-context-vm-runInNewContext.js b/test/js/node/async_hooks/async-context/async-context-vm-runInNewContext.js new file mode 100644 index 0000000000..9d7b4bc0a1 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-vm-runInNewContext.js @@ -0,0 +1,23 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const vm = require("vm"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "vm.runInNewContext" }, () => { + const code = ` + setImmediate(() => { + if (asyncLocalStorage.getStore()?.test !== 'vm.runInNewContext') { + console.error('FAIL: vm.runInNewContext callback lost context'); + process.exit(1); + } + process.exit(0); + }); + `; + + vm.runInNewContext(code, { + asyncLocalStorage, + setImmediate, + console, + process, + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-worker_threads-message.js b/test/js/node/async_hooks/async-context/async-context-worker_threads-message.js new file mode 100644 index 0000000000..d92e02bab5 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-worker_threads-message.js @@ -0,0 +1,32 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const { Worker, isMainThread, parentPort } = require("worker_threads"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +if (isMainThread) { + asyncLocalStorage.run({ test: "worker_threads" }, () => { + const worker = new Worker(__filename); + + worker.on("message", msg => { + if (asyncLocalStorage.getStore()?.test !== "worker_threads") { + console.error("FAIL: worker message event lost context"); + process.exit(1); + } + worker.terminate(); + }); + + worker.on("exit", () => { + if (asyncLocalStorage.getStore()?.test !== "worker_threads") { + console.error("FAIL: worker exit event lost context"); + process.exit(1); + } + process.exit(0); + }); + + worker.postMessage("test"); + }); +} else { + parentPort.on("message", msg => { + parentPort.postMessage("response"); + }); +} diff --git a/test/js/node/async_hooks/async-context/async-context-zlib-brotliCompress.js b/test/js/node/async_hooks/async-context/async-context-zlib-brotliCompress.js new file mode 100644 index 0000000000..798f67d86b --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-zlib-brotliCompress.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const zlib = require("zlib"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "zlib.brotliCompress" }, () => { + zlib.brotliCompress("test data", (err, compressed) => { + if (asyncLocalStorage.getStore()?.test !== "zlib.brotliCompress") { + console.error("FAIL: zlib.brotliCompress callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-zlib-brotliDecompress.js b/test/js/node/async_hooks/async-context/async-context-zlib-brotliDecompress.js new file mode 100644 index 0000000000..f467565b02 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-zlib-brotliDecompress.js @@ -0,0 +1,17 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const zlib = require("zlib"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "zlib.brotliDecompress" }, () => { + // First compress data + const compressed = zlib.brotliCompressSync("test data"); + + zlib.brotliDecompress(compressed, (err, decompressed) => { + if (asyncLocalStorage.getStore()?.test !== "zlib.brotliDecompress") { + console.error("FAIL: zlib.brotliDecompress callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-zlib-createGzip.js b/test/js/node/async_hooks/async-context/async-context-zlib-createGzip.js new file mode 100644 index 0000000000..6a0d80b029 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-zlib-createGzip.js @@ -0,0 +1,34 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const zlib = require("zlib"); + +const asyncLocalStorage = new AsyncLocalStorage(); +let failed = false; + +asyncLocalStorage.run({ test: "zlib.createGzip" }, () => { + const gzip = zlib.createGzip(); + + gzip.on("data", chunk => { + if (asyncLocalStorage.getStore()?.test !== "zlib.createGzip") { + console.error("FAIL: zlib.createGzip data event lost context"); + failed = true; + } + }); + + gzip.on("end", () => { + if (asyncLocalStorage.getStore()?.test !== "zlib.createGzip") { + console.error("FAIL: zlib.createGzip end event lost context"); + failed = true; + } + process.exit(failed ? 1 : 0); + }); + + gzip.on("finish", () => { + if (asyncLocalStorage.getStore()?.test !== "zlib.createGzip") { + console.error("FAIL: zlib.createGzip finish event lost context"); + failed = true; + } + }); + + gzip.write("test data"); + gzip.end(); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-zlib-deflate.js b/test/js/node/async_hooks/async-context/async-context-zlib-deflate.js new file mode 100644 index 0000000000..15ea6e70b8 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-zlib-deflate.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const zlib = require("zlib"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "zlib.deflate" }, () => { + zlib.deflate("test data", (err, compressed) => { + if (asyncLocalStorage.getStore()?.test !== "zlib.deflate") { + console.error("FAIL: zlib.deflate callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-zlib-gunzip.js b/test/js/node/async_hooks/async-context/async-context-zlib-gunzip.js new file mode 100644 index 0000000000..5c3ada6191 --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-zlib-gunzip.js @@ -0,0 +1,17 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const zlib = require("zlib"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "zlib.gunzip" }, () => { + // First compress data + const compressed = zlib.gzipSync("test data"); + + zlib.gunzip(compressed, (err, decompressed) => { + if (asyncLocalStorage.getStore()?.test !== "zlib.gunzip") { + console.error("FAIL: zlib.gunzip callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-zlib-gzip.js b/test/js/node/async_hooks/async-context/async-context-zlib-gzip.js new file mode 100644 index 0000000000..20decb021f --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-zlib-gzip.js @@ -0,0 +1,14 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const zlib = require("zlib"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "zlib.gzip" }, () => { + zlib.gzip("test data", (err, compressed) => { + if (asyncLocalStorage.getStore()?.test !== "zlib.gzip") { + console.error("FAIL: zlib.gzip callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/async_hooks/async-context/async-context-zlib-inflate.js b/test/js/node/async_hooks/async-context/async-context-zlib-inflate.js new file mode 100644 index 0000000000..1a41995d2d --- /dev/null +++ b/test/js/node/async_hooks/async-context/async-context-zlib-inflate.js @@ -0,0 +1,17 @@ +const { AsyncLocalStorage } = require("async_hooks"); +const zlib = require("zlib"); + +const asyncLocalStorage = new AsyncLocalStorage(); + +asyncLocalStorage.run({ test: "zlib.inflate" }, () => { + // First compress data + const compressed = zlib.deflateSync("test data"); + + zlib.inflate(compressed, (err, decompressed) => { + if (asyncLocalStorage.getStore()?.test !== "zlib.inflate") { + console.error("FAIL: zlib.inflate callback lost context"); + process.exit(1); + } + process.exit(0); + }); +}); diff --git a/test/js/node/fs/abort-signal-leak-read-write-file-fixture.ts b/test/js/node/fs/abort-signal-leak-read-write-file-fixture.ts index 891591e408..dbfb71c7c8 100644 --- a/test/js/node/fs/abort-signal-leak-read-write-file-fixture.ts +++ b/test/js/node/fs/abort-signal-leak-read-write-file-fixture.ts @@ -33,6 +33,6 @@ if (numAbortSignalObjects > 10) { } const rss = (process.memoryUsage().rss / 1024 / 1024) | 0; -if (rss > 170) { +if (rss > 200) { throw new Error(`Memory leak detected: ${rss} MB, expected < 170 MB`); } diff --git a/test/js/node/fs/glob.test.ts b/test/js/node/fs/glob.test.ts index 180f79edb7..0383efae19 100644 --- a/test/js/node/fs/glob.test.ts +++ b/test/js/node/fs/glob.test.ts @@ -14,6 +14,10 @@ beforeAll(() => { "bar.txt": "bar", "baz.js": "baz", }, + "folder.test": { + "file.txt": "content", + "another-folder": {}, + }, }); }); @@ -61,6 +65,11 @@ describe("fs.glob", () => { expect(() => fs.glob("*.txt", { cwd: tmp }, undefined)).toThrow(TypeError); }); }); + + it("matches directories", () => { + const paths = fs.globSync("*.test", { cwd: tmp }); + expect(paths).toContain("folder.test"); + }); }); // describe("fs.globSync", () => { @@ -102,12 +111,29 @@ describe("fs.globSync", () => { expect(fs.globSync("a/*", { cwd: tmp, exclude })).toStrictEqual(expected); }); + it("works without providing options", () => { + const oldProcessCwd = process.cwd; + try { + process.cwd = () => tmp; + + const paths = fs.globSync("*.txt"); + expect(paths).toContain("foo.txt"); + } finally { + process.cwd = oldProcessCwd; + } + }); + describe("invalid arguments", () => { // TODO: GlobSet it("does not support arrays of patterns yet", () => { expect(() => fs.globSync(["*.txt"])).toThrow(TypeError); }); }); + + it("matches directories", () => { + const paths = fs.globSync("*.test", { cwd: tmp }); + expect(paths).toContain("folder.test"); + }); }); // describe("fs.promises.glob", () => { @@ -129,4 +155,34 @@ describe("fs.promises.glob", () => { expect(path).toMatch(/\.txt$/); } }); + + it("works without providing options", async () => { + const oldProcessCwd = process.cwd; + try { + process.cwd = () => tmp; + + const iter = fs.promises.glob("*.txt"); + expect(iter[Symbol.asyncIterator]).toBeDefined(); + + const paths = []; + for await (const path of iter) { + paths.push(path); + } + + expect(paths).toContain("foo.txt"); + } finally { + process.cwd = oldProcessCwd; + } + }); + + it("matches directories", async () => { + const iter = fs.promises.glob("*.test", { cwd: tmp }); + expect(iter[Symbol.asyncIterator]).toBeDefined(); + let count = 0; + for await (const path of iter) { + expect(path).toBe("folder.test"); + count++; + } + expect(count).toBe(1); + }); }); // diff --git a/test/js/node/http/node-http.test.ts b/test/js/node/http/node-http.test.ts index c5795abdab..d023f81c31 100644 --- a/test/js/node/http/node-http.test.ts +++ b/test/js/node/http/node-http.test.ts @@ -2631,6 +2631,35 @@ test("client side flushHeaders should work", async () => { expect(headers.foo).toEqual("bar"); }); +test("flushHeaders should not drop request body", async () => { + const { promise, resolve } = Promise.withResolvers(); + await using server = http.createServer((req, res) => { + let body = ""; + req.setEncoding("utf8"); + req.on("data", chunk => (body += chunk)); + req.on("end", () => { + resolve(body); + res.end(); + }); + }); + + await once(server.listen(0), "listening"); + const address = server.address() as AddressInfo; + const req = http.request({ + method: "POST", + host: "127.0.0.1", + port: address.port, + headers: { "content-type": "text/plain" }, + }); + + req.flushHeaders(); + req.write("bun"); + req.end("rocks"); + + const body = await promise; + expect(body).toBe("bunrocks"); +}); + test("server.listening should work", async () => { const server = http.createServer(); await once(server.listen(0), "listening"); diff --git a/test/js/node/http2/node-http2.test.js b/test/js/node/http2/node-http2.test.js index b218cb22ac..4b01bc1177 100644 --- a/test/js/node/http2/node-http2.test.js +++ b/test/js/node/http2/node-http2.test.js @@ -214,6 +214,31 @@ for (const nodeExecutable of [nodeExe(), bunExe()]) { expect(parsed.url).toBe(`${HTTPS_SERVER}/get`); } }); + it("http2 should receive remoteSettings when receiving default settings frame", async () => { + const { promise, resolve, reject } = Promise.withResolvers(); + const session = http2.connect(HTTPS_SERVER, TLS_OPTIONS); + + session.once("remoteSettings", resolve); + session.once("close", () => { + reject(new Error("Failed to receive remoteSettings")); + }); + try { + const settings = await promise; + expect(settings).toBeDefined(); + expect(settings).toEqual({ + headerTableSize: 4096, + enablePush: false, + maxConcurrentStreams: 4294967295, + initialWindowSize: 65535, + maxFrameSize: 16384, + maxHeaderListSize: 65535, + maxHeaderSize: 65535, + enableConnectProtocol: false, + }); + } finally { + session.close(); + } + }); it("should be able to mutiplex POST requests", async () => { const results = await doMultiplexHttp2Request(HTTPS_SERVER, [ { headers: { ":path": "/post", ":method": "POST" }, payload: JSON.stringify({ "request": 1 }) }, diff --git a/test/js/node/module/module-sourcemap.test.js b/test/js/node/module/module-sourcemap.test.js new file mode 100644 index 0000000000..dc847c01b2 --- /dev/null +++ b/test/js/node/module/module-sourcemap.test.js @@ -0,0 +1,27 @@ +const { test, expect } = require("bun:test"); + +test("SourceMap is available from node:module", () => { + const module = require("node:module"); + expect(module.SourceMap).toBeDefined(); + expect(typeof module.SourceMap).toBe("function"); +}); + +test("SourceMap from require('module') works", () => { + const module = require("module"); + expect(module.SourceMap).toBeDefined(); + expect(typeof module.SourceMap).toBe("function"); +}); + +test("Can create SourceMap instance from node:module", () => { + const { SourceMap } = require("node:module"); + const payload = { + version: 3, + sources: ["test.js"], + names: [], + mappings: "AAAA", + }; + + const sourceMap = new SourceMap(payload); + expect(sourceMap).toBeInstanceOf(SourceMap); + expect(sourceMap.payload).toBe(payload); +}); diff --git a/test/js/node/module/sourcemap.test.js b/test/js/node/module/sourcemap.test.js new file mode 100644 index 0000000000..d9ff675cdf --- /dev/null +++ b/test/js/node/module/sourcemap.test.js @@ -0,0 +1,177 @@ +const { test, expect } = require("bun:test"); +const { SourceMap } = require("node:module"); + +test("SourceMap class exists", () => { + expect(SourceMap).toBeDefined(); + expect(typeof SourceMap).toBe("function"); + expect(SourceMap.name).toBe("SourceMap"); +}); + +test("SourceMap constructor requires payload", () => { + expect(() => { + new SourceMap(); + }).toThrowErrorMatchingInlineSnapshot(`"The "payload" argument must be of type object. Received undefined"`); +}); + +test("SourceMap payload must be an object", () => { + expect(() => { + new SourceMap("not an object"); + }).toThrowErrorMatchingInlineSnapshot( + `"The "payload" argument must be of type object. Received type string ('not an object')"`, + ); +}); + +test("SourceMap instance has expected methods", () => { + const sourceMap = new SourceMap({ + version: 3, + sources: ["test.js"], + mappings: "AAAA", + }); + + expect(typeof sourceMap.findOrigin).toBe("function"); + expect(typeof sourceMap.findEntry).toBe("function"); + expect(sourceMap.findOrigin.length).toBe(2); + expect(sourceMap.findEntry.length).toBe(2); +}); + +test("SourceMap payload getter", () => { + const payload = { + version: 3, + sources: ["test.js"], + mappings: "AAAA", + }; + const sourceMap = new SourceMap(payload); + + expect(sourceMap.payload).toBe(payload); +}); + +test("SourceMap lineLengths getter", () => { + const payload = { + version: 3, + sources: ["test.js"], + mappings: "AAAA", + }; + const lineLengths = [10, 20, 30]; + const sourceMap = new SourceMap(payload, { lineLengths }); + + expect(sourceMap.lineLengths).toBe(lineLengths); +}); + +test("SourceMap lineLengths undefined when not provided", () => { + const sourceMap = new SourceMap({ + version: 3, + sources: ["test.js"], + mappings: "AAAA", + }); + + expect(sourceMap.lineLengths).toBeUndefined(); +}); +test("SourceMap findEntry returns mapping data", () => { + const sourceMap = new SourceMap({ + version: 3, + sources: ["test.js"], + mappings: "AAAA", + }); + const result = sourceMap.findEntry(0, 0); + + expect(result).toMatchInlineSnapshot(` + { + "generatedColumn": 0, + "generatedLine": 0, + "name": undefined, + "originalColumn": 0, + "originalLine": 0, + "originalSource": "test.js", + } + `); +}); + +test("SourceMap findOrigin returns origin data", () => { + const sourceMap = new SourceMap({ + version: 3, + sources: ["test.js"], + mappings: "AAAA", + }); + const result = sourceMap.findOrigin(0, 0); + + expect(result).toMatchInlineSnapshot(` + { + "column": 0, + "fileName": "test.js", + "line": 0, + "name": undefined, + } + `); +}); + +test("SourceMap with names returns name property correctly", () => { + const sourceMap = new SourceMap({ + version: 3, + sources: ["test.js"], + names: ["myFunction", "myVariable"], + mappings: "AAAAA,CAACC", // Both segments reference names + }); + + const result = sourceMap.findEntry(0, 0); + const resultWithName = sourceMap.findEntry(0, 6); + expect(result).toMatchInlineSnapshot(` + { + "generatedColumn": 0, + "generatedLine": 0, + "name": "myFunction", + "originalColumn": 0, + "originalLine": 0, + "originalSource": "test.js", + } + `); + expect(resultWithName).toMatchInlineSnapshot(` + { + "generatedColumn": 1, + "generatedLine": 0, + "name": "myVariable", + "originalColumn": 1, + "originalLine": 0, + "originalSource": "test.js", + } + `); +}); + +test("SourceMap without names has undefined name property", () => { + const sourceMap = new SourceMap({ + version: 3, + sources: ["test.js"], + mappings: "AAAA", + }); + + const result = sourceMap.findEntry(0, 0); + expect(result).toMatchInlineSnapshot(` + { + "generatedColumn": 0, + "generatedLine": 0, + "name": undefined, + "originalColumn": 0, + "originalLine": 0, + "originalSource": "test.js", + } + `); +}); + +test("SourceMap with invalid name index has undefined name property", () => { + const sourceMap = new SourceMap({ + version: 3, + sources: ["test.js"], + mappings: "AAAAA,CAACC", // Both segments reference names + }); + + const result = sourceMap.findEntry(0, 0); + expect(result).toMatchInlineSnapshot(` + { + "generatedColumn": 0, + "generatedLine": 0, + "name": undefined, + "originalColumn": 0, + "originalLine": 0, + "originalSource": "test.js", + } + `); +}); diff --git a/test/js/node/process/process.test.js b/test/js/node/process/process.test.js index 2eb79daaa8..d8ef7c338a 100644 --- a/test/js/node/process/process.test.js +++ b/test/js/node/process/process.test.js @@ -2,7 +2,7 @@ import { spawnSync, which } from "bun"; import { describe, expect, it } from "bun:test"; import { familySync } from "detect-libc"; import { existsSync, readFileSync, writeFileSync } from "fs"; -import { bunEnv, bunExe, isWindows, tmpdirSync } from "harness"; +import { bunEnv, bunExe, isMacOS, isMusl, isWindows, tmpdirSync } from "harness"; import { basename, join, resolve } from "path"; expect.extend({ @@ -269,7 +269,7 @@ it("process.umask()", () => { const generated_versions_list = join(import.meta.dir, "../../../../src/generated_versions_list.zig"); const versions = existsSync(generated_versions_list); -(versions ? it : it.skip)("process.versions", () => { +it.skipIf(!versions)("process.versions", () => { // Generate a list of all the versions in the versions object // example: // pub const boringssl = "b275c5ce1c88bc06f5a967026d3c0ce1df2be815"; @@ -305,14 +305,9 @@ const versions = existsSync(generated_versions_list); }); it("process.config", () => { - expect(process.config).toEqual({ - variables: { - enable_lto: false, - node_module_version: expect.any(Number), - v8_enable_i8n_support: 1, - }, - target_defaults: {}, - }); + expect(process.config.variables.clang).toBeNumber(); + expect(process.config.variables.host_arch).toBeDefined(); + expect(process.config.variables.target_arch).toBeDefined(); }); it("process.execArgv", () => { @@ -1131,3 +1126,24 @@ it.each(["stdin", "stdout", "stderr"])("%s stream accessor should handle excepti 1, ]).toRunInlineFixture(); }); + +it("process.versions", () => { + expect(process.versions.node).toEqual("24.3.0"); + expect(process.versions.v8).toEqual("13.6.233.10-node.18"); + expect(process.versions.napi).toEqual("10"); + expect(process.versions.modules).toEqual("137"); +}); + +it.todoIf(isMacOS || isMusl)("should be the node version on the host that we expect", async () => { + const subprocess = Bun.spawn({ + cmd: ["node", "--version"], + stdout: "pipe", + stdin: "inherit", + stderr: "pipe", + env: bunEnv, + }); + + let [out, exited] = await Promise.all([new Response(subprocess.stdout).text(), subprocess.exited]); + expect(out.trim()).toEqual(isWindows ? "v24.3.0" : "v24.4.0"); // TODO: this *should* be v24.3.0 but scripts/bootstrap.sh needs to be enhanced to do so + expect(exited).toBe(0); +}); diff --git a/test/js/node/test/parallel/needs-test/README.md b/test/js/node/test/parallel/needs-test/README.md deleted file mode 100644 index 821ae16ee3..0000000000 --- a/test/js/node/test/parallel/needs-test/README.md +++ /dev/null @@ -1,8 +0,0 @@ -A good deal of parallel test cases can be run directly via `bun `. -However, some newer cases use `node:test`. - -Files in this directory need to be run with `bun test `. The -`node:test` module is shimmed via a require cache hack in -`test/js/node/harness.js` to use `bun:test`. Note that our test runner -(`scripts/runner.node.mjs`) checks for `needs-test` in the names of test files, -so don't rename this folder without updating that code. diff --git a/test/js/node/test/parallel/test-assert-async.js b/test/js/node/test/parallel/test-assert-async.js index 28e8c72bd0..bcf3d55622 100644 --- a/test/js/node/test/parallel/test-assert-async.js +++ b/test/js/node/test/parallel/test-assert-async.js @@ -137,10 +137,7 @@ promises.push(assert.rejects( assert.strictEqual(err.code, 'ERR_ASSERTION'); assert.strictEqual(err.actual, actual); assert.strictEqual(err.operator, 'rejects'); - if (typeof Bun === "undefined") { - // disabled on bun because it is missing async stack traces - assert.match(err.stack, /rejects/); - } + assert.match(err.stack, /rejects/); return true; }; const err = new Error(); diff --git a/test/js/node/test/parallel/test-buffer-bytelength.js b/test/js/node/test/parallel/test-buffer-bytelength.js index 95d54d425b..1013469181 100644 --- a/test/js/node/test/parallel/test-buffer-bytelength.js +++ b/test/js/node/test/parallel/test-buffer-bytelength.js @@ -2,7 +2,7 @@ const common = require('../common'); const assert = require('assert'); -const SlowBuffer = require('buffer').SlowBuffer; +const { Buffer } = require('buffer'); const vm = require('vm'); [ @@ -24,7 +24,6 @@ const vm = require('vm'); }); assert(ArrayBuffer.isView(new Buffer(10))); -assert(ArrayBuffer.isView(new SlowBuffer(10))); assert(ArrayBuffer.isView(Buffer.alloc(10))); assert(ArrayBuffer.isView(Buffer.allocUnsafe(10))); assert(ArrayBuffer.isView(Buffer.allocUnsafeSlow(10))); diff --git a/test/js/node/test/parallel/test-buffer-failed-alloc-typed-arrays.js b/test/js/node/test/parallel/test-buffer-failed-alloc-typed-arrays.js index 699475ad0a..ac519d2bf7 100644 --- a/test/js/node/test/parallel/test-buffer-failed-alloc-typed-arrays.js +++ b/test/js/node/test/parallel/test-buffer-failed-alloc-typed-arrays.js @@ -2,7 +2,7 @@ require('../common'); const assert = require('assert'); -const SlowBuffer = require('buffer').SlowBuffer; +const { Buffer } = require('buffer'); // Test failed or zero-sized Buffer allocations not affecting typed arrays. // This test exists because of a regression that occurred. Because Buffer @@ -15,7 +15,6 @@ const zeroArray = new Uint32Array(10).fill(0); const sizes = [1e20, 0, 0.1, -1, 'a', undefined, null, NaN]; const allocators = [ Buffer, - SlowBuffer, Buffer.alloc, Buffer.allocUnsafe, Buffer.allocUnsafeSlow, diff --git a/test/js/node/test/parallel/test-buffer-inspect.js b/test/js/node/test/parallel/test-buffer-inspect.js index 1e8212e876..0dd15e5a97 100644 --- a/test/js/node/test/parallel/test-buffer-inspect.js +++ b/test/js/node/test/parallel/test-buffer-inspect.js @@ -30,7 +30,7 @@ buffer.INSPECT_MAX_BYTES = 2; let b = Buffer.allocUnsafe(4); b.fill('1234'); -let s = buffer.SlowBuffer(4); +let s = Buffer.allocUnsafeSlow(4); s.fill('1234'); let expected = ''; @@ -41,7 +41,7 @@ assert.strictEqual(util.inspect(s), expected); b = Buffer.allocUnsafe(2); b.fill('12'); -s = buffer.SlowBuffer(2); +s = Buffer.allocUnsafeSlow(2); s.fill('12'); expected = ''; diff --git a/test/js/node/test/parallel/test-buffer-no-negative-allocation.js b/test/js/node/test/parallel/test-buffer-no-negative-allocation.js index 055e2d5dc6..e48d9c7545 100644 --- a/test/js/node/test/parallel/test-buffer-no-negative-allocation.js +++ b/test/js/node/test/parallel/test-buffer-no-negative-allocation.js @@ -2,7 +2,6 @@ require('../common'); const assert = require('assert'); -const { SlowBuffer } = require('buffer'); const msg = { code: 'ERR_OUT_OF_RANGE', @@ -30,8 +29,3 @@ assert.throws(() => Buffer.allocUnsafeSlow(-Buffer.poolSize), msg); assert.throws(() => Buffer.allocUnsafeSlow(-100), msg); assert.throws(() => Buffer.allocUnsafeSlow(-1), msg); assert.throws(() => Buffer.allocUnsafeSlow(NaN), msg); - -assert.throws(() => SlowBuffer(-Buffer.poolSize), msg); -assert.throws(() => SlowBuffer(-100), msg); -assert.throws(() => SlowBuffer(-1), msg); -assert.throws(() => SlowBuffer(NaN), msg); diff --git a/test/js/node/test/parallel/test-buffer-over-max-length.js b/test/js/node/test/parallel/test-buffer-over-max-length.js index f29d6b62d4..5ee07b14d2 100644 --- a/test/js/node/test/parallel/test-buffer-over-max-length.js +++ b/test/js/node/test/parallel/test-buffer-over-max-length.js @@ -4,7 +4,6 @@ require('../common'); const assert = require('assert'); const buffer = require('buffer'); -const SlowBuffer = buffer.SlowBuffer; const kMaxLength = buffer.kMaxLength; const bufferMaxSizeMsg = { @@ -13,7 +12,6 @@ const bufferMaxSizeMsg = { }; assert.throws(() => Buffer(kMaxLength + 1), bufferMaxSizeMsg); -assert.throws(() => SlowBuffer(kMaxLength + 1), bufferMaxSizeMsg); assert.throws(() => Buffer.alloc(kMaxLength + 1), bufferMaxSizeMsg); assert.throws(() => Buffer.allocUnsafe(kMaxLength + 1), bufferMaxSizeMsg); assert.throws(() => Buffer.allocUnsafeSlow(kMaxLength + 1), bufferMaxSizeMsg); diff --git a/test/js/node/test/parallel/test-buffer-slow.js b/test/js/node/test/parallel/test-buffer-slow.js index 07138d5db0..bf104abf4f 100644 --- a/test/js/node/test/parallel/test-buffer-slow.js +++ b/test/js/node/test/parallel/test-buffer-slow.js @@ -2,13 +2,12 @@ require('../common'); const assert = require('assert'); -const buffer = require('buffer'); -const SlowBuffer = buffer.SlowBuffer; +const { Buffer, kMaxLength } = require('buffer'); const ones = [1, 1, 1, 1]; // Should create a Buffer -let sb = SlowBuffer(4); +let sb = Buffer.allocUnsafeSlow(4); assert(sb instanceof Buffer); assert.strictEqual(sb.length, 4); sb.fill(1); @@ -20,7 +19,7 @@ for (const [key, value] of sb.entries()) { assert.strictEqual(sb.buffer.byteLength, 4); // Should work without new -sb = SlowBuffer(4); +sb = Buffer.allocUnsafeSlow(4); assert(sb instanceof Buffer); assert.strictEqual(sb.length, 4); sb.fill(1); @@ -29,7 +28,7 @@ for (const [key, value] of sb.entries()) { } // Should work with edge cases -assert.strictEqual(SlowBuffer(0).length, 0); +assert.strictEqual(Buffer.allocUnsafeSlow(0).length, 0); // Should throw with invalid length type const bufferInvalidTypeMsg = { @@ -37,17 +36,17 @@ const bufferInvalidTypeMsg = { name: 'TypeError', message: /^The "size" argument must be of type number/, }; -assert.throws(() => SlowBuffer(), bufferInvalidTypeMsg); -assert.throws(() => SlowBuffer({}), bufferInvalidTypeMsg); -assert.throws(() => SlowBuffer('6'), bufferInvalidTypeMsg); -assert.throws(() => SlowBuffer(true), bufferInvalidTypeMsg); +assert.throws(() => Buffer.allocUnsafeSlow(), bufferInvalidTypeMsg); +assert.throws(() => Buffer.allocUnsafeSlow({}), bufferInvalidTypeMsg); +assert.throws(() => Buffer.allocUnsafeSlow('6'), bufferInvalidTypeMsg); +assert.throws(() => Buffer.allocUnsafeSlow(true), bufferInvalidTypeMsg); // Should throw with invalid length value const bufferMaxSizeMsg = { code: 'ERR_OUT_OF_RANGE', name: 'RangeError', }; -assert.throws(() => SlowBuffer(NaN), bufferMaxSizeMsg); -assert.throws(() => SlowBuffer(Infinity), bufferMaxSizeMsg); -assert.throws(() => SlowBuffer(-1), bufferMaxSizeMsg); -assert.throws(() => SlowBuffer(buffer.kMaxLength + 1), bufferMaxSizeMsg); +assert.throws(() => Buffer.allocUnsafeSlow(NaN), bufferMaxSizeMsg); +assert.throws(() => Buffer.allocUnsafeSlow(Infinity), bufferMaxSizeMsg); +assert.throws(() => Buffer.allocUnsafeSlow(-1), bufferMaxSizeMsg); +assert.throws(() => Buffer.allocUnsafeSlow(kMaxLength + 1), bufferMaxSizeMsg); diff --git a/test/js/node/test/parallel/test-buffer-tostring-rangeerror.js b/test/js/node/test/parallel/test-buffer-tostring-rangeerror.js index 0ebea759b5..b34f4c6126 100644 --- a/test/js/node/test/parallel/test-buffer-tostring-rangeerror.js +++ b/test/js/node/test/parallel/test-buffer-tostring-rangeerror.js @@ -1,13 +1,18 @@ 'use strict'; -require('../common'); + +const common = require('../common'); // This test ensures that Node.js throws an Error when trying to convert a // large buffer into a string. // Regression test for https://github.com/nodejs/node/issues/649. +if (!common.enoughTestMem) { + common.skip('skipped due to memory requirements'); +} + const assert = require('assert'); const { - SlowBuffer, + Buffer, constants: { MAX_STRING_LENGTH, }, @@ -18,8 +23,21 @@ const message = { code: 'ERR_STRING_TOO_LONG', name: 'Error', }; -assert.throws(() => Buffer(len).toString('utf8'), message); -assert.throws(() => SlowBuffer(len).toString('utf8'), message); -assert.throws(() => Buffer.alloc(len).toString('utf8'), message); -assert.throws(() => Buffer.allocUnsafe(len).toString('utf8'), message); -assert.throws(() => Buffer.allocUnsafeSlow(len).toString('utf8'), message); + +function test(getBuffer) { + let buf; + try { + buf = getBuffer(); + } catch (e) { + // If the buffer allocation fails, we skip the test. + if (e.code === 'ERR_MEMORY_ALLOCATION_FAILED' || /Array buffer allocation failed/.test(e.message)) { + return; + } + } + assert.throws(() => { buf.toString('utf8'); }, message); +} + +test(() => Buffer(len)); +test(() => Buffer.alloc(len)); +test(() => Buffer.allocUnsafe(len)); +test(() => Buffer.allocUnsafeSlow(len)); diff --git a/test/js/node/test/parallel/test-buffer-zero-fill-cli.js b/test/js/node/test/parallel/test-buffer-zero-fill-cli.js index 4299f81039..663911b718 100644 --- a/test/js/node/test/parallel/test-buffer-zero-fill-cli.js +++ b/test/js/node/test/parallel/test-buffer-zero-fill-cli.js @@ -1,11 +1,11 @@ 'use strict'; // Flags: --zero-fill-buffers -// when using --zero-fill-buffers, every Buffer and SlowBuffer +// when using --zero-fill-buffers, every Buffer // instance must be zero filled upon creation require('../common'); -const SlowBuffer = require('buffer').SlowBuffer; +const { Buffer } = require('buffer'); const assert = require('assert'); function isZeroFilled(buf) { @@ -22,9 +22,8 @@ for (let i = 0; i < 50; i++) { const bufs = [ Buffer.alloc(20), Buffer.allocUnsafe(20), - SlowBuffer(20), + Buffer.allocUnsafeSlow(20), Buffer(20), - new SlowBuffer(20), ]; for (const buf of bufs) { assert(isZeroFilled(buf)); diff --git a/test/js/node/test/parallel/test-child-process-exec-encoding.js b/test/js/node/test/parallel/test-child-process-exec-encoding.js index 0c3178e3f2..059e300e4e 100644 --- a/test/js/node/test/parallel/test-child-process-exec-encoding.js +++ b/test/js/node/test/parallel/test-child-process-exec-encoding.js @@ -13,7 +13,8 @@ if (process.argv[2] === 'child') { const expectedStdout = `${stdoutData}\n`; const expectedStderr = `${stderrData}\n`; function run(options, callback) { - const cmd = `"${process.execPath}" "${__filename}" child`; + const [cmd, opts] = common.escapePOSIXShell`"${process.execPath}" "${__filename}" child`; + options = { ...options, env: { ...opts?.env, ...options.env } }; cp.exec(cmd, options, common.mustSucceed((stdout, stderr) => { callback(stdout, stderr); diff --git a/test/js/node/test/parallel/test-child-process-exec-std-encoding.js b/test/js/node/test/parallel/test-child-process-exec-std-encoding.js index 0818731672..ed5050e14d 100644 --- a/test/js/node/test/parallel/test-child-process-exec-std-encoding.js +++ b/test/js/node/test/parallel/test-child-process-exec-std-encoding.js @@ -12,8 +12,7 @@ if (process.argv[2] === 'child') { console.log(stdoutData); console.error(stderrData); } else { - const cmd = `"${process.execPath}" "${__filename}" child`; - const child = cp.exec(cmd, common.mustSucceed((stdout, stderr) => { + const child = cp.exec(...common.escapePOSIXShell`"${process.execPath}" "${__filename}" child`, common.mustSucceed((stdout, stderr) => { assert.strictEqual(stdout, expectedStdout); assert.strictEqual(stderr, expectedStderr); })); diff --git a/test/js/node/test/parallel/test-child-process-exec-timeout-expire.js b/test/js/node/test/parallel/test-child-process-exec-timeout-expire.js index 6b62d131cb..0fd4f85cbc 100644 --- a/test/js/node/test/parallel/test-child-process-exec-timeout-expire.js +++ b/test/js/node/test/parallel/test-child-process-exec-timeout-expire.js @@ -18,9 +18,10 @@ if (process.argv[2] === 'child') { return; } -const cmd = `"${process.execPath}" "${__filename}" child`; +const [cmd, opts] = common.escapePOSIXShell`"${process.execPath}" "${__filename}" child`; cp.exec(cmd, { + ...opts, timeout: kExpiringParentTimer, }, common.mustCall((err, stdout, stderr) => { console.log('[stdout]', stdout.trim()); diff --git a/test/js/node/test/parallel/test-child-process-exec-timeout-kill.js b/test/js/node/test/parallel/test-child-process-exec-timeout-kill.js index 845fd1eaec..4938cea8a2 100644 --- a/test/js/node/test/parallel/test-child-process-exec-timeout-kill.js +++ b/test/js/node/test/parallel/test-child-process-exec-timeout-kill.js @@ -18,10 +18,11 @@ if (process.argv[2] === 'child') { return; } -const cmd = `"${process.execPath}" "${__filename}" child`; +const [cmd, opts] = common.escapePOSIXShell`"${process.execPath}" "${__filename}" child`; // Test with a different kill signal. cp.exec(cmd, { + ...opts, timeout: kExpiringParentTimer, killSignal: 'SIGKILL' }, common.mustCall((err, stdout, stderr) => { diff --git a/test/js/node/test/parallel/test-child-process-exec-timeout-not-expired.js b/test/js/node/test/parallel/test-child-process-exec-timeout-not-expired.js index 7c8dd3661a..0d0c0f47f9 100644 --- a/test/js/node/test/parallel/test-child-process-exec-timeout-not-expired.js +++ b/test/js/node/test/parallel/test-child-process-exec-timeout-not-expired.js @@ -22,13 +22,14 @@ if (process.argv[2] === 'child') { return; } -const cmd = `"${process.execPath}" "${__filename}" child`; +const [cmd, opts] = common.escapePOSIXShell`"${process.execPath}" "${__filename}" child`; cp.exec(cmd, { - timeout: kTimeoutNotSupposedToExpire + ...opts, + timeout: kTimeoutNotSupposedToExpire, }, common.mustSucceed((stdout, stderr) => { - assert.strict(stdout.trim().includes('child stdout')); - assert.strict(stderr.trim().includes('child stderr')); + assert.strictEqual(stdout.trim(), 'child stdout'); + assert.strictEqual(stderr.trim(), 'child stderr'); })); cleanupStaleProcess(__filename); diff --git a/test/js/node/test/parallel/test-child-process-fork-exec-argv.js b/test/js/node/test/parallel/test-child-process-fork-exec-argv.js new file mode 100644 index 0000000000..f1e3bbbc58 --- /dev/null +++ b/test/js/node/test/parallel/test-child-process-fork-exec-argv.js @@ -0,0 +1,49 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const child_process = require('child_process'); +const spawn = child_process.spawn; +const fork = child_process.fork; + +if (process.argv[2] === 'fork') { + process.stdout.write(JSON.stringify(process.execArgv), function() { + process.exit(); + }); +} else if (process.argv[2] === 'child') { + fork(__filename, ['fork']); +} else { + const execArgv = ['--stack-size=256']; + const args = [__filename, 'child', 'arg0']; + + const child = spawn(process.execPath, execArgv.concat(args)); + let out = ''; + + child.stdout.on('data', function(chunk) { + out += chunk; + }); + + child.on('exit', common.mustCall(function() { + assert.deepStrictEqual(JSON.parse(out), execArgv); + })); +} diff --git a/test/js/node/test/parallel/test-child-process-fork.js b/test/js/node/test/parallel/test-child-process-fork.js index a357f4fbc1..ee9dd3fc9f 100644 --- a/test/js/node/test/parallel/test-child-process-fork.js +++ b/test/js/node/test/parallel/test-child-process-fork.js @@ -18,7 +18,6 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -// Flags: --no-warnings 'use strict'; const common = require('../common'); const assert = require('assert'); @@ -29,7 +28,6 @@ const debug = require('util').debuglog('test'); const n = fork(fixtures.path('child-process-spawn-node.js'), args); -assert.strictEqual(n.channel, n._channel); assert.deepStrictEqual(args, ['foo', 'bar']); n.on('message', (m) => { diff --git a/test/js/node/test/parallel/test-child-process-prototype-tampering.mjs b/test/js/node/test/parallel/test-child-process-prototype-tampering.mjs deleted file mode 100644 index d94c4bdbc6..0000000000 --- a/test/js/node/test/parallel/test-child-process-prototype-tampering.mjs +++ /dev/null @@ -1,91 +0,0 @@ -import * as common from '../common/index.mjs'; -import * as fixtures from '../common/fixtures.mjs'; -import { EOL } from 'node:os'; -import { strictEqual, notStrictEqual, throws } from 'node:assert'; -import cp from 'node:child_process'; - -// TODO(LiviaMedeiros): test on different platforms -if (!common.isLinux) - common.skip(); - -const expectedCWD = process.cwd(); -const expectedUID = process.getuid(); - -for (const tamperedCwd of ['', '/tmp', '/not/existing/malicious/path', 42n]) { - Object.prototype.cwd = tamperedCwd; - - cp.exec('pwd', common.mustSucceed((out) => { - strictEqual(`${out}`, `${expectedCWD}${EOL}`); - })); - strictEqual(`${cp.execSync('pwd')}`, `${expectedCWD}${EOL}`); - cp.execFile('pwd', common.mustSucceed((out) => { - strictEqual(`${out}`, `${expectedCWD}${EOL}`); - })); - strictEqual(`${cp.execFileSync('pwd')}`, `${expectedCWD}${EOL}`); - cp.spawn('pwd').stdout.on('data', common.mustCall((out) => { - strictEqual(`${out}`, `${expectedCWD}${EOL}`); - })); - strictEqual(`${cp.spawnSync('pwd').stdout}`, `${expectedCWD}${EOL}`); - - delete Object.prototype.cwd; -} - -for (const tamperedUID of [0, 1, 999, 1000, 0n, 'gwak']) { - Object.prototype.uid = tamperedUID; - - cp.exec('id -u', common.mustSucceed((out) => { - strictEqual(`${out}`, `${expectedUID}${EOL}`); - })); - strictEqual(`${cp.execSync('id -u')}`, `${expectedUID}${EOL}`); - cp.execFile('id', ['-u'], common.mustSucceed((out) => { - strictEqual(`${out}`, `${expectedUID}${EOL}`); - })); - strictEqual(`${cp.execFileSync('id', ['-u'])}`, `${expectedUID}${EOL}`); - cp.spawn('id', ['-u']).stdout.on('data', common.mustCall((out) => { - strictEqual(`${out}`, `${expectedUID}${EOL}`); - })); - strictEqual(`${cp.spawnSync('id', ['-u']).stdout}`, `${expectedUID}${EOL}`); - - delete Object.prototype.uid; -} - -{ - Object.prototype.execPath = '/not/existing/malicious/path'; - - // Does not throw ENOENT - cp.fork(fixtures.path('empty.js')); - - delete Object.prototype.execPath; -} - -for (const shellCommandArgument of ['-L && echo "tampered"']) { - Object.prototype.shell = true; - const cmd = 'pwd'; - let cmdExitCode = ''; - - const program = cp.spawn(cmd, [shellCommandArgument], { cwd: expectedCWD }); - program.stderr.on('data', common.mustCall()); - program.stdout.on('data', common.mustNotCall()); - - program.on('exit', common.mustCall((code) => { - notStrictEqual(code, 0); - })); - - cp.execFile(cmd, [shellCommandArgument], { cwd: expectedCWD }, - common.mustCall((err) => { - notStrictEqual(err.code, 0); - }) - ); - - throws(() => { - cp.execFileSync(cmd, [shellCommandArgument], { cwd: expectedCWD }); - }, (e) => { - notStrictEqual(e.status, 0); - return true; - }); - - cmdExitCode = cp.spawnSync(cmd, [shellCommandArgument], { cwd: expectedCWD }).status; - notStrictEqual(cmdExitCode, 0); - - delete Object.prototype.shell; -} diff --git a/test/js/node/test/parallel/test-child-process-reject-null-bytes.js b/test/js/node/test/parallel/test-child-process-reject-null-bytes.js index db0db64fd8..b5239cdddc 100644 --- a/test/js/node/test/parallel/test-child-process-reject-null-bytes.js +++ b/test/js/node/test/parallel/test-child-process-reject-null-bytes.js @@ -288,9 +288,7 @@ throws(() => fork(__filename, { execPath: 'BBB\0XXX' }), { }); // Tests for the 'options.execArgv' argument -if(typeof Bun === 'undefined') { // This test is disabled in bun because bun does not support execArgv. - throws(() => fork(__filename, { execArgv: ['AAA', 'BBB\0XXX', 'CCC'] }), { - code: 'ERR_INVALID_ARG_VALUE', - name: 'TypeError', - }); -} +throws(() => fork(__filename, { execArgv: ['AAA', 'BBB\0XXX', 'CCC'] }), { + code: 'ERR_INVALID_ARG_VALUE', + name: 'TypeError', +}); diff --git a/test/js/node/test/parallel/test-child-process-spawnsync-input.js b/test/js/node/test/parallel/test-child-process-spawnsync-input.js index 4b4549ff55..62ae476ae1 100644 --- a/test/js/node/test/parallel/test-child-process-spawnsync-input.js +++ b/test/js/node/test/parallel/test-child-process-spawnsync-input.js @@ -48,9 +48,7 @@ function checkSpawnSyncRet(ret) { function verifyBufOutput(ret) { checkSpawnSyncRet(ret); - assert.deepStrictEqual(ret.stdout.toString('utf8'), msgOutBuf.toString('utf8')); assert.deepStrictEqual(ret.stdout, msgOutBuf); - assert.deepStrictEqual(ret.stderr.toString('utf8'), msgErrBuf.toString('utf8')); assert.deepStrictEqual(ret.stderr, msgErrBuf); } diff --git a/test/js/node/test/parallel/test-child-process-stdout-flush-exit.js b/test/js/node/test/parallel/test-child-process-stdout-flush-exit.js index 3c5f00d9bb..90f746c39e 100644 --- a/test/js/node/test/parallel/test-child-process-stdout-flush-exit.js +++ b/test/js/node/test/parallel/test-child-process-stdout-flush-exit.js @@ -25,9 +25,14 @@ const assert = require('assert'); // If child process output to console and exit // The console.log statements here are part of the test. +// Note: This test verifies specific behavior that is *not* guaranteed +// by Node.js's API contract. See https://nodejs.org/api/process.html#processexitcode. +// We are still generally interested in knowing when this test breaks, +// since applications may rely on the implicit behavior of stdout having +// a buffer size up to which they can write data synchronously. if (process.argv[2] === 'child') { console.log('hello'); - for (let i = 0; i < 200; i++) { + for (let i = 0; i < 100; i++) { console.log('filler'); } console.log('goodbye'); diff --git a/test/js/node/test/parallel/test-cluster-eaddrinuse.js b/test/js/node/test/parallel/test-cluster-eaddrinuse.js index db97029d22..f74d4ab7ec 100644 --- a/test/js/node/test/parallel/test-cluster-eaddrinuse.js +++ b/test/js/node/test/parallel/test-cluster-eaddrinuse.js @@ -59,4 +59,4 @@ if (id === 'undefined') { })); } else { assert(0); // Bad argument. -} \ No newline at end of file +} diff --git a/test/js/node/test/parallel/test-cluster-setup-primary.js b/test/js/node/test/parallel/test-cluster-setup-primary.js index efba017fd7..32bd83fb5a 100644 --- a/test/js/node/test/parallel/test-cluster-setup-primary.js +++ b/test/js/node/test/parallel/test-cluster-setup-primary.js @@ -50,8 +50,7 @@ if (cluster.isWorker) { checks.setupEvent = true; settings = cluster.settings; - if (settings && - settings.args && settings.args[0] === 'custom argument' && + if (settings?.args && settings.args[0] === 'custom argument' && settings.silent === true && settings.exec === process.argv[1]) { checks.settingsObject = true; diff --git a/test/js/node/test/parallel/test-cluster-worker-exit.js b/test/js/node/test/parallel/test-cluster-worker-exit.js index 09e2a83701..1503333b6b 100644 --- a/test/js/node/test/parallel/test-cluster-worker-exit.js +++ b/test/js/node/test/parallel/test-cluster-worker-exit.js @@ -124,7 +124,7 @@ function checkResults(expected_results, results) { const expected = expected_results[k]; assert.strictEqual( - actual, expected && expected.length ? expected[0] : expected, + actual, expected?.length ? expected[0] : expected, `${expected[1] || ''} [expected: ${expected[0]} / actual: ${actual}]`); } } diff --git a/test/js/node/test/parallel/test-cluster-worker-kill-signal.js b/test/js/node/test/parallel/test-cluster-worker-kill-signal.js deleted file mode 100644 index 53e3739eba..0000000000 --- a/test/js/node/test/parallel/test-cluster-worker-kill-signal.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict'; -// test-cluster-worker-kill-signal.js -// verifies that when we're killing a worker using Worker.prototype.kill -// and the worker's process was killed with the given signal (SIGKILL) - - -const common = require('../common'); -const assert = require('assert'); -const cluster = require('cluster'); - -if (cluster.isWorker) { - // Make the worker run something - const http = require('http'); - const server = http.Server(() => { }); - - server.once('listening', common.mustCall()); - server.listen(0, '127.0.0.1'); - -} else if (cluster.isMaster) { - const KILL_SIGNAL = 'SIGKILL'; - - // Start worker - const worker = cluster.fork(); - - // When the worker is up and running, kill it - worker.once('listening', common.mustCall(() => { - worker.kill(KILL_SIGNAL); - })); - - // Check worker events and properties - worker.on('disconnect', common.mustCall(() => { - assert.strictEqual(worker.exitedAfterDisconnect, false); - assert.strictEqual(worker.state, 'disconnected'); - }, 1)); - - // Check that the worker died - worker.once('exit', common.mustCall((exitCode, signalCode) => { - const isWorkerProcessStillAlive = common.isAlive(worker.process.pid); - const numOfRunningWorkers = Object.keys(cluster.workers).length; - - assert.strictEqual(exitCode, null); - assert.strictEqual(signalCode, KILL_SIGNAL); - assert.strictEqual(isWorkerProcessStillAlive, false); - assert.strictEqual(numOfRunningWorkers, 0); - }, 1)); - - // Check if the cluster was killed as well - cluster.on('exit', common.mustCall(1)); -} diff --git a/test/js/node/test/parallel/test-cluster-worker-kill.js b/test/js/node/test/parallel/test-cluster-worker-kill.js index 7307a93e1b..07ab46304f 100644 --- a/test/js/node/test/parallel/test-cluster-worker-kill.js +++ b/test/js/node/test/parallel/test-cluster-worker-kill.js @@ -111,7 +111,7 @@ function checkResults(expected_results, results) { const expected = expected_results[k]; assert.strictEqual( - actual, expected && expected.length ? expected[0] : expected, + actual, expected?.length ? expected[0] : expected, `${expected[1] || ''} [expected: ${expected[0]} / actual: ${actual}]`); } } diff --git a/test/js/node/test/parallel/test-console-assign-undefined.js b/test/js/node/test/parallel/test-console-assign-undefined.js index 1021307b3c..7f5b0e0472 100644 --- a/test/js/node/test/parallel/test-console-assign-undefined.js +++ b/test/js/node/test/parallel/test-console-assign-undefined.js @@ -1,28 +1,28 @@ 'use strict'; -// Patch global.console before importing modules that may modify the console +// Patch globalThis.console before importing modules that may modify the console // object. -const tmp = global.console; -global.console = 42; +const tmp = globalThis.console; +globalThis.console = 42; require('../common'); const assert = require('assert'); // Originally the console had a getter. Test twice to verify it had no side // effect. -assert.strictEqual(global.console, 42); -assert.strictEqual(global.console, 42); +assert.strictEqual(globalThis.console, 42); +assert.strictEqual(globalThis.console, 42); assert.throws( () => console.log('foo'), { name: 'TypeError' } ); -global.console = 1; -assert.strictEqual(global.console, 1); +globalThis.console = 1; +assert.strictEqual(globalThis.console, 1); assert.strictEqual(console, 1); // Reset the console -global.console = tmp; +globalThis.console = tmp; console.log('foo'); diff --git a/test/js/node/test/parallel/test-console-instance.js b/test/js/node/test/parallel/test-console-instance.js index 9821afeabd..0364a6213b 100644 --- a/test/js/node/test/parallel/test-console-instance.js +++ b/test/js/node/test/parallel/test-console-instance.js @@ -36,9 +36,9 @@ process.stdout.write = process.stderr.write = common.mustNotCall(); // Make sure that the "Console" function exists. assert.strictEqual(typeof Console, 'function'); -assert.strictEqual(requiredConsole, global.console); +assert.strictEqual(requiredConsole, globalThis.console); // Make sure the custom instanceof of Console works -assert.ok(global.console instanceof Console); +assert.ok(globalThis.console instanceof Console); assert.ok(!({} instanceof Console)); // Make sure that the Console constructor throws @@ -140,6 +140,8 @@ out.write = err.write = (d) => {}; }); }, { + message: 'The "options.inspectOptions" property must be of type object.' + + common.invalidArgTypeHelper(inspectOptions), code: 'ERR_INVALID_ARG_TYPE' } ); diff --git a/test/js/node/test/parallel/test-console-self-assign.js b/test/js/node/test/parallel/test-console-self-assign.js index 53c54ab9a3..46f9bc93d4 100644 --- a/test/js/node/test/parallel/test-console-self-assign.js +++ b/test/js/node/test/parallel/test-console-self-assign.js @@ -3,4 +3,4 @@ require('../common'); // Assigning to itself should not throw. -global.console = global.console; // eslint-disable-line no-self-assign +globalThis.console = globalThis.console; // eslint-disable-line no-self-assign diff --git a/test/js/node/test/parallel/test-crypto-keygen-missing-oid.js b/test/js/node/test/parallel/test-crypto-keygen-missing-oid.js index f7fefe1384..1e4f309292 100644 --- a/test/js/node/test/parallel/test-crypto-keygen-missing-oid.js +++ b/test/js/node/test/parallel/test-crypto-keygen-missing-oid.js @@ -11,6 +11,8 @@ const { getCurves, } = require('crypto'); +const { hasOpenSSL3 } = require('../common/crypto'); + // This test creates EC key pairs on curves without associated OIDs. // Specifying a key encoding should not crash. { @@ -20,7 +22,7 @@ const { continue; const expectedErrorCode = - common.hasOpenSSL3 ? 'ERR_OSSL_MISSING_OID' : 'ERR_OSSL_EC_MISSING_OID'; + hasOpenSSL3 ? 'ERR_OSSL_MISSING_OID' : 'ERR_OSSL_EC_MISSING_OID'; const params = { namedCurve, publicKeyEncoding: { diff --git a/test/js/node/test/parallel/test-crypto-no-algorithm.js b/test/js/node/test/parallel/test-crypto-no-algorithm.js index 37db38cf61..bb5b81e119 100644 --- a/test/js/node/test/parallel/test-crypto-no-algorithm.js +++ b/test/js/node/test/parallel/test-crypto-no-algorithm.js @@ -4,13 +4,16 @@ const common = require('../common'); if (!common.hasCrypto) common.skip('missing crypto'); -if (!common.hasOpenSSL3) +const { hasOpenSSL3 } = require('../common/crypto'); + +if (!hasOpenSSL3) common.skip('this test requires OpenSSL 3.x'); const assert = require('node:assert/strict'); const crypto = require('node:crypto'); +const { isMainThread } = require('worker_threads'); -if (common.isMainThread) { +if (isMainThread) { // TODO(richardlau): Decide if `crypto.setFips` should error if the // provider named "fips" is not available. crypto.setFips(1); diff --git a/test/js/node/test/parallel/test-crypto-oneshot-hash.js b/test/js/node/test/parallel/test-crypto-oneshot-hash.js index b0340a4189..861aded5dd 100644 --- a/test/js/node/test/parallel/test-crypto-oneshot-hash.js +++ b/test/js/node/test/parallel/test-crypto-oneshot-hash.js @@ -36,7 +36,6 @@ for (const method of methods) { if (method.startsWith('shake') && hasOpenSSL(3, 4)) continue; for (const outputEncoding of ['buffer', 'hex', 'base64', undefined]) { - if (method === 'SHA512-224') continue; const oldDigest = crypto.createHash(method).update(input).digest(outputEncoding || 'hex'); const digestFromBuffer = crypto.hash(method, input, outputEncoding); assert.deepStrictEqual(digestFromBuffer, oldDigest, diff --git a/test/js/node/test/parallel/test-crypto-padding-aes256.js b/test/js/node/test/parallel/test-crypto-padding-aes256.js index 4e25976f28..14d853bdfd 100644 --- a/test/js/node/test/parallel/test-crypto-padding-aes256.js +++ b/test/js/node/test/parallel/test-crypto-padding-aes256.js @@ -19,10 +19,6 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -/* -Skipped test -https://github.com/electron/electron/blob/bf1d377e083380b4849c5f42aacf3762176eac07/script/node-disabled-tests.json#L25 - 'use strict'; const common = require('../common'); if (!common.hasCrypto) @@ -62,5 +58,3 @@ plaintext = '0123456789abcdef0123456789abcde'; // not a multiple encrypted = encrypt(plaintext, true); decrypted = decrypt(encrypted, true); assert.strictEqual(decrypted, plaintext); - -*/ \ No newline at end of file diff --git a/test/js/node/test/parallel/test-crypto-pbkdf2.js b/test/js/node/test/parallel/test-crypto-pbkdf2.js index e293fed04b..efd8d6eaf0 100644 --- a/test/js/node/test/parallel/test-crypto-pbkdf2.js +++ b/test/js/node/test/parallel/test-crypto-pbkdf2.js @@ -220,7 +220,7 @@ assert.throws( } ); -if (!common.openSSLIsBoringSSL) { +if (!hasOpenSSL3) { const kNotPBKDF2Supported = ['shake128', 'shake256']; crypto.getHashes() .filter((hash) => !kNotPBKDF2Supported.includes(hash)) diff --git a/test/js/node/test/parallel/test-crypto-publicDecrypt-fails-first-time.js b/test/js/node/test/parallel/test-crypto-publicDecrypt-fails-first-time.js index a60b87dbf6..1d64e08920 100644 --- a/test/js/node/test/parallel/test-crypto-publicDecrypt-fails-first-time.js +++ b/test/js/node/test/parallel/test-crypto-publicDecrypt-fails-first-time.js @@ -3,11 +3,15 @@ const common = require('../common'); // Test for https://github.com/nodejs/node/issues/40814 -if (!common.hasCrypto) +if (!common.hasCrypto) { common.skip('missing crypto'); +} -if (!common.hasOpenSSL3) +const { hasOpenSSL3 } = require('../common/crypto'); + +if (!hasOpenSSL3) { common.skip('only openssl3'); // https://github.com/nodejs/node/pull/42793#issuecomment-1107491901 +} const assert = require('assert'); const crypto = require('crypto'); diff --git a/test/js/node/test/parallel/test-crypto-worker-thread.js b/test/js/node/test/parallel/test-crypto-worker-thread.js index 0aebf5384e..d9030d5cfc 100644 --- a/test/js/node/test/parallel/test-crypto-worker-thread.js +++ b/test/js/node/test/parallel/test-crypto-worker-thread.js @@ -31,9 +31,4 @@ if (isMainThread) { } else { console.log(workerData); assert.notDeepStrictEqual(workerData, {}); - if (workerData instanceof CryptoKey) { - assert.deepStrictEqual(structuredClone(workerData), workerData); - } else { - assert(workerData.equals(structuredClone(workerData))); - } } diff --git a/test/js/node/test/parallel/test-debugger-exec.js b/test/js/node/test/parallel/test-debugger-exec.js index 51bc749734..536e0128ea 100644 --- a/test/js/node/test/parallel/test-debugger-exec.js +++ b/test/js/node/test/parallel/test-debugger-exec.js @@ -60,6 +60,11 @@ async function waitInitialBreak() { /\[ 'undefined', 'function' \]/, 'non-paused exec can see global but not module-scope values' ); + + // Ref: https://github.com/nodejs/node/issues/46808 + await cli.waitForPrompt(); + await cli.command('exec { a: 1 }'); + assert.match(cli.output, /\{ a: 1 \}/); } finally { await cli.quit(); } diff --git a/test/js/node/test/parallel/test-dgram-cluster-close-during-bind.js b/test/js/node/test/parallel/test-dgram-cluster-close-during-bind.js index 169a9f985b..065ff094f1 100644 --- a/test/js/node/test/parallel/test-dgram-cluster-close-during-bind.js +++ b/test/js/node/test/parallel/test-dgram-cluster-close-during-bind.js @@ -35,9 +35,4 @@ if (cluster.isPrimary) { }); socket.bind(common.mustNotCall('Socket should not bind.')); - - setTimeout(() => { - console.error("Timed out"); - process.exit(1); - }, 5000).unref(); } diff --git a/test/js/node/test/parallel/test-dgram-connect.js b/test/js/node/test/parallel/test-dgram-connect.js index 25a7afda62..30e817f344 100644 --- a/test/js/node/test/parallel/test-dgram-connect.js +++ b/test/js/node/test/parallel/test-dgram-connect.js @@ -14,7 +14,7 @@ client.connect(PORT, common.mustCall(() => { client.connect(PORT, common.mustNotCall()); }, { name: 'Error', - message: /Socket is connected|Already connected/, + message: 'Already connected', code: 'ERR_SOCKET_DGRAM_IS_CONNECTED' }); @@ -23,7 +23,7 @@ client.connect(PORT, common.mustCall(() => { client.disconnect(); }, { name: 'Error', - message: /(Socket is n|N)ot connected/, + message: 'Not connected', code: 'ERR_SOCKET_DGRAM_NOT_CONNECTED' }); @@ -31,7 +31,7 @@ client.connect(PORT, common.mustCall(() => { client.remoteAddress(); }, { name: 'Error', - message: /(Socket is n|N)ot connected/, + message: 'Not connected', code: 'ERR_SOCKET_DGRAM_NOT_CONNECTED' }); @@ -43,7 +43,7 @@ assert.throws(() => { client.connect(PORT); }, { name: 'Error', - message: /Socket is connected|Already connected/, + message: 'Already connected', code: 'ERR_SOCKET_DGRAM_IS_CONNECTED' }); @@ -51,7 +51,7 @@ assert.throws(() => { client.disconnect(); }, { name: 'Error', - message: /(Socket is n|N)ot connected/, + message: 'Not connected', code: 'ERR_SOCKET_DGRAM_NOT_CONNECTED' }); @@ -60,7 +60,7 @@ assert.throws(() => { client.connect(port); }, { name: 'RangeError', - message: /(Port|"Port") should be > 0 and < 65536/, + message: /^Port should be > 0 and < 65536/, code: 'ERR_SOCKET_BAD_PORT' }); }); diff --git a/test/js/node/test/parallel/test-dgram-custom-lookup.js b/test/js/node/test/parallel/test-dgram-custom-lookup.js index f17dc7e2ad..4f80451c52 100644 --- a/test/js/node/test/parallel/test-dgram-custom-lookup.js +++ b/test/js/node/test/parallel/test-dgram-custom-lookup.js @@ -41,7 +41,8 @@ const dns = require('dns'); }, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', - message: /The "lookup" argument must be of type function/ + message: 'The "lookup" argument must be of type function.' + + common.invalidArgTypeHelper(value) }); }); } diff --git a/test/js/node/test/parallel/test-dgram-reuseport.js b/test/js/node/test/parallel/test-dgram-reuseport.js index c9b6f7964a..e5fd696581 100644 --- a/test/js/node/test/parallel/test-dgram-reuseport.js +++ b/test/js/node/test/parallel/test-dgram-reuseport.js @@ -12,8 +12,6 @@ function test() { socket2.close(); })); })); - socket1.on('error', common.mustNotCall()); - socket2.on('error', common.mustNotCall()); } checkSupportReusePort().then(test, () => { diff --git a/test/js/node/test/parallel/test-dgram-sendto.js b/test/js/node/test/parallel/test-dgram-sendto.js index ab45a0fc34..967a22383f 100644 --- a/test/js/node/test/parallel/test-dgram-sendto.js +++ b/test/js/node/test/parallel/test-dgram-sendto.js @@ -12,22 +12,26 @@ const errObj = { }; assert.throws(() => socket.sendto(), errObj); -errObj.message = /The "length" argument must be of type number\. Received type string \(["']offset["']\)$/; +errObj.message = 'The "length" argument must be of type number. Received ' + + "type string ('offset')"; assert.throws( () => socket.sendto('buffer', 1, 'offset', 'port', 'address', 'cb'), errObj); -errObj.message = /The "offset" argument must be of type number. Received type string \(["']offset["']\)$/; +errObj.message = 'The "offset" argument must be of type number. Received ' + + "type string ('offset')"; assert.throws( () => socket.sendto('buffer', 'offset', 1, 'port', 'address', 'cb'), errObj); -errObj.message = /The "address" argument must be of type string. Received (type boolean \(false\)|false)$/; +errObj.message = 'The "address" argument must be of type string. Received ' + + 'type boolean (false)'; assert.throws( () => socket.sendto('buffer', 1, 1, 10, false, 'cb'), errObj); -errObj.message = /The "port" argument must be of type number. Received (type boolean \(false\)|false)$/; +errObj.message = 'The "port" argument must be of type number. Received ' + + 'type boolean (false)'; assert.throws( () => socket.sendto('buffer', 1, 1, false, 'address', 'cb'), errObj); diff --git a/test/js/node/test/parallel/test-dgram-setBroadcast.js b/test/js/node/test/parallel/test-dgram-setBroadcast.js index 01c1e85786..f4ce7ff06a 100644 --- a/test/js/node/test/parallel/test-dgram-setBroadcast.js +++ b/test/js/node/test/parallel/test-dgram-setBroadcast.js @@ -21,9 +21,5 @@ const dgram = require('dgram'); socket.setBroadcast(true); socket.setBroadcast(false); socket.close(); - - assert.throws(() => { - socket.setBroadcast(true); - }, /^Error: setBroadcast EBADF$/); })); } diff --git a/test/js/node/test/parallel/test-dgram-udp6-link-local-address.js b/test/js/node/test/parallel/test-dgram-udp6-link-local-address.js index 882c35a33d..5c090acc6b 100644 --- a/test/js/node/test/parallel/test-dgram-udp6-link-local-address.js +++ b/test/js/node/test/parallel/test-dgram-udp6-link-local-address.js @@ -10,33 +10,14 @@ const os = require('os'); const { isWindows } = common; function linklocal() { - const candidates = []; - for (const [ifname, entries] of Object.entries(os.networkInterfaces())) { for (const { address, family, scopeid } of entries) { - if (family === 'IPv6' && address.startsWith('fe80:') && !ifname.match(/tailscale/i)) { - candidates.push({ address, ifname, scopeid }); + if (family === 'IPv6' && address.startsWith('fe80:')) { + return { address, ifname, scopeid }; } } } - - // Prefer en0 - for (const candidate of candidates) { - if (candidate.ifname === "en0") { - return candidate; - } - } - - // Prefer non-loopback interfaces - for (const candidate of candidates) { - if (!candidate.ifname.startsWith("lo")) { - return candidate; - } - } - - return candidates[0]; } - const iface = linklocal(); if (!iface) diff --git a/test/js/node/test/parallel/test-dns-get-server.js b/test/js/node/test/parallel/test-dns-get-server.js index 3ce6a45ac7..4fa983c243 100644 --- a/test/js/node/test/parallel/test-dns-get-server.js +++ b/test/js/node/test/parallel/test-dns-get-server.js @@ -6,6 +6,6 @@ const { Resolver } = require('dns'); const resolver = new Resolver(); assert(resolver.getServers().length > 0); - +// return undefined resolver._handle.getServers = common.mustCall(); assert.strictEqual(resolver.getServers().length, 0); diff --git a/test/js/node/test/parallel/test-dns-resolveany-bad-ancount.js b/test/js/node/test/parallel/test-dns-resolveany-bad-ancount.js index 88369a87f8..f3dd8131f0 100644 --- a/test/js/node/test/parallel/test-dns-resolveany-bad-ancount.js +++ b/test/js/node/test/parallel/test-dns-resolveany-bad-ancount.js @@ -10,10 +10,9 @@ const server = dgram.createSocket('udp4'); const resolver = new dns.Resolver({ timeout: 100, tries: 1 }); const resolverPromises = new dnsPromises.Resolver({ timeout: 100, tries: 1 }); -server.on('message', common.mustCallAtLeast((msg, { address, port }) => { +server.on('message', common.mustCall((msg, { address, port }) => { const parsed = dnstools.parseDNSPacket(msg); const domain = parsed.questions[0].domain; - assert.strictEqual(domain, 'example.org'); const buf = dnstools.writeDNSPacket({ diff --git a/test/js/node/test/parallel/test-domain-crypto.js b/test/js/node/test/parallel/test-domain-crypto.js index e0a470bd9d..47eb33f70a 100644 --- a/test/js/node/test/parallel/test-domain-crypto.js +++ b/test/js/node/test/parallel/test-domain-crypto.js @@ -31,7 +31,7 @@ const crypto = require('crypto'); // Pollution of global is intentional as part of test. common.allowGlobals(require('domain')); // See https://github.com/nodejs/node/commit/d1eff9ab -global.domain = require('domain'); +globalThis.domain = require('domain'); // Should not throw a 'TypeError: undefined is not a function' exception crypto.randomBytes(8); diff --git a/test/js/node/test/parallel/test-dsa-fips-invalid-key.js b/test/js/node/test/parallel/test-dsa-fips-invalid-key.js index 05cc1d143a..3df51bfbed 100644 --- a/test/js/node/test/parallel/test-dsa-fips-invalid-key.js +++ b/test/js/node/test/parallel/test-dsa-fips-invalid-key.js @@ -1,12 +1,18 @@ 'use strict'; const common = require('../common'); -const fixtures = require('../common/fixtures'); -if (!common.hasFipsCrypto) +if (!common.hasCrypto) { + common.skip('no crypto'); +} + +const fixtures = require('../common/fixtures'); +const crypto = require('crypto'); + +if (!crypto.getFips()) { common.skip('node compiled without FIPS OpenSSL.'); +} const assert = require('assert'); -const crypto = require('crypto'); const input = 'hello'; diff --git a/test/js/node/test/parallel/test-eslint-documented-errors.js b/test/js/node/test/parallel/test-eslint-documented-errors.js index 03131306d7..1759c786fd 100644 --- a/test/js/node/test/parallel/test-eslint-documented-errors.js +++ b/test/js/node/test/parallel/test-eslint-documented-errors.js @@ -27,11 +27,6 @@ new RuleTester().run('documented-errors', rule, { message: `"${invalidCode}" is not documented in doc/api/errors.md`, line: 2 }, - { - message: - `doc/api/errors.md does not have an anchor for "${invalidCode}"`, - line: 2 - }, ] }, ] diff --git a/test/js/node/test/parallel/test-eslint-duplicate-requires.js b/test/js/node/test/parallel/test-eslint-duplicate-requires.js index f2a11b37ca..36c43d9d16 100644 --- a/test/js/node/test/parallel/test-eslint-duplicate-requires.js +++ b/test/js/node/test/parallel/test-eslint-duplicate-requires.js @@ -17,7 +17,7 @@ new RuleTester({ }).run('no-duplicate-requires', rule, { valid: [ { - code: 'require("a"); require("b"); (function() { require("a"); });', + code: '(function() { require("a"); }); (function() { require("a"); });', }, { code: 'require(a); require(a);', diff --git a/test/js/node/test/parallel/test-eslint-prefer-primordials.js b/test/js/node/test/parallel/test-eslint-prefer-primordials.js index 61c84cbadd..b6633c08e5 100644 --- a/test/js/node/test/parallel/test-eslint-prefer-primordials.js +++ b/test/js/node/test/parallel/test-eslint-prefer-primordials.js @@ -177,6 +177,22 @@ new RuleTester({ options: [{ name: 'Symbol' }], errors: [{ message: /const { SymbolIterator } = primordials/ }] }, + { + code: ` + const { SymbolAsyncDispose } = primordials; + const a = { [SymbolAsyncDispose] () {} } + `, + options: [{ name: 'Symbol', polyfilled: ['asyncDispose', 'dispose'] }], + errors: [{ message: /const { SymbolAsyncDispose } = require\("internal\/util"\)/ }] + }, + { + code: ` + const { SymbolDispose } = primordials; + const a = { [SymbolDispose] () {} } + `, + options: [{ name: 'Symbol', polyfilled: ['asyncDispose', 'dispose'] }], + errors: [{ message: /const { SymbolDispose } = require\("internal\/util"\)/ }] + }, { code: ` const { ObjectDefineProperty, Symbol } = primordials; diff --git a/test/js/node/test/parallel/test-eslint-require-common-first.js b/test/js/node/test/parallel/test-eslint-require-common-first.js index ef19f95b97..d7980cebed 100644 --- a/test/js/node/test/parallel/test-eslint-require-common-first.js +++ b/test/js/node/test/parallel/test-eslint-require-common-first.js @@ -20,6 +20,12 @@ new RuleTester({ code: 'require("common")\n' + 'require("assert")' }, + { + code: 'import "../../../../common/index.mjs";', + languageOptions: { + sourceType: 'module', + }, + }, ], invalid: [ { diff --git a/test/js/node/test/parallel/test-event-emitter-invalid-listener.js b/test/js/node/test/parallel/test-event-emitter-invalid-listener.js index f05766c72e..1abd84e1ac 100644 --- a/test/js/node/test/parallel/test-event-emitter-invalid-listener.js +++ b/test/js/node/test/parallel/test-event-emitter-invalid-listener.js @@ -16,5 +16,5 @@ for (const method of eventsMethods) { name: 'TypeError', message: 'The "listener" argument must be of type function. ' + 'Received null', - }); + }, `event.${method}('foo', null) should throw the proper error`); } diff --git a/test/js/node/test/parallel/test-event-emitter-listeners.js b/test/js/node/test/parallel/test-event-emitter-listeners.js index eb1da829c9..4a08ad34c2 100644 --- a/test/js/node/test/parallel/test-event-emitter-listeners.js +++ b/test/js/node/test/parallel/test-event-emitter-listeners.js @@ -86,6 +86,11 @@ function listener4() { assert.deepStrictEqual(ee.listeners('foo'), []); } +{ + const ee = new events.EventEmitter(); + assert.deepStrictEqual(ee.listeners(), []); +} + { class TestStream extends events.EventEmitter {} const s = new TestStream(); diff --git a/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-null.js b/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-null.js index 673b42336e..81cfc96f43 100644 --- a/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-null.js +++ b/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-null.js @@ -15,7 +15,8 @@ process.on('warning', common.mustCall((warning) => { assert.strictEqual(warning.emitter, e); assert.strictEqual(warning.count, 2); assert.strictEqual(warning.type, null); - assert.ok(warning.message.includes('2 null listeners added to [EventEmitter]. MaxListeners is 1.')); + assert.ok(warning.message.includes( + '2 null listeners added to [EventEmitter]. MaxListeners is 1.')); })); e.on(null, () => {}); diff --git a/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-symbol.js b/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-symbol.js index e654b7697c..212f9fb1b2 100644 --- a/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-symbol.js +++ b/test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-symbol.js @@ -17,7 +17,8 @@ process.on('warning', common.mustCall((warning) => { assert.strictEqual(warning.emitter, e); assert.strictEqual(warning.count, 2); assert.strictEqual(warning.type, symbol); - assert.ok(warning.message.includes('2 Symbol(symbol) listeners added to [EventEmitter]. MaxListeners is 1.')); + assert.ok(warning.message.includes( + '2 Symbol(symbol) listeners added to [EventEmitter]. MaxListeners is 1.')); })); e.on(symbol, () => {}); diff --git a/test/js/node/test/parallel/test-event-emitter-max-listeners-warning.js b/test/js/node/test/parallel/test-event-emitter-max-listeners-warning.js index 31bd8d0712..fc23355349 100644 --- a/test/js/node/test/parallel/test-event-emitter-max-listeners-warning.js +++ b/test/js/node/test/parallel/test-event-emitter-max-listeners-warning.js @@ -22,7 +22,8 @@ process.on('warning', common.mustCall((warning) => { assert.strictEqual(warning.emitter, e); assert.strictEqual(warning.count, 2); assert.strictEqual(warning.type, 'event-type'); - assert.ok(warning.message.includes('2 event-type listeners added to [FakeInput]. MaxListeners is 1.')); + assert.ok(warning.message.includes( + '2 event-type listeners added to [FakeInput]. MaxListeners is 1.')); })); e.on('event-type', () => {}); diff --git a/test/js/node/test/parallel/test-fs-append-file-sync.js b/test/js/node/test/parallel/test-fs-append-file-sync.js index a3969b1a4a..f32b458535 100644 --- a/test/js/node/test/parallel/test-fs-append-file-sync.js +++ b/test/js/node/test/parallel/test-fs-append-file-sync.js @@ -70,7 +70,6 @@ fs.writeFileSync(filename4, currentFileData, common.mustNotMutateObjectDeep({ mo [ true, false, 0, 1, Infinity, () => {}, {}, [], undefined, null, ].forEach((value) => { - console.log(value); assert.throws( () => fs.appendFileSync(filename4, value, common.mustNotMutateObjectDeep({ mode: m })), { message: /data/, code: 'ERR_INVALID_ARG_TYPE' } diff --git a/test/js/node/test/parallel/test-fs-close-errors.js b/test/js/node/test/parallel/test-fs-close-errors.js index 0c48d39cd9..112b93739e 100644 --- a/test/js/node/test/parallel/test-fs-close-errors.js +++ b/test/js/node/test/parallel/test-fs-close-errors.js @@ -11,8 +11,8 @@ const fs = require('fs'); const errObj = { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', - // message: 'The "fd" argument must be of type number.' + - // common.invalidArgTypeHelper(input) + message: 'The "fd" argument must be of type number.' + + common.invalidArgTypeHelper(input) }; assert.throws(() => fs.close(input), errObj); assert.throws(() => fs.closeSync(input), errObj); diff --git a/test/js/node/test/parallel/test-fs-long-path.js b/test/js/node/test/parallel/test-fs-long-path.js index a544cffd2e..11724a88dc 100644 --- a/test/js/node/test/parallel/test-fs-long-path.js +++ b/test/js/node/test/parallel/test-fs-long-path.js @@ -41,14 +41,12 @@ console.log({ fullPathLength: fullPath.length }); -console.log(1); fs.writeFile(fullPath, 'ok', common.mustSucceed(() => { - console.log(2); fs.stat(fullPath, common.mustSucceed()); // Tests https://github.com/nodejs/node/issues/39721 - // fs.realpath.native(fullPath, common.mustSucceed()); + fs.realpath.native(fullPath, common.mustSucceed()); // Tests https://github.com/nodejs/node/issues/51031 - // fs.promises.realpath(fullPath).then(common.mustCall(), common.mustNotCall()); + fs.promises.realpath(fullPath).then(common.mustCall(), common.mustNotCall()); })); diff --git a/test/js/node/test/parallel/test-fs-open.js b/test/js/node/test/parallel/test-fs-open.js index 0855e521f7..56157b0183 100644 --- a/test/js/node/test/parallel/test-fs-open.js +++ b/test/js/node/test/parallel/test-fs-open.js @@ -49,8 +49,8 @@ fs.open(__filename, 'r', 0, common.mustSucceed()); fs.open(__filename, 'r', null, common.mustSucceed()); async function promise() { - await fs.promises.open(__filename); - await fs.promises.open(__filename, 'r'); + await (await fs.promises.open(__filename)).close(); + await (await fs.promises.open(__filename, 'r')).close(); } promise().then(common.mustCall()).catch(common.mustNotCall()); diff --git a/test/js/node/test/parallel/test-fs-promises-file-handle-read.js b/test/js/node/test/parallel/test-fs-promises-file-handle-read.js index 2e9534c398..423f1778bf 100644 --- a/test/js/node/test/parallel/test-fs-promises-file-handle-read.js +++ b/test/js/node/test/parallel/test-fs-promises-file-handle-read.js @@ -54,18 +54,26 @@ async function validateLargeRead(options) { // from the current position in the file. const filePath = fixtures.path('x.txt'); const fileHandle = await open(filePath, 'r'); - const pos = 0xffffffff + 1; // max-uint32 + 1 - const readHandle = - await read(fileHandle, Buffer.alloc(1), 0, 1, pos, options); + try { + const pos = 0xffffffff + 1; // max-uint32 + 1 + const readHandle = + await read(fileHandle, Buffer.alloc(1), 0, 1, pos, options); - assert.strictEqual(readHandle.bytesRead, 0); + assert.strictEqual(readHandle.bytesRead, 0); + } finally { + await fileHandle.close(); + } } async function validateReadNoParams() { const filePath = fixtures.path('x.txt'); const fileHandle = await open(filePath, 'r'); // Should not throw - await fileHandle.read(); + try { + await fileHandle.read(); + } finally { + await fileHandle.close(); + } } // Validates that the zero position is respected after the position has been @@ -75,15 +83,19 @@ async function validateReadWithPositionZero() { const opts = { useConf: true }; const filePath = fixtures.path('x.txt'); const fileHandle = await open(filePath, 'r'); - const expectedSequence = ['x', 'y', 'z']; + try { + const expectedSequence = ['x', 'y', 'z']; - for (let i = 0; i < expectedSequence.length * 2; i++) { - const len = 1; - const pos = i % 3; - const buf = Buffer.alloc(len); - const { bytesRead } = await read(fileHandle, buf, 0, len, pos, opts); - assert.strictEqual(bytesRead, len); - assert.strictEqual(buf.toString(), expectedSequence[pos]); + for (let i = 0; i < expectedSequence.length * 2; i++) { + const len = 1; + const pos = i % 3; + const buf = Buffer.alloc(len); + const { bytesRead } = await read(fileHandle, buf, 0, len, pos, opts); + assert.strictEqual(bytesRead, len); + assert.strictEqual(buf.toString(), expectedSequence[pos]); + } + } finally { + await fileHandle.close(); } } @@ -92,24 +104,32 @@ async function validateReadLength(len) { const opts = { useConf: true }; const filePath = fixtures.path('x.txt'); const fileHandle = await open(filePath, 'r'); - const { bytesRead } = await read(fileHandle, buf, 0, len, 0, opts); - assert.strictEqual(bytesRead, len); + try { + const { bytesRead } = await read(fileHandle, buf, 0, len, 0, opts); + assert.strictEqual(bytesRead, len); + } finally { + await fileHandle.close(); + } } async function validateReadWithNoOptions(byte) { const buf = Buffer.alloc(byte); const filePath = fixtures.path('x.txt'); const fileHandle = await open(filePath, 'r'); - let response = await fileHandle.read(buf); - assert.strictEqual(response.bytesRead, byte); - response = await read(fileHandle, buf, 0, undefined, 0); - assert.strictEqual(response.bytesRead, byte); - response = await read(fileHandle, buf, 0, null, 0); - assert.strictEqual(response.bytesRead, byte); - response = await read(fileHandle, buf, 0, undefined, 0, { useConf: true }); - assert.strictEqual(response.bytesRead, byte); - response = await read(fileHandle, buf, 0, null, 0, { useConf: true }); - assert.strictEqual(response.bytesRead, byte); + try { + let response = await fileHandle.read(buf); + assert.strictEqual(response.bytesRead, byte); + response = await read(fileHandle, buf, 0, undefined, 0); + assert.strictEqual(response.bytesRead, byte); + response = await read(fileHandle, buf, 0, null, 0); + assert.strictEqual(response.bytesRead, byte); + response = await read(fileHandle, buf, 0, undefined, 0, { useConf: true }); + assert.strictEqual(response.bytesRead, byte); + response = await read(fileHandle, buf, 0, null, 0, { useConf: true }); + assert.strictEqual(response.bytesRead, byte); + } finally { + await fileHandle.close(); + } } (async function() { diff --git a/test/js/node/test/parallel/test-fs-read-promises-optional-params.js b/test/js/node/test/parallel/test-fs-read-promises-optional-params.js index 07bb6657e4..f9007a69ba 100644 --- a/test/js/node/test/parallel/test-fs-read-promises-optional-params.js +++ b/test/js/node/test/parallel/test-fs-read-promises-optional-params.js @@ -17,11 +17,11 @@ read(fd, common.mustNotMutateObjectDeep({})) assert.strictEqual(bytesRead, expected.byteLength); assert.deepStrictEqual(defaultBufferAsync.byteLength, buffer.byteLength); }) - .then(common.mustCall()).catch(console.error); + .then(common.mustCall()); read(fd, bufferAsOption, common.mustNotMutateObjectDeep({ position: 0 })) .then(function({ bytesRead, buffer }) { assert.strictEqual(bytesRead, expected.byteLength); assert.deepStrictEqual(bufferAsOption.byteLength, buffer.byteLength); }) - .then(common.mustCall()).catch(console.error); + .then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-fs-read-stream-patch-open.js b/test/js/node/test/parallel/test-fs-read-stream-patch-open.js index 3c5250e9bf..fbca4f578d 100644 --- a/test/js/node/test/parallel/test-fs-read-stream-patch-open.js +++ b/test/js/node/test/parallel/test-fs-read-stream-patch-open.js @@ -2,16 +2,5 @@ const common = require('../common'); const fs = require('fs'); -// common.expectWarning( -// 'DeprecationWarning', -// 'ReadStream.prototype.open() is deprecated', 'DEP0135'); -const s = fs.createReadStream('asd') - // We don't care about errors in this test. - .on('error', () => {}); -s.open(); - -process.nextTick(() => { - // Allow overriding open(). - fs.ReadStream.prototype.open = common.mustCall(); - fs.createReadStream('asd'); -}); +fs.ReadStream.prototype.open = common.mustCall(); +fs.createReadStream('asd'); diff --git a/test/js/node/test/parallel/test-fs-readSync-optional-params.js b/test/js/node/test/parallel/test-fs-readSync-optional-params.js index f39e8bc469..7fc1abfd91 100644 --- a/test/js/node/test/parallel/test-fs-readSync-optional-params.js +++ b/test/js/node/test/parallel/test-fs-readSync-optional-params.js @@ -12,7 +12,6 @@ function runTest(defaultBuffer, options, errorCode = false) { let fd; try { fd = fs.openSync(filepath, 'r'); - console.log({ options, errorCode }); if (errorCode) { assert.throws( () => fs.readSync(fd, defaultBuffer, options), diff --git a/test/js/node/test/parallel/test-fs-realpath.js b/test/js/node/test/parallel/test-fs-realpath.js index f1fba3e0a5..69237e3974 100644 --- a/test/js/node/test/parallel/test-fs-realpath.js +++ b/test/js/node/test/parallel/test-fs-realpath.js @@ -23,12 +23,11 @@ const common = require('../common'); const fixtures = require('../common/fixtures'); const tmpdir = require('../common/tmpdir'); +const { isMainThread } = require('worker_threads'); -if (!common.isMainThread) +if (!isMainThread) { common.skip('process.chdir is not available in Workers'); - -if (common.isWindows && process.env.CI) - common.skip('Bun CI windows runners have a bug; verified works locally in admin shell or with symlinks enabled.'); +} const assert = require('assert'); const fs = require('fs'); diff --git a/test/js/node/test/parallel/test-fs-stat-bigint.js b/test/js/node/test/parallel/test-fs-stat-bigint.js index b9fde22881..0a2bea92e5 100644 --- a/test/js/node/test/parallel/test-fs-stat-bigint.js +++ b/test/js/node/test/parallel/test-fs-stat-bigint.js @@ -18,7 +18,6 @@ function getFilename() { return filename; } - function verifyStats(bigintStats, numStats, allowableDelta) { // allowableDelta: It's possible that the file stats are updated between the // two stat() calls so allow for a small difference. diff --git a/test/js/node/test/parallel/test-fs-stat-date.mjs b/test/js/node/test/parallel/test-fs-stat-date.mjs index 9ccab9c6e6..5f85bff273 100644 --- a/test/js/node/test/parallel/test-fs-stat-date.mjs +++ b/test/js/node/test/parallel/test-fs-stat-date.mjs @@ -45,9 +45,7 @@ function closeEnough(actual, expected, margin) { async function runTest(atime, mtime, margin = 0) { margin += Number.EPSILON; try { - const atimeDate = new Date(atime); - const mtimeDate = new Date(mtime); - await fsPromises.utimes(filepath, atimeDate, mtimeDate); + await fsPromises.utimes(filepath, new Date(atime), new Date(mtime)); } catch (e) { if (ignoredErrors.has(e.code)) return; throw e; diff --git a/test/js/node/test/parallel/test-fs-stream-fs-options.js b/test/js/node/test/parallel/test-fs-stream-fs-options.js index a8251db0e6..4e4d17391e 100644 --- a/test/js/node/test/parallel/test-fs-stream-fs-options.js +++ b/test/js/node/test/parallel/test-fs-stream-fs-options.js @@ -27,8 +27,8 @@ const originalFs = { fs }; () => fs.createWriteStream(file, opts), { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', - // message: `The "options.fs.${fn}" property must be of type function. ` + - // 'Received null' + message: `The "options.fs.${fn}" property must be of type function. ` + + 'Received null' }, `createWriteStream options.fs.${fn} should throw if isn't a function` ); @@ -45,8 +45,8 @@ const originalFs = { fs }; () => fs.createWriteStream(file, opts), { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', - // message: 'The "options.fs.writev" property must be of type function. ' + - // 'Received type string (\'not a fn\')' + message: 'The "options.fs.writev" property must be of type function. ' + + 'Received type string (\'not a fn\')' }, 'createWriteStream options.fs.writev should throw if isn\'t a function' ); @@ -63,8 +63,8 @@ const originalFs = { fs }; () => fs.createReadStream(file, opts), { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', - // message: `The "options.fs.${fn}" property must be of type function. ` + - // 'Received null' + message: `The "options.fs.${fn}" property must be of type function. ` + + 'Received null' }, `createReadStream options.fs.${fn} should throw if isn't a function` ); diff --git a/test/js/node/test/parallel/test-fs-truncate-sync.js b/test/js/node/test/parallel/test-fs-truncate-sync.js index 66250cf438..c529fa6f39 100644 --- a/test/js/node/test/parallel/test-fs-truncate-sync.js +++ b/test/js/node/test/parallel/test-fs-truncate-sync.js @@ -12,10 +12,7 @@ const filename = path.resolve(tmp, 'truncate-sync-file.txt'); fs.writeFileSync(filename, 'hello world', 'utf8'); -const fd = fs.openSync(filename, 'r+'); +fs.truncateSync(filename, 5); +assert(fs.readFileSync(filename).equals(Buffer.from('hello'))); -fs.truncateSync(fd, 5); -assert(fs.readFileSync(fd).equals(Buffer.from('hello'))); - -fs.closeSync(fd); fs.unlinkSync(filename); diff --git a/test/js/node/test/parallel/test-fs-watch-stop-async.js b/test/js/node/test/parallel/test-fs-watch-stop-async.js new file mode 100644 index 0000000000..64995730b6 --- /dev/null +++ b/test/js/node/test/parallel/test-fs-watch-stop-async.js @@ -0,0 +1,19 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const fs = require('fs'); + +const watch = fs.watchFile(__filename, () => {}); +let triggered; +const listener = common.mustCall(() => { + triggered = true; +}); + +triggered = false; +watch.once('stop', listener); // Should trigger. +watch.stop(); +assert.equal(triggered, false); +setImmediate(() => { + assert.equal(triggered, true); + watch.removeListener('stop', listener); +}); \ No newline at end of file diff --git a/test/js/node/test/parallel/test-fs-watchfile.js b/test/js/node/test/parallel/test-fs-watchfile.js new file mode 100644 index 0000000000..6fabedd67e --- /dev/null +++ b/test/js/node/test/parallel/test-fs-watchfile.js @@ -0,0 +1,112 @@ +'use strict'; +const common = require('../common'); + +const assert = require('assert'); +const fs = require('fs'); +const path = require('path'); + +const tmpdir = require('../common/tmpdir'); + +// Basic usage tests. +assert.throws( + () => { + fs.watchFile('./some-file'); + }, + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }); + +assert.throws( + () => { + fs.watchFile('./another-file', {}, 'bad listener'); + }, + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }); + +assert.throws(() => { + fs.watchFile(new Object(), common.mustNotCall()); +}, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError' }); + +const enoentFile = tmpdir.resolve('non-existent-file'); +const expectedStatObject = new fs.Stats( + 0, // dev + 0, // mode + 0, // nlink + 0, // uid + 0, // gid + 0, // rdev + 0, // blksize + 0, // ino + 0, // size + 0, // blocks + Date.UTC(1970, 0, 1, 0, 0, 0), // atime + Date.UTC(1970, 0, 1, 0, 0, 0), // mtime + Date.UTC(1970, 0, 1, 0, 0, 0), // ctime + Date.UTC(1970, 0, 1, 0, 0, 0) // birthtime +); + +tmpdir.refresh(); + +// If the file initially didn't exist, and gets created at a later point of +// time, the callback should be invoked again with proper values in stat object +let fileExists = false; + +const watcher = + fs.watchFile(enoentFile, { interval: 0 }, common.mustCall((curr, prev) => { + if (!fileExists) { + // If the file does not exist, all the fields should be zero and the date + // fields should be UNIX EPOCH time + assert.deepStrictEqual(curr, expectedStatObject); + assert.deepStrictEqual(prev, expectedStatObject); + // Create the file now, so that the callback will be called back once the + // event loop notices it. + fs.closeSync(fs.openSync(enoentFile, 'w')); + fileExists = true; + } else { + // If the ino (inode) value is greater than zero, it means that the file + // is present in the filesystem and it has a valid inode number. + assert(curr.ino > 0); + // As the file just got created, previous ino value should be lesser than + // or equal to zero (non-existent file). + assert(prev.ino <= 0); + // Stop watching the file + fs.unwatchFile(enoentFile); + watcher.stop(); // Stopping a stopped watcher should be a noop + } + }, 2)); + +// 'stop' should only be emitted once - stopping a stopped watcher should +// not trigger a 'stop' event. +watcher.on('stop', common.mustCall()); + +// Watch events should callback with a filename on supported systems. +// Omitting AIX. It works but not reliably. +if (common.isLinux || common.isMacOS || common.isWindows) { + const dir = tmpdir.resolve('watch'); + function doWatch() { + const handle = fs.watch(dir, common.mustCall(function(eventType, filename) { + clearInterval(interval); + handle.close(); + assert.strictEqual(filename, 'foo.txt'); + })); + + const interval = setInterval(() => { + fs.writeFile(path.join(dir, 'foo.txt'), 'foo', common.mustCall((err) => { + if (err) assert.fail(err); + })); + }, 1); + } + + fs.mkdir(dir, common.mustSucceed(() => { + if (common.isMacOS) { + // On macOS delay watcher start to avoid leaking previous events. + // Refs: https://github.com/libuv/libuv/pull/4503 + setTimeout(doWatch, common.platformTimeout(100)); + } else { + doWatch(); + } + })); +} \ No newline at end of file diff --git a/test/js/node/test/parallel/test-fs-whatwg-url.js b/test/js/node/test/parallel/test-fs-whatwg-url.js index 7401ed7e76..2d5664cd12 100644 --- a/test/js/node/test/parallel/test-fs-whatwg-url.js +++ b/test/js/node/test/parallel/test-fs-whatwg-url.js @@ -5,6 +5,8 @@ const fixtures = require('../common/fixtures'); const assert = require('assert'); const fs = require('fs'); const tmpdir = require('../common/tmpdir'); +const { isMainThread } = require('worker_threads'); + tmpdir.refresh(); const url = fixtures.fileURL('a.js'); @@ -86,7 +88,7 @@ if (common.isWindows) { // Test that strings are interpreted as paths and not as URL // Can't use process.chdir in Workers // Please avoid testing fs.rmdir('file:') or using it as cleanup -if (common.isMainThread && !common.isWindows) { +if (isMainThread && !common.isWindows) { const oldCwd = process.cwd(); process.chdir(tmpdir.path); diff --git a/test/js/node/test/parallel/test-fs-write-file-sync.js b/test/js/node/test/parallel/test-fs-write-file-sync.js index 4ead91530b..e5fbe32eab 100644 --- a/test/js/node/test/parallel/test-fs-write-file-sync.js +++ b/test/js/node/test/parallel/test-fs-write-file-sync.js @@ -21,9 +21,11 @@ 'use strict'; const common = require('../common'); +const { isMainThread } = require('worker_threads'); -if (!common.isMainThread) +if (!isMainThread) { common.skip('Setting process.umask is not supported in Workers'); +} const assert = require('assert'); const fs = require('fs'); diff --git a/test/js/node/test/parallel/test-fs-write-reuse-callback.js b/test/js/node/test/parallel/test-fs-write-reuse-callback.js index 82c772ab34..c80902e541 100644 --- a/test/js/node/test/parallel/test-fs-write-reuse-callback.js +++ b/test/js/node/test/parallel/test-fs-write-reuse-callback.js @@ -20,7 +20,7 @@ let done = 0; const ondone = common.mustSucceed(() => { if (++done < writes) { - if (done % 25 === 0) global.gc(); + if (done % 25 === 0) globalThis.gc(); setImmediate(write); } else { assert.strictEqual( diff --git a/test/js/node/test/parallel/test-fs-write-stream-patch-open.js b/test/js/node/test/parallel/test-fs-write-stream-patch-open.js index e07a308123..88c4db469d 100644 --- a/test/js/node/test/parallel/test-fs-write-stream-patch-open.js +++ b/test/js/node/test/parallel/test-fs-write-stream-patch-open.js @@ -22,15 +22,6 @@ if (process.argv[2] !== 'child') { } // Child - -// common.expectWarning( -// 'DeprecationWarning', -// 'WriteStream.prototype.open() is deprecated', 'DEP0135'); -const s = fs.createWriteStream(`${tmpdir.path}/out`); -s.open(); - -process.nextTick(() => { - // Allow overriding open(). - fs.WriteStream.prototype.open = common.mustCall(); - fs.createWriteStream('asd'); -}); +// Allow overriding open(). +fs.WriteStream.prototype.open = common.mustCall(); +fs.createWriteStream('asd'); diff --git a/test/js/node/test/parallel/test-http-chunk-problem.js b/test/js/node/test/parallel/test-http-chunk-problem.js index b6b30dbee5..90c54b8f5c 100644 --- a/test/js/node/test/parallel/test-http-chunk-problem.js +++ b/test/js/node/test/parallel/test-http-chunk-problem.js @@ -1,9 +1,12 @@ 'use strict'; // http://groups.google.com/group/nodejs/browse_thread/thread/f66cd3c960406919 const common = require('../common'); -if (!common.hasCrypto) - common.skip('missing crypto'); +if (!common.hasCrypto) { + common.skip('missing crypto'); +} + +const fs = require('fs'); const assert = require('assert'); if (process.argv[2] === 'request') { @@ -23,15 +26,11 @@ if (process.argv[2] === 'request') { if (process.argv[2] === 'shasum') { const crypto = require('crypto'); const shasum = crypto.createHash('sha1'); - let total = 0; process.stdin.on('data', (d) => { - console.warn("Chunk: " + d.length); - total += d.length; shasum.update(d); }); process.stdin.on('close', () => { - console.warn("Total:", total); process.stdout.write(shasum.digest('hex')); }); @@ -64,9 +63,8 @@ function executeRequest(cb) { { env }, (err, stdout, stderr) => { if (stderr.trim() !== '') { - console.error(stderr); + console.log(stderr); } - console.log(stdout.toString()); assert.ifError(err); assert.strictEqual(stdout.slice(0, 40), '8c206a1a87599f532ce68675536f0b1546900d7a'); @@ -78,7 +76,11 @@ function executeRequest(cb) { tmpdir.refresh(); -common.createZeroFilledFile(filename); + +// Create a zero-filled file. +const fd = fs.openSync(filename, 'w'); +fs.ftruncateSync(fd, 10 * 1024 * 1024); +fs.closeSync(fd); server = http.createServer(function(req, res) { res.writeHead(200); diff --git a/test/js/node/test/parallel/test-http-content-length-mismatch.js b/test/js/node/test/parallel/test-http-content-length-mismatch.js index 540acbe759..2d4714694d 100644 --- a/test/js/node/test/parallel/test-http-content-length-mismatch.js +++ b/test/js/node/test/parallel/test-http-content-length-mismatch.js @@ -78,3 +78,23 @@ function shouldThrowOnFewerBytes() { shouldThrowOnMoreBytes(); shouldNotThrow(); shouldThrowOnFewerBytes(); + + +{ + const server = http.createServer(common.mustCall((req, res) => { + res.strictContentLength = true; + // Pass content-length as string + res.setHeader('content-length', '5'); + res.end('12345'); + })); + + + server.listen(0, common.mustCall(() => { + http.get({ port: server.address().port }, common.mustCall((res) => { + res.resume().on('end', common.mustCall(() => { + assert.strictEqual(res.statusCode, 200); + server.close(); + })); + })); + })); +} diff --git a/test/js/node/test/parallel/test-http-dummy-characters-smuggling.js b/test/js/node/test/parallel/test-http-dummy-characters-smuggling.js index 3bfead0f13..ac6f8560e7 100644 --- a/test/js/node/test/parallel/test-http-dummy-characters-smuggling.js +++ b/test/js/node/test/parallel/test-http-dummy-characters-smuggling.js @@ -43,6 +43,7 @@ const assert = require('assert'); { const server = http.createServer((request, response) => { // Since chunk parsing failed, none of this should be called + request.on('data', common.mustNotCall()); request.on('end', common.mustNotCall()); }); diff --git a/test/js/node/test/parallel/test-http-header-obstext.js b/test/js/node/test/parallel/test-http-header-obstext.js index 88c39cbb44..23aea246d7 100644 --- a/test/js/node/test/parallel/test-http-header-obstext.js +++ b/test/js/node/test/parallel/test-http-header-obstext.js @@ -18,4 +18,4 @@ server.listen(0, () => { assert.strictEqual(res.statusCode, 200); server.close(); })); -}); \ No newline at end of file +}); diff --git a/test/js/node/test/parallel/test-http-invalid-te.js b/test/js/node/test/parallel/test-http-invalid-te.js index 5e7fb75e15..5651e94186 100644 --- a/test/js/node/test/parallel/test-http-invalid-te.js +++ b/test/js/node/test/parallel/test-http-invalid-te.js @@ -35,6 +35,6 @@ server.listen(0, common.mustCall(() => { const client = net.connect( server.address().port, common.mustCall(() => { - client.write(REQUEST_BB.replace(/\n/g, '\r\n')); + client.end(REQUEST_BB.replace(/\n/g, '\r\n')); })); })); diff --git a/test/js/node/test/parallel/test-http-invalidheaderfield.js b/test/js/node/test/parallel/test-http-invalidheaderfield.js index 77cc39eb51..01315ba690 100644 --- a/test/js/node/test/parallel/test-http-invalidheaderfield.js +++ b/test/js/node/test/parallel/test-http-invalidheaderfield.js @@ -15,7 +15,7 @@ const server = http.createServer(function(req, res) { }, TypeError); res.end(''); }); -server.listen(0, "127.0.0.1", function() { +server.listen(0, function() { http.get({ port: this.address().port }, function() { ee.emit('done'); diff --git a/test/js/node/test/parallel/test-http-keep-alive-pipeline-max-requests.js b/test/js/node/test/parallel/test-http-keep-alive-pipeline-max-requests.js index 7528a8a7fb..6a07eb2638 100644 --- a/test/js/node/test/parallel/test-http-keep-alive-pipeline-max-requests.js +++ b/test/js/node/test/parallel/test-http-keep-alive-pipeline-max-requests.js @@ -68,6 +68,7 @@ server.listen(0, common.mustCall((res) => { buffer += data; const responseParts = buffer.trim().split('\r\n\r\n'); + if (responseParts.length === 8) { assertResponse(responseParts[0], responseParts[1]); assertResponse(responseParts[2], responseParts[3]); diff --git a/test/js/node/test/parallel/test-http-request-methods.js b/test/js/node/test/parallel/test-http-request-methods.js index 2f0da7432a..3532d45c63 100644 --- a/test/js/node/test/parallel/test-http-request-methods.js +++ b/test/js/node/test/parallel/test-http-request-methods.js @@ -48,6 +48,7 @@ const http = require('http'); }); c.on('data', function(chunk) { + console.log(chunk); server_response += chunk; }); diff --git a/test/js/node/test/parallel/test-http-server-capture-rejections.js b/test/js/node/test/parallel/test-http-server-capture-rejections.js index ea647811e4..b11618a615 100644 --- a/test/js/node/test/parallel/test-http-server-capture-rejections.js +++ b/test/js/node/test/parallel/test-http-server-capture-rejections.js @@ -100,7 +100,6 @@ events.captureRejections = true; }); req.end(); - req.on('error', common.mustCall((err) => { assert.strictEqual(err.code, 'ECONNRESET'); server.close(); diff --git a/test/js/node/test/parallel/test-http-server-connections-checking-leak.js b/test/js/node/test/parallel/test-http-server-connections-checking-leak.js index 282c9a569f..38dca83102 100644 --- a/test/js/node/test/parallel/test-http-server-connections-checking-leak.js +++ b/test/js/node/test/parallel/test-http-server-connections-checking-leak.js @@ -20,5 +20,5 @@ for (let i = 0; i < max; i++) { } setImmediate(() => { - global.gc(); + globalThis.gc(); }); diff --git a/test/js/node/test/parallel/test-http-wget.js b/test/js/node/test/parallel/test-http-wget.js index 0abe850d3f..2ce6f6f698 100644 --- a/test/js/node/test/parallel/test-http-wget.js +++ b/test/js/node/test/parallel/test-http-wget.js @@ -56,7 +56,6 @@ server.on('listening', common.mustCall(() => { c.on('connect', () => { c.write('GET / HTTP/1.0\r\n' + - 'Host: localhost\r\n' + 'Connection: Keep-Alive\r\n\r\n'); }); diff --git a/test/js/node/test/parallel/test-http2-altsvc.js b/test/js/node/test/parallel/test-http2-altsvc.js index 4e5b8f6424..c5abfc3326 100644 --- a/test/js/node/test/parallel/test-http2-altsvc.js +++ b/test/js/node/test/parallel/test-http2-altsvc.js @@ -102,7 +102,7 @@ server.on('session', common.mustCall((session) => { })); server.listen(0, common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const countdown = new Countdown(4, () => { client.close(); diff --git a/test/js/node/test/parallel/test-http2-cancel-while-client-reading.js b/test/js/node/test/parallel/test-http2-cancel-while-client-reading.js index 2011a6e2b2..189e128e66 100644 --- a/test/js/node/test/parallel/test-http2-cancel-while-client-reading.js +++ b/test/js/node/test/parallel/test-http2-cancel-while-client-reading.js @@ -24,11 +24,10 @@ server.on('stream', common.mustCall(function(stream) { })); server.listen(0, function() { - const client = http2.connect(`https://127.0.0.1:${server.address().port}`, + const client = http2.connect(`https://localhost:${server.address().port}`, { rejectUnauthorized: false } ); client_stream = client.request({ ':method': 'POST' }); - client_stream.on('close', common.mustCall(() => { client.close(); server.close(); diff --git a/test/js/node/test/parallel/test-http2-client-port-80.js b/test/js/node/test/parallel/test-http2-client-port-80.js index 016abc69e3..a286dbf6a7 100644 --- a/test/js/node/test/parallel/test-http2-client-port-80.js +++ b/test/js/node/test/parallel/test-http2-client-port-80.js @@ -22,4 +22,4 @@ const client = http2.connect('http://localhost:80'); // mustNotCall. client.on('error', () => {}); -client.close(); \ No newline at end of file +client.close(); diff --git a/test/js/node/test/parallel/test-http2-client-request-options-errors.js b/test/js/node/test/parallel/test-http2-client-request-options-errors.js index 229a5e9130..48e76c7a2f 100644 --- a/test/js/node/test/parallel/test-http2-client-request-options-errors.js +++ b/test/js/node/test/parallel/test-http2-client-request-options-errors.js @@ -11,7 +11,6 @@ const http2 = require('http2'); const optionsToTest = { endStream: 'boolean', - weight: 'number', parent: 'number', exclusive: 'boolean', silent: 'boolean' @@ -54,4 +53,4 @@ server.listen(0, common.mustCall(() => { server.close(); client.close(); }); -})); \ No newline at end of file +})); diff --git a/test/js/node/test/parallel/test-http2-client-stream-destroy-before-connect.js b/test/js/node/test/parallel/test-http2-client-stream-destroy-before-connect.js index 5d059b7acd..087b06d01b 100644 --- a/test/js/node/test/parallel/test-http2-client-stream-destroy-before-connect.js +++ b/test/js/node/test/parallel/test-http2-client-stream-destroy-before-connect.js @@ -43,6 +43,7 @@ server.listen(0, common.mustCall(() => { })); req.on('close', common.mustCall(() => { + assert.strictEqual(req.rstCode, NGHTTP2_INTERNAL_ERROR); assert.strictEqual(req.rstCode, NGHTTP2_INTERNAL_ERROR); countdown.dec(); })); diff --git a/test/js/node/test/parallel/test-http2-client-upload.js b/test/js/node/test/parallel/test-http2-client-upload.js index 14f2c5d403..d073cd94e6 100644 --- a/test/js/node/test/parallel/test-http2-client-upload.js +++ b/test/js/node/test/parallel/test-http2-client-upload.js @@ -42,7 +42,7 @@ fs.readFile(loc, common.mustSucceed((data) => { })); server.listen(0, common.mustCall(() => { - client = http2.connect(`http://127.0.0.1:${server.address().port}`); + client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request({ ':method': 'POST' }); req.on('response', common.mustCall()); diff --git a/test/js/node/test/parallel/test-http2-close-while-writing.js b/test/js/node/test/parallel/test-http2-close-while-writing.js index 4d27aab8e2..d8537c31b0 100644 --- a/test/js/node/test/parallel/test-http2-close-while-writing.js +++ b/test/js/node/test/parallel/test-http2-close-while-writing.js @@ -32,7 +32,7 @@ server.on('session', common.mustCall(function(session) { })); server.listen(0, function() { - const client = http2.connect(`https://127.0.0.1:${server.address().port}`, { + const client = http2.connect(`https://localhost:${server.address().port}`, { ca, maxSessionMemory: 1000 }); diff --git a/test/js/node/test/parallel/test-http2-compat-client-upload-reject.js b/test/js/node/test/parallel/test-http2-compat-client-upload-reject.js index 82ce936e55..2378ef27df 100644 --- a/test/js/node/test/parallel/test-http2-compat-client-upload-reject.js +++ b/test/js/node/test/parallel/test-http2-compat-client-upload-reject.js @@ -24,7 +24,7 @@ fs.readFile(loc, common.mustSucceed((data) => { server.on('close', common.mustCall()); server.listen(0, common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); client.on('close', common.mustCall()); const req = client.request({ ':method': 'POST' }); diff --git a/test/js/node/test/parallel/test-http2-compat-serverrequest-end.js b/test/js/node/test/parallel/test-http2-compat-serverrequest-end.js index d914c522b3..cee5fa47ad 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverrequest-end.js +++ b/test/js/node/test/parallel/test-http2-compat-serverrequest-end.js @@ -15,7 +15,6 @@ server.listen(0, common.mustCall(function() { server.once('request', common.mustCall(function(request, response) { assert.strictEqual(request.complete, false); request.on('data', () => {}); - request.on('end', common.mustCall(() => { assert.strictEqual(request.complete, true); response.on('finish', common.mustCall(function() { diff --git a/test/js/node/test/parallel/test-http2-compat-serverrequest-host.js b/test/js/node/test/parallel/test-http2-compat-serverrequest-host.js index d6702f237c..e5593deb1e 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverrequest-host.js +++ b/test/js/node/test/parallel/test-http2-compat-serverrequest-host.js @@ -19,7 +19,7 @@ server.listen(0, common.mustCall(function() { ':path': '/foobar', ':method': 'GET', ':scheme': 'http', - 'host': `127.0.0.1:${port}` + 'host': `localhost:${port}` }; assert.strictEqual(request.authority, expected.host); @@ -35,6 +35,7 @@ server.listen(0, common.mustCall(function() { assert.notStrictEqual(position, -1); assert.strictEqual(rawHeaders[position + 1], value); } + assert(!Object.hasOwn(headers, ':authority')); assert(!Object.hasOwn(rawHeaders, ':authority')); @@ -44,13 +45,13 @@ server.listen(0, common.mustCall(function() { response.end(); })); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = h2.connect(url, common.mustCall(function() { const headers = { ':path': '/foobar', ':method': 'GET', ':scheme': 'http', - 'host': `127.0.0.1:${port}` + 'host': `localhost:${port}` }; const request = client.request(headers); request.on('end', common.mustCall(function() { diff --git a/test/js/node/test/parallel/test-http2-compat-serverrequest-settimeout.js b/test/js/node/test/parallel/test-http2-compat-serverrequest-settimeout.js index cee2a58192..44abf29caf 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverrequest-settimeout.js +++ b/test/js/node/test/parallel/test-http2-compat-serverrequest-settimeout.js @@ -26,12 +26,12 @@ server.on('request', (req, res) => { server.listen(0, common.mustCall(() => { const port = server.address().port; - const client = http2.connect(`http://127.0.0.1:${port}`); + const client = http2.connect(`http://localhost:${port}`); const req = client.request({ ':path': '/', ':method': 'GET', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }); req.on('end', common.mustCall(() => { client.close(); diff --git a/test/js/node/test/parallel/test-http2-compat-serverrequest.js b/test/js/node/test/parallel/test-http2-compat-serverrequest.js index 2d1b87c882..d92da61d94 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverrequest.js +++ b/test/js/node/test/parallel/test-http2-compat-serverrequest.js @@ -29,7 +29,7 @@ server.listen(0, common.mustCall(function() { response.on('finish', common.mustCall(function() { process.nextTick(() => { - // assert.ok(request.socket); + assert.ok(request.socket); server.close(); }); })); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-destroy.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-destroy.js index c20f0103df..1154b69df4 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-destroy.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-destroy.js @@ -34,7 +34,7 @@ const server = http2.createServer(common.mustCall((req, res) => { }, 3)); server.listen(0, common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const countdown = new Countdown(3, () => { server.close(); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-end.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-end.js index 45e29048be..52d4c603e9 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-end.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-end.js @@ -159,13 +159,13 @@ const { })); server.listen(0, mustCall(() => { const { port } = server.address(); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const headers = { ':path': '/', ':method': 'HEAD', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }; const request = client.request(headers); request.on('response', mustCall((headers, flags) => { @@ -193,13 +193,13 @@ const { })); server.listen(0, mustCall(() => { const { port } = server.address(); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const headers = { ':path': '/', ':method': 'HEAD', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }; const request = client.request(headers); request.on('data', mustNotCall()); @@ -224,13 +224,13 @@ const { })); server.listen(0, mustCall(() => { const { port } = server.address(); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const headers = { ':path': '/', ':method': 'HEAD', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }; const request = client.request(headers); request.on('response', mustCall((headers, flags) => { @@ -259,13 +259,13 @@ const { })); server.listen(0, mustCall(() => { const { port } = server.address(); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const headers = { ':path': '/', ':method': 'HEAD', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }; const request = client.request(headers); request.on('response', mustCall((headers, flags) => { @@ -299,13 +299,13 @@ const { })); server.listen(0, mustCall(() => { const { port } = server.address(); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const headers = { ':path': '/', ':method': 'HEAD', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }; const request = client.request(headers); request.on('response', mustCall((headers, flags) => { @@ -332,13 +332,13 @@ const { })); server.listen(0, mustCall(() => { const { port } = server.address(); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const headers = { ':path': '/', ':method': 'HEAD', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }; const request = client.request(headers); request.on('response', mustCall((headers, flags) => { diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-flushheaders.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-flushheaders.js index 74bf8861fc..7760bf8c7d 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-flushheaders.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-flushheaders.js @@ -14,10 +14,8 @@ const server = h2.createServer(); server.listen(0, common.mustCall(function() { const port = server.address().port; server.once('request', common.mustCall(function(request, response) { - serverResponse = response; assert.strictEqual(response.headersSent, false); assert.strictEqual(response._header, false); // Alias for headersSent - response.flushHeaders(); assert.strictEqual(response.headersSent, true); assert.strictEqual(response._header, true); @@ -35,7 +33,7 @@ server.listen(0, common.mustCall(function() { response.flushHeaders(); // Idempotent }); })); - + serverResponse = response; })); const url = `http://localhost:${port}`; @@ -58,4 +56,4 @@ server.listen(0, common.mustCall(function() { request.end(); request.resume(); })); -})); \ No newline at end of file +})); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-headers-after-destroy.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-headers-after-destroy.js index 9a76f2eded..fc97a70f42 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-headers-after-destroy.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-headers-after-destroy.js @@ -10,7 +10,7 @@ const h2 = require('http2'); // any errors if the stream was destroyed before headers were sent const server = h2.createServer(); -server.listen(0, "127.0.0.1", common.mustCall(function() { +server.listen(0, common.mustCall(function() { const port = server.address().port; server.once('request', common.mustCall(function(request, response) { response.on('finish', common.mustCall(() => { @@ -26,23 +26,22 @@ server.listen(0, "127.0.0.1", common.mustCall(function() { }); })); - response.destroy(); })); - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = h2.connect(url, common.mustCall(function() { const headers = { ':path': '/', ':method': 'GET', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }; const request = client.request(headers); request.on('end', common.mustCall(function() { client.close(); })); - request.end("hello"); + request.end(); request.resume(); })); })); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-headers.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-headers.js index 0687df4208..95423fd09d 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-headers.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-headers.js @@ -105,7 +105,7 @@ server.listen(0, common.mustCall(function() { { code: 'ERR_INVALID_HTTP_TOKEN', name: 'TypeError', - // message: 'Header name must be a valid HTTP token [""]' + message: 'Header name must be a valid HTTP token [""]' } ); @@ -136,6 +136,7 @@ server.listen(0, common.mustCall(function() { () => response.setHeader(real, expectedValue), { code: 'ERR_HTTP2_HEADERS_SENT', + name: 'Error', message: 'Response has already been initiated.' } ); @@ -143,6 +144,7 @@ server.listen(0, common.mustCall(function() { () => response.removeHeader(real, expectedValue), { code: 'ERR_HTTP2_HEADERS_SENT', + name: 'Error', message: 'Response has already been initiated.' } ); @@ -152,6 +154,7 @@ server.listen(0, common.mustCall(function() { () => response.setHeader(real, expectedValue), { code: 'ERR_HTTP2_HEADERS_SENT', + name: 'Error', message: 'Response has already been initiated.' } ); @@ -159,6 +162,7 @@ server.listen(0, common.mustCall(function() { () => response.removeHeader(real, expectedValue), { code: 'ERR_HTTP2_HEADERS_SENT', + name: 'Error', message: 'Response has already been initiated.' } ); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-settimeout.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-settimeout.js index 10d84173ee..e24621ad09 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-settimeout.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-settimeout.js @@ -24,12 +24,12 @@ server.on('request', (req, res) => { server.listen(0, common.mustCall(() => { const port = server.address().port; - const client = http2.connect(`http://127.0.0.1:${port}`); + const client = http2.connect(`http://localhost:${port}`); const req = client.request({ ':path': '/', ':method': 'GET', ':scheme': 'http', - ':authority': `127.0.0.1:${port}` + ':authority': `localhost:${port}` }); req.on('end', common.mustCall(() => { client.close(); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property-set.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property-set.js index 87e1724028..778600775e 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property-set.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property-set.js @@ -24,7 +24,6 @@ server.listen(0, common.mustCall(function() { response.statusMessage = 'test'; response.statusMessage = 'test'; // only warn once assert.strictEqual(response.statusMessage, ''); // no change - server.close(); })); response.end(); })); @@ -44,6 +43,9 @@ server.listen(0, common.mustCall(function() { request.on('end', common.mustCall(function() { client.close(); })); + request.on('close', common.mustCall(function() { + server.close(); + })); request.end(); request.resume(); })); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property.js index 8a083cf3ba..eaffcc11cd 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property.js @@ -23,7 +23,6 @@ server.listen(0, common.mustCall(function() { response.on('finish', common.mustCall(function() { assert.strictEqual(response.statusMessage, ''); assert.strictEqual(response.statusMessage, ''); // only warn once - server.close(); })); response.end(); })); @@ -43,6 +42,9 @@ server.listen(0, common.mustCall(function() { request.on('end', common.mustCall(function() { client.close(); })); + request.on('close', common.mustCall(function() { + server.close(); + })); request.end(); request.resume(); })); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-trailers.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-trailers.js index d8c53afff6..4cfbae0bda 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-trailers.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-trailers.js @@ -20,6 +20,7 @@ server.listen(0, common.mustCall(() => { { code: 'ERR_INVALID_HTTP_TOKEN', name: 'TypeError', + message: 'Header name must be a valid HTTP token [""]' } ); assert.throws( @@ -52,6 +53,7 @@ server.listen(0, common.mustCall(() => { { code: 'ERR_INVALID_HTTP_TOKEN', name: 'TypeError', + message: 'Header name must be a valid HTTP token [""]' } ); diff --git a/test/js/node/test/parallel/test-http2-compat-serverresponse-write.js b/test/js/node/test/parallel/test-http2-compat-serverresponse-write.js index e9518a1b74..64b37e8a13 100644 --- a/test/js/node/test/parallel/test-http2-compat-serverresponse-write.js +++ b/test/js/node/test/parallel/test-http2-compat-serverresponse-write.js @@ -14,7 +14,7 @@ const assert = require('assert'); const server = createServer(); server.listen(0, mustCall(() => { const port = server.address().port; - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const request = client.request(); request.resume(); @@ -51,7 +51,7 @@ const assert = require('assert'); const server = createServer(); server.listen(0, mustCall(() => { const port = server.address().port; - const url = `http://127.0.0.1:${port}`; + const url = `http://localhost:${port}`; const client = connect(url, mustCall(() => { const request = client.request(); request.resume(); diff --git a/test/js/node/test/parallel/test-http2-compat-socket-destroy-delayed.js b/test/js/node/test/parallel/test-http2-compat-socket-destroy-delayed.js index 1edb85d0df..62405047d8 100644 --- a/test/js/node/test/parallel/test-http2-compat-socket-destroy-delayed.js +++ b/test/js/node/test/parallel/test-http2-compat-socket-destroy-delayed.js @@ -24,7 +24,7 @@ const app = http2.createServer(mustCall((req, res) => { })); app.listen(0, mustCall(() => { - const session = http2.connect(`http://127.0.0.1:${app.address().port}`); + const session = http2.connect(`http://localhost:${app.address().port}`); const request = session.request({ [HTTP2_HEADER_PATH]: '/', [HTTP2_HEADER_METHOD]: 'get' diff --git a/test/js/node/test/parallel/test-http2-compat-write-head-after-close.js b/test/js/node/test/parallel/test-http2-compat-write-head-after-close.js index 6ae55a312c..541973f5db 100644 --- a/test/js/node/test/parallel/test-http2-compat-write-head-after-close.js +++ b/test/js/node/test/parallel/test-http2-compat-write-head-after-close.js @@ -10,9 +10,9 @@ const server = h2.createServer((req, res) => { res.writeHead(200, { 'content-type': 'text/plain' }); }); -server.listen(0, "127.0.0.1", common.mustCall(() => { +server.listen(0, common.mustCall(() => { const port = server.address().port; - const client = h2.connect(`http://127.0.0.1:${port}`); + const client = h2.connect(`http://localhost:${port}`); const req = client.request({ ':path': '/' }); req.on('response', common.mustNotCall('head after close should not be sent')); req.on('end', common.mustCall(() => { diff --git a/test/js/node/test/parallel/test-http2-destroy-after-write.js b/test/js/node/test/parallel/test-http2-destroy-after-write.js index 780a5e1330..399df015b8 100644 --- a/test/js/node/test/parallel/test-http2-destroy-after-write.js +++ b/test/js/node/test/parallel/test-http2-destroy-after-write.js @@ -23,7 +23,7 @@ server.on('session', common.mustCall(function(session) { })); server.listen(0, function() { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const stream = client.request({ ':method': 'POST' }); stream.on('response', common.mustCall(function(headers) { assert.strictEqual(headers[':status'], 200); diff --git a/test/js/node/test/parallel/test-http2-endafterheaders.js b/test/js/node/test/parallel/test-http2-endafterheaders.js index b2f83e7df3..438caf3be4 100644 --- a/test/js/node/test/parallel/test-http2-endafterheaders.js +++ b/test/js/node/test/parallel/test-http2-endafterheaders.js @@ -21,7 +21,7 @@ const countdown = new Countdown(2, () => server.close()); server.listen(0, common.mustCall(() => { { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request(); req.resume(); @@ -34,7 +34,7 @@ server.listen(0, common.mustCall(() => { })); } { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request({ ':method': 'POST' }); req.resume(); diff --git a/test/js/node/test/parallel/test-http2-graceful-close.js b/test/js/node/test/parallel/test-http2-graceful-close.js index 25e2e22702..6e518080f6 100644 --- a/test/js/node/test/parallel/test-http2-graceful-close.js +++ b/test/js/node/test/parallel/test-http2-graceful-close.js @@ -31,6 +31,7 @@ server.on('stream', common.mustCall((stream, headers) => { for (let i = 0; i < 16; i++) { stream.write(chunk); } + // Stream end should happen after data is written stream.end(); }); @@ -44,7 +45,7 @@ server.on('stream', common.mustCall((stream, headers) => { // Start the server server.listen(0, common.mustCall(() => { // Create client and request - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request({ ':path': '/' }); // Track received data diff --git a/test/js/node/test/parallel/test-http2-invalidheaderfield.js b/test/js/node/test/parallel/test-http2-invalidheaderfield.js index 167bbac4ee..d6799160f3 100644 --- a/test/js/node/test/parallel/test-http2-invalidheaderfield.js +++ b/test/js/node/test/parallel/test-http2-invalidheaderfield.js @@ -8,8 +8,8 @@ if (!common.hasCrypto) { common.skip('missing crypto'); } // Capitalized headers const http2 = require('http2'); -const { throws, strictEqual } = require('assert'); -const { once } = require('events'); +const { throws } = require('assert'); + { const server = http2.createServer(common.mustCall((req, res) => { throws(() => { @@ -28,8 +28,8 @@ const { once } = require('events'); res.end(); })); - server.listen(0, "127.0.0.1", common.mustCall(() => { - const session = http2.connect(`http://127.0.0.1:${server.address().port}`); + server.listen(0, common.mustCall(() => { + const session = http2.connect(`http://localhost:${server.address().port}`); session.request({ 'test_': 123, 'TEST': 123 }) .on('end', common.mustCall(() => { session.close(); @@ -40,53 +40,42 @@ const { once } = require('events'); { const server = http2.createServer(); - server.listen(0, "127.0.0.1", common.mustCall(async () => { - const session = http2.connect(`http://127.0.0.1:${server.address().port}`); - await once(session, 'connect'); - session.on('error', common.mustCall((e) => { - - strictEqual(e.code, 'ERR_INVALID_HTTP_TOKEN'); - session.close() - server.close(); - })); + server.listen(0, common.mustCall(() => { + const session = http2.connect(`http://localhost:${server.address().port}`); throws(() => { session.request({ 't est': 123 }); }, { code: 'ERR_INVALID_HTTP_TOKEN' }); + session.close(); + server.close(); })); } - { const server = http2.createServer(); - server.listen(0, "127.0.0.1", common.mustCall(async () => { - const session = http2.connect(`http://127.0.0.1:${server.address().port}`); - await once(session, 'connect'); - session.on('error', common.mustCall((e) => { - strictEqual(e.code, 'ERR_INVALID_HTTP_TOKEN'); - session.close(); - server.close(); - })); + server.listen(0, common.mustCall(() => { + const session = http2.connect(`http://localhost:${server.address().port}`); throws(() => { session.request({ ' test': 123 }); }, { code: 'ERR_INVALID_HTTP_TOKEN' }); + session.close(); + server.close(); })); } { const server = http2.createServer(); - server.listen(0, "127.0.0.1", common.mustCall(async () => { - const session4 = http2.connect(`http://127.0.0.1:${server.address().port}`); - await once(session4, 'connect'); + server.listen(0, common.mustCall(() => { + const session = http2.connect(`http://localhost:${server.address().port}`); throws(() => { - session4.request({ ':test': 123 }); + session.request({ ':test': 123 }); }, { code: 'ERR_HTTP2_INVALID_PSEUDOHEADER' }); - session4.close(); + session.close(); server.close(); })); } diff --git a/test/js/node/test/parallel/test-http2-invalidheaderfields-client.js b/test/js/node/test/parallel/test-http2-invalidheaderfields-client.js index 02f3cbca6e..cfca6c30b2 100644 --- a/test/js/node/test/parallel/test-http2-invalidheaderfields-client.js +++ b/test/js/node/test/parallel/test-http2-invalidheaderfields-client.js @@ -3,23 +3,19 @@ const common = require('../common'); if (!common.hasCrypto) { common.skip('missing crypto'); } const assert = require('assert'); const http2 = require('http2'); -const { once } = require('events'); + const server1 = http2.createServer(); -server1.listen(0, "127.0.0.1", common.mustCall(async () => { - const session = http2.connect(`http://127.0.0.1:${server1.address().port}`); - await once(session, 'connect'); +server1.listen(0, common.mustCall(() => { + const session = http2.connect(`http://localhost:${server1.address().port}`); // Check for req headers assert.throws(() => { session.request({ 'no underscore': 123 }); }, { code: 'ERR_INVALID_HTTP_TOKEN' }); - session.on('error', common.mustCall((e) => { - assert.strictEqual(e.code, 'ERR_INVALID_HTTP_TOKEN'); - session.close(); - server1.close(); - })); + session.close(); + server1.close(); })); const server2 = http2.createServer(common.mustCall((req, res) => { @@ -32,8 +28,8 @@ const server2 = http2.createServer(common.mustCall((req, res) => { res.end(); })); -server2.listen(0, "127.0.0.1", common.mustCall(() => { - const session = http2.connect(`http://127.0.0.1:${server2.address().port}`); +server2.listen(0, common.mustCall(() => { + const session = http2.connect(`http://localhost:${server2.address().port}`); const req = session.request(); req.on('end', common.mustCall(() => { session.close(); @@ -53,8 +49,8 @@ const server3 = http2.createServer(common.mustCall((req, res) => { res.end(); })); -server3.listen(0, "127.0.0.1", common.mustCall(() => { - const session = http2.connect(`http://127.0.0.1:${server3.address().port}`); +server3.listen(0, common.mustCall(() => { + const session = http2.connect(`http://localhost:${server3.address().port}`); const req = session.request(); req.on('end', common.mustCall(() => { server3.close(); diff --git a/test/js/node/test/parallel/test-http2-large-write-multiple-requests.js b/test/js/node/test/parallel/test-http2-large-write-multiple-requests.js index e2c0a0bfb7..bcbb1434cb 100644 --- a/test/js/node/test/parallel/test-http2-large-write-multiple-requests.js +++ b/test/js/node/test/parallel/test-http2-large-write-multiple-requests.js @@ -26,8 +26,8 @@ server.on('stream', (stream, headers) => { console.log('server sends content', ++streamCount); }); -server.listen(0, '127.0.0.1', common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}/`); +server.listen(0, common.mustCall(() => { + const client = http2.connect(`http://localhost:${server.address().port}/`); let endCount = 0; let finished = 0; diff --git a/test/js/node/test/parallel/test-http2-misbehaving-flow-control.js b/test/js/node/test/parallel/test-http2-misbehaving-flow-control.js index 0f1671f654..6774be2237 100644 --- a/test/js/node/test/parallel/test-http2-misbehaving-flow-control.js +++ b/test/js/node/test/parallel/test-http2-misbehaving-flow-control.js @@ -73,7 +73,7 @@ server.on('stream', (stream) => { })); stream.on('close', common.mustCall(() => { server.close(common.mustCall()); - client.end(); + client.destroy(); })); stream.resume(); stream.respond(); diff --git a/test/js/node/test/parallel/test-http2-no-more-streams.js b/test/js/node/test/parallel/test-http2-no-more-streams.js index 584447c527..26ec5ab8ad 100644 --- a/test/js/node/test/parallel/test-http2-no-more-streams.js +++ b/test/js/node/test/parallel/test-http2-no-more-streams.js @@ -15,11 +15,12 @@ server.on('stream', (stream) => { }); server.listen(0, common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const nextID = 2 ** 31 - 1; client.on('connect', () => { client.setNextStreamID(nextID); + assert.strictEqual(client.state.nextStreamID, nextID); const countdown = new Countdown(2, () => { diff --git a/test/js/node/test/parallel/test-http2-pipe.js b/test/js/node/test/parallel/test-http2-pipe.js index 11cd3c84d6..ebd89e23d8 100644 --- a/test/js/node/test/parallel/test-http2-pipe.js +++ b/test/js/node/test/parallel/test-http2-pipe.js @@ -28,8 +28,8 @@ server.on('stream', common.mustCall((stream) => { stream.end(); })); -server.listen(0, "127.0.0.1", common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); +server.listen(0, common.mustCall(() => { + const client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request({ ':method': 'POST' }); diff --git a/test/js/node/test/parallel/test-http2-premature-close.js b/test/js/node/test/parallel/test-http2-premature-close.js index d2582de3ab..df30c42918 100644 --- a/test/js/node/test/parallel/test-http2-premature-close.js +++ b/test/js/node/test/parallel/test-http2-premature-close.js @@ -82,7 +82,6 @@ server.on( server.listen( 0, - "127.0.0.1", common.mustCall(async () => { await requestAndClose(server); }), diff --git a/test/js/node/test/parallel/test-http2-respond-file-fd-invalid.js b/test/js/node/test/parallel/test-http2-respond-file-fd-invalid.js index 58e5125394..0a4fbcf7a6 100644 --- a/test/js/node/test/parallel/test-http2-respond-file-fd-invalid.js +++ b/test/js/node/test/parallel/test-http2-respond-file-fd-invalid.js @@ -34,7 +34,7 @@ server.on('stream', (stream) => { }); server.listen(0, () => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request(); req.on('response', common.mustCall()); diff --git a/test/js/node/test/parallel/test-http2-respond-file-fd-range.js b/test/js/node/test/parallel/test-http2-respond-file-fd-range.js index 55a0cd132d..2dd73e0001 100644 --- a/test/js/node/test/parallel/test-http2-respond-file-fd-range.js +++ b/test/js/node/test/parallel/test-http2-respond-file-fd-range.js @@ -50,7 +50,7 @@ server.on('stream', (stream, headers) => { server.on('close', common.mustCall(() => fs.closeSync(fd))); server.listen(0, () => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const countdown = new Countdown(2, () => { client.close(); diff --git a/test/js/node/test/parallel/test-http2-respond-file-range.js b/test/js/node/test/parallel/test-http2-respond-file-range.js index 3f1c54fc8e..4e6a607451 100644 --- a/test/js/node/test/parallel/test-http2-respond-file-range.js +++ b/test/js/node/test/parallel/test-http2-respond-file-range.js @@ -17,6 +17,7 @@ const { const fname = fixtures.path('printA.js'); const data = fs.readFileSync(fname); const stat = fs.statSync(fname); + const server = http2.createServer(); server.on('stream', (stream) => { stream.respondWithFile(fname, { diff --git a/test/js/node/test/parallel/test-http2-sent-headers.js b/test/js/node/test/parallel/test-http2-sent-headers.js index 3580a98a1a..6a492cf13c 100644 --- a/test/js/node/test/parallel/test-http2-sent-headers.js +++ b/test/js/node/test/parallel/test-http2-sent-headers.js @@ -25,8 +25,8 @@ server.on('stream', common.mustCall((stream) => { }); })); -server.listen(0, "127.0.0.1", common.mustCall(async () => { - const client = h2.connect(`http://127.0.0.1:${server.address().port}`); +server.listen(0, common.mustCall(() => { + const client = h2.connect(`http://localhost:${server.address().port}`); const req = client.request(); req.on('headers', common.mustCall((headers, flags) => { @@ -36,7 +36,7 @@ server.listen(0, "127.0.0.1", common.mustCall(async () => { assert.strictEqual(req.sentHeaders[':method'], 'GET'); assert.strictEqual(req.sentHeaders[':authority'], - `127.0.0.1:${server.address().port}`); + `localhost:${server.address().port}`); assert.strictEqual(req.sentHeaders[':scheme'], 'http'); assert.strictEqual(req.sentHeaders[':path'], '/'); req.resume(); diff --git a/test/js/node/test/parallel/test-http2-server-close-idle-connection.js b/test/js/node/test/parallel/test-http2-server-close-idle-connection.js index 8b031fea8f..56a94f2786 100644 --- a/test/js/node/test/parallel/test-http2-server-close-idle-connection.js +++ b/test/js/node/test/parallel/test-http2-server-close-idle-connection.js @@ -28,7 +28,7 @@ server.on('session', common.mustCall((session) => { // Start the server server.listen(0, common.mustCall(() => { // Create client and initial request - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); // This will ensure that server closed the idle connection client.on('close', common.mustCall()); diff --git a/test/js/node/test/parallel/test-http2-server-session-destroy.js b/test/js/node/test/parallel/test-http2-server-session-destroy.js index c1c11832c4..afa3dd3398 100644 --- a/test/js/node/test/parallel/test-http2-server-session-destroy.js +++ b/test/js/node/test/parallel/test-http2-server-session-destroy.js @@ -6,9 +6,8 @@ if (!common.hasCrypto) const h2 = require('http2'); const server = h2.createServer(); -server.listen(0, "127.0.0.1", common.mustCall(() => { +server.listen(0, common.localhostIPv4, common.mustCall(() => { const afterConnect = common.mustCall((session) => { - session.request({ ':method': 'POST' }).end(common.mustCall(() => { session.destroy(); server.close(); @@ -16,6 +15,6 @@ server.listen(0, "127.0.0.1", common.mustCall(() => { }); const port = server.address().port; - const host = "127.0.0.1"; + const host = common.localhostIPv4; h2.connect(`http://${host}:${port}`, afterConnect); })); diff --git a/test/js/node/test/parallel/test-http2-server-setLocalWindowSize.js b/test/js/node/test/parallel/test-http2-server-setLocalWindowSize.js index b1e7046648..8fcb9b9d0d 100644 --- a/test/js/node/test/parallel/test-http2-server-setLocalWindowSize.js +++ b/test/js/node/test/parallel/test-http2-server-setLocalWindowSize.js @@ -26,7 +26,7 @@ server.on('session', common.mustCall((session) => { })); server.listen(0, common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request(); req.resume(); diff --git a/test/js/node/test/parallel/test-http2-session-stream-state.js b/test/js/node/test/parallel/test-http2-session-stream-state.js index cb6a575bc5..612feb8cf1 100644 --- a/test/js/node/test/parallel/test-http2-session-stream-state.js +++ b/test/js/node/test/parallel/test-http2-session-stream-state.js @@ -50,7 +50,7 @@ server.listen(0); server.on('listening', common.mustCall(() => { - const client = h2.connect(`http://127.0.0.1:${server.address().port}`); + const client = h2.connect(`http://localhost:${server.address().port}`); const headers = { ':path': '/' }; diff --git a/test/js/node/test/parallel/test-http2-single-headers.js b/test/js/node/test/parallel/test-http2-single-headers.js index d72ddcc800..36ad8c3b41 100644 --- a/test/js/node/test/parallel/test-http2-single-headers.js +++ b/test/js/node/test/parallel/test-http2-single-headers.js @@ -5,7 +5,7 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); const http2 = require('http2'); -const { once } = require('events'); + const server = http2.createServer(); // Each of these headers must appear only once @@ -24,11 +24,10 @@ const singles = [ server.on('stream', common.mustNotCall()); -server.listen(0, "127.0.0.1", common.mustCall(async () => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); - await once(client, 'connect'); +server.listen(0, common.mustCall(() => { + const client = http2.connect(`http://localhost:${server.address().port}`); + for (const i of singles) { - assert.throws( () => client.request({ [i]: 'abc', [i.toUpperCase()]: 'xyz' }), { diff --git a/test/js/node/test/parallel/test-http2-stream-destroy-event-order.js b/test/js/node/test/parallel/test-http2-stream-destroy-event-order.js index b25ccdc015..8fcbbabe3c 100644 --- a/test/js/node/test/parallel/test-http2-stream-destroy-event-order.js +++ b/test/js/node/test/parallel/test-http2-stream-destroy-event-order.js @@ -19,7 +19,7 @@ server.on('stream', common.mustCall((stream) => { req.close(2); })); server.listen(0, common.mustCall(() => { - client = http2.connect(`http://127.0.0.1:${server.address().port}`); + client = http2.connect(`http://localhost:${server.address().port}`); req = client.request(); req.resume(); req.on('error', common.mustCall(() => { diff --git a/test/js/node/test/parallel/test-http2-too-many-headers.js b/test/js/node/test/parallel/test-http2-too-many-headers.js index 8aa8f4435e..f77e7679d7 100644 --- a/test/js/node/test/parallel/test-http2-too-many-headers.js +++ b/test/js/node/test/parallel/test-http2-too-many-headers.js @@ -17,7 +17,7 @@ const server = http2.createServer({ maxHeaderListPairs: 0 }); server.on('stream', common.mustNotCall()); server.listen(0, common.mustCall(() => { - const client = http2.connect(`http://127.0.0.1:${server.address().port}`); + const client = http2.connect(`http://localhost:${server.address().port}`); const req = client.request({ foo: 'bar' }); req.on('error', common.expectsError({ diff --git a/test/js/node/test/parallel/test-https-agent-session-eviction.js b/test/js/node/test/parallel/test-https-agent-session-eviction.js index da56007105..a3fd362b5c 100644 --- a/test/js/node/test/parallel/test-https-agent-session-eviction.js +++ b/test/js/node/test/parallel/test-https-agent-session-eviction.js @@ -2,10 +2,13 @@ 'use strict'; const common = require('../common'); -const { readKey } = require('../common/fixtures'); -if (!common.hasCrypto) +if (!common.hasCrypto) { common.skip('missing crypto'); +} + +const { readKey } = require('../common/fixtures'); +const { hasOpenSSL } = require('../common/crypto'); const https = require('https'); const { SSL_OP_NO_TICKET } = require('crypto').constants; @@ -14,9 +17,12 @@ const options = { key: readKey('agent1-key.pem'), cert: readKey('agent1-cert.pem'), secureOptions: SSL_OP_NO_TICKET, - ciphers: 'RSA@SECLEVEL=0' }; +if (!process.features.openssl_is_boringssl) { + options.ciphers = 'RSA@SECLEVEL=0'; +} + // Create TLS1.2 server https.createServer(options, function(req, res) { res.writeHead(200, { 'Connection': 'close' }); @@ -56,7 +62,7 @@ function faultyServer(port) { function second(server, session) { const req = https.request({ port: server.address().port, - ciphers: (common.hasOpenSSL31 ? 'DEFAULT:@SECLEVEL=0' : 'DEFAULT'), + ciphers: (hasOpenSSL(3, 1) ? 'DEFAULT:@SECLEVEL=0' : 'DEFAULT'), rejectUnauthorized: false }, function(res) { res.resume(); diff --git a/test/js/node/test/parallel/test-https-close.js b/test/js/node/test/parallel/test-https-close.js index 93a8f02f55..29104bf2f9 100644 --- a/test/js/node/test/parallel/test-https-close.js +++ b/test/js/node/test/parallel/test-https-close.js @@ -51,4 +51,4 @@ server.listen(0, () => { setImmediate(shutdown); }); req.end(); -}); \ No newline at end of file +}); diff --git a/test/js/node/test/parallel/test-https-server-close-destroy-timeout.js b/test/js/node/test/parallel/test-https-server-close-destroy-timeout.js index 904edeae48..e876721f61 100644 --- a/test/js/node/test/parallel/test-https-server-close-destroy-timeout.js +++ b/test/js/node/test/parallel/test-https-server-close-destroy-timeout.js @@ -21,4 +21,4 @@ server.listen(0, common.mustCall(function() { server.close(common.mustCall(() => { assert(server[kConnectionsCheckingInterval]._destroyed); })); -})); \ No newline at end of file +})); diff --git a/test/js/node/test/parallel/test-https-server-headers-timeout.js b/test/js/node/test/parallel/test-https-server-headers-timeout.js index 755336053e..45457e3942 100644 --- a/test/js/node/test/parallel/test-https-server-headers-timeout.js +++ b/test/js/node/test/parallel/test-https-server-headers-timeout.js @@ -18,4 +18,4 @@ const server = createServer(options); assert.strictEqual(server.headersTimeout, 60000); const headersTimeout = common.platformTimeout(1000); server.headersTimeout = headersTimeout; -assert.strictEqual(server.headersTimeout, headersTimeout); \ No newline at end of file +assert.strictEqual(server.headersTimeout, headersTimeout); diff --git a/test/js/node/test/parallel/test-https-server-request-timeout.js b/test/js/node/test/parallel/test-https-server-request-timeout.js index e9224973a7..00bac8ea39 100644 --- a/test/js/node/test/parallel/test-https-server-request-timeout.js +++ b/test/js/node/test/parallel/test-https-server-request-timeout.js @@ -18,4 +18,4 @@ const server = createServer(options); assert.strictEqual(server.requestTimeout, 300000); const requestTimeout = common.platformTimeout(1000); server.requestTimeout = requestTimeout; -assert.strictEqual(server.requestTimeout, requestTimeout); \ No newline at end of file +assert.strictEqual(server.requestTimeout, requestTimeout); diff --git a/test/js/node/test/parallel/test-icu-transcode.js b/test/js/node/test/parallel/test-icu-transcode.js index e9aced128e..875d954b6c 100644 --- a/test/js/node/test/parallel/test-icu-transcode.js +++ b/test/js/node/test/parallel/test-icu-transcode.js @@ -86,5 +86,5 @@ assert.deepStrictEqual( // Test that it doesn't crash { - buffer.transcode(new buffer.SlowBuffer(1), 'utf16le', 'ucs2'); + buffer.transcode(new buffer.Buffer.allocUnsafeSlow(1), 'utf16le', 'ucs2'); } diff --git a/test/js/node/test/parallel/test-mime-api.js b/test/js/node/test/parallel/test-mime-api.js index af9b87e887..30272e5aa6 100644 --- a/test/js/node/test/parallel/test-mime-api.js +++ b/test/js/node/test/parallel/test-mime-api.js @@ -88,26 +88,6 @@ const params = mime.params; params.set('charset', 'utf-8'); assert.strictEqual(params.has('charset'), true); assert.strictEqual(params.get('charset'), 'utf-8'); -{ - // these tests are added by bun - assert.strictEqual(params.get("CHARSET"), null); // case sensitive - const mime2 = new MIMEType('text/javascript;CHARSET=UTF-8;abc=;def;ghi'); - assert.strictEqual(mime2.params.get("CHARSET"), null); - assert.strictEqual(mime2.params.get("charset"), "UTF-8"); // converted to lowercase on parsing - assert.strictEqual(mime2.params.has("CHARSET"), false); - assert.strictEqual(mime2.params.has("charset"), true); - assert.strictEqual(mime2.params.has("abc"), false); - assert.strictEqual(mime2.params.has("def"), false); - assert.strictEqual(mime2.params.has("ghi"), false); - assert.strictEqual(mime2.params.get("abc"), null); - assert.strictEqual(mime2.params.get("def"), null); - assert.strictEqual(mime2.params.get("ghi"), null); - mime2.params.set("CHARSET", "UTF-8"); - assert.strictEqual(mime2.params.get("CHARSET"), "UTF-8"); // not converted to lowercase on set - assert.strictEqual(mime2.params.has("CHARSET"), true); - assert.strictEqual(mime2.params.get("charset"), "UTF-8"); - assert.strictEqual(mime2.params.has("charset"), true); -} assert.deepStrictEqual([...params], [['charset', 'utf-8']]); assert.strictEqual( JSON.stringify(mime), diff --git a/test/js/node/test/parallel/test-module-loading-error.js b/test/js/node/test/parallel/test-module-loading-error.js index d56e696942..3496a4104d 100644 --- a/test/js/node/test/parallel/test-module-loading-error.js +++ b/test/js/node/test/parallel/test-module-loading-error.js @@ -28,7 +28,7 @@ const errorMessagesByPlatform = { win32: ['is not a valid Win32 application'], linux: ['file too short', 'Exec format error'], sunos: ['unknown file type', 'not an ELF file'], - darwin: ['file too short', 'not a mach-o file'], + darwin: ['file too short', 'not a mach-o file', 'not valid mach-o file'], aix: ['Cannot load module', 'Cannot run a file that does not have a valid format.', 'Exec format error'], diff --git a/test/js/node/test/parallel/test-module-relative-lookup.js b/test/js/node/test/parallel/test-module-relative-lookup.js index 1bd505392c..76af2b3b30 100644 --- a/test/js/node/test/parallel/test-module-relative-lookup.js +++ b/test/js/node/test/parallel/test-module-relative-lookup.js @@ -2,7 +2,7 @@ const common = require('../common'); const assert = require('assert'); -const _module = require('module'); // Avoid collision with global.module +const _module = require('module'); // Avoid collision with globalThis.module // Current directory gets highest priority for local modules function testFirstInPath(moduleName, isLocalModule) { @@ -15,7 +15,7 @@ function testFirstInPath(moduleName, isLocalModule) { assertFunction(paths[0], '.'); paths = _module._resolveLookupPaths(moduleName, null); - assertFunction(paths && paths[0], '.'); + assertFunction(paths?.[0], '.'); } testFirstInPath('./lodash', true); diff --git a/test/js/node/test/parallel/test-net-access-byteswritten.js b/test/js/node/test/parallel/test-net-access-byteswritten.js index 6d77dc5473..da63d68f6c 100644 --- a/test/js/node/test/parallel/test-net-access-byteswritten.js +++ b/test/js/node/test/parallel/test-net-access-byteswritten.js @@ -9,10 +9,13 @@ const net = require('net'); const tls = require('tls'); const tty = require('tty'); -// Check that the bytesWritten getter doesn't crash if object isn't constructed. +// Check that the bytesWritten getter doesn't crash if object isn't +// constructed. assert.strictEqual(net.Socket.prototype.bytesWritten, undefined); -assert.strictEqual(Object.getPrototypeOf(tls.TLSSocket).prototype.bytesWritten, undefined); +assert.strictEqual(Object.getPrototypeOf(tls.TLSSocket).prototype.bytesWritten, + undefined); assert.strictEqual(tls.TLSSocket.prototype.bytesWritten, undefined); -assert.strictEqual(Object.getPrototypeOf(tty.ReadStream).prototype.bytesWritten, undefined); +assert.strictEqual(Object.getPrototypeOf(tty.ReadStream).prototype.bytesWritten, + undefined); assert.strictEqual(tty.ReadStream.prototype.bytesWritten, undefined); assert.strictEqual(tty.WriteStream.prototype.bytesWritten, undefined); diff --git a/test/js/node/test/parallel/test-net-after-close.js b/test/js/node/test/parallel/test-net-after-close.js index 38ea3b96aa..413e8f7599 100644 --- a/test/js/node/test/parallel/test-net-after-close.js +++ b/test/js/node/test/parallel/test-net-after-close.js @@ -30,14 +30,13 @@ const server = net.createServer(common.mustCall((s) => { })); server.listen(0, common.mustCall(() => { - console.log('SEVER: got listen'); const c = net.createConnection(server.address().port); c.on('close', common.mustCall(() => { - console.log('CONN: got close'); /* eslint-disable no-unused-expressions */ console.error('connection closed'); assert.strictEqual(c._handle, null); - // Calling functions / accessing properties of a closed socket should not throw. + // Calling functions / accessing properties of a closed socket should not + // throw. c.setNoDelay(); c.setKeepAlive(); c.bufferSize; diff --git a/test/js/node/test/parallel/test-net-better-error-messages-path.js b/test/js/node/test/parallel/test-net-better-error-messages-path.js new file mode 100644 index 0000000000..93a5e38fac --- /dev/null +++ b/test/js/node/test/parallel/test-net-better-error-messages-path.js @@ -0,0 +1,22 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +{ + const fp = '/tmp/fadagagsdfgsdf'; + const c = net.connect(fp); + + c.on('connect', common.mustNotCall()); + c.on('error', common.expectsError({ + code: 'ENOENT', + message: `connect ENOENT ${fp}` + })); +} + +{ + assert.throws( + () => net.createConnection({ path: {} }), + { code: 'ERR_INVALID_ARG_TYPE' } + ); +} diff --git a/test/js/node/test/parallel/test-net-better-error-messages-port-hostname.js b/test/js/node/test/parallel/test-net-better-error-messages-port-hostname.js index c9582f9ae7..3cc9e58987 100644 --- a/test/js/node/test/parallel/test-net-better-error-messages-port-hostname.js +++ b/test/js/node/test/parallel/test-net-better-error-messages-port-hostname.js @@ -9,7 +9,10 @@ const assert = require('assert'); const net = require('net'); const { addresses } = require('../common/internet'); -const { errorLookupMock, mockedErrorCode } = require('../common/dns'); +const { + errorLookupMock, + mockedErrorCode +} = require('../common/dns'); // Using port 0 as hostname used is already invalid. const c = net.createConnection({ diff --git a/test/js/node/test/parallel/test-net-connect-abort-controller.js b/test/js/node/test/parallel/test-net-connect-abort-controller.js index 5432bbabae..9c259cc3fc 100644 --- a/test/js/node/test/parallel/test-net-connect-abort-controller.js +++ b/test/js/node/test/parallel/test-net-connect-abort-controller.js @@ -24,7 +24,6 @@ server.listen(0, common.mustCall(async () => { assert.fail(`close ${testName} should have thrown`); } catch (err) { assert.strictEqual(err.name, 'AbortError'); - assert.strictEqual(err.toString(), 'AbortError: The operation was aborted.'); } }; diff --git a/test/js/node/test/parallel/test-net-connect-custom-lookup-non-string-address.mjs b/test/js/node/test/parallel/test-net-connect-custom-lookup-non-string-address.mjs new file mode 100644 index 0000000000..d81232cb24 --- /dev/null +++ b/test/js/node/test/parallel/test-net-connect-custom-lookup-non-string-address.mjs @@ -0,0 +1,44 @@ +import * as common from '../common/index.mjs'; +import net from 'node:net'; +import { describe, it } from 'node:test'; + +const brokenCustomLookup = (_hostname, options, callback) => { + // Incorrectly return an array of IPs instead of a string. + callback(null, ['127.0.0.1'], options.family); +}; + +describe('when family is ipv4', () => { + it('socket emits an error when lookup does not return a string', (t, done) => { + const options = { + host: 'example.com', + port: 80, + lookup: brokenCustomLookup, + family: 4 + }; + + const socket = net.connect(options, common.mustNotCall()); + socket.on('error', (err) => { + t.assert.strictEqual(err.code, 'ERR_INVALID_IP_ADDRESS'); + + done(); + }); + }); +}); + +describe('when family is ipv6', () => { + it('socket emits an error when lookup does not return a string', (t, done) => { + const options = { + host: 'example.com', + port: 80, + lookup: brokenCustomLookup, + family: 6 + }; + + const socket = net.connect(options, common.mustNotCall()); + socket.on('error', (err) => { + t.assert.strictEqual(err.code, 'ERR_INVALID_IP_ADDRESS'); + + done(); + }); + }); +}); diff --git a/test/js/node/test/parallel/test-net-connect-keepalive.js b/test/js/node/test/parallel/test-net-connect-keepalive.js new file mode 100644 index 0000000000..514a057655 --- /dev/null +++ b/test/js/node/test/parallel/test-net-connect-keepalive.js @@ -0,0 +1,55 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +const truthyValues = [true, 1, 'true', {}, []]; +const delays = [[123, 0], [456123, 456], [-123000, 0], [undefined, 0]]; +const falseyValues = [false, 0, '']; + +const genSetKeepAlive = (desiredEnable, desiredDelay) => (enable, delay) => { + assert.strictEqual(enable, desiredEnable); + assert.strictEqual(delay, desiredDelay); +}; + +for (const value of truthyValues) { + for (const delay of delays) { + const server = net.createServer(); + + server.listen(0, common.mustCall(function() { + const port = server.address().port; + + const client = net.connect( + { port, keepAlive: value, keepAliveInitialDelay: delay[0] }, + common.mustCall(() => client.end()) + ); + + client._handle.setKeepAlive = common.mustCall( + genSetKeepAlive(true, delay[1]) + ); + + client.on('end', common.mustCall(function() { + server.close(); + })); + })); + } +} + +for (const value of falseyValues) { + const server = net.createServer(); + + server.listen(0, common.mustCall(function() { + const port = server.address().port; + + const client = net.connect( + { port, keepAlive: value }, + common.mustCall(() => client.end()) + ); + + client._handle.setKeepAlive = common.mustNotCall(); + + client.on('end', common.mustCall(function() { + server.close(); + })); + })); +} diff --git a/test/js/node/test/parallel/test-net-connect-no-arg.js b/test/js/node/test/parallel/test-net-connect-no-arg.js index 78eb633e73..c795ef7f6e 100644 --- a/test/js/node/test/parallel/test-net-connect-no-arg.js +++ b/test/js/node/test/parallel/test-net-connect-no-arg.js @@ -5,31 +5,31 @@ const assert = require('assert'); const net = require('net'); // Tests that net.connect() called without arguments throws ERR_MISSING_ARGS. -const message = 'The "options", "port", or "path" argument must be specified'; + assert.throws(() => { net.connect(); }, { code: 'ERR_MISSING_ARGS', - message, + message: 'The "options" or "port" or "path" argument must be specified', }); assert.throws(() => { new net.Socket().connect(); }, { code: 'ERR_MISSING_ARGS', - message, + message: 'The "options" or "port" or "path" argument must be specified', }); assert.throws(() => { net.connect({}); }, { code: 'ERR_MISSING_ARGS', - message, + message: 'The "options" or "port" or "path" argument must be specified', }); assert.throws(() => { new net.Socket().connect({}); }, { code: 'ERR_MISSING_ARGS', - message, + message: 'The "options" or "port" or "path" argument must be specified', }); diff --git a/test/js/node/test/parallel/test-net-connect-options-invalid.js b/test/js/node/test/parallel/test-net-connect-options-invalid.js index 05a5654630..53ce89cac9 100644 --- a/test/js/node/test/parallel/test-net-connect-options-invalid.js +++ b/test/js/node/test/parallel/test-net-connect-options-invalid.js @@ -25,3 +25,16 @@ const net = require('net'); }); }); } + +{ + assert.throws(() => { + net.createConnection({ + host: ['192.168.0.1'], + port: 8080, + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "options.host" property must be of type string. Received an instance of Array', + }); +} diff --git a/test/js/node/test/parallel/test-net-dns-lookup.js b/test/js/node/test/parallel/test-net-dns-lookup.js index 3bd6bd45ce..8ef0382ae1 100644 --- a/test/js/node/test/parallel/test-net-dns-lookup.js +++ b/test/js/node/test/parallel/test-net-dns-lookup.js @@ -30,8 +30,8 @@ const server = net.createServer(function(client) { }); server.listen(0, common.mustCall(function() { - const socket = net.connect(this.address().port, 'localhost'); - socket.on('lookup', common.mustCallAtLeast(function(err, ip, type, host) { + net.connect(this.address().port, 'localhost') + .on('lookup', common.mustCallAtLeast(function(err, ip, type, host) { assert.strictEqual(err, null); assert.match(ip, /^(127\.0\.0\.1|::1)$/); assert.match(type.toString(), /^(4|6)$/); diff --git a/test/js/node/test/parallel/test-net-listen-close-server.js b/test/js/node/test/parallel/test-net-listen-close-server.js index 99d7111eba..c4421fb225 100644 --- a/test/js/node/test/parallel/test-net-listen-close-server.js +++ b/test/js/node/test/parallel/test-net-listen-close-server.js @@ -23,8 +23,7 @@ const common = require('../common'); const net = require('net'); -const server = net.createServer(function(socket) { -}); +const server = net.createServer(common.mustNotCall()); server.listen(0, common.mustNotCall()); server.on('error', common.mustNotCall()); server.close(); diff --git a/test/js/node/test/parallel/test-net-listen-error.js b/test/js/node/test/parallel/test-net-listen-error.js index 05ca799d3e..44a2bd6982 100644 --- a/test/js/node/test/parallel/test-net-listen-error.js +++ b/test/js/node/test/parallel/test-net-listen-error.js @@ -23,7 +23,6 @@ const common = require('../common'); const net = require('net'); -const server = net.createServer(function(socket) { -}); +const server = net.createServer(common.mustNotCall()); server.listen(1, '1.1.1.1', common.mustNotCall()); // EACCES or EADDRNOTAVAIL server.on('error', common.mustCall()); diff --git a/test/js/node/test/parallel/test-net-listen-fd0.js b/test/js/node/test/parallel/test-net-listen-fd0.js new file mode 100644 index 0000000000..c9ba56b5ae --- /dev/null +++ b/test/js/node/test/parallel/test-net-listen-fd0.js @@ -0,0 +1,33 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +// This should fail with an async EINVAL error, not throw an exception +net.createServer(common.mustNotCall()) + .listen({ fd: 0 }) + .on('error', common.mustCall(function(e) { + assert(e instanceof Error); + assert(['EINVAL', 'ENOTSOCK'].includes(e.code)); + })); diff --git a/test/js/node/test/parallel/test-net-listen-invalid-port.js b/test/js/node/test/parallel/test-net-listen-invalid-port.js index 119be60193..8448780366 100644 --- a/test/js/node/test/parallel/test-net-listen-invalid-port.js +++ b/test/js/node/test/parallel/test-net-listen-invalid-port.js @@ -40,4 +40,4 @@ assert.throws(() => { }, { code: 'ERR_SOCKET_BAD_PORT', name: 'RangeError' -}); \ No newline at end of file +}); diff --git a/test/js/node/test/parallel/test-net-remote-address-port.js b/test/js/node/test/parallel/test-net-remote-address-port.js index 8cbf661b55..615f22979c 100644 --- a/test/js/node/test/parallel/test-net-remote-address-port.js +++ b/test/js/node/test/parallel/test-net-remote-address-port.js @@ -34,8 +34,10 @@ const remoteAddrCandidates = [ common.localhostIPv4, const remoteFamilyCandidates = ['IPv4', 'IPv6']; const server = net.createServer(common.mustCall(function(socket) { - assert.ok(remoteAddrCandidates.includes(socket.remoteAddress), `Invalid remoteAddress: ${socket.remoteAddress}`); - assert.ok(remoteFamilyCandidates.includes(socket.remoteFamily), `Invalid remoteFamily: ${socket.remoteFamily}`); + assert.ok(remoteAddrCandidates.includes(socket.remoteAddress), + `Invalid remoteAddress: ${socket.remoteAddress}`); + assert.ok(remoteFamilyCandidates.includes(socket.remoteFamily), + `Invalid remoteFamily: ${socket.remoteFamily}`); assert.ok(socket.remotePort); assert.notStrictEqual(socket.remotePort, this.address().port); socket.on('end', function() { @@ -60,26 +62,22 @@ server.listen(0, function() { assert.strictEqual(client2.remotePort, undefined); client.on('connect', function() { - console.log(1, !!client._handle, client.remoteAddress, client.remoteFamily, client.remotePort); assert.ok(remoteAddrCandidates.includes(client.remoteAddress)); assert.ok(remoteFamilyCandidates.includes(client.remoteFamily)); assert.strictEqual(client.remotePort, server.address().port); client.end(); }); client.on('close', function() { - console.log(2, !!client._handle, client.remoteAddress, client.remoteFamily); assert.ok(remoteAddrCandidates.includes(client.remoteAddress)); assert.ok(remoteFamilyCandidates.includes(client.remoteFamily)); }); client2.on('connect', function() { - console.log(3, !!client2._handle, client2.remoteAddress, client2.remoteFamily, client2.remotePort); assert.ok(remoteAddrCandidates.includes(client2.remoteAddress)); assert.ok(remoteFamilyCandidates.includes(client2.remoteFamily)); assert.strictEqual(client2.remotePort, server.address().port); client2.end(); }); client2.on('close', function() { - console.log(4, !!client2._handle, client2.remoteAddress, client2.remoteFamily); assert.ok(remoteAddrCandidates.includes(client2.remoteAddress)); assert.ok(remoteFamilyCandidates.includes(client2.remoteFamily)); }); diff --git a/test/js/node/test/parallel/test-net-server-blocklist.js b/test/js/node/test/parallel/test-net-server-blocklist.js index 66bb948e82..8f310bd625 100644 --- a/test/js/node/test/parallel/test-net-server-blocklist.js +++ b/test/js/node/test/parallel/test-net-server-blocklist.js @@ -4,13 +4,12 @@ const net = require('net'); const blockList = new net.BlockList(); blockList.addAddress(common.localhostIPv4); -console.log('common.localhostIPv4',common.localhostIPv4) const server = net.createServer({ blockList }, common.mustNotCall()); server.listen(0, common.localhostIPv4, common.mustCall(() => { const adddress = server.address(); const socket = net.connect({ - // localAddress: common.localhostIPv4, + localAddress: common.localhostIPv4, host: adddress.address, port: adddress.port }); diff --git a/test/js/node/test/parallel/test-net-server-capture-rejection.js b/test/js/node/test/parallel/test-net-server-capture-rejection.js new file mode 100644 index 0000000000..b1564ec268 --- /dev/null +++ b/test/js/node/test/parallel/test-net-server-capture-rejection.js @@ -0,0 +1,27 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const events = require('events'); +const { createServer, connect } = require('net'); + +events.captureRejections = true; + +const server = createServer(common.mustCall(async (sock) => { + server.close(); + + const _err = new Error('kaboom'); + sock.on('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + throw _err; +})); + +server.listen(0, common.mustCall(() => { + const sock = connect( + server.address().port, + server.address().host + ); + + sock.on('close', common.mustCall()); +})); diff --git a/test/js/node/test/parallel/test-net-server-max-connections.js b/test/js/node/test/parallel/test-net-server-max-connections.js index f0cbea1bc5..ea9a8d29e9 100644 --- a/test/js/node/test/parallel/test-net-server-max-connections.js +++ b/test/js/node/test/parallel/test-net-server-max-connections.js @@ -55,7 +55,7 @@ function makeConnection(index) { } c.on('close', function() { - // console.error(`closed ${index}`); + console.error(`closed ${index}`); closes++; if (closes < N / 2) { diff --git a/test/js/node/test/parallel/test-net-server-pause-on-connect.js b/test/js/node/test/parallel/test-net-server-pause-on-connect.js index f6079a40b6..59c39e8816 100644 --- a/test/js/node/test/parallel/test-net-server-pause-on-connect.js +++ b/test/js/node/test/parallel/test-net-server-pause-on-connect.js @@ -1,3 +1,24 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + 'use strict'; const common = require('../common'); const assert = require('assert'); @@ -6,11 +27,13 @@ const msg = 'test'; let stopped = true; let server1Sock; + const server1ConnHandler = (socket) => { socket.on('data', function(data) { if (stopped) { assert.fail('data event should not have happened yet'); } + assert.strictEqual(data.toString(), msg); socket.end(); server1.close(); diff --git a/test/js/node/test/parallel/test-net-socket-constructor.js b/test/js/node/test/parallel/test-net-socket-constructor.js index 3755d6e9d6..47010aa3b3 100644 --- a/test/js/node/test/parallel/test-net-socket-constructor.js +++ b/test/js/node/test/parallel/test-net-socket-constructor.js @@ -62,4 +62,4 @@ if (cluster.isPrimary) { test({ fd: 5 }, true, true); test({ fd: 6, readable: true, writable: true }, true, true); process.disconnect(); -} \ No newline at end of file +} diff --git a/test/js/node/test/parallel/test-net-socket-write-after-close.js b/test/js/node/test/parallel/test-net-socket-write-after-close.js new file mode 100644 index 0000000000..207f735fff --- /dev/null +++ b/test/js/node/test/parallel/test-net-socket-write-after-close.js @@ -0,0 +1,42 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +{ + const server = net.createServer(); + + server.listen(common.mustCall(() => { + const port = server.address().port; + const client = net.connect({ port }, common.mustCall(() => { + client.on('error', common.mustCall((err) => { + server.close(); + assert.strictEqual(err.constructor, Error); + assert.strictEqual(err.message, 'write EBADF'); + })); + client._handle.close(); + client.write('foo'); + })); + })); +} + +{ + const server = net.createServer(); + + server.listen(common.mustCall(() => { + const port = server.address().port; + const client = net.connect({ port }, common.mustCall(() => { + client.on('error', common.expectsError({ + code: 'ERR_SOCKET_CLOSED', + message: 'Socket is closed', + name: 'Error' + })); + + server.close(); + + client._handle.close(); + client._handle = null; + client.write('foo'); + })); + })); +} diff --git a/test/js/node/test/parallel/test-net-timeout-no-handle.js b/test/js/node/test/parallel/test-net-timeout-no-handle.js index 57dd2c94ba..b6baf891af 100644 --- a/test/js/node/test/parallel/test-net-timeout-no-handle.js +++ b/test/js/node/test/parallel/test-net-timeout-no-handle.js @@ -5,7 +5,7 @@ const net = require('net'); const assert = require('assert'); const socket = new net.Socket(); -socket.setTimeout(common.platformTimeout(1200)); +socket.setTimeout(common.platformTimeout(50)); socket.on('timeout', common.mustCall(() => { assert.strictEqual(socket._handle, null); @@ -14,4 +14,4 @@ socket.on('timeout', common.mustCall(() => { socket.on('connect', common.mustNotCall()); // Since the timeout is unrefed, the code will exit without this -setTimeout(() => {}, common.platformTimeout(2500)); +setTimeout(() => {}, common.platformTimeout(200)); diff --git a/test/js/node/test/parallel/test-net-write-fully-async-buffer.js b/test/js/node/test/parallel/test-net-write-fully-async-buffer.js index 4dfb905d23..93074c3c49 100644 --- a/test/js/node/test/parallel/test-net-write-fully-async-buffer.js +++ b/test/js/node/test/parallel/test-net-write-fully-async-buffer.js @@ -23,7 +23,7 @@ const server = net.createServer(common.mustCall(function(conn) { } while (conn.write(Buffer.from(data))); - globalThis.gc({ type: 'major' }); + globalThis.gc({ type: 'minor' }); // The buffer allocated above should still be alive. } diff --git a/test/js/node/test/parallel/test-net-write-fully-async-hex-string.js b/test/js/node/test/parallel/test-net-write-fully-async-hex-string.js index c1ebe7e68b..2719ad6b5b 100644 --- a/test/js/node/test/parallel/test-net-write-fully-async-hex-string.js +++ b/test/js/node/test/parallel/test-net-write-fully-async-hex-string.js @@ -21,7 +21,7 @@ const server = net.createServer(common.mustCall(function(conn) { } while (conn.write(data, 'hex')); - globalThis.gc({ type: 'major' }); + globalThis.gc({ type: 'minor' }); // The buffer allocated inside the .write() call should still be alive. } diff --git a/test/js/node/test/parallel/test-os-homedir-no-envvar.js b/test/js/node/test/parallel/test-os-homedir-no-envvar.js index 75d439b2ed..2f9b1b47a7 100644 --- a/test/js/node/test/parallel/test-os-homedir-no-envvar.js +++ b/test/js/node/test/parallel/test-os-homedir-no-envvar.js @@ -23,8 +23,7 @@ if (process.argv[2] === 'child') { delete process.env.HOME; const child = cp.spawnSync(process.execPath, [__filename, 'child'], { - env: process.env, - stdio: 'inherit', + env: process.env }); assert.strictEqual(child.status, 0); diff --git a/test/js/node/test/parallel/test-pipe-head.js b/test/js/node/test/parallel/test-pipe-head.js index 1e79249c29..f0b66a9d43 100644 --- a/test/js/node/test/parallel/test-pipe-head.js +++ b/test/js/node/test/parallel/test-pipe-head.js @@ -5,12 +5,13 @@ const assert = require('assert'); const exec = require('child_process').exec; -const nodePath = process.argv[0]; const script = fixtures.path('print-10-lines.js'); -const cmd = `"${nodePath}" "${script}" | head -2`; +const cmd = `"${common.isWindows ? process.execPath : '$NODE'}" "${common.isWindows ? script : '$FILE'}" | head -2`; -exec(cmd, common.mustSucceed((stdout, stderr) => { +exec(cmd, { + env: common.isWindows ? process.env : { ...process.env, NODE: process.execPath, FILE: script }, +}, common.mustSucceed((stdout, stderr) => { const lines = stdout.split('\n'); assert.strictEqual(lines.length, 3); })); diff --git a/test/js/node/test/parallel/test-preload-self-referential.js b/test/js/node/test/parallel/test-preload-self-referential.js index 2624527deb..6868133297 100644 --- a/test/js/node/test/parallel/test-preload-self-referential.js +++ b/test/js/node/test/parallel/test-preload-self-referential.js @@ -4,17 +4,19 @@ const common = require('../common'); const fixtures = require('../common/fixtures'); const assert = require('assert'); const { exec } = require('child_process'); +const { isMainThread } = require('worker_threads'); const nodeBinary = process.argv[0]; -if (!common.isMainThread) +if (!isMainThread) { common.skip('process.chdir is not available in Workers'); +} const selfRefModule = fixtures.path('self_ref_module'); const fixtureA = fixtures.path('printA.js'); -exec(`"${nodeBinary}" -r self_ref "${fixtureA}"`, { cwd: selfRefModule }, - (err, stdout, stderr) => { - assert.ifError(err); +const [cmd, opts] = common.escapePOSIXShell`"${nodeBinary}" -r self_ref "${fixtureA}"`; +exec(cmd, { ...opts, cwd: selfRefModule }, + common.mustSucceed((stdout, stderr) => { assert.strictEqual(stdout, 'A\n'); - }); + })); diff --git a/test/js/node/test/parallel/test-process-abort.js b/test/js/node/test/parallel/test-process-abort.js index 665e1399a3..34353befb0 100644 --- a/test/js/node/test/parallel/test-process-abort.js +++ b/test/js/node/test/parallel/test-process-abort.js @@ -2,9 +2,11 @@ const common = require('../common'); const assert = require('assert'); +const { isMainThread } = require('worker_threads'); -if (!common.isMainThread) +if (!isMainThread) { common.skip('process.abort() is not available in Workers'); +} // Check that our built-in methods do not have a prototype/constructor behaviour // if they don't need to. This could be tested for any of our C++ methods. diff --git a/test/js/node/test/parallel/test-process-beforeexit-throw-exit.js b/test/js/node/test/parallel/test-process-beforeexit-throw-exit.js index 6e9d764be9..c967d3a627 100644 --- a/test/js/node/test/parallel/test-process-beforeexit-throw-exit.js +++ b/test/js/node/test/parallel/test-process-beforeexit-throw-exit.js @@ -1,6 +1,10 @@ 'use strict'; const common = require('../common'); -common.skipIfWorker(); +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { + common.skip('This test only works on a main thread'); +} // Test that 'exit' is emitted if 'beforeExit' throws. diff --git a/test/js/node/test/parallel/test-process-chdir-errormessage.js b/test/js/node/test/parallel/test-process-chdir-errormessage.js index 0ed368287b..727a13f6f6 100644 --- a/test/js/node/test/parallel/test-process-chdir-errormessage.js +++ b/test/js/node/test/parallel/test-process-chdir-errormessage.js @@ -1,8 +1,11 @@ 'use strict'; const common = require('../common'); -if (!common.isMainThread) +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { common.skip('process.chdir is not available in Workers'); +} const assert = require('assert'); assert.throws( diff --git a/test/js/node/test/parallel/test-process-chdir.js b/test/js/node/test/parallel/test-process-chdir.js index ee59df853b..42d2a60c8e 100644 --- a/test/js/node/test/parallel/test-process-chdir.js +++ b/test/js/node/test/parallel/test-process-chdir.js @@ -4,9 +4,11 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); +const { isMainThread } = require('worker_threads'); -if (!common.isMainThread) +if (!isMainThread) { common.skip('process.chdir is not available in Workers'); +} const tmpdir = require('../common/tmpdir'); diff --git a/test/js/node/test/parallel/test-process-exit-handler.js b/test/js/node/test/parallel/test-process-exit-handler.js index d74e320fe6..2546aa60a5 100644 --- a/test/js/node/test/parallel/test-process-exit-handler.js +++ b/test/js/node/test/parallel/test-process-exit-handler.js @@ -1,8 +1,10 @@ 'use strict'; const common = require('../common'); +const { isMainThread } = require('worker_threads'); -if (!common.isMainThread) +if (!isMainThread) { common.skip('execArgv does not affect Workers'); +} // This test ensures that no asynchronous operations are performed in the 'exit' // handler. diff --git a/test/js/node/test/parallel/test-process-features.js b/test/js/node/test/parallel/test-process-features.js index 3b4677c561..e4365b656d 100644 --- a/test/js/node/test/parallel/test-process-features.js +++ b/test/js/node/test/parallel/test-process-features.js @@ -3,20 +3,24 @@ require('../common'); const assert = require('assert'); -const keys = new Set(Object.keys(process.features)); +const actualKeys = new Set(Object.keys(process.features)); +const expectedKeys = new Map([ + ['inspector', ['boolean']], + ['debug', ['boolean']], + ['uv', ['boolean']], + ['ipv6', ['boolean']], + ['openssl_is_boringssl', ['boolean']], + ['tls_alpn', ['boolean']], + ['tls_sni', ['boolean']], + ['tls_ocsp', ['boolean']], + ['tls', ['boolean']], + ['cached_builtins', ['boolean']], + ['require_module', ['boolean']], + ['typescript', ['boolean', 'string']], +]); -assert.deepStrictEqual(keys, new Set([ - 'inspector', - 'debug', - 'uv', - 'ipv6', - 'tls_alpn', - 'tls_sni', - 'tls_ocsp', - 'tls', - 'cached_builtins', -])); +assert.deepStrictEqual(actualKeys, new Set(expectedKeys.keys())); -for (const key of keys) { - assert.strictEqual(typeof process.features[key], 'boolean'); +for (const [key, expected] of expectedKeys) { + assert.ok(expected.includes(typeof process.features[key]), `typeof process.features.${key} is not one of [${expected.join(', ')}]`); } diff --git a/test/js/node/test/parallel/test-process-release.js b/test/js/node/test/parallel/test-process-release.js index 98a089a8f9..ae4a02aa85 100644 --- a/test/js/node/test/parallel/test-process-release.js +++ b/test/js/node/test/parallel/test-process-release.js @@ -27,6 +27,8 @@ if (versionParts[0] === '4' && versionParts[1] >= 2) { assert.strictEqual(process.release.lts, 'Hydrogen'); } else if (versionParts[0] === '20' && versionParts[1] >= 9) { assert.strictEqual(process.release.lts, 'Iron'); +} else if (versionParts[0] === '22' && versionParts[1] >= 11) { + assert.strictEqual(process.release.lts, 'Jod'); } else { assert.strictEqual(process.release.lts, undefined); } diff --git a/test/js/node/test/parallel/test-process-umask-mask.js b/test/js/node/test/parallel/test-process-umask-mask.js index d599379761..f0a67b8f14 100644 --- a/test/js/node/test/parallel/test-process-umask-mask.js +++ b/test/js/node/test/parallel/test-process-umask-mask.js @@ -5,8 +5,9 @@ const common = require('../common'); const assert = require('assert'); +const { isMainThread } = require('worker_threads'); -if (!common.isMainThread) +if (!isMainThread) common.skip('Setting process.umask is not supported in Workers'); let mask; diff --git a/test/js/node/test/parallel/test-process-umask.js b/test/js/node/test/parallel/test-process-umask.js index e90955f394..594f75ebeb 100644 --- a/test/js/node/test/parallel/test-process-umask.js +++ b/test/js/node/test/parallel/test-process-umask.js @@ -22,8 +22,9 @@ 'use strict'; const common = require('../common'); const assert = require('assert'); +const { isMainThread } = require('worker_threads'); -if (!common.isMainThread) { +if (!isMainThread) { assert.strictEqual(typeof process.umask(), 'number'); assert.throws(() => { process.umask('0664'); diff --git a/test/js/node/test/parallel/test-readline-interface-no-trailing-newline.js b/test/js/node/test/parallel/test-readline-interface-no-trailing-newline.js index b3392db861..398b85838c 100644 --- a/test/js/node/test/parallel/test-readline-interface-no-trailing-newline.js +++ b/test/js/node/test/parallel/test-readline-interface-no-trailing-newline.js @@ -3,7 +3,9 @@ const common = require('../common'); const ArrayStream = require('../common/arraystream'); const assert = require('assert'); -common.skipIfDumbTerminal(); +if (process.env.TERM === 'dumb') { + common.skip('skipping - dumb terminal'); +} const readline = require('readline'); const rli = new readline.Interface({ diff --git a/test/js/node/test/parallel/test-readline-interface-recursive-writes.js b/test/js/node/test/parallel/test-readline-interface-recursive-writes.js index 3a0aee5be9..ea3df1968d 100644 --- a/test/js/node/test/parallel/test-readline-interface-recursive-writes.js +++ b/test/js/node/test/parallel/test-readline-interface-recursive-writes.js @@ -3,7 +3,9 @@ const common = require('../common'); const ArrayStream = require('../common/arraystream'); const assert = require('assert'); -common.skipIfDumbTerminal(); +if (process.env.TERM === 'dumb') { + common.skip('skipping - dumb terminal'); +} const readline = require('readline'); const rli = new readline.Interface({ diff --git a/test/js/node/test/parallel/test-readline-position.js b/test/js/node/test/parallel/test-readline-position.js index 3603a42ece..ac2fe43b37 100644 --- a/test/js/node/test/parallel/test-readline-position.js +++ b/test/js/node/test/parallel/test-readline-position.js @@ -7,7 +7,9 @@ const assert = require('assert'); const ctrlU = { ctrl: true, name: 'u' }; -common.skipIfDumbTerminal(); +if (process.env.TERM === 'dumb') { + common.skip('skipping - dumb terminal'); +} { const input = new PassThrough(); diff --git a/test/js/node/test/parallel/test-readline-undefined-columns.js b/test/js/node/test/parallel/test-readline-undefined-columns.js index 25bafe957f..d7000a16dd 100644 --- a/test/js/node/test/parallel/test-readline-undefined-columns.js +++ b/test/js/node/test/parallel/test-readline-undefined-columns.js @@ -5,7 +5,9 @@ const assert = require('assert'); const PassThrough = require('stream').PassThrough; const readline = require('readline'); -common.skipIfDumbTerminal(); +if (process.env.TERM === 'dumb') { + common.skip('skipping - dumb terminal'); +} // Checks that tab completion still works // when output column size is undefined diff --git a/test/js/node/test/parallel/test-readline.js b/test/js/node/test/parallel/test-readline.js index 77799fc14c..0cf5779429 100644 --- a/test/js/node/test/parallel/test-readline.js +++ b/test/js/node/test/parallel/test-readline.js @@ -4,7 +4,9 @@ const { PassThrough } = require('stream'); const readline = require('readline'); const assert = require('assert'); -common.skipIfDumbTerminal(); +if (process.env.TERM === 'dumb') { + common.skip('skipping - dumb terminal'); +} { const input = new PassThrough(); diff --git a/test/js/node/test/parallel/test-repl-preview-without-inspector.js b/test/js/node/test/parallel/test-repl-preview-without-inspector.js index 8905d21483..67090a928f 100644 --- a/test/js/node/test/parallel/test-repl-preview-without-inspector.js +++ b/test/js/node/test/parallel/test-repl-preview-without-inspector.js @@ -65,7 +65,7 @@ function runAndWait(cmds, repl) { return promise; } -const repl = REPLServer({ +const repl = new REPLServer({ prompt: PROMPT, stream: new REPLStream(), ignoreUndefined: true, diff --git a/test/js/node/test/parallel/test-require-enoent-dir.js b/test/js/node/test/parallel/test-require-enoent-dir.js index e4db66f985..2e60042585 100644 --- a/test/js/node/test/parallel/test-require-enoent-dir.js +++ b/test/js/node/test/parallel/test-require-enoent-dir.js @@ -13,19 +13,15 @@ fs.writeFileSync(fooPath, ''); const dirPath = tmpdir.resolve('delete_me'); fs.mkdirSync(dirPath, { - recursive: true, + recursive: true }); const barPath = path.join(dirPath, 'bar.cjs'); -fs.writeFileSync( - barPath, - ` +fs.writeFileSync(barPath, ` module.exports = () => require('../foo.cjs').call() -` -); +`); const foo = require(fooPath); -console.log('fooPath', fooPath, foo); const unique = Symbol('unique'); foo.call = common.mustCall(() => unique); const bar = require(barPath); diff --git a/test/js/node/test/parallel/test-require-symlink.js b/test/js/node/test/parallel/test-require-symlink.js index 0c4477023b..9ca543e8d6 100644 --- a/test/js/node/test/parallel/test-require-symlink.js +++ b/test/js/node/test/parallel/test-require-symlink.js @@ -2,10 +2,14 @@ 'use strict'; const common = require('../common'); -if (!common.canCreateSymLink()) +if (!common.canCreateSymLink()) { common.skip('insufficient privileges'); -if (!common.isMainThread) +} +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { common.skip('process.chdir is not available in Workers'); +} const assert = require('assert'); const { spawn } = require('child_process'); diff --git a/test/js/node/test/parallel/test-signal-args.js b/test/js/node/test/parallel/test-signal-args.js index 7b72ed6dcb..28a077ecc1 100644 --- a/test/js/node/test/parallel/test-signal-args.js +++ b/test/js/node/test/parallel/test-signal-args.js @@ -3,10 +3,15 @@ const common = require('../common'); const assert = require('assert'); -if (common.isWindows) +if (common.isWindows) { common.skip('Sending signals with process.kill is not supported on Windows'); -if (!common.isMainThread) +} + +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { common.skip('No signal handling available in Workers'); +} process.once('SIGINT', common.mustCall((signal) => { assert.strictEqual(signal, 'SIGINT'); diff --git a/test/js/node/test/parallel/test-signal-handler.js b/test/js/node/test/parallel/test-signal-handler.js index 68c9758637..b84d2063a2 100644 --- a/test/js/node/test/parallel/test-signal-handler.js +++ b/test/js/node/test/parallel/test-signal-handler.js @@ -23,19 +23,21 @@ const common = require('../common'); -if (common.isWindows) +if (common.isWindows) { common.skip('SIGUSR1 and SIGHUP signals are not supported'); -if (!common.isMainThread) +} + +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { common.skip('Signal handling in Workers is not supported'); +} console.log(`process.pid: ${process.pid}`); -// On Bun in Linux, SIGUSR1 is reserved for the GC. -// So we need to use a different signal. -const SIGNAL = process.platform === 'linux' ? 'SIGUSR2' : 'SIGUSR1'; +process.on('SIGUSR1', common.mustCall()); -process.on(SIGNAL, common.mustCall()); -process.on(SIGNAL, common.mustCall(function() { +process.on('SIGUSR1', common.mustCall(function() { setTimeout(function() { console.log('End.'); process.exit(0); @@ -47,7 +49,7 @@ setInterval(function() { console.log(`running process...${++i}`); if (i === 5) { - process.kill(process.pid, SIGNAL); + process.kill(process.pid, 'SIGUSR1'); } }, 1); diff --git a/test/js/node/test/parallel/test-stdio-closed.js b/test/js/node/test/parallel/test-stdio-closed.js index 45f6d0832f..3d2cb88a47 100644 --- a/test/js/node/test/parallel/test-stdio-closed.js +++ b/test/js/node/test/parallel/test-stdio-closed.js @@ -20,7 +20,6 @@ if (common.isWindows) { proc.on('exit', common.mustCall(function(exitCode) { assert.strictEqual(exitCode, 0); })); - proc.stderr.pipe(process.stderr); return; } @@ -30,8 +29,8 @@ if (process.argv[2] === 'child') { } // Run the script in a shell but close stdout and stderr. -const cmd = `"${process.execPath}" "${__filename}" child 1>&- 2>&-`; -const proc = spawn('/bin/sh', ['-c', cmd], { stdio: 'inherit' }); +const [cmd, opts] = common.escapePOSIXShell`"${process.execPath}" "${__filename}" child 1>&- 2>&-`; +const proc = spawn('/bin/sh', ['-c', cmd], { ...opts, stdio: 'inherit' }); proc.on('exit', common.mustCall(function(exitCode) { assert.strictEqual(exitCode, 0); diff --git a/test/js/node/test/parallel/test-stdio-pipe-access.js b/test/js/node/test/parallel/test-stdio-pipe-access.js index ac0e22c399..6bf6b107c6 100644 --- a/test/js/node/test/parallel/test-stdio-pipe-access.js +++ b/test/js/node/test/parallel/test-stdio-pipe-access.js @@ -1,7 +1,10 @@ 'use strict'; const common = require('../common'); -if (!common.isMainThread) +const { isMainThread } = require('worker_threads'); + +if (!isMainThread) { common.skip("Workers don't have process-like stdio"); +} // Test if Node handles accessing process.stdin if it is a redirected // pipe without deadlocking diff --git a/test/js/node/test/parallel/test-stdout-to-file.js b/test/js/node/test/parallel/test-stdout-to-file.js index 9114f22443..761c26f820 100644 --- a/test/js/node/test/parallel/test-stdout-to-file.js +++ b/test/js/node/test/parallel/test-stdout-to-file.js @@ -13,9 +13,6 @@ const tmpFile = tmpdir.resolve('stdout.txt'); tmpdir.refresh(); function test(size, useBuffer, cb) { - const cmd = `"${process.argv[0]}" "${ - useBuffer ? scriptBuffer : scriptString}" ${size} > "${tmpFile}"`; - try { fs.unlinkSync(tmpFile); } catch { @@ -24,7 +21,9 @@ function test(size, useBuffer, cb) { console.log(`${size} chars to ${tmpFile}...`); - childProcess.exec(cmd, common.mustSucceed(() => { + childProcess.exec(...common.escapePOSIXShell`"${ + process.execPath}" "${useBuffer ? scriptBuffer : scriptString}" ${size} > "${tmpFile + }"`, common.mustSucceed(() => { console.log('done!'); const stat = fs.statSync(tmpFile); diff --git a/test/js/node/test/parallel/test-stream-big-packet.js b/test/js/node/test/parallel/test-stream-big-packet.js index fdbe3cd211..daa486c0a7 100644 --- a/test/js/node/test/parallel/test-stream-big-packet.js +++ b/test/js/node/test/parallel/test-stream-big-packet.js @@ -28,10 +28,8 @@ let passed = false; class TestStream extends stream.Transform { _transform(chunk, encoding, done) { - if (!passed) { - // Char 'a' only exists in the last write - passed = chunk.toString().includes('a'); - } + // Char 'a' only exists in the last write + passed ||= chunk.toString().includes('a'); done(); } } diff --git a/test/js/node/test/parallel/test-stream-filter.js b/test/js/node/test/parallel/test-stream-filter.js index e7711012bb..173e4f47e2 100644 --- a/test/js/node/test/parallel/test-stream-filter.js +++ b/test/js/node/test/parallel/test-stream-filter.js @@ -156,7 +156,9 @@ const { setTimeout } = require('timers/promises'); { // Error cases assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/); - assert.throws(() => Readable.from([1]).filter((x) => x, { concurrency: 'Foo' }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).filter((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/); } { diff --git a/test/js/node/test/parallel/test-stream-flatMap.js b/test/js/node/test/parallel/test-stream-flatMap.js index 9295b8a0f8..0e55119f7a 100644 --- a/test/js/node/test/parallel/test-stream-flatMap.js +++ b/test/js/node/test/parallel/test-stream-flatMap.js @@ -110,7 +110,9 @@ function oneTo5() { { // Error cases assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/); - assert.throws(() => Readable.from([1]).flatMap((x) => x, { concurrency: 'Foo' }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/); assert.throws(() => Readable.from([1]).flatMap((x) => x, { signal: true }), /ERR_INVALID_ARG_TYPE/); } diff --git a/test/js/node/test/parallel/test-stream-readable-unshift.js b/test/js/node/test/parallel/test-stream-readable-unshift.js index e39a9abf36..cccc834fc1 100644 --- a/test/js/node/test/parallel/test-stream-readable-unshift.js +++ b/test/js/node/test/parallel/test-stream-readable-unshift.js @@ -156,9 +156,9 @@ const { Readable } = require('stream'); // Remove the 'readable' listener before unshifting stream.removeListener('readable', onRead); stream.unshift('a'); - stream.on('data', common.mustCall((chunk) => { - // console.log(chunk.length); - }, 50)); + stream.on('data', (chunk) => { + console.log(chunk.length); + }); break; } } diff --git a/test/js/node/test/parallel/test-stream-reduce.js b/test/js/node/test/parallel/test-stream-reduce.js index 4cee2b5d71..42c734305f 100644 --- a/test/js/node/test/parallel/test-stream-reduce.js +++ b/test/js/node/test/parallel/test-stream-reduce.js @@ -61,8 +61,8 @@ function sum(p, c) { throw new Error('boom'); } return c; - }, 0) - , /boom/).then(common.mustCall()); + }, 0), + /boom/).then(common.mustCall()); } { diff --git a/test/js/node/test/parallel/test-stream-toArray.js b/test/js/node/test/parallel/test-stream-toArray.js index 690b3c4b08..5c86410ed7 100644 --- a/test/js/node/test/parallel/test-stream-toArray.js +++ b/test/js/node/test/parallel/test-stream-toArray.js @@ -1,7 +1,9 @@ 'use strict'; const common = require('../common'); -const { Readable } = require('stream'); +const { + Readable, +} = require('stream'); const assert = require('assert'); { diff --git a/test/js/node/test/parallel/test-stream-typedarray.js b/test/js/node/test/parallel/test-stream-typedarray.js index a374989276..ae5846da09 100644 --- a/test/js/node/test/parallel/test-stream-typedarray.js +++ b/test/js/node/test/parallel/test-stream-typedarray.js @@ -63,7 +63,7 @@ const views = common.getArrayBufferViews(buffer); assert.strictEqual(chunk.encoding, 'buffer'); res += chunk.chunk; } - assert.strictEqual(res, 'ABCD'.repeat(9)); + assert.strictEqual(res, 'ABCD'.repeat(views.length)); }), }); diff --git a/test/js/node/test/parallel/test-string-decoder-fuzz.js b/test/js/node/test/parallel/test-string-decoder-fuzz.js index 542876e96e..3a6108e8fc 100644 --- a/test/js/node/test/parallel/test-string-decoder-fuzz.js +++ b/test/js/node/test/parallel/test-string-decoder-fuzz.js @@ -44,6 +44,5 @@ function runSingleFuzzTest() { } const start = Date.now(); -// Run this for 1 second -while (Date.now() - start < 1000) +while (Date.now() - start < 100) runSingleFuzzTest(); diff --git a/test/js/node/test/parallel/test-timers-api-refs.js b/test/js/node/test/parallel/test-timers-api-refs.js index 3c55a05ac4..a6a5419631 100644 --- a/test/js/node/test/parallel/test-timers-api-refs.js +++ b/test/js/node/test/parallel/test-timers-api-refs.js @@ -4,12 +4,12 @@ const timers = require('timers'); // Delete global APIs to make sure they're not relied on by the internal timers // code -delete global.setTimeout; -delete global.clearTimeout; -delete global.setInterval; -delete global.clearInterval; -delete global.setImmediate; -delete global.clearImmediate; +delete globalThis.setTimeout; +delete globalThis.clearTimeout; +delete globalThis.setInterval; +delete globalThis.clearInterval; +delete globalThis.setImmediate; +delete globalThis.clearImmediate; const timeoutCallback = () => { timers.clearTimeout(timeout); }; const timeout = timers.setTimeout(common.mustCall(timeoutCallback), 1); diff --git a/test/js/node/test/parallel/test-timers-invalid-clear.js b/test/js/node/test/parallel/test-timers-invalid-clear.js new file mode 100644 index 0000000000..b7d046794c --- /dev/null +++ b/test/js/node/test/parallel/test-timers-invalid-clear.js @@ -0,0 +1,13 @@ +'use strict'; + +const common = require('../common'); + +// clearImmediate should be a noop if anything other than an Immediate +// is passed to it. + +const t = setTimeout(common.mustCall()); + +clearImmediate(t); + +setTimeout(common.mustCall()); +setTimeout(common.mustCall()); diff --git a/test/js/node/test/parallel/test-timers-process-tampering.js b/test/js/node/test/parallel/test-timers-process-tampering.js index 766cc9f356..8632e7c96f 100644 --- a/test/js/node/test/parallel/test-timers-process-tampering.js +++ b/test/js/node/test/parallel/test-timers-process-tampering.js @@ -3,6 +3,6 @@ 'use strict'; const common = require('../common'); -global.process = {}; // Boom! -common.allowGlobals(global.process); +globalThis.process = {}; // Boom! +common.allowGlobals(globalThis.process); setImmediate(common.mustCall()); diff --git a/test/js/node/test/parallel/test-tls-add-context.js b/test/js/node/test/parallel/test-tls-add-context.js index 8d02866ce5..0929fb4046 100644 --- a/test/js/node/test/parallel/test-tls-add-context.js +++ b/test/js/node/test/parallel/test-tls-add-context.js @@ -22,16 +22,17 @@ const serverOptions = { let connections = 0; -const server = tls.createServer(serverOptions, (c) => { +const server = tls.createServer(serverOptions, common.mustCall((c) => { if (++connections === 3) { server.close(); } + console.log(c.servername,c.authorized); if (c.servername === 'unknowncontext') { assert.strictEqual(c.authorized, false); return; } assert.strictEqual(c.authorized, true); -}); +}, 3)); const secureContext = { key: loadPEM('agent1-key'), @@ -73,3 +74,5 @@ server.listen(0, common.mustCall(() => { client3.end(); })); })); + +setTimeout(()=>process.exit(0),1000).unref(); diff --git a/test/js/node/test/parallel/test-tls-cert-regression.js b/test/js/node/test/parallel/test-tls-cert-regression.js index 478402772e..5dab234013 100644 --- a/test/js/node/test/parallel/test-tls-cert-regression.js +++ b/test/js/node/test/parallel/test-tls-cert-regression.js @@ -21,52 +21,31 @@ 'use strict'; const common = require('../common'); +const fixtures = require('../common/fixtures'); +const assert = require('assert'); if (!common.hasCrypto) common.skip('missing crypto'); const tls = require('tls'); -const cert = -`-----BEGIN CERTIFICATE----- -MIIDNDCCAp2gAwIBAgIJAJvXLQpGPpm7MA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV -BAYTAkdCMRAwDgYDVQQIEwdHd3luZWRkMREwDwYDVQQHEwhXYXVuZmF3cjEUMBIG -A1UEChMLQWNrbmFjayBMdGQxEjAQBgNVBAsTCVRlc3QgQ2VydDESMBAGA1UEAxMJ -bG9jYWxob3N0MB4XDTA5MTEwMjE5MzMwNVoXDTEwMTEwMjE5MzMwNVowcDELMAkG -A1UEBhMCR0IxEDAOBgNVBAgTB0d3eW5lZGQxETAPBgNVBAcTCFdhdW5mYXdyMRQw -EgYDVQQKEwtBY2tuYWNrIEx0ZDESMBAGA1UECxMJVGVzdCBDZXJ0MRIwEAYDVQQD -Ewlsb2NhbGhvc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANdym7nGe2yw -6LlJfJrQtC5TmKOGrSXiyolYCbGOy4xZI4KD31d3097jhlQFJyF+10gwkE62DuJe -fLvBZDUsvLe1R8bzlVhZnBVn+3QJyUIWQAL+DsRj8P3KoD7k363QN5dIaA1GOAg2 -vZcPy1HCUsvOgvDXGRUCZqNLAyt+h/cpAgMBAAGjgdUwgdIwHQYDVR0OBBYEFK4s -VBV4shKUj3UX/fvSJnFaaPBjMIGiBgNVHSMEgZowgZeAFK4sVBV4shKUj3UX/fvS -JnFaaPBjoXSkcjBwMQswCQYDVQQGEwJHQjEQMA4GA1UECBMHR3d5bmVkZDERMA8G -A1UEBxMIV2F1bmZhd3IxFDASBgNVBAoTC0Fja25hY2sgTHRkMRIwEAYDVQQLEwlU -ZXN0IENlcnQxEjAQBgNVBAMTCWxvY2FsaG9zdIIJAJvXLQpGPpm7MAwGA1UdEwQF -MAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAFxR7BA1mUlsYqPiogtxSIfLzHWh+s0bJ -SBuhNrHes4U8QxS8+x/KWjd/81gzsf9J1C2VzTlFaydAgigz3SkQYgs+TMnFkT2o -9jqoJrcdf4WpZ2DQXUALaZgwNzPumMUSx8Ac5gO+BY/RHyP6fCodYvdNwyKslnI3 -US7eCSHZsVo= ------END CERTIFICATE-----`; +let key = fixtures.readKey('rsa_private.pem'); +let cert = fixtures.readKey('rsa_cert.crt'); -const key = -`-----BEGIN RSA PRIVATE KEY----- -MIICXgIBAAKBgQDXcpu5xntssOi5SXya0LQuU5ijhq0l4sqJWAmxjsuMWSOCg99X -d9Pe44ZUBSchftdIMJBOtg7iXny7wWQ1LLy3tUfG85VYWZwVZ/t0CclCFkAC/g7E -Y/D9yqA+5N+t0DeXSGgNRjgINr2XD8tRwlLLzoLw1xkVAmajSwMrfof3KQIDAQAB -AoGBAIBHR/tT93ce2mJAJAXV0AJpWc+7x2pwX2FpXtQujnlxNZhnRlrBCRCD7h4m -t0bVS/86kyGaesBDvAbavfx/N5keYzzmmSp5Ht8IPqKPydGWdigk4x90yWvktai7 -dWuRKF94FXr0GUuBONb/dfHdp4KBtzN7oIF9WydYGGXA9ZmBAkEA8/k01bfwQZIu -AgcdNEM94Zcug1gSspXtUu8exNQX4+PNVbadghZb1+OnUO4d3gvWfqvAnaXD3KV6 -N4OtUhQQ0QJBAOIRbKMfaymQ9yE3CQQxYfKmEhHXWARXVwuYqIFqjmhSjSXx0l/P -7mSHz1I9uDvxkJev8sQgu1TKIyTOdqPH1tkCQQDPa6H1yYoj1Un0Q2Qa2Mg1kTjk -Re6vkjPQ/KcmJEOjZjtekgFbZfLzmwLXFXqjG2FjFFaQMSxR3QYJSJQEYjbhAkEA -sy7OZcjcXnjZeEkv61Pc57/7qIp/6Aj2JGnefZ1gvI1Z9Q5kCa88rA/9Iplq8pA4 -ZBKAoDW1ZbJGAsFmxc/6mQJAdPilhci0qFN86IGmf+ZBnwsDflIwHKDaVofti4wQ -sPWhSOb9VQjMXekI4Y2l8fqAVTS2Fn6+8jkVKxXBywSVCw== ------END RSA PRIVATE KEY-----`; +// This test validates that we accept certificates and keys which +// do not end with a newline. If a newline exists at the end +// of the key or cert being used remove it +let i = 0; +while (key[key.length - 1 - i] === 0x0a) i++; +if (i !== 0) key = key.slice(0, key.length - i); + +i = 0; +while (cert[cert.length - 1 - i] === 0x0a) i++; +if (i !== 0) cert = cert.slice(0, cert.length - i); function test(cert, key, cb) { + assert.notStrictEqual(cert.at(-1), 0x0a); + assert.notStrictEqual(key.at(-1), 0x0a); const server = tls.createServer({ cert, key diff --git a/test/js/node/test/parallel/test-tls-client-verify.js b/test/js/node/test/parallel/test-tls-client-verify.js index a8de1078bf..5471279433 100644 --- a/test/js/node/test/parallel/test-tls-client-verify.js +++ b/test/js/node/test/parallel/test-tls-client-verify.js @@ -73,13 +73,9 @@ function testServers(index, servers, clientOptions, cb) { const ok = serverOptions.ok; - if (serverOptions.key) { - serverOptions.key = loadPEM(serverOptions.key); - } + serverOptions.key &&= loadPEM(serverOptions.key); - if (serverOptions.cert) { - serverOptions.cert = loadPEM(serverOptions.cert); - } + serverOptions.cert &&= loadPEM(serverOptions.cert); const server = tls.createServer(serverOptions, common.mustCall(function(s) { s.end('hello world\n'); diff --git a/test/js/node/test/parallel/test-tls-friendly-error-message.js b/test/js/node/test/parallel/test-tls-friendly-error-message.js index 4ae9d3f3f9..84471da487 100644 --- a/test/js/node/test/parallel/test-tls-friendly-error-message.js +++ b/test/js/node/test/parallel/test-tls-friendly-error-message.js @@ -40,6 +40,5 @@ tls.createServer({ key, cert }).on('connection', common.mustCall(function() { const options = { port: this.address().port, rejectUnauthorized: true }; tls.connect(options).on('error', common.mustCall(function(err) { assert.strictEqual(err.code, 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'); - assert.strictEqual(err.message, 'unable to verify the first certificate'); })); })); diff --git a/test/js/node/test/parallel/test-tls-handshake-error.js b/test/js/node/test/parallel/test-tls-handshake-error.js index c57026f6fd..5547964780 100644 --- a/test/js/node/test/parallel/test-tls-handshake-error.js +++ b/test/js/node/test/parallel/test-tls-handshake-error.js @@ -23,4 +23,4 @@ const server = tls.createServer({ }, /no cipher match/i); server.close(); -})); \ No newline at end of file +})); diff --git a/test/js/node/test/parallel/test-tls-junk-closes-server.js b/test/js/node/test/parallel/test-tls-junk-closes-server.js index 08c2d39c68..7ec087c0e4 100644 --- a/test/js/node/test/parallel/test-tls-junk-closes-server.js +++ b/test/js/node/test/parallel/test-tls-junk-closes-server.js @@ -38,6 +38,7 @@ const server = tls.createServer(options, common.mustNotCall()); server.listen(0, common.mustCall(function() { const c = net.createConnection(this.address().port); + console.log(server.requestCert, server.rejectUnauthorized); c.on('data', function() { // We must consume all data sent by the server. Otherwise the diff --git a/test/js/node/test/parallel/test-tls-keyengine-invalid-arg-type.js b/test/js/node/test/parallel/test-tls-keyengine-invalid-arg-type.js index 748ea3a39c..72fe526daf 100644 --- a/test/js/node/test/parallel/test-tls-keyengine-invalid-arg-type.js +++ b/test/js/node/test/parallel/test-tls-keyengine-invalid-arg-type.js @@ -21,4 +21,4 @@ assert.throws( privateKeyIdentifier: 0 }); }, { code: 'ERR_INVALID_ARG_TYPE', - message: / Received type number \(0\)$/ }); \ No newline at end of file + message: / Received type number \(0\)$/ }); diff --git a/test/js/node/test/parallel/test-tls-on-empty-socket.js b/test/js/node/test/parallel/test-tls-on-empty-socket.js index e58a147b72..87d51a81bb 100644 --- a/test/js/node/test/parallel/test-tls-on-empty-socket.js +++ b/test/js/node/test/parallel/test-tls-on-empty-socket.js @@ -22,15 +22,15 @@ const server = tls.createServer({ const s = tls.connect({ socket: socket, rejectUnauthorized: false - }, common.mustCall(function() { - s.on('data', common.mustCall(function(chunk) { + }, function() { + s.on('data', function(chunk) { out += chunk; - })); - s.on('end', common.mustCall(function() { + }); + s.on('end', function() { s.destroy(); server.close(); - })); - })); + }); + }); socket.connect(this.address().port); }); diff --git a/test/js/node/test/parallel/test-tls-options-boolean-check.js b/test/js/node/test/parallel/test-tls-options-boolean-check.js index 9a23d1db8a..83fa4b4eb1 100644 --- a/test/js/node/test/parallel/test-tls-options-boolean-check.js +++ b/test/js/node/test/parallel/test-tls-options-boolean-check.js @@ -164,4 +164,4 @@ const caArrDataView = toDataView(caCert); [0, 0, 0], ].forEach(([key, cert, ca]) => { tls.createSecureContext({ key, cert, ca }); -}); \ No newline at end of file +}); diff --git a/test/js/node/test/parallel/test-tls-set-ciphers.js b/test/js/node/test/parallel/test-tls-set-ciphers.js index 313c5e2389..1e63e9376e 100644 --- a/test/js/node/test/parallel/test-tls-set-ciphers.js +++ b/test/js/node/test/parallel/test-tls-set-ciphers.js @@ -1,7 +1,17 @@ 'use strict'; const common = require('../common'); -if (!common.hasOpenSSL3) +if (!common.hasCrypto) { common.skip('missing crypto, or OpenSSL version lower than 3'); +} + +const { + hasOpenSSL, + hasOpenSSL3, +} = require('../common/crypto'); + +if (!hasOpenSSL3) { + common.skip('missing crypto, or OpenSSL version lower than 3'); +} const fixtures = require('../common/fixtures'); const { inspect } = require('util'); @@ -79,6 +89,11 @@ function test(cciphers, sciphers, cipher, cerr, serr, options) { const U = undefined; +let expectedTLSAlertError = 'ERR_SSL_SSLV3_ALERT_HANDSHAKE_FAILURE'; +if (hasOpenSSL(3, 2)) { + expectedTLSAlertError = 'ERR_SSL_SSL/TLS_ALERT_HANDSHAKE_FAILURE'; +} + // Have shared ciphers. test(U, 'AES256-SHA', 'AES256-SHA'); test('AES256-SHA', U, 'AES256-SHA'); @@ -89,13 +104,13 @@ test('TLS_AES_256_GCM_SHA384:!TLS_CHACHA20_POLY1305_SHA256', U, 'TLS_AES_256_GCM // Do not have shared ciphers. test('TLS_AES_256_GCM_SHA384', 'TLS_CHACHA20_POLY1305_SHA256', - U, 'ERR_SSL_SSLV3_ALERT_HANDSHAKE_FAILURE', 'ERR_SSL_NO_SHARED_CIPHER'); + U, expectedTLSAlertError, 'ERR_SSL_NO_SHARED_CIPHER'); -test('AES128-SHA', 'AES256-SHA', U, 'ERR_SSL_SSLV3_ALERT_HANDSHAKE_FAILURE', +test('AES256-SHA', 'AES256-SHA256', U, expectedTLSAlertError, 'ERR_SSL_NO_SHARED_CIPHER'); -test('AES128-SHA:TLS_AES_256_GCM_SHA384', - 'TLS_CHACHA20_POLY1305_SHA256:AES256-SHA', - U, 'ERR_SSL_SSLV3_ALERT_HANDSHAKE_FAILURE', 'ERR_SSL_NO_SHARED_CIPHER'); +test('AES256-SHA:TLS_AES_256_GCM_SHA384', + 'TLS_CHACHA20_POLY1305_SHA256:AES256-SHA256', + U, expectedTLSAlertError, 'ERR_SSL_NO_SHARED_CIPHER'); // Cipher order ignored, TLS1.3 chosen before TLS1.2. test('AES256-SHA:TLS_AES_256_GCM_SHA384', U, 'TLS_AES_256_GCM_SHA384'); @@ -110,11 +125,15 @@ test(U, 'AES256-SHA', 'TLS_AES_256_GCM_SHA384', U, U, { maxVersion: 'TLSv1.3' }) // TLS_AES_128_CCM_8_SHA256 & TLS_AES_128_CCM_SHA256 are not enabled by // default, but work. -test('TLS_AES_128_CCM_8_SHA256', U, - U, 'ERR_SSL_SSLV3_ALERT_HANDSHAKE_FAILURE', 'ERR_SSL_NO_SHARED_CIPHER'); +// However, for OpenSSL32 AES_128 is not enabled due to the +// default security level +if (!hasOpenSSL(3, 2)) { + test('TLS_AES_128_CCM_8_SHA256', U, + U, 'ERR_SSL_SSLV3_ALERT_HANDSHAKE_FAILURE', 'ERR_SSL_NO_SHARED_CIPHER'); -test('TLS_AES_128_CCM_8_SHA256', 'TLS_AES_128_CCM_8_SHA256', - 'TLS_AES_128_CCM_8_SHA256'); + test('TLS_AES_128_CCM_8_SHA256', 'TLS_AES_128_CCM_8_SHA256', + 'TLS_AES_128_CCM_8_SHA256'); +} // Invalid cipher values test(9, 'AES256-SHA', U, 'ERR_INVALID_ARG_TYPE', U); diff --git a/test/js/node/test/parallel/test-tls-transport-destroy-after-own-gc.js b/test/js/node/test/parallel/test-tls-transport-destroy-after-own-gc.js index 17c494ca0b..bcac2c6ebd 100644 --- a/test/js/node/test/parallel/test-tls-transport-destroy-after-own-gc.js +++ b/test/js/node/test/parallel/test-tls-transport-destroy-after-own-gc.js @@ -19,11 +19,11 @@ let clientTLSHandle = clientTLS._handle; // eslint-disable-line no-unused-vars setImmediate(() => { clientTLS = null; - global.gc(); + globalThis.gc(); clientTLSHandle = null; - global.gc(); + globalThis.gc(); setImmediate(() => { clientSide = null; - global.gc(); + globalThis.gc(); }); }); diff --git a/test/js/node/test/parallel/test-tls-write-error.js b/test/js/node/test/parallel/test-tls-write-error.js index b06f2fa2c5..8a8d820a09 100644 --- a/test/js/node/test/parallel/test-tls-write-error.js +++ b/test/js/node/test/parallel/test-tls-write-error.js @@ -17,9 +17,12 @@ const server_cert = fixtures.readKey('agent1-cert.pem'); const opts = { key: server_key, cert: server_cert, - ciphers: 'ALL@SECLEVEL=0' }; +if (!process.features.openssl_is_boringssl) { + opts.ciphers = 'ALL@SECLEVEL=0'; +} + const server = https.createServer(opts, (req, res) => { res.write('hello'); }).listen(0, common.mustCall(() => { diff --git a/test/js/node/test/parallel/test-unhandled-exception-with-worker-inuse.js b/test/js/node/test/parallel/test-unhandled-exception-with-worker-inuse.js index a3e823ca70..47c220245d 100644 --- a/test/js/node/test/parallel/test-unhandled-exception-with-worker-inuse.js +++ b/test/js/node/test/parallel/test-unhandled-exception-with-worker-inuse.js @@ -10,7 +10,7 @@ const common = require('../common'); // // The root cause of this issue is that before PerIsolateMessageListener() // is invoked by v8, v8 preserves the JS vm state, although it should -// switch to EXTERNEL. https://bugs.chromium.org/p/v8/issues/detail?id=13464 +// switch to EXTERNAL. https://bugs.chromium.org/p/v8/issues/detail?id=13464 // // Therefore, this commit can be considered as an workaround of the v8 bug, // but we also find it not useful to call SetIdle() when terminating. diff --git a/test/js/node/test/parallel/test-util-inherits.js b/test/js/node/test/parallel/test-util-inherits.js index 2ff8a84446..1729b1734d 100644 --- a/test/js/node/test/parallel/test-util-inherits.js +++ b/test/js/node/test/parallel/test-util-inherits.js @@ -88,6 +88,8 @@ assert.throws(() => { }, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', + message: 'The "superCtor.prototype" property must be of type object. ' + + 'Received undefined' }); assert.throws(() => { @@ -95,6 +97,8 @@ assert.throws(() => { }, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', + message: 'The "superCtor" argument must be of type function. ' + + 'Received null' }); assert.throws(() => { @@ -102,4 +106,5 @@ assert.throws(() => { }, { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', + message: 'The "ctor" argument must be of type function. Received null' }); diff --git a/test/js/node/test/parallel/test-v8-deserialize-buffer.js b/test/js/node/test/parallel/test-v8-deserialize-buffer.js index f05631a72a..8626cf14a9 100644 --- a/test/js/node/test/parallel/test-v8-deserialize-buffer.js +++ b/test/js/node/test/parallel/test-v8-deserialize-buffer.js @@ -5,3 +5,7 @@ const v8 = require('v8'); process.on('warning', common.mustNotCall()); v8.deserialize(v8.serialize(Buffer.alloc(0))); +v8.deserialize(v8.serialize({ a: new Int32Array(1024) })); +v8.deserialize(v8.serialize({ b: new Int16Array(8192) })); +v8.deserialize(v8.serialize({ c: new Uint32Array(1024) })); +v8.deserialize(v8.serialize({ d: new Uint16Array(8192) })); diff --git a/test/js/node/test/parallel/test-vm-context-dont-contextify.js b/test/js/node/test/parallel/test-vm-context-dont-contextify.js new file mode 100644 index 0000000000..d75fc1438d --- /dev/null +++ b/test/js/node/test/parallel/test-vm-context-dont-contextify.js @@ -0,0 +1,185 @@ +'use strict'; + +// Check vm.constants.DONT_CONTEXTIFY works. + +const common = require('../common'); + +const assert = require('assert'); +const vm = require('vm'); +const fixtures = require('../common/fixtures'); + +{ + // Check identity of the returned object. + const context = vm.createContext(vm.constants.DONT_CONTEXTIFY); + // The globalThis in the new context should be reference equal to the returned object. + assert.strictEqual(vm.runInContext('globalThis', context), context); + assert(vm.isContext(context)); + assert.strictEqual(typeof context.Array, 'function'); // Can access builtins directly. + assert.deepStrictEqual(Object.keys(context), []); // Properties on the global proxy are not enumerable +} + +{ + // Check that vm.createContext can return the original context if re-passed. + const context = vm.createContext(vm.constants.DONT_CONTEXTIFY); + const context2 = new vm.createContext(context); + assert.strictEqual(context, context2); +} + +{ + // Check that the context is vanilla and that Script.runInContext works. + const context = vm.createContext(vm.constants.DONT_CONTEXTIFY); + const result = + new vm.Script('globalThis.hey = 1; Object.freeze(globalThis); globalThis.process') + .runInContext(context); + assert.strictEqual(globalThis.hey, undefined); // Should not leak into current context. + assert.strictEqual(result, undefined); // Vanilla context has no Node.js globals +} + +{ + // Check Script.runInNewContext works. + const result = + new vm.Script('globalThis.hey = 1; Object.freeze(globalThis); globalThis.process') + .runInNewContext(vm.constants.DONT_CONTEXTIFY); + assert.strictEqual(globalThis.hey, undefined); // Should not leak into current context. + assert.strictEqual(result, undefined); // Vanilla context has no Node.js globals +} + +{ + // Check that vm.runInNewContext() works + const result = vm.runInNewContext( + 'globalThis.hey = 1; Object.freeze(globalThis); globalThis.process', + vm.constants.DONT_CONTEXTIFY); + assert.strictEqual(globalThis.hey, undefined); // Should not leak into current context. + assert.strictEqual(result, undefined); // Vanilla context has no Node.js globals +} + +{ + // Check that the global object of vanilla contexts work as expected. + const context = vm.createContext(vm.constants.DONT_CONTEXTIFY); + + // Check mutation via globalThis. + vm.runInContext('globalThis.foo = 1;', context); + assert.strictEqual(globalThis.foo, undefined); // Should not pollute the current context. + assert.strictEqual(context.foo, 1); + assert.strictEqual(vm.runInContext('globalThis.foo', context), 1); + assert.strictEqual(vm.runInContext('foo', context), 1); + + // Check mutation from outside. + context.foo = 2; + assert.strictEqual(context.foo, 2); + assert.strictEqual(vm.runInContext('globalThis.foo', context), 2); + assert.strictEqual(vm.runInContext('foo', context), 2); + + // Check contextual mutation. + vm.runInContext('bar = 1;', context); + assert.strictEqual(globalThis.bar, undefined); // Should not pollute the current context. + assert.strictEqual(context.bar, 1); + assert.strictEqual(vm.runInContext('globalThis.bar', context), 1); + assert.strictEqual(vm.runInContext('bar', context), 1); + + // Check adding new property from outside. + context.baz = 1; + assert.strictEqual(context.baz, 1); + assert.strictEqual(vm.runInContext('globalThis.baz', context), 1); + assert.strictEqual(vm.runInContext('baz', context), 1); + + // Check mutation via Object.defineProperty(). + vm.runInContext('Object.defineProperty(globalThis, "qux", {' + + 'enumerable: false, configurable: false, get() { return 1; } })', context); + assert.strictEqual(globalThis.qux, undefined); // Should not pollute the current context. + assert.strictEqual(context.qux, 1); + assert.strictEqual(vm.runInContext('qux', context), 1); + const desc = Object.getOwnPropertyDescriptor(context, 'qux'); + assert.strictEqual(desc.enumerable, false); + assert.strictEqual(desc.configurable, false); + assert.strictEqual(typeof desc.get, 'function'); + assert.throws(() => { context.qux = 1; }, { name: 'TypeError' }); + assert.throws(() => { Object.defineProperty(context, 'qux', { value: 1 }); }, { name: 'TypeError' }); + // Setting a value without a setter fails silently. + assert.strictEqual(vm.runInContext('qux = 2; qux', context), 1); + assert.throws(() => { + vm.runInContext('Object.defineProperty(globalThis, "qux", { value: 1 });'); + }, { name: 'TypeError' }); +} + +function checkFrozen(context) { + // Check mutation via globalThis. + vm.runInContext('globalThis.foo = 1', context); // Invoking setters on freezed object fails silently. + assert.strictEqual(context.foo, undefined); + assert.strictEqual(vm.runInContext('globalThis.foo', context), undefined); + assert.throws(() => { + vm.runInContext('foo', context); // It should not be looked up contextually. + }, { + name: 'ReferenceError' + }); + + // Check mutation from outside. + assert.throws(() => { + context.foo = 2; + }, { name: 'TypeError' }); + assert.strictEqual(context.foo, undefined); + assert.strictEqual(vm.runInContext('globalThis.foo', context), undefined); + assert.throws(() => { + vm.runInContext('foo', context); // It should not be looked up contextually. + }, { + name: 'ReferenceError' + }); + + // Check contextual mutation. + vm.runInContext('bar = 1', context); // Invoking setters on freezed object fails silently. + assert.strictEqual(context.bar, undefined); + assert.strictEqual(vm.runInContext('globalThis.bar', context), undefined); + assert.throws(() => { + vm.runInContext('bar', context); // It should not be looked up contextually. + }, { + name: 'ReferenceError' + }); + + // Check mutation via Object.defineProperty(). + assert.throws(() => { + vm.runInContext('Object.defineProperty(globalThis, "qux", {' + + 'enumerable: false, configurable: false, get() { return 1; } })', context); + }, { + name: 'TypeError' + }); + assert.strictEqual(context.qux, undefined); + assert.strictEqual(vm.runInContext('globalThis.qux', context), undefined); + assert.strictEqual(Object.getOwnPropertyDescriptor(context, 'qux'), undefined); + assert.throws(() => { Object.defineProperty(context, 'qux', { value: 1 }); }, { name: 'TypeError' }); + assert.throws(() => { + vm.runInContext('qux', context); + }, { + name: 'ReferenceError' + }); +} + +{ + // Check freezing the vanilla context's global object from within the context. + const context = vm.createContext(vm.constants.DONT_CONTEXTIFY); + // Only vanilla contexts' globals can be freezed. Contextified global objects cannot be freezed + // due to the presence of interceptors. + vm.runInContext('Object.freeze(globalThis)', context); + checkFrozen(context); +} + +{ + // Check freezing the vanilla context's global object from outside the context. + const context = vm.createContext(vm.constants.DONT_CONTEXTIFY); + Object.freeze(context); + checkFrozen(context); +} + +// Check importModuleDynamically works. +(async function() { + { + const moduleUrl = fixtures.fileURL('es-modules', 'message.mjs'); + const namespace = await import(moduleUrl.href); + // Check dynamic import works + const context = vm.createContext(vm.constants.DONT_CONTEXTIFY); + const script = new vm.Script(`import(${JSON.stringify(moduleUrl)})`, { + importModuleDynamically: vm.constants.USE_MAIN_CONTEXT_DEFAULT_LOADER, + }); + const promise = script.runInContext(context); + assert.strictEqual(await promise, namespace); + } +})().catch(common.mustNotCall()); diff --git a/test/js/node/test/parallel/test-vm-create-and-run-in-context.js b/test/js/node/test/parallel/test-vm-create-and-run-in-context.js index bd746cf2df..314ab95257 100644 --- a/test/js/node/test/parallel/test-vm-create-and-run-in-context.js +++ b/test/js/node/test/parallel/test-vm-create-and-run-in-context.js @@ -45,6 +45,6 @@ assert.strictEqual(context.thing, 'lala'); // Run in contextified sandbox without referencing the context const sandbox = { x: 1 }; vm.createContext(sandbox); -global.gc(); +globalThis.gc(); vm.runInContext('x = 2', sandbox); // Should not crash. diff --git a/test/js/node/test/parallel/test-vm-cross-context.js b/test/js/node/test/parallel/test-vm-cross-context.js index b7cf1309d3..abdfde32a8 100644 --- a/test/js/node/test/parallel/test-vm-cross-context.js +++ b/test/js/node/test/parallel/test-vm-cross-context.js @@ -23,7 +23,7 @@ require('../common'); const vm = require('vm'); -const ctx = vm.createContext(global); +const ctx = vm.createContext(globalThis); // Should not throw. vm.runInContext('!function() { var x = console.log; }()', ctx); diff --git a/test/js/node/test/parallel/test-vm-global-get-own.js b/test/js/node/test/parallel/test-vm-global-get-own.js index 246fcbf866..de5e0a9619 100644 --- a/test/js/node/test/parallel/test-vm-global-get-own.js +++ b/test/js/node/test/parallel/test-vm-global-get-own.js @@ -9,7 +9,7 @@ const vm = require('vm'); // Related to: // - https://github.com/nodejs/node/issues/45983 -const global = vm.runInContext('this', vm.createContext()); +const contextGlobal = vm.runInContext('this', vm.createContext()); function runAssertions(data, property, viaDefine, value1, value2, value3) { // Define the property for the first time @@ -35,20 +35,20 @@ function runAssertionsOnSandbox(builder) { } // Assertions on: define property -runAssertions(global, 'toto', true, 1, 2, 3); -runAssertions(global, Symbol.for('toto'), true, 1, 2, 3); -runAssertions(global, 'tutu', true, fun1, fun2, fun3); -runAssertions(global, Symbol.for('tutu'), true, fun1, fun2, fun3); -runAssertions(global, 'tyty', true, fun1, 2, 3); -runAssertions(global, Symbol.for('tyty'), true, fun1, 2, 3); +runAssertions(contextGlobal, 'toto', true, 1, 2, 3); +runAssertions(contextGlobal, Symbol.for('toto'), true, 1, 2, 3); +runAssertions(contextGlobal, 'tutu', true, fun1, fun2, fun3); +runAssertions(contextGlobal, Symbol.for('tutu'), true, fun1, fun2, fun3); +runAssertions(contextGlobal, 'tyty', true, fun1, 2, 3); +runAssertions(contextGlobal, Symbol.for('tyty'), true, fun1, 2, 3); // Assertions on: direct assignment -runAssertions(global, 'titi', false, 1, 2, 3); -runAssertions(global, Symbol.for('titi'), false, 1, 2, 3); -runAssertions(global, 'tata', false, fun1, fun2, fun3); -runAssertions(global, Symbol.for('tata'), false, fun1, fun2, fun3); -runAssertions(global, 'tztz', false, fun1, 2, 3); -runAssertions(global, Symbol.for('tztz'), false, fun1, 2, 3); +runAssertions(contextGlobal, 'titi', false, 1, 2, 3); +runAssertions(contextGlobal, Symbol.for('titi'), false, 1, 2, 3); +runAssertions(contextGlobal, 'tata', false, fun1, fun2, fun3); +runAssertions(contextGlobal, Symbol.for('tata'), false, fun1, fun2, fun3); +runAssertions(contextGlobal, 'tztz', false, fun1, 2, 3); +runAssertions(contextGlobal, Symbol.for('tztz'), false, fun1, 2, 3); // Assertions on: define property from sandbox runAssertionsOnSandbox( diff --git a/test/js/node/test/parallel/test-vm-inherited_properties.js b/test/js/node/test/parallel/test-vm-inherited_properties.js index 92cd64a6df..0a1d06cbdf 100644 --- a/test/js/node/test/parallel/test-vm-inherited_properties.js +++ b/test/js/node/test/parallel/test-vm-inherited_properties.js @@ -20,7 +20,6 @@ let result = vm.runInContext('Object.hasOwnProperty(this, "propBase");', assert.strictEqual(result, false); - // Ref: https://github.com/nodejs/node/issues/5350 base = { __proto__: null }; base.x = 1; diff --git a/test/js/node/test/parallel/test-vm-is-context.js b/test/js/node/test/parallel/test-vm-is-context.js index 911c4acb5c..02dc7a596d 100644 --- a/test/js/node/test/parallel/test-vm-is-context.js +++ b/test/js/node/test/parallel/test-vm-is-context.js @@ -43,4 +43,4 @@ assert.strictEqual(vm.isContext(vm.createContext([])), true); const sandbox = { foo: 'bar' }; vm.createContext(sandbox); -assert.strictEqual(vm.isContext(sandbox), true); \ No newline at end of file +assert.strictEqual(vm.isContext(sandbox), true); diff --git a/test/js/node/test/parallel/test-vm-module-dynamic-import.js b/test/js/node/test/parallel/test-vm-module-dynamic-import.js new file mode 100644 index 0000000000..bd542ca920 --- /dev/null +++ b/test/js/node/test/parallel/test-vm-module-dynamic-import.js @@ -0,0 +1,117 @@ +'use strict'; + +// Flags: --experimental-vm-modules + +const common = require('../common'); + +const assert = require('assert'); +const { Script, SourceTextModule } = require('vm'); + +async function testNoCallback() { + const m = new SourceTextModule(` + globalThis.importResult = import("foo"); + globalThis.importResult.catch(() => {}); + `); + await m.link(common.mustNotCall()); + await m.evaluate(); + let threw = false; + try { + await globalThis.importResult; + } catch (err) { + threw = true; + assert.strictEqual(err.code, 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING'); + } + delete globalThis.importResult; + assert(threw); +} + +async function test() { + const foo = new SourceTextModule('export const a = 1;'); + await foo.link(common.mustNotCall()); + await foo.evaluate(); + + { + const s = new Script('import("foo")', { + importModuleDynamically: common.mustCall((specifier, wrap) => { + assert.strictEqual(specifier, 'foo'); + assert.strictEqual(wrap, s); + return foo; + }), + }); + + const result = s.runInThisContext(); + assert.strictEqual(await result, foo.namespace); + } + + { + const m = new SourceTextModule('globalThis.fooResult = import("foo")', { + importModuleDynamically: common.mustCall((specifier, wrap) => { + assert.strictEqual(specifier, 'foo'); + assert.strictEqual(wrap, m); + return foo; + }), + }); + await m.link(common.mustNotCall()); + await m.evaluate(); + assert.strictEqual(await globalThis.fooResult, foo.namespace); + delete globalThis.fooResult; + } + + { + const s = new Script('import("foo", { with: { key: "value" } })', { + importModuleDynamically: common.mustCall((specifier, wrap, attributes) => { + assert.strictEqual(specifier, 'foo'); + assert.strictEqual(wrap, s); + assert.deepStrictEqual(attributes, { __proto__: null, key: 'value' }); + return foo; + }), + }); + + const result = s.runInThisContext(); + assert.strictEqual(await result, foo.namespace); + } +} + +async function testInvalid() { + const m = new SourceTextModule('globalThis.fooResult = import("foo")', { + importModuleDynamically: common.mustCall((specifier, wrap) => { + return 5; + }), + }); + await m.link(common.mustNotCall()); + await m.evaluate(); + await globalThis.fooResult.catch(common.mustCall((e) => { + assert.strictEqual(e.code, 'ERR_VM_MODULE_NOT_MODULE'); + })); + delete globalThis.fooResult; + + const s = new Script('import("bar")', { + importModuleDynamically: common.mustCall((specifier, wrap) => { + return undefined; + }), + }); + let threw = false; + try { + await s.runInThisContext(); + } catch (e) { + threw = true; + assert.strictEqual(e.code, 'ERR_VM_MODULE_NOT_MODULE'); + } + assert(threw); +} + +async function testInvalidimportModuleDynamically() { + assert.throws( + () => new Script( + 'import("foo")', + { importModuleDynamically: false }), + { code: 'ERR_INVALID_ARG_TYPE' } + ); +} + +(async function() { + await testNoCallback(); + await test(); + await testInvalid(); + await testInvalidimportModuleDynamically(); +}()).then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-vm-module-dynamic-namespace.js b/test/js/node/test/parallel/test-vm-module-dynamic-namespace.js new file mode 100644 index 0000000000..84937cd78d --- /dev/null +++ b/test/js/node/test/parallel/test-vm-module-dynamic-namespace.js @@ -0,0 +1,26 @@ +'use strict'; + +// Flags: --experimental-vm-modules + +const common = require('../common'); + +const assert = require('assert'); + +const { types } = require('util'); +const { SourceTextModule } = require('vm'); + +(async () => { + const m = new SourceTextModule('globalThis.importResult = import("");', { + importModuleDynamically: common.mustCall(async (specifier, wrap) => { + const m = new SourceTextModule(''); + await m.link(() => 0); + await m.evaluate(); + return m.namespace; + }), + }); + await m.link(() => 0); + await m.evaluate(); + const ns = await globalThis.importResult; + delete globalThis.importResult; + assert.ok(types.isModuleNamespaceObject(ns)); +})().then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-vm-module-referrer-realm.mjs b/test/js/node/test/parallel/test-vm-module-referrer-realm.mjs new file mode 100644 index 0000000000..3957f147d8 --- /dev/null +++ b/test/js/node/test/parallel/test-vm-module-referrer-realm.mjs @@ -0,0 +1,70 @@ +// Flags: --experimental-vm-modules +import * as common from '../common/index.mjs'; +import assert from 'node:assert'; +import { Script, SourceTextModule, createContext } from 'node:vm'; + +async function test() { + const foo = new SourceTextModule('export const a = 1;'); + await foo.link(common.mustNotCall()); + await foo.evaluate(); + + const ctx = createContext({}, { + importModuleDynamically: common.mustCall((specifier, wrap) => { + assert.strictEqual(specifier, 'foo'); + assert.strictEqual(wrap, ctx); + return foo; + }, 2), + }); + { + const s = new Script('Promise.resolve("import(\'foo\')").then(eval)', { + importModuleDynamically: common.mustNotCall(), + }); + + const result = s.runInContext(ctx); + assert.strictEqual(await result, foo.namespace); + } + + { + const m = new SourceTextModule('globalThis.fooResult = Promise.resolve("import(\'foo\')").then(eval)', { + context: ctx, + importModuleDynamically: common.mustNotCall(), + }); + await m.link(common.mustNotCall()); + await m.evaluate(); + assert.strictEqual(await ctx.fooResult, foo.namespace); + delete ctx.fooResult; + } +} + +async function testMissing() { + const ctx = createContext({}); + { + const s = new Script('Promise.resolve("import(\'foo\')").then(eval)', { + importModuleDynamically: common.mustNotCall(), + }); + + const result = s.runInContext(ctx); + await assert.rejects(result, { + code: 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING', + }); + } + + { + const m = new SourceTextModule('globalThis.fooResult = Promise.resolve("import(\'foo\')").then(eval)', { + context: ctx, + importModuleDynamically: common.mustNotCall(), + }); + await m.link(common.mustNotCall()); + await m.evaluate(); + + await assert.rejects(ctx.fooResult, { + code: 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING', + }); + delete ctx.fooResult; + } +} + +await Promise.all([ + test(), + testMissing(), +]).then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-vm-new-script-this-context.js b/test/js/node/test/parallel/test-vm-new-script-this-context.js index 18f39f9086..30b220e3d4 100644 --- a/test/js/node/test/parallel/test-vm-new-script-this-context.js +++ b/test/js/node/test/parallel/test-vm-new-script-this-context.js @@ -35,34 +35,34 @@ assert.throws(() => { script.runInThisContext(script); }, /^Error: test$/); -global.hello = 5; +globalThis.hello = 5; script = new Script('hello = 2'); script.runInThisContext(script); -assert.strictEqual(global.hello, 2); +assert.strictEqual(globalThis.hello, 2); // Pass values -global.code = 'foo = 1;' + +globalThis.code = 'foo = 1;' + 'bar = 2;' + 'if (typeof baz !== "undefined") throw new Error("test fail");'; -global.foo = 2; -global.obj = { foo: 0, baz: 3 }; -script = new Script(global.code); +globalThis.foo = 2; +globalThis.obj = { foo: 0, baz: 3 }; +script = new Script(globalThis.code); script.runInThisContext(script); -assert.strictEqual(global.obj.foo, 0); -assert.strictEqual(global.bar, 2); -assert.strictEqual(global.foo, 1); +assert.strictEqual(globalThis.obj.foo, 0); +assert.strictEqual(globalThis.bar, 2); +assert.strictEqual(globalThis.foo, 1); // Call a function -global.f = function() { global.foo = 100; }; +globalThis.f = function() { globalThis.foo = 100; }; script = new Script('f()'); script.runInThisContext(script); -assert.strictEqual(global.foo, 100); +assert.strictEqual(globalThis.foo, 100); common.allowGlobals( - global.hello, - global.code, - global.foo, - global.obj, - global.f + globalThis.hello, + globalThis.code, + globalThis.foo, + globalThis.obj, + globalThis.f ); diff --git a/test/js/node/test/parallel/test-vm-no-dynamic-import-callback.js b/test/js/node/test/parallel/test-vm-no-dynamic-import-callback.js new file mode 100644 index 0000000000..35b553d587 --- /dev/null +++ b/test/js/node/test/parallel/test-vm-no-dynamic-import-callback.js @@ -0,0 +1,20 @@ +'use strict'; + +const common = require('../common'); +const { Script, compileFunction } = require('vm'); +const assert = require('assert'); + +assert.rejects(async () => { + const script = new Script('import("fs")'); + const imported = script.runInThisContext(); + await imported; +}, { + code: 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING' +}).then(common.mustCall()); + +assert.rejects(async () => { + const imported = compileFunction('return import("fs")')(); + await imported; +}, { + code: 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING' +}).then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-vm-options-validation.js b/test/js/node/test/parallel/test-vm-options-validation.js index d719a984e1..1d02f8eb90 100644 --- a/test/js/node/test/parallel/test-vm-options-validation.js +++ b/test/js/node/test/parallel/test-vm-options-validation.js @@ -78,17 +78,17 @@ assert.throws(() => { }, errCheck); } - [/*null,*/ 'bad', 42].forEach((value) => { + [null, 'bad', 42].forEach((value) => { assertErrors(value, invalidArgType); }); - // [{}, [1], 'bad', null].forEach((value) => { - // assertErrors({ timeout: value }, invalidArgType); - // }); - // [-1, 0, NaN].forEach((value) => { - // assertErrors({ timeout: value }, outOfRange); - // }); - // [{}, [1], 'bad', 1, null].forEach((value) => { - // assertErrors({ displayErrors: value }, invalidArgType); - // assertErrors({ breakOnSigint: value }, invalidArgType); - // }); -} \ No newline at end of file + [{}, [1], 'bad', null].forEach((value) => { + assertErrors({ timeout: value }, invalidArgType); + }); + [-1, 0, NaN].forEach((value) => { + assertErrors({ timeout: value }, outOfRange); + }); + [{}, [1], 'bad', 1, null].forEach((value) => { + assertErrors({ displayErrors: value }, invalidArgType); + assertErrors({ breakOnSigint: value }, invalidArgType); + }); +} diff --git a/test/js/node/test/parallel/test-vm-static-this.js b/test/js/node/test/parallel/test-vm-static-this.js index e9382d6c3b..f47c0b5d0d 100644 --- a/test/js/node/test/parallel/test-vm-static-this.js +++ b/test/js/node/test/parallel/test-vm-static-this.js @@ -33,9 +33,9 @@ assert.throws(function() { vm.runInThisContext('throw new Error(\'test\');'); }, /test/); -global.hello = 5; +globalThis.hello = 5; vm.runInThisContext('hello = 2'); -assert.strictEqual(global.hello, 2); +assert.strictEqual(globalThis.hello, 2); // pass values @@ -43,23 +43,23 @@ const code = 'foo = 1;' + 'bar = 2;' + 'if (typeof baz !== \'undefined\')' + 'throw new Error(\'test fail\');'; -global.foo = 2; -global.obj = { foo: 0, baz: 3 }; +globalThis.foo = 2; +globalThis.obj = { foo: 0, baz: 3 }; /* eslint-disable no-unused-vars */ const baz = vm.runInThisContext(code); /* eslint-enable no-unused-vars */ -assert.strictEqual(global.obj.foo, 0); -assert.strictEqual(global.bar, 2); -assert.strictEqual(global.foo, 1); +assert.strictEqual(globalThis.obj.foo, 0); +assert.strictEqual(globalThis.bar, 2); +assert.strictEqual(globalThis.foo, 1); // call a function -global.f = function() { global.foo = 100; }; +globalThis.f = function() { globalThis.foo = 100; }; vm.runInThisContext('f()'); -assert.strictEqual(global.foo, 100); +assert.strictEqual(globalThis.foo, 100); common.allowGlobals( - global.hello, - global.foo, - global.obj, - global.f + globalThis.hello, + globalThis.foo, + globalThis.obj, + globalThis.f ); diff --git a/test/js/node/test/parallel/test-websocket.js b/test/js/node/test/parallel/test-websocket.js index c595ec12bf..4a047d20e6 100644 --- a/test/js/node/test/parallel/test-websocket.js +++ b/test/js/node/test/parallel/test-websocket.js @@ -4,3 +4,4 @@ require('../common'); const assert = require('assert'); assert.strictEqual(typeof WebSocket, 'function'); +assert.strictEqual(typeof CloseEvent, 'function'); diff --git a/test/js/node/test/parallel/test-whatwg-url-custom-inspect.js b/test/js/node/test/parallel/test-whatwg-url-custom-inspect.js index 946c097eac..addd759b4f 100644 --- a/test/js/node/test/parallel/test-whatwg-url-custom-inspect.js +++ b/test/js/node/test/parallel/test-whatwg-url-custom-inspect.js @@ -45,7 +45,7 @@ assert.strictEqual( search: '?que=ry', searchParams: URLSearchParams { 'que' => 'ry' }, hash: '#hash', - [Symbol(context)]: URLContext { + Symbol(context): URLContext { href: 'https://username:password@host.name:8080/path/name/?que=ry#hash', protocol_end: 6, username_end: 16, diff --git a/test/js/node/test/parallel/test-worker-terminate-null-handler.js b/test/js/node/test/parallel/test-worker-terminate-null-handler.js index 9db2e38b5c..e546e66265 100644 --- a/test/js/node/test/parallel/test-worker-terminate-null-handler.js +++ b/test/js/node/test/parallel/test-worker-terminate-null-handler.js @@ -15,9 +15,7 @@ process.once('beforeExit', common.mustCall(() => worker.ref())); worker.on('exit', common.mustCall(() => { worker.terminate().then((res) => assert.strictEqual(res, undefined)); - worker.terminate(() => null).then( - (res) => assert.strictEqual(res, undefined) - ); + })); worker.unref(); diff --git a/test/js/node/test/parallel/test-zlib-const.js b/test/js/node/test/parallel/test-zlib-const.js index 342c8c712a..5b9a127f0e 100644 --- a/test/js/node/test/parallel/test-zlib-const.js +++ b/test/js/node/test/parallel/test-zlib-const.js @@ -1,4 +1,4 @@ -/* eslint-disable strict */ +'use strict'; require('../common'); const assert = require('assert'); @@ -9,27 +9,17 @@ assert.strictEqual(zlib.constants.Z_OK, 0, 'Expected Z_OK to be 0;', `got ${zlib.constants.Z_OK}`, ].join(' ')); -zlib.constants.Z_OK = 1; -assert.strictEqual(zlib.constants.Z_OK, 0, - [ - 'Z_OK should be immutable.', - `Expected to get 0, got ${zlib.constants.Z_OK}`, - ].join(' ')); + +assert.throws(() => { zlib.constants.Z_OK = 1; }, + TypeError, 'zlib.constants.Z_OK should be immutable'); assert.strictEqual(zlib.codes.Z_OK, 0, `Expected Z_OK to be 0; got ${zlib.codes.Z_OK}`); -zlib.codes.Z_OK = 1; -assert.strictEqual(zlib.codes.Z_OK, 0, - [ - 'Z_OK should be immutable.', - `Expected to get 0, got ${zlib.codes.Z_OK}`, - ].join(' ')); -zlib.codes = { Z_OK: 1 }; -assert.strictEqual(zlib.codes.Z_OK, 0, - [ - 'Z_OK should be immutable.', - `Expected to get 0, got ${zlib.codes.Z_OK}`, - ].join(' ')); +assert.throws(() => { zlib.codes.Z_OK = 1; }, + TypeError, 'zlib.codes.Z_OK should be immutable'); + +assert.throws(() => { zlib.codes = { Z_OK: 1 }; }, + TypeError, 'zlib.codes should be immutable'); assert.ok(Object.isFrozen(zlib.codes), [ diff --git a/test/js/node/test/parallel/test-zlib-dictionary.js b/test/js/node/test/parallel/test-zlib-dictionary.js index 47eaaa62d0..49a01d5a03 100644 --- a/test/js/node/test/parallel/test-zlib-dictionary.js +++ b/test/js/node/test/parallel/test-zlib-dictionary.js @@ -172,4 +172,4 @@ for (const dict of [spdyDict, ...common.getBufferSources(spdyDict)]) { deflateResetDictionaryTest(dict); rawDictionaryTest(dict); deflateRawResetDictionaryTest(dict); -} \ No newline at end of file +} diff --git a/test/js/node/test/parallel/test-zlib-flush-flags.js b/test/js/node/test/parallel/test-zlib-flush-flags.js index f156c81847..3d8e609adb 100644 --- a/test/js/node/test/parallel/test-zlib-flush-flags.js +++ b/test/js/node/test/parallel/test-zlib-flush-flags.js @@ -1,5 +1,5 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const zlib = require('zlib'); @@ -10,7 +10,8 @@ assert.throws( { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', - message: 'The "options.flush" property must be of type number.' + common.invalidArgTypeHelper('foobar') + message: 'The "options.flush" property must be of type number. ' + + "Received type string ('foobar')" } ); @@ -31,7 +32,8 @@ assert.throws( { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError', - message: 'The "options.finishFlush" property must be of type number.' + common.invalidArgTypeHelper('foobar') + message: 'The "options.finishFlush" property must be of type number. ' + + "Received type string ('foobar')" } ); diff --git a/test/js/node/test/parallel/test-zlib-invalid-input-memory.js b/test/js/node/test/parallel/test-zlib-invalid-input-memory.js index c4dbe4c081..ac718395da 100644 --- a/test/js/node/test/parallel/test-zlib-invalid-input-memory.js +++ b/test/js/node/test/parallel/test-zlib-invalid-input-memory.js @@ -17,7 +17,7 @@ const ongc = common.mustCall(); strm.once('error', common.mustCall((err) => { assert(err); setImmediate(() => { - global.gc(); + globalThis.gc(); // Keep the event loop alive for seeing the async_hooks destroy hook // we use for GC tracking... // TODO(addaleax): This should maybe not be necessary? @@ -25,4 +25,4 @@ const ongc = common.mustCall(); }); })); onGC(strm, { ongc }); -} \ No newline at end of file +} diff --git a/test/js/node/test/parallel/test-zlib-random-byte-pipes.js b/test/js/node/test/parallel/test-zlib-random-byte-pipes.js index 6a7d7c505e..382c70c09d 100644 --- a/test/js/node/test/parallel/test-zlib-random-byte-pipes.js +++ b/test/js/node/test/parallel/test-zlib-random-byte-pipes.js @@ -41,17 +41,17 @@ class RandomReadStream extends Stream { this._processing = false; this._hasher = crypto.createHash('sha1'); - opt = opt || {}; + opt ||= {}; // base block size. - opt.block = opt.block || 256 * 1024; + opt.block ||= 256 * 1024; // Total number of bytes to emit - opt.total = opt.total || 256 * 1024 * 1024; + opt.total ||= 256 * 1024 * 1024; this._remaining = opt.total; // How variable to make the block sizes - opt.jitter = opt.jitter || 1024; + opt.jitter ||= 1024; this._opt = opt; diff --git a/test/js/node/test/sequential/test-vm-timeout-rethrow.js b/test/js/node/test/sequential/test-vm-timeout-rethrow.js new file mode 100644 index 0000000000..d4682fe975 --- /dev/null +++ b/test/js/node/test/sequential/test-vm-timeout-rethrow.js @@ -0,0 +1,44 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const assert = require('assert'); +const vm = require('vm'); +const spawn = require('child_process').spawn; + +if (process.argv[2] === 'child') { + const code = 'while(true);'; + + const ctx = vm.createContext(); + + vm.runInContext(code, ctx, { timeout: 1 }); +} else { + const proc = spawn(process.execPath, process.argv.slice(1).concat('child')); + let err = ''; + proc.stderr.on('data', function(data) { + err += data; + }); + + process.on('exit', function() { + assert.match(err, /Script execution timed out after 1ms/); + }); +} diff --git a/test/js/node/timers/node-timers.test.ts b/test/js/node/timers/node-timers.test.ts index 0660a8fc80..d0c7d3d5e7 100644 --- a/test/js/node/timers/node-timers.test.ts +++ b/test/js/node/timers/node-timers.test.ts @@ -163,17 +163,8 @@ describe("clear", () => { const interval1 = setInterval(() => { throw new Error("interval not cleared"); }, 1); - // TODO: this may become wrong once https://github.com/nodejs/node/pull/57069 is merged - const timeout2 = setTimeout(() => { - throw new Error("timeout not cleared"); - }, 1); - const interval2 = setInterval(() => { - throw new Error("interval not cleared"); - }, 1); clearInterval(timeout1); clearTimeout(interval1); - clearImmediate(timeout2); - clearImmediate(interval2); }); it("interval/timeout do not affect immediates", async () => { diff --git a/test/js/node/vm/__snapshots__/vm-sourceUrl.test.ts.snap b/test/js/node/vm/__snapshots__/vm-sourceUrl.test.ts.snap index 82c5b1b753..d176c37e73 100644 --- a/test/js/node/vm/__snapshots__/vm-sourceUrl.test.ts.snap +++ b/test/js/node/vm/__snapshots__/vm-sourceUrl.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`can get sourceURL from eval inside node:vm 1`] = ` "evalmachine.:2 diff --git a/test/js/node/watch/fs.watchFile.test.ts b/test/js/node/watch/fs.watchFile.test.ts index ec7166259c..dc84364d92 100644 --- a/test/js/node/watch/fs.watchFile.test.ts +++ b/test/js/node/watch/fs.watchFile.test.ts @@ -1,4 +1,4 @@ -import { tempDirWithFiles } from "harness"; +import { isWindows, tempDirWithFiles } from "harness"; import fs from "node:fs"; import path from "path"; @@ -113,6 +113,18 @@ describe("fs.watchFile", () => { expect(typeof entries[0][0].mtimeMs === "bigint").toBe(true); }); + test.if(isWindows)("does not fire on atime-only update", async () => { + let called = false; + const file = path.join(testDir, "watch.txt"); + fs.watchFile(file, { interval: 50 }, () => { + called = true; + }); + fs.readFileSync(file); + await Bun.sleep(100); + fs.unwatchFile(file); + expect(called).toBe(false); + }); + test("StatWatcherScheduler stress test (1000 watchers with random times)", async () => { const EventEmitter = require("events"); let defaultMaxListeners = EventEmitter.defaultMaxListeners; diff --git a/test/js/node/zlib/leak.test.ts b/test/js/node/zlib/leak.test.ts index 7468cdb3fe..4150d833f1 100644 --- a/test/js/node/zlib/leak.test.ts +++ b/test/js/node/zlib/leak.test.ts @@ -6,7 +6,7 @@ import zlib from "node:zlib"; const input = Buffer.alloc(50000); for (let i = 0; i < input.length; i++) input[i] = Math.random(); -const upper = 1024 * 1024 * (isASAN ? 15 : 10); +const upper = 1024 * 1024 * (isASAN ? 20 : 10); describe("zlib compression does not leak memory", () => { beforeAll(() => { diff --git a/test/js/sql/sql.test.ts b/test/js/sql/sql.test.ts index 4a15541e20..ba5b4f022b 100644 --- a/test/js/sql/sql.test.ts +++ b/test/js/sql/sql.test.ts @@ -2318,21 +2318,31 @@ if (isDockerEnabled()) { // ] // }) - // t('connect_timeout', { timeout: 20 }, async() => { - // const connect_timeout = 0.2 - // const server = net.createServer() - // server.listen() - // const sql = postgres({ port: server.address().port, host: '127.0.0.1', connect_timeout }) - // const start = Date.now() - // let end - // await sql`select 1`.catch((e) => { - // if (e.code !== 'CONNECT_TIMEOUT') - // throw e - // end = Date.now() - // }) - // server.close() - // return [connect_timeout, Math.floor((end - start) / 100) / 10] - // }) + test.each(["connect_timeout", "connectTimeout", "connectionTimeout", "connection_timeout"] as const)( + "connection timeout key %p throws", + async key => { + const server = net.createServer().listen(); + + const port = (server.address() as import("node:net").AddressInfo).port; + + const sql = postgres({ port, host: "127.0.0.1", [key]: 0.2 }); + + try { + await sql`select 1`; + throw new Error("should not reach"); + } catch (e) { + expect(e).toBeInstanceOf(Error); + expect(e.code).toBe("ERR_POSTGRES_CONNECTION_TIMEOUT"); + expect(e.message).toMatch(/Connection timed out after 200ms/); + } finally { + sql.close(); + server.close(); + } + }, + { + timeout: 1000, + }, + ); // t('connect_timeout throws proper error', async() => [ // 'CONNECT_TIMEOUT', diff --git a/test/js/third_party/grpc-js/fixtures/tonic-server/Cargo.toml b/test/js/third_party/grpc-js/fixtures/tonic-server/Cargo.toml new file mode 100644 index 0000000000..7f65079bb7 --- /dev/null +++ b/test/js/third_party/grpc-js/fixtures/tonic-server/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "server" +version = "0.1.0" +edition = "2021" + +[dependencies] +tonic = { version = "0.13.1", features = ["transport", "prost"] } +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } +tokio-stream = "0.1.11" +prost = "0.13" +[build-dependencies] +tonic-build = "0.13.1" diff --git a/test/js/third_party/grpc-js/fixtures/tonic-server/build.rs b/test/js/third_party/grpc-js/fixtures/tonic-server/build.rs new file mode 100644 index 0000000000..fb515867e5 --- /dev/null +++ b/test/js/third_party/grpc-js/fixtures/tonic-server/build.rs @@ -0,0 +1,3 @@ +fn main() { + tonic_build::compile_protos("proto/helloworld.proto").unwrap(); +} diff --git a/test/js/third_party/grpc-js/fixtures/tonic-server/proto/helloworld.proto b/test/js/third_party/grpc-js/fixtures/tonic-server/proto/helloworld.proto new file mode 100644 index 0000000000..dc709267e4 --- /dev/null +++ b/test/js/third_party/grpc-js/fixtures/tonic-server/proto/helloworld.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package helloworld; + +service Greeter { + rpc SayHello (HelloRequest) returns (HelloReply); +} + +message HelloRequest { + string name = 1; +} + +message HelloReply { + string message = 1; +} \ No newline at end of file diff --git a/test/js/third_party/grpc-js/fixtures/tonic-server/src/main.rs b/test/js/third_party/grpc-js/fixtures/tonic-server/src/main.rs new file mode 100644 index 0000000000..271ca32d5b --- /dev/null +++ b/test/js/third_party/grpc-js/fixtures/tonic-server/src/main.rs @@ -0,0 +1,40 @@ +use tonic::{transport::Server, Request, Response, Status}; +use helloworld::greeter_server::{Greeter, GreeterServer}; +use helloworld::{HelloRequest, HelloReply}; +use tokio::net::TcpListener; + +pub mod helloworld { + tonic::include_proto!("helloworld"); +} + +#[derive(Default)] +pub struct MyGreeter {} + +#[tonic::async_trait] +impl Greeter for MyGreeter { + async fn say_hello( + &self, + request: Request, + ) -> Result, Status> { + let reply = HelloReply { + message: request.into_inner().name, + }; + Ok(Response::new(reply)) + } +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); + let addr = listener.local_addr().unwrap(); + + let greeter = MyGreeter::default(); + let mut server: Server = Server::builder(); + println!("Listening on {}", addr); + server.add_service(GreeterServer::new(greeter)) + .serve_with_incoming(tokio_stream::wrappers::TcpListenerStream::new(listener)) + .await?; + + + Ok(()) +} diff --git a/test/js/third_party/grpc-js/test-tonic.test.ts b/test/js/third_party/grpc-js/test-tonic.test.ts new file mode 100644 index 0000000000..ca390576bc --- /dev/null +++ b/test/js/third_party/grpc-js/test-tonic.test.ts @@ -0,0 +1,126 @@ +import grpc from "@grpc/grpc-js"; +import protoLoader from "@grpc/proto-loader"; +import { afterAll, beforeAll, describe, expect, test } from "bun:test"; +import { rmSync } from "fs"; +import { chmod, cp, mkdir } from "fs/promises"; +import { tmpdirSync } from "harness"; +import path, { join } from "path"; +import unzipper from "unzipper"; + +const protoVersion = "31.0"; + +const releases = { + "win32_x86_32": `https://github.com/protocolbuffers/protobuf/releases/download/v${protoVersion}/protoc-${protoVersion}-win32.zip`, + "win32_x86_64": `https://github.com/protocolbuffers/protobuf/releases/download/v${protoVersion}/protoc-${protoVersion}-win64.zip`, + "linux_x86_32": `https://github.com/protocolbuffers/protobuf/releases/download/v${protoVersion}/protoc-${protoVersion}-linux-x86_32.zip`, + "linux_x86_64": `https://github.com/protocolbuffers/protobuf/releases/download/v${protoVersion}/protoc-${protoVersion}-linux-x86_64.zip`, + "darwin_x86_64": `https://github.com/protocolbuffers/protobuf/releases/download/v${protoVersion}/protoc-${protoVersion}-osx-x86_64.zip`, + "darwin_arm64": `https://github.com/protocolbuffers/protobuf/releases/download/v${protoVersion}/protoc-${protoVersion}-osx-aarch_64.zip`, +}; + +const platform = process.platform; +const arch = process.arch === "arm64" ? "arm64" : process.arch === "x64" ? "x86_64" : "x86_32"; +const release = platform + "_" + arch; +const binPath = join("bin", platform === "win32" ? "protoc.exe" : "protoc"); + +// Load proto +const packageDefinition = protoLoader.loadSync(join(import.meta.dir, "fixtures/tonic-server/proto/helloworld.proto"), { + keepCase: true, + longs: String, + enums: String, + defaults: true, + oneofs: true, +}); + +type Server = { address: string; kill: () => Promise }; + +const cargoBin = Bun.which("cargo") as string; +async function startServer(): Promise { + const tmpDir = tmpdirSync(); + await cp(join(import.meta.dir, "fixtures/tonic-server"), tmpDir, { recursive: true }); + const protocZip = await unzipper.Open.buffer(await fetch(releases[release]).then(res => res.bytes())); + + const protocPath = join(tmpDir, "protoc"); + await mkdir(protocPath, { recursive: true }); + await protocZip.extract({ path: protocPath }); + const protocExec = join(protocPath, binPath); + await chmod(protocExec, 0o755); + + const server = Bun.spawn([cargoBin, "run", "--quiet", path.join(tmpDir, "server")], { + cwd: tmpDir, + env: { + PROTOC: protocExec, + PATH: process.env.PATH, + CARGO_HOME: process.env.CARGO_HOME, + RUSTUP_HOME: process.env.RUSTUP_HOME, + }, + stdout: "pipe", + stdin: "ignore", + stderr: "inherit", + }); + + { + const { promise, reject, resolve } = Promise.withResolvers(); + const reader = server.stdout.getReader(); + const decoder = new TextDecoder(); + async function killServer() { + try { + server.kill(); + await server.exited; + rmSync(tmpDir, { recursive: true, force: true }); + } catch {} + } + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; + } + const text = decoder.decode(value); + if (text.includes("Listening on")) { + const [_, address] = text.split("Listening on "); + resolve({ + address: address?.trim(), + kill: killServer, + }); + break; + } else { + await killServer(); + reject(new Error("Server not started")); + break; + } + } + return await promise; + } +} + +describe.skipIf(!cargoBin || !releases[release])("test tonic server", () => { + let server: Server; + + beforeAll(async () => { + server = await startServer(); + }); + + afterAll(() => { + server.kill(); + }); + + test("flow control should work in both directions", async () => { + const hello_proto = grpc.loadPackageDefinition(packageDefinition).helloworld; + + // Create client + const client = new hello_proto.Greeter(server.address, grpc.credentials.createInsecure()); + const payload = Buffer.alloc(1024 * 1024, "bun").toString(); + for (let i = 0; i < 20; i++) { + const { promise, reject, resolve } = Promise.withResolvers(); + // Call SayHello + client.SayHello({ name: payload }, (err, response) => { + if (err) reject(err); + else resolve(response.message); + }); + const result = await promise; + expect(result.length).toBe(payload.length); + expect(result).toBe(payload); + } + await client.close(); + }, 20_000); // debug can take some time +}); diff --git a/test/js/third_party/next-auth/next-auth.test.ts b/test/js/third_party/next-auth/next-auth.test.ts index 25397559c6..9ba6204e06 100644 --- a/test/js/third_party/next-auth/next-auth.test.ts +++ b/test/js/third_party/next-auth/next-auth.test.ts @@ -4,10 +4,11 @@ import { bunEnv, bunRun, runBunInstall, tmpdirSync } from "harness"; import { join } from "path"; describe("next-auth", () => { it("should be able to call server action multiple times using auth middleware #18977", async () => { - const testDir = tmpdirSync("next-auth"); + const testDir = tmpdirSync("next-auth-" + Date.now()); cpSync(join(import.meta.dir, "fixture"), testDir, { recursive: true, + force: true, filter: src => { if (src.includes("node_modules")) { return false; @@ -21,6 +22,7 @@ describe("next-auth", () => { await runBunInstall(bunEnv, testDir, { savesLockfile: false }); + console.log(testDir); const result = bunRun(join(testDir, "server.js"), { AUTH_SECRET: "I7Jiq12TSMlPlAzyVAT+HxYX7OQb/TTqIbfTTpr1rg8=", }); @@ -28,5 +30,5 @@ describe("next-auth", () => { expect(result.stdout).toBeDefined(); const lines = result.stdout?.split("\n") ?? []; expect(lines[lines.length - 1]).toMatch(/request sent/); - }, 30_000); + }, 90_000); }); diff --git a/test/js/third_party/rollup-v4/__snapshots__/rollup-v4.test.ts.snap b/test/js/third_party/rollup-v4/__snapshots__/rollup-v4.test.ts.snap index ef4df2b8df..390f4c3fa1 100644 --- a/test/js/third_party/rollup-v4/__snapshots__/rollup-v4.test.ts.snap +++ b/test/js/third_party/rollup-v4/__snapshots__/rollup-v4.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`it works 1`] = ` { diff --git a/test/js/web/console/__snapshots__/console-log.test.ts.snap b/test/js/web/console/__snapshots__/console-log.test.ts.snap index 45f884b607..a29a10db51 100644 --- a/test/js/web/console/__snapshots__/console-log.test.ts.snap +++ b/test/js/web/console/__snapshots__/console-log.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`console.group: console-group-error 1`] = ` "Warning log diff --git a/test/js/web/console/console-log-utf16.fixture.js b/test/js/web/console/console-log-utf16.fixture.js new file mode 100644 index 0000000000..5a4b2a99ff --- /dev/null +++ b/test/js/web/console/console-log-utf16.fixture.js @@ -0,0 +1,2 @@ +const text = Array(10000).fill("肉醬意大利粉").join("\n"); +console.log(text); diff --git a/test/js/web/console/console-log-utf16.test.ts b/test/js/web/console/console-log-utf16.test.ts new file mode 100644 index 0000000000..cf1d42f0a6 --- /dev/null +++ b/test/js/web/console/console-log-utf16.test.ts @@ -0,0 +1,22 @@ +import { expect, it } from "bun:test"; +import { bunEnv, bunExe } from "harness"; +import { join } from "node:path"; + +it("works with large utf-16 strings", async () => { + const filepath = join(import.meta.dir, "console-log-utf16.fixture.js").replaceAll("\\", "/"); + const proc = Bun.spawn({ + cmd: [bunExe(), filepath], + env: { ...bunEnv }, + stdio: ["inherit", "pipe", "pipe"], + }); + + const exitCode = await proc.exited; + const stdout = await new Response(proc.stdout).text(); + const stderr = await new Response(proc.stderr).text(); + expect(stderr).toBeEmpty(); + expect(exitCode).toBe(0); + + const expected = Array(10000).fill("肉醬意大利粉").join("\n"); + // Add the \n because `console.log` adds a newline + expect(stdout).toBe(expected + "\n"); +}); diff --git a/test/js/web/web-globals.test.js b/test/js/web/web-globals.test.js index 1270c0a810..551578d925 100644 --- a/test/js/web/web-globals.test.js +++ b/test/js/web/web-globals.test.js @@ -30,6 +30,7 @@ test("exists", () => { expect(typeof PerformanceResourceTiming !== "undefined").toBe(true); expect(typeof PerformanceServerTiming !== "undefined").toBe(true); expect(typeof PerformanceTiming !== "undefined").toBe(true); + expect(typeof Math.sumPrecise !== "undefined").toBe(true); }); const globalSetters = [ diff --git a/test/js/web/websocket/__snapshots__/error-event.test.ts.snap b/test/js/web/websocket/__snapshots__/error-event.test.ts.snap index 62c8d485e2..a8a8127ee1 100644 --- a/test/js/web/websocket/__snapshots__/error-event.test.ts.snap +++ b/test/js/web/websocket/__snapshots__/error-event.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`WebSocket error event snapshot: Snapshot snapshot 1`] = `ErrorEvent { type: "error", diff --git a/test/js/web/websocket/autobahn.test.ts b/test/js/web/websocket/autobahn.test.ts index 480908076b..bff979bdef 100644 --- a/test/js/web/websocket/autobahn.test.ts +++ b/test/js/web/websocket/autobahn.test.ts @@ -21,72 +21,78 @@ function isDockerEnabled(): boolean { } } -if (isDockerEnabled()) { - describe("autobahn", async () => { - const url = "ws://localhost:9002"; - const agent = encodeURIComponent("bun/1.0.0"); - let docker: child_process.ChildProcessWithoutNullStreams | null = null; - const { promise, resolve } = Promise.withResolvers(); - // we can exclude cases by adding them to the exclude-cases array - // "exclude-cases": [ - // "9.*" - // ], - const CWD = tempDirWithFiles("autobahn", { - "fuzzingserver.json": `{ +let docker: child_process.ChildProcess | null = null; +let url: string = ""; +const agent = encodeURIComponent("bun/1.0.0"); +async function load() { + if (process.env.BUN_AUTOBAHN_URL) { + url = process.env.BUN_AUTOBAHN_URL; + return true; + } + url = "ws://localhost:9002"; + + const { promise, resolve } = Promise.withResolvers(); + // we can exclude cases by adding them to the exclude-cases array + // "exclude-cases": [ + // "9.*" + // ], + const CWD = tempDirWithFiles("autobahn", { + "fuzzingserver.json": `{ "url": "ws://127.0.0.1:9002", "outdir": "./", "cases": ["*"], "exclude-agent-cases": {} }`, - "index.json": "{}", - }); + "index.json": "{}", + }); - docker = child_process.spawn( - dockerCLI, - [ - "run", - "-t", - "--rm", - "-v", - `${CWD}:/config`, - "-v", - `${CWD}:/reports`, - "-p", - "9002:9002", - "--name", - "fuzzingserver", - "crossbario/autobahn-testsuite", - ], - { - cwd: CWD, - stdout: "pipe", - stderr: "pipe", - }, - ) as child_process.ChildProcessWithoutNullStreams; + docker = child_process.spawn( + dockerCLI, + [ + "run", + "-t", + "--rm", + "-v", + `${CWD}:/config`, + "-v", + `${CWD}:/reports`, + "-p", + "9002:9002", + "--platform", + "linux/amd64", + "--name", + "fuzzingserver", + "crossbario/autobahn-testsuite", + ], + { + cwd: CWD, + stdio: ["ignore", "pipe", "pipe"], + }, + ); - let out = ""; - let pending = true; - docker.stdout.on("data", data => { - out += data; - if (pending) { - if (out.indexOf("Autobahn WebSocket") !== -1) { - pending = false; - resolve(true); - } - } - }); - - docker.on("close", code => { - if (pending) { + let out = ""; + let pending = true; + docker.stdout?.on("data", data => { + out += data; + if (pending) { + if (out.indexOf("Autobahn WebSocket") !== -1) { pending = false; - resolve(false); + resolve(true); } - }); - const cases = await promise; - if (!cases) { - throw new Error("Autobahn WebSocket not detected"); } + }); + docker.on("close", () => { + if (pending) { + pending = false; + resolve(false); + } + }); + return await promise; +} + +if (isDockerEnabled() && (await load())) { + describe("autobahn", async () => { function getCaseStatus(testID: number) { return new Promise((resolve, reject) => { const socket = new WebSocket(`${url}/getCaseStatus?case=${testID}&agent=${agent}`); @@ -108,7 +114,7 @@ if (isDockerEnabled()) { socket.addEventListener("message", event => { count = parseInt(event.data as string, 10); }); - socket.addEventListener("close", event => { + socket.addEventListener("close", () => { if (!count) { reject("No test count received"); } @@ -139,7 +145,7 @@ if (isDockerEnabled()) { socket.addEventListener("message", event => { socket.send(event.data); }); - socket.addEventListener("close", event => { + socket.addEventListener("close", () => { resolve(undefined); }); socket.addEventListener("error", event => { @@ -154,12 +160,11 @@ if (isDockerEnabled()) { }); for (let i = 1; i <= count; i++) { const info = (await getCaseInfo(i)) as { id: string; description: string }; - const test = parseInt(info.id.split(".")[0]) > 10 ? it.todo : it; - // tests > 10 are compression tests, which are not supported yet - test(`Running test case ${info.id}: ${info.description}`, async () => { + + it(`Running test case ${info.id}: ${info.description}`, async () => { await runTestCase(i); const result = (await getCaseStatus(i)) as { behavior: string }; - expect(["OK", "INFORMATIONAL", "NON-STRICT"]).toContain(result.behavior); + expect(result.behavior).toBeOneOf(["OK", "INFORMATIONAL", "NON-STRICT"]); }); } diff --git a/test/js/web/websocket/websocket-permessage-deflate-edge-cases.test.ts b/test/js/web/websocket/websocket-permessage-deflate-edge-cases.test.ts new file mode 100644 index 0000000000..934f2f0e3b --- /dev/null +++ b/test/js/web/websocket/websocket-permessage-deflate-edge-cases.test.ts @@ -0,0 +1,200 @@ +import { serve } from "bun"; +import { expect, test } from "bun:test"; + +// Test compressed continuation frames +test("WebSocket client handles compressed continuation frames correctly", async () => { + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send a message that should be compressed + const largeMessage = "A".repeat(100000); // 100KB of A's + const result = ws.send(largeMessage, true); + if (result <= 0) { + throw new Error(`Failed to send large message, result: ${result}`); + } + }, + message(ws, message) { + // Echo back + const result = ws.send(message, true); + if (result <= 0) { + throw new Error(`Failed to echo message, result: ${result}`); + } + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + const { promise: openPromise, resolve: resolveOpen, reject: rejectOpen } = Promise.withResolvers(); + client.onopen = () => resolveOpen(); + client.onerror = error => rejectOpen(error); + client.onclose = event => { + if (!event.wasClean) { + rejectOpen(new Error(`WebSocket closed: code=${event.code}, reason=${event.reason}`)); + } + }; + + await openPromise; + expect(client.extensions).toContain("permessage-deflate"); + + const { promise: messagePromise, resolve: resolveMessage } = Promise.withResolvers(); + client.onmessage = event => resolveMessage(event.data); + + const receivedMessage = await messagePromise; + expect(receivedMessage).toBe("A".repeat(100000)); + + client.close(); + server.stop(); +}); + +// Test small message compression threshold +test("WebSocket client doesn't compress small messages", async () => { + let serverReceivedCompressed = false; + + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Track if messages are compressed by checking frame headers + }, + message(ws, message) { + // Small messages should not be compressed (< 860 bytes) + const result = ws.send("OK", true); + if (result <= 0) { + throw new Error(`Failed to send OK response, result: ${result}`); + } + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + const { promise: openPromise, resolve: resolveOpen, reject: rejectOpen } = Promise.withResolvers(); + client.onopen = () => resolveOpen(); + client.onerror = error => rejectOpen(error); + + await openPromise; + + const { promise: messagePromise, resolve: resolveMessage } = Promise.withResolvers(); + client.onmessage = event => resolveMessage(event.data); + + // Send a small message (should not be compressed) + client.send("Hello"); + + const receivedMessage = await messagePromise; + expect(receivedMessage).toBe("OK"); + + client.close(); + server.stop(); +}); + +// Test message size limits +test("WebSocket client rejects messages exceeding size limit", async () => { + // This test would require a custom server that sends extremely large compressed data + // For now, we'll test that normal large messages work + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send a 1MB message (under the 100MB limit) with more realistic data + // Using varied content to avoid triggering compression bomb detection + const size = 1 * 1024 * 1024; + const pattern = "The quick brown fox jumps over the lazy dog. "; + const buffer = Buffer.alloc(size); + buffer.fill(pattern); + const result = ws.send(buffer, true); + if (result <= 0) { + throw new Error(`Failed to send large buffer, result: ${result}`); + } + }, + message(ws, message) {}, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + const { promise: openPromise, resolve: resolveOpen, reject: rejectOpen } = Promise.withResolvers(); + client.onopen = () => resolveOpen(); + client.onerror = error => rejectOpen(error); + + const { promise: messagePromise, resolve: resolveMessage } = Promise.withResolvers(); + client.onmessage = event => resolveMessage(event.data); + + await openPromise; + const receivedMessage = await messagePromise; + expect(receivedMessage.length).toBe(1 * 1024 * 1024); + + client.close(); + server.stop(); +}); + +// Test compression error handling +test("WebSocket client handles compression errors gracefully", async () => { + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send a message + const result = ws.send("Test message", true); + if (result <= 0) { + throw new Error(`Failed to send test message, result: ${result}`); + } + }, + message(ws, message) { + // Echo back with compression + const result = ws.send(message, true); + if (result <= 0) { + throw new Error(`Failed to echo message in compression test, result: ${result}`); + } + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + const { promise: openPromise, resolve: resolveOpen, reject: rejectOpen } = Promise.withResolvers(); + client.onopen = () => resolveOpen(); + client.onerror = error => rejectOpen(error); + + const { promise: messagePromise, resolve: resolveMessage } = Promise.withResolvers(); + client.onmessage = event => resolveMessage(event.data); + + await openPromise; + const receivedMessage = await messagePromise; + expect(receivedMessage).toBe("Test message"); + + // Send a message to test compression + client.send(Buffer.alloc(1000, "A").toString()); // Should be compressed + + client.close(); + server.stop(); +}); diff --git a/test/js/web/websocket/websocket-permessage-deflate-simple.test.ts b/test/js/web/websocket/websocket-permessage-deflate-simple.test.ts new file mode 100644 index 0000000000..473b54f7a4 --- /dev/null +++ b/test/js/web/websocket/websocket-permessage-deflate-simple.test.ts @@ -0,0 +1,100 @@ +import { serve } from "bun"; +import { expect, test } from "bun:test"; + +// Simple test to verify basic permessage-deflate functionality +test("WebSocket client basic permessage-deflate support", async () => { + using server = serve({ + port: 0, + fetch(req, server) { + // Upgrade to WebSocket with permessage-deflate + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + console.log("Server: WebSocket opened"); + }, + message(ws, message) { + // Echo the message back + ws.send(typeof message === "string" ? message : message.toString(), true); + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + await new Promise((resolve, reject) => { + client.onopen = () => { + console.log("Client connected"); + console.log("Client extensions:", client.extensions); + resolve(); + }; + client.onerror = reject; + }); + + // Verify that extensions property contains permessage-deflate + expect(client.extensions).toContain("permessage-deflate"); + + // Test sending and receiving a message + const testMessage = "Hello, WebSocket with compression!"; + + const messagePromise = new Promise(resolve => { + client.onmessage = event => { + resolve(event.data); + }; + }); + + client.send(testMessage); + + const receivedMessage = await messagePromise; + expect(receivedMessage).toBe(testMessage); + + client.close(); + server.stop(); +}); + +// Test that compression actually works for large messages +test("WebSocket permessage-deflate compresses large messages", async () => { + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send a large repetitive message that should compress well + const largeMessage = "A".repeat(10000); + ws.send(largeMessage, true); + }, + message(ws, message) { + // Not used in this test + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + const messagePromise = new Promise(resolve => { + client.onmessage = event => { + resolve(event.data); + }; + }); + + await new Promise((resolve, reject) => { + client.onopen = () => resolve(); + client.onerror = reject; + }); + + const receivedMessage = await messagePromise; + expect(receivedMessage).toBe("A".repeat(10000)); + + client.close(); + server.stop(); +}); diff --git a/test/js/web/websocket/websocket-permessage-deflate.test.ts b/test/js/web/websocket/websocket-permessage-deflate.test.ts new file mode 100644 index 0000000000..be4d618cf9 --- /dev/null +++ b/test/js/web/websocket/websocket-permessage-deflate.test.ts @@ -0,0 +1,250 @@ +import { serve } from "bun"; +import { expect, test } from "bun:test"; + +test("WebSocket client negotiates permessage-deflate", async () => { + let serverReceivedExtensions = ""; + let serverReceivedMessage = ""; + + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Store the headers from the upgrade request + // For now we'll check the extensions after connection + }, + message(ws, message) { + serverReceivedMessage = typeof message === "string" ? message : message.toString(); + // Echo back the message + ws.send(message, true); + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + await new Promise((resolve, reject) => { + client.onopen = resolve; + client.onerror = reject; + }); + + // Check that the client negotiated the extension + // Since we can't easily access request headers in Bun's server, we'll check client.extensions + expect(client.extensions).toContain("permessage-deflate"); + + // Test sending and receiving compressed messages + const testMessage = "Hello, this is a test message that should be compressed!".repeat(10); + + const messagePromise = new Promise(resolve => { + client.onmessage = event => { + resolve(event.data); + }; + }); + + client.send(testMessage); + + const receivedMessage = await messagePromise; + expect(receivedMessage).toBe(testMessage); + + client.close(); + server.stop(); +}); + +test("WebSocket client handles compressed text messages", async () => { + const messages: string[] = []; + + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send various text messages + ws.send("Short message", true); + ws.send("A".repeat(1000), true); // Repetitive message that compresses well + ws.send("Random text with unicode: 你好世界 🌍", true); + }, + message(ws, message) { + // Required by the type but not used in this test + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + client.onmessage = event => { + messages.push(event.data); + }; + + await new Promise(resolve => { + client.onopen = resolve; + }); + + await new Promise(resolve => setTimeout(resolve, 100)); + + expect(messages).toHaveLength(3); + expect(messages[0]).toBe("Short message"); + expect(messages[1]).toBe("A".repeat(1000)); + expect(messages[2]).toBe("Random text with unicode: 你好世界 🌍"); + + client.close(); + server.stop(); +}); + +test("WebSocket client handles compressed binary messages", async () => { + const messages: ArrayBuffer[] = []; + + using server = serve({ + port: 0, + fetch(req, server) { + if ( + server.upgrade(req, { + headers: { + "Sec-WebSocket-Extensions": "permessage-deflate", + }, + }) + ) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send binary data + const buffer1 = new Uint8Array([1, 2, 3, 4, 5]); + const buffer2 = new Uint8Array(1000).fill(0xff); // Repetitive binary data + + ws.send(buffer1); + ws.send(buffer2); + }, + message(ws, message) { + // Required by the type but not used in this test + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + client.binaryType = "arraybuffer"; + client.onmessage = event => { + messages.push(event.data); + }; + + await new Promise(resolve => { + client.onopen = resolve; + }); + + await new Promise(resolve => setTimeout(resolve, 100)); + + expect(messages).toHaveLength(2); + expect(new Uint8Array(messages[0])).toEqual(new Uint8Array([1, 2, 3, 4, 5])); + expect(new Uint8Array(messages[1]).every(b => b === 0xff)).toBe(true); + expect(messages[1].byteLength).toBe(1000); + + client.close(); + server.stop(); +}); + +test("WebSocket client handles fragmented compressed messages", async () => { + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send a large message + const largeMessage = "X".repeat(100000); // 100KB message + ws.send(largeMessage, true); + }, + message(ws, message) { + // Required by the type but not used in this test + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + const messagePromise = new Promise(resolve => { + client.onmessage = event => { + resolve(event.data); + }; + }); + + await new Promise(resolve => { + client.onopen = resolve; + }); + + const receivedMessage = await messagePromise; + expect(receivedMessage).toBe("X".repeat(100000)); + + client.close(); + server.stop(); +}); + +test("WebSocket client handles context takeover options", async () => { + const messages: string[] = []; + + using server = serve({ + port: 0, + fetch(req, server) { + if (server.upgrade(req)) { + return; + } + return new Response("Not found", { status: 404 }); + }, + websocket: { + perMessageDeflate: true, + open(ws) { + // Send multiple messages - with no context takeover, each should be compressed independently + ws.send("Message 1: AAAAAAAAAA", true); + ws.send("Message 2: AAAAAAAAAA", true); + ws.send("Message 3: BBBBBBBBBB", true); + }, + message(ws, message) { + // Required by the type but not used in this test + }, + }, + }); + + const client = new WebSocket(`ws://localhost:${server.port}`); + + client.onmessage = event => { + messages.push(event.data); + }; + + await new Promise(resolve => { + client.onopen = resolve; + }); + + await new Promise(resolve => setTimeout(resolve, 100)); + + expect(messages).toHaveLength(3); + expect(messages[0]).toBe("Message 1: AAAAAAAAAA"); + expect(messages[1]).toBe("Message 2: AAAAAAAAAA"); + expect(messages[2]).toBe("Message 3: BBBBBBBBBB"); + + client.close(); + server.stop(); +}); + +test.skip("WebSocket client rejects compressed control frames", async () => { + // This test would require a custom server that sends invalid compressed control frames + // Skip for now as it requires low-level WebSocket frame manipulation +}); diff --git a/test/js/web/websocket/websocket.test.js b/test/js/web/websocket/websocket.test.js index d1d46607e5..4fbcd25f3e 100644 --- a/test/js/web/websocket/websocket.test.js +++ b/test/js/web/websocket/websocket.test.js @@ -143,7 +143,7 @@ describe("WebSocket", () => { }; return promise; } - const url = `wss://127.0.0.1:${server.address.port}`; + const url = server.url.href; { // by default rejectUnauthorized is true const client = new WebSocket(url); diff --git a/test/napi/napi-app/bun.lock b/test/napi/napi-app/bun.lock index ac82ef2d88..605f6d4777 100644 --- a/test/napi/napi-app/bun.lock +++ b/test/napi/napi-app/bun.lock @@ -3,30 +3,28 @@ "workspaces": { "": { "name": "napi-buffer-bug", - "dependencies": { - "node-api-headers": "1.5.0", - }, "devDependencies": { "node-addon-api": "^8.0.0", - "node-gyp": "^10.1.0", + "node-api-headers": "1.5.0", + "node-gyp": "^11.2.0", }, }, }, "packages": { "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], - "@npmcli/agent": ["@npmcli/agent@2.2.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.1" } }, "sha512-2yThA1Es98orMkpSLVqlDZAMPK3jHJhifP2gnNUdk1754uZ8yI5c+ulCoVG+WlntQA6MzhrURMXjSd9Z7dJ2/Q=="], + "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], - "@npmcli/fs": ["@npmcli/fs@3.1.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-7kZUAaLscfgbwBQRbvdMYaZOWyMEcPTH/tJjnyAWJ/dvvs9Ef+CERx/qJb9GExJpl1qipaDGn7KqHnFGGixd0w=="], + "@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="], + + "@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="], "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], - "abbrev": ["abbrev@2.0.0", "", {}, "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ=="], + "abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], "agent-base": ["agent-base@7.1.0", "", { "dependencies": { "debug": "^4.3.4" } }, "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg=="], - "aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="], - "ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="], "ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], @@ -35,11 +33,9 @@ "brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - "cacache": ["cacache@18.0.0", "", { "dependencies": { "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^4.0.0", "ssri": "^10.0.0", "tar": "^6.1.11", "unique-filename": "^3.0.0" } }, "sha512-I7mVOPl3PUCeRub1U8YoGz2Lqv9WOBpobZ8RyWFXmReuILz+3OAyTa5oH3QPdtKZD7N0Yk00aLfzn0qvp8dZ1w=="], + "cacache": ["cacache@19.0.1", "", { "dependencies": { "@npmcli/fs": "^4.0.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^7.0.2", "ssri": "^12.0.0", "tar": "^7.4.3", "unique-filename": "^4.0.0" } }, "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ=="], - "chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="], - - "clean-stack": ["clean-stack@2.2.0", "", {}, "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A=="], + "chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], @@ -61,9 +57,11 @@ "exponential-backoff": ["exponential-backoff@3.1.1", "", {}, "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw=="], + "fdir": ["fdir@6.4.6", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w=="], + "foreground-child": ["foreground-child@3.1.1", "", { "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" } }, "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg=="], - "fs-minipass": ["fs-minipass@2.1.0", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg=="], + "fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], "glob": ["glob@10.3.10", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^2.3.5", "minimatch": "^9.0.1", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", "path-scurry": "^1.10.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g=="], @@ -79,29 +77,27 @@ "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], - "indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="], - - "ip": ["ip@2.0.0", "", {}, "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ=="], + "ip-address": ["ip-address@9.0.5", "", { "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" } }, "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g=="], "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], - "is-lambda": ["is-lambda@1.0.1", "", {}, "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ=="], - "isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], "jackspeak": ["jackspeak@2.3.6", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ=="], + "jsbn": ["jsbn@1.1.0", "", {}, "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A=="], + "lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="], - "make-fetch-happen": ["make-fetch-happen@13.0.0", "", { "dependencies": { "@npmcli/agent": "^2.0.0", "cacache": "^18.0.0", "http-cache-semantics": "^4.1.1", "is-lambda": "^1.0.1", "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "promise-retry": "^2.0.1", "ssri": "^10.0.0" } }, "sha512-7ThobcL8brtGo9CavByQrQi+23aIfgYU++wg4B87AIS8Rb2ZBt/MEaDqzA00Xwv/jUjAjYkLHjVolYuTLKda2A=="], + "make-fetch-happen": ["make-fetch-happen@14.0.3", "", { "dependencies": { "@npmcli/agent": "^3.0.0", "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" } }, "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ=="], "minimatch": ["minimatch@9.0.3", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg=="], - "minipass": ["minipass@7.0.4", "", {}, "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ=="], + "minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], - "minipass-collect": ["minipass-collect@1.0.2", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA=="], + "minipass-collect": ["minipass-collect@2.0.1", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw=="], - "minipass-fetch": ["minipass-fetch@3.0.4", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^2.1.2" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-jHAqnA728uUpIaFm7NWsCnqKT6UqZz7GcI/bDpPATuwYyKwJwW0remxSCxUlKiEty+eopHGa3oc8WxgQ1FFJqg=="], + "minipass-fetch": ["minipass-fetch@4.0.1", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^3.0.1" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ=="], "minipass-flush": ["minipass-flush@1.0.5", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw=="], @@ -109,29 +105,31 @@ "minipass-sized": ["minipass-sized@1.0.3", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g=="], - "minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="], + "minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="], - "mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], + "mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], "ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], - "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], + "negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], "node-addon-api": ["node-addon-api@8.0.0", "", {}, "sha512-ipO7rsHEBqa9STO5C5T10fj732ml+5kLN1cAG8/jdHd56ldQeGj3Q7+scUS+VHK/qy1zLEwC4wMK5+yM0btPvw=="], "node-api-headers": ["node-api-headers@1.5.0", "", {}, "sha512-Yi/FgnN8IU/Cd6KeLxyHkylBUvDTsSScT0Tna2zTrz8klmc8qF2ppj6Q1LHsmOueJWhigQwR4cO2p0XBGW5IaQ=="], - "node-gyp": ["node-gyp@10.1.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "glob": "^10.3.10", "graceful-fs": "^4.2.6", "make-fetch-happen": "^13.0.0", "nopt": "^7.0.0", "proc-log": "^3.0.0", "semver": "^7.3.5", "tar": "^6.1.2", "which": "^4.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-B4J5M1cABxPc5PwfjhbV5hoy2DP9p8lFXASnEN6hugXOa61416tnTZ29x9sSwAd0o99XNIcpvDDy1swAExsVKA=="], + "node-gyp": ["node-gyp@11.2.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA=="], - "nopt": ["nopt@7.2.0", "", { "dependencies": { "abbrev": "^2.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-CVDtwCdhYIvnAzFoJ6NJ6dX3oga9/HyciQDnG1vQDjSLMeKLJ4A93ZqYKDrgYSr1FBY5/hMYC+2VCi24pgpkGA=="], + "nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="], - "p-map": ["p-map@4.0.0", "", { "dependencies": { "aggregate-error": "^3.0.0" } }, "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ=="], + "p-map": ["p-map@7.0.3", "", {}, "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA=="], "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], "path-scurry": ["path-scurry@1.10.1", "", { "dependencies": { "lru-cache": "^9.1.1 || ^10.0.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ=="], - "proc-log": ["proc-log@3.0.0", "", {}, "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A=="], + "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], + + "proc-log": ["proc-log@5.0.0", "", {}, "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ=="], "promise-retry": ["promise-retry@2.0.1", "", { "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" } }, "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g=="], @@ -149,11 +147,13 @@ "smart-buffer": ["smart-buffer@4.2.0", "", {}, "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg=="], - "socks": ["socks@2.7.1", "", { "dependencies": { "ip": "^2.0.0", "smart-buffer": "^4.2.0" } }, "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ=="], + "socks": ["socks@2.8.5", "", { "dependencies": { "ip-address": "^9.0.5", "smart-buffer": "^4.2.0" } }, "sha512-iF+tNDQla22geJdTyJB1wM/qrX9DMRwWrciEPwWLPRWAUEM8sQiyxgckLxWT1f7+9VabJS0jTGGr4QgBuvi6Ww=="], - "socks-proxy-agent": ["socks-proxy-agent@8.0.2", "", { "dependencies": { "agent-base": "^7.0.2", "debug": "^4.3.4", "socks": "^2.7.1" } }, "sha512-8zuqoLv1aP/66PHF5TqwJ7Czm3Yv32urJQHrVyhD7mmA6d61Zv8cIXQYPTWwmg6qlupnPvs/QKDmfa4P/qct2g=="], + "socks-proxy-agent": ["socks-proxy-agent@8.0.5", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "^4.3.4", "socks": "^2.8.3" } }, "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw=="], - "ssri": ["ssri@10.0.5", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-bSf16tAFkGeRlUNDjXu8FzaMQt6g2HZJrun7mtMbIPOddxt3GLMSz5VWUWcqTJUPfLEaDIepGxv+bYQW49596A=="], + "sprintf-js": ["sprintf-js@1.1.3", "", {}, "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="], + + "ssri": ["ssri@12.0.0", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ=="], "string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], @@ -163,31 +163,33 @@ "strip-ansi-cjs": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - "tar": ["tar@6.2.0", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ=="], + "tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], - "unique-filename": ["unique-filename@3.0.0", "", { "dependencies": { "unique-slug": "^4.0.0" } }, "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g=="], + "tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="], - "unique-slug": ["unique-slug@4.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ=="], + "unique-filename": ["unique-filename@4.0.0", "", { "dependencies": { "unique-slug": "^5.0.0" } }, "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ=="], - "which": ["which@4.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg=="], + "unique-slug": ["unique-slug@5.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg=="], + + "which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], "wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], "wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], "@npmcli/agent/lru-cache": ["lru-cache@10.0.2", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-Yj9mA8fPiVgOUpByoTZO5pNrcl5Yk37FcSHsUINpAsaBIEZIuqcCclDZJCVxqQShDsmYX8QG63svJiTbOATZwg=="], - "cacache/fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], - "cacache/lru-cache": ["lru-cache@10.0.2", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-Yj9mA8fPiVgOUpByoTZO5pNrcl5Yk37FcSHsUINpAsaBIEZIuqcCclDZJCVxqQShDsmYX8QG63svJiTbOATZwg=="], "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], - "fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + "fs-minipass/minipass": ["minipass@7.0.4", "", {}, "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ=="], - "minipass-collect/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + "glob/minipass": ["minipass@7.0.4", "", {}, "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ=="], + + "lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], "minipass-flush/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], @@ -195,18 +197,18 @@ "minipass-sized/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - "minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - "path-scurry/lru-cache": ["lru-cache@10.0.2", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-Yj9mA8fPiVgOUpByoTZO5pNrcl5Yk37FcSHsUINpAsaBIEZIuqcCclDZJCVxqQShDsmYX8QG63svJiTbOATZwg=="], + "path-scurry/minipass": ["minipass@7.0.4", "", {}, "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ=="], + + "socks-proxy-agent/agent-base": ["agent-base@7.1.3", "", {}, "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw=="], + "string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], "string-width-cjs/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], "strip-ansi-cjs/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="], - "wrap-ansi-cjs/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], "wrap-ansi-cjs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], @@ -215,6 +217,12 @@ "cross-spawn/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + "minipass-flush/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "minipass-pipeline/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "minipass-sized/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "string-width-cjs/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], "wrap-ansi-cjs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], diff --git a/test/napi/napi-app/class_test.cpp b/test/napi/napi-app/class_test.cpp index 57dd62ea89..deffdda2a4 100644 --- a/test/napi/napi-app/class_test.cpp +++ b/test/napi/napi-app/class_test.cpp @@ -164,8 +164,53 @@ static napi_value get_class_with_constructor(const Napi::CallbackInfo &info) { return napi_class; } +static napi_value test_constructor_with_no_prototype(const Napi::CallbackInfo &info) { + // This test verifies that Reflect.construct with a newTarget that has no prototype + // property doesn't crash. This was a bug where jsDynamicCast was called on a JSValue + // of 0 when the prototype property didn't exist. + + napi_env env = info.Env(); + + // Get the NapiClass constructor + napi_value napi_class = get_class_with_constructor(info); + + // Create a newTarget object with no prototype property + napi_value new_target; + NODE_API_CALL(env, napi_create_object(env, &new_target)); + + // Call Reflect.construct(NapiClass, [], newTarget) + napi_value global; + NODE_API_CALL(env, napi_get_global(env, &global)); + + napi_value reflect; + NODE_API_CALL(env, napi_get_named_property(env, global, "Reflect", &reflect)); + + napi_value construct_fn; + NODE_API_CALL(env, napi_get_named_property(env, reflect, "construct", &construct_fn)); + + napi_value empty_array; + NODE_API_CALL(env, napi_create_array_with_length(env, 0, &empty_array)); + + napi_value args[3] = { napi_class, empty_array, new_target }; + napi_value result; + + // This should not crash - previously it would crash when trying to access + // the prototype property of newTarget + napi_status status = napi_call_function(env, reflect, construct_fn, 3, args, &result); + + if (status == napi_ok) { + return Napi::String::New(env, "success - no crash"); + } else { + // If there was an error, return it + const napi_extended_error_info* error_info; + napi_get_last_error_info(env, &error_info); + return Napi::String::New(env, error_info->error_message ? error_info->error_message : "error"); + } +} + void register_class_test(Napi::Env env, Napi::Object exports) { REGISTER_FUNCTION(env, exports, get_class_with_constructor); + REGISTER_FUNCTION(env, exports, test_constructor_with_no_prototype); } } // namespace napitests diff --git a/test/napi/napi-app/get_string_tests.cpp b/test/napi/napi-app/get_string_tests.cpp index 6fb7d8e96c..5020191612 100644 --- a/test/napi/napi-app/get_string_tests.cpp +++ b/test/napi/napi-app/get_string_tests.cpp @@ -19,6 +19,10 @@ test_get_value_string_any_encoding(const Napi::CallbackInfo &info) { std::array buf; napi_value string = info[0]; +#ifndef _WIN32 + BlockingStdoutScope stdout_scope; +#endif + size_t full_length; NODE_API_CALL(env, get_value_string_fn(env, string, nullptr, 0, &full_length)); diff --git a/test/napi/napi-app/module.js b/test/napi/napi-app/module.js index 1488404b70..9b6002ab02 100644 --- a/test/napi/napi-app/module.js +++ b/test/napi/napi-app/module.js @@ -399,6 +399,51 @@ nativeTests.test_reflect_construct_napi_class = () => { console.log("reflect constructed data =", instance.getData?.()); }; +nativeTests.test_reflect_construct_no_prototype_crash = () => { + // This test verifies the fix for jsDynamicCast being called on JSValue(0) + // when a NAPI class constructor is called via Reflect.construct with a + // newTarget that has no prototype property. + + const NapiClass = nativeTests.get_class_with_constructor(); + + // Test 1: Constructor function with deleted prototype property + // This case should work without crashing + function ConstructorWithoutPrototype() {} + delete ConstructorWithoutPrototype.prototype; + + try { + const instance1 = Reflect.construct(NapiClass, [], ConstructorWithoutPrototype); + console.log("constructor without prototype: success - no crash"); + } catch (e) { + console.log("constructor without prototype error:", e.message); + } + + // Test 2: Regular constructor (control test) + // This should always work + function NormalConstructor() {} + + try { + const instance2 = Reflect.construct(NapiClass, [], NormalConstructor); + console.log("normal constructor: success - no crash"); + } catch (e) { + console.log("normal constructor error:", e.message); + } + + // Test 3: Reflect.construct with Proxy newTarget (prototype returns undefined) + function ProxyObject() {} + + const proxyTarget = new Proxy(ProxyObject, { + get(target, prop) { + if (prop === "prototype") { + return undefined; + } + return target[prop]; + }, + }); + const instance3 = Reflect.construct(NapiClass, [], proxyTarget); + console.log("✓ Success - no crash!"); +}; + nativeTests.test_napi_wrap = () => { const values = [ {}, diff --git a/test/napi/napi-app/package.json b/test/napi/napi-app/package.json index ebc48bd9e2..c3a3ff5a6d 100644 --- a/test/napi/napi-app/package.json +++ b/test/napi/napi-app/package.json @@ -3,12 +3,12 @@ "version": "1.0.0", "gypfile": true, "scripts": { - "install": "node-gyp rebuild --debug", - "build": "node-gyp rebuild --debug", + "install": "node-gyp rebuild --debug -j max", + "build": "node-gyp rebuild --debug -j max", "clean": "node-gyp clean" }, "devDependencies": { - "node-gyp": "^10.1.0", + "node-gyp": "^11.2.0", "node-addon-api": "^8.0.0", "node-api-headers": "1.5.0" } diff --git a/test/napi/napi-app/standalone_tests.cpp b/test/napi/napi-app/standalone_tests.cpp index 8795218b9b..5984927787 100644 --- a/test/napi/napi-app/standalone_tests.cpp +++ b/test/napi/napi-app/standalone_tests.cpp @@ -109,6 +109,9 @@ test_napi_get_value_string_utf8_with_buffer(const Napi::CallbackInfo &info) { NODE_API_CALL(env, napi_get_value_string_utf8(env, string_js, buf, len, &copied)); +#ifndef _WIN32 + BlockingStdoutScope stdout_scope; +#endif std::cout << "Chars to copy: " << len << std::endl; std::cout << "Copied chars: " << copied << std::endl; @@ -118,6 +121,7 @@ test_napi_get_value_string_utf8_with_buffer(const Napi::CallbackInfo &info) { } std::cout << std::endl; std::cout << "Value str: " << buf << std::endl; + return ok(env); } @@ -163,15 +167,23 @@ test_napi_handle_scope_bigint(const Napi::CallbackInfo &info) { auto *small_ints = new napi_value[num_small_ints]; - for (size_t i = 0; i < num_small_ints; i++) { - std::array words; - words.fill(i + 1); - NODE_API_CALL(env, napi_create_bigint_words(env, 0, small_int_size, - words.data(), &small_ints[i])); + for (size_t i = 0, small_int_index = 1; i < num_small_ints; + i++, small_int_index++) { + uint64_t words[small_int_size]; + for (size_t j = 0; j < small_int_size; j++) { + words[j] = small_int_index; + } + + NODE_API_CALL(env, napi_create_bigint_words(env, 0, small_int_size, words, + &small_ints[i])); } run_gc(info); +#ifndef _WIN32 + BlockingStdoutScope stdout_scope; +#endif + for (size_t j = 0; j < num_small_ints; j++) { std::array words; int sign; @@ -370,7 +382,8 @@ static napi_value test_napi_throw_with_nullptr(const Napi::CallbackInfo &info) { bool is_exception_pending; NODE_API_CALL(env, napi_is_exception_pending(env, &is_exception_pending)); - printf("napi_is_exception_pending -> %s\n", is_exception_pending ? "true" : "false"); + printf("napi_is_exception_pending -> %s\n", + is_exception_pending ? "true" : "false"); return ok(env); } @@ -382,6 +395,10 @@ static napi_value test_extended_error_messages(const Napi::CallbackInfo &info) { napi_env env = info.Env(); const napi_extended_error_info *error; +#ifndef _WIN32 + BlockingStdoutScope stdout_scope; +#endif + // this function is implemented in C++ // error because the result pointer is null printf("erroneous napi_create_double returned code %d\n", @@ -432,6 +449,11 @@ static napi_value test_extended_error_messages(const Napi::CallbackInfo &info) { static napi_value bigint_to_i64(const Napi::CallbackInfo &info) { napi_env env = info.Env(); + +#ifndef _WIN32 + BlockingStdoutScope stdout_scope; +#endif + // start at 1 is intentional, since argument 0 is the callback to run GC // passed to every function // perform test on all arguments @@ -460,6 +482,10 @@ static napi_value bigint_to_i64(const Napi::CallbackInfo &info) { static napi_value bigint_to_u64(const Napi::CallbackInfo &info) { napi_env env = info.Env(); +#ifndef _WIN32 + BlockingStdoutScope stdout_scope; +#endif + // start at 1 is intentional, since argument 0 is the callback to run GC // passed to every function // perform test on all arguments @@ -489,6 +515,10 @@ static napi_value bigint_to_u64(const Napi::CallbackInfo &info) { static napi_value bigint_to_64_null(const Napi::CallbackInfo &info) { napi_env env = info.Env(); +#ifndef _WIN32 + BlockingStdoutScope stdout_scope; +#endif + napi_value bigint; NODE_API_CALL(env, napi_create_bigint_int64(env, 5, &bigint)); diff --git a/test/napi/napi-app/utils.h b/test/napi/napi-app/utils.h index 92e158e6b7..0711999a32 100644 --- a/test/napi/napi-app/utils.h +++ b/test/napi/napi-app/utils.h @@ -2,6 +2,33 @@ #include "napi_with_version.h" #include +#ifndef _WIN32 +#include +#include + +// Node.js makes stdout non-blocking +// This messes up printf when you spam it quickly enough. +class BlockingStdoutScope { +public: + BlockingStdoutScope() { + original = fcntl(1, F_GETFL); + fcntl(1, F_SETFL, original & ~O_NONBLOCK); + setvbuf(stdout, nullptr, _IOFBF, 8192); + fflush(stdout); + } + + ~BlockingStdoutScope() { + fflush(stdout); + fcntl(1, F_SETFL, original); + setvbuf(stdout, nullptr, _IOLBF, 0); + } + +private: + int original; +}; + +#endif + // e.g NODE_API_CALL(env, napi_create_int32(env, 5, &my_napi_integer)) #define NODE_API_CALL(env, call) NODE_API_CALL_CUSTOM_RETURN(env, NULL, call) diff --git a/test/napi/napi.test.ts b/test/napi/napi.test.ts index c5c2169f37..bd7dc82586 100644 --- a/test/napi/napi.test.ts +++ b/test/napi/napi.test.ts @@ -1,12 +1,13 @@ -import { spawnSync } from "bun"; +import { spawn, spawnSync } from "bun"; import { beforeAll, describe, expect, it } from "bun:test"; import { readdirSync } from "fs"; -import { bunEnv, bunExe, tempDirWithFiles } from "harness"; +import { bunEnv, bunExe, isCI, isMacOS, isMusl, tempDirWithFiles } from "harness"; import { join } from "path"; describe("napi", () => { beforeAll(() => { // build gyp + console.time("Building node-gyp"); const install = spawnSync({ cmd: [bunExe(), "install", "--verbose"], cwd: join(__dirname, "napi-app"), @@ -19,6 +20,7 @@ describe("napi", () => { console.error("build failed, bailing out!"); process.exit(1); } + console.timeEnd("Building node-gyp"); }); describe.each(["esm", "cjs"])("bundle .node files to %s via", format => { @@ -148,88 +150,89 @@ describe("napi", () => { }); describe("issue_7685", () => { - it("works", () => { + it("works", async () => { const args = [...Array(20).keys()]; - checkSameOutput("test_issue_7685", args); + await checkSameOutput("test_issue_7685", args); }); }); describe("issue_11949", () => { - it("napi_call_threadsafe_function should accept null", () => { - const result = checkSameOutput("test_issue_11949", []); + it("napi_call_threadsafe_function should accept null", async () => { + const result = await checkSameOutput("test_issue_11949", []); expect(result).toStartWith("data = 1234, context = 42"); }); }); describe("napi_get_value_string_utf8 with buffer", () => { // see https://github.com/oven-sh/bun/issues/6949 - it("copies one char", () => { - const result = checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 2]); + it("copies one char", async () => { + const result = await checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 2]); expect(result).toEndWith("str: a"); }); - it("copies null terminator", () => { - const result = checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 1]); + it("copies null terminator", async () => { + const result = await checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 1]); expect(result).toEndWith("str:"); }); - it("copies zero char", () => { - const result = checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 0]); + it("copies zero char", async () => { + const result = await checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 0]); expect(result).toEndWith("str: *****************************"); }); - it("copies more than given len", () => { - const result = checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 25]); + it("copies more than given len", async () => { + const result = await checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 25]); expect(result).toEndWith("str: abcdef"); }); - it("copies auto len", () => { - const result = checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 424242]); - expect(result).toEndWith("str:"); + // TODO: once we upgrade the Node version on macOS and musl to Node v24.3.0, remove this TODO + it.todoIf(isCI && (isMacOS || isMusl))("copies auto len", async () => { + const result = await checkSameOutput("test_napi_get_value_string_utf8_with_buffer", ["abcdef", 424242]); + expect(result).toEndWith("str: abcdef"); }); }); describe("napi_get_value_string_*", () => { - it("behaves like node on edge cases", () => { - checkSameOutput("test_get_value_string", []); + it("behaves like node on edge cases", async () => { + await checkSameOutput("test_get_value_string", []); }); }); it("#1288", async () => { - const result = checkSameOutput("self", []); + const result = await checkSameOutput("self", []); expect(result).toBe("hello world!"); }); describe("handle_scope", () => { - it("keeps strings alive", () => { - checkSameOutput("test_napi_handle_scope_string", []); + it("keeps strings alive", async () => { + await checkSameOutput("test_napi_handle_scope_string", []); }); - it("keeps bigints alive", () => { - checkSameOutput("test_napi_handle_scope_bigint", []); + it("keeps bigints alive", async () => { + await checkSameOutput("test_napi_handle_scope_bigint", []); }, 10000); - it("keeps the parent handle scope alive", () => { - checkSameOutput("test_napi_handle_scope_nesting", []); + it("keeps the parent handle scope alive", async () => { + await checkSameOutput("test_napi_handle_scope_nesting", []); }); - it("exists when calling a napi constructor", () => { - checkSameOutput("test_napi_class_constructor_handle_scope", []); + it("exists when calling a napi constructor", async () => { + await checkSameOutput("test_napi_class_constructor_handle_scope", []); }); - it("exists while calling a napi_async_complete_callback", () => { - checkSameOutput("create_promise", [false]); + it("exists while calling a napi_async_complete_callback", async () => { + await checkSameOutput("create_promise", [false]); }); - it("keeps arguments moved off the stack alive", () => { - checkSameOutput("test_napi_handle_scope_many_args", ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]); + it("keeps arguments moved off the stack alive", async () => { + await checkSameOutput("test_napi_handle_scope_many_args", ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]); }); }); describe("escapable_handle_scope", () => { - it("keeps the escaped value alive in the outer scope", () => { - checkSameOutput("test_napi_escapable_handle_scope", []); + it("keeps the escaped value alive in the outer scope", async () => { + await checkSameOutput("test_napi_escapable_handle_scope", []); }); }); describe("napi_delete_property", () => { - it("returns a valid boolean", () => { - checkSameOutput( + it("returns a valid boolean", async () => { + await checkSameOutput( "test_napi_delete_property", // generate a string representing an array around an IIFE which main.js will eval // we do this as the napi_delete_property test needs an object with an own non-configurable @@ -251,68 +254,94 @@ describe("napi", () => { }); describe("napi_ref", () => { - it("can recover the value from a weak ref", () => { - checkSameOutput("test_napi_ref", []); + it("can recover the value from a weak ref", async () => { + await checkSameOutput("test_napi_ref", []); }); - it("allows creating a handle scope in the finalizer", () => { - checkSameOutput("test_napi_handle_scope_finalizer", []); + it("allows creating a handle scope in the finalizer", async () => { + await checkSameOutput("test_napi_handle_scope_finalizer", []); }); }); describe("napi_async_work", () => { - it("null checks execute callbacks", () => { - const output = checkSameOutput("test_napi_async_work_execute_null_check", []); + it("null checks execute callbacks", async () => { + const output = await checkSameOutput("test_napi_async_work_execute_null_check", []); expect(output).toContain("success!"); expect(output).not.toContain("failure!"); }); - it("null checks complete callbacks after scheduling", () => { - checkSameOutput("test_napi_async_work_complete_null_check", []); + it("null checks complete callbacks after scheduling", async () => { + // This test verifies that async work can be created with a null complete callback. + // The output order can vary due to thread scheduling on Linux, so we normalize + // the output lines before comparing. + const [nodeResult, bunResult] = await Promise.all([ + runOn("node", "test_napi_async_work_complete_null_check", []), + runOn(bunExe(), "test_napi_async_work_complete_null_check", []), + ]); + + // Filter out debug logs and normalize + const cleanBunResult = bunResult.replaceAll(/^\[\w+\].+$/gm, "").trim(); + + // Both should contain these two lines, but order may vary + const expectedLines = ["execute called!", "resolved to undefined"]; + + const nodeLines = nodeResult + .trim() + .split("\n") + .filter(line => line) + .sort(); + const bunLines = cleanBunResult + .split("\n") + .filter(line => line) + .sort(); + + expect(bunLines).toEqual(nodeLines); + expect(bunLines).toEqual(expectedLines.sort()); }); - it("works with cancelation", () => { - const output = checkSameOutput("test_napi_async_work_cancel", [], { "UV_THREADPOOL_SIZE": "2" }); + it("works with cancelation", async () => { + const output = await checkSameOutput("test_napi_async_work_cancel", [], { "UV_THREADPOOL_SIZE": "2" }); expect(output).toContain("success!"); expect(output).not.toContain("failure!"); }); }); describe("napi_threadsafe_function", () => { - it("keeps the event loop alive without async_work", () => { - const result = checkSameOutput("test_promise_with_threadsafe_function", []); + it("keeps the event loop alive without async_work", async () => { + const result = await checkSameOutput("test_promise_with_threadsafe_function", []); expect(result).toContain("tsfn_callback"); expect(result).toContain("resolved to 1234"); expect(result).toContain("tsfn_finalize_callback"); }); - it("does not hang on finalize", () => { - const result = checkSameOutput("test_napi_threadsafe_function_does_not_hang_after_finalize", []); + it("does not hang on finalize", async () => { + const result = await checkSameOutput("test_napi_threadsafe_function_does_not_hang_after_finalize", []); expect(result).toBe("success!"); }); }); describe("exception handling", () => { - it("can check for a pending error and catch the right value", () => { - checkSameOutput("test_get_exception", [5]); - checkSameOutput("test_get_exception", [{ foo: "bar" }]); + it("can check for a pending error and catch the right value", async () => { + await checkSameOutput("test_get_exception", [5]); + await checkSameOutput("test_get_exception", [{ foo: "bar" }]); }); - it("can throw an exception from an async_complete_callback", () => { - checkSameOutput("create_promise", [true]); + it("can throw an exception from an async_complete_callback", async () => { + const count = 10; + await Promise.all(Array.from({ length: count }, () => checkSameOutput("create_promise", [true]))); }); }); describe("napi_run_script", () => { - it("evaluates a basic expression", () => { - checkSameOutput("test_napi_run_script", ["5 * (1 + 2)"]); + it("evaluates a basic expression", async () => { + await checkSameOutput("test_napi_run_script", ["5 * (1 + 2)"]); }); - it("provides the right this value", () => { - checkSameOutput("test_napi_run_script", ["this === global"]); + it("provides the right this value", async () => { + await checkSameOutput("test_napi_run_script", ["this === global"]); }); - it("propagates exceptions", () => { - checkSameOutput("test_napi_run_script", ["(()=>{ throw new TypeError('oops'); })()"]); + it("propagates exceptions", async () => { + await checkSameOutput("test_napi_run_script", ["(()=>{ throw new TypeError('oops'); })()"]); }); - it("cannot see locals from around its invocation", () => { + it("cannot see locals from around its invocation", async () => { // variable should_not_exist is declared on main.js:18, but it should not be in scope for the eval'd code - // this doesn't use checkSameOutput because V8 and JSC use different error messages for a missing variable - let bunResult = runOn(bunExe(), "test_napi_run_script", ["shouldNotExist"]); + // this doesn't use await checkSameOutput because V8 and JSC use different error messages for a missing variable + let bunResult = await runOn(bunExe(), "test_napi_run_script", ["shouldNotExist"]); // remove all debug logs bunResult = bunResult.replaceAll(/^\[\w+\].+$/gm, "").trim(); expect(bunResult).toBe( @@ -322,100 +351,104 @@ describe("napi", () => { }); describe("napi_get_named_property", () => { - it("handles edge cases", () => { - checkSameOutput("test_get_property", []); + it("handles edge cases", async () => { + await checkSameOutput("test_get_property", []); }); }); describe("napi_set_named_property", () => { - it("handles edge cases", () => { - checkSameOutput("test_set_property", []); + it("handles edge cases", async () => { + await checkSameOutput("test_set_property", []); }); }); describe("napi_value <=> integer conversion", () => { - it("works", () => { - checkSameOutput("test_number_integer_conversions_from_js", []); - checkSameOutput("test_number_integer_conversions", []); + it("works", async () => { + await checkSameOutput("test_number_integer_conversions_from_js", []); + await checkSameOutput("test_number_integer_conversions", []); }); }); describe("arrays", () => { describe("napi_create_array_with_length", () => { - it("creates an array with empty slots", () => { - checkSameOutput("test_create_array_with_length", []); + it("creates an array with empty slots", async () => { + await checkSameOutput("test_create_array_with_length", []); }); }); }); describe("napi_throw functions", () => { - it("has the right code and message", () => { - checkSameOutput("test_throw_functions_exhaustive", []); + it("has the right code and message", async () => { + await checkSameOutput("test_throw_functions_exhaustive", []); }); - it("does not throw with nullptr", () => { - checkSameOutput("test_napi_throw_with_nullptr", []); + it("does not throw with nullptr", async () => { + await checkSameOutput("test_napi_throw_with_nullptr", []); }); }); describe("napi_create_error functions", () => { - it("has the right code and message", () => { - checkSameOutput("test_create_error_functions_exhaustive", []); + it("has the right code and message", async () => { + await checkSameOutput("test_create_error_functions_exhaustive", []); }); }); describe("napi_type_tag_object", () => { - it("works", () => { - checkSameOutput("test_type_tag", []); + it("works", async () => { + await checkSameOutput("test_type_tag", []); }); }); // TODO(@190n) test allocating in a finalizer from a napi module with the right version describe("napi_wrap", () => { - it("accepts the right kinds of values", () => { - checkSameOutput("test_napi_wrap", []); + it("accepts the right kinds of values", async () => { + await checkSameOutput("test_napi_wrap", []); }); - it("is shared between addons", () => { - checkSameOutput("test_napi_wrap_cross_addon", []); + it("is shared between addons", async () => { + await checkSameOutput("test_napi_wrap_cross_addon", []); }); - it("does not follow prototypes", () => { - checkSameOutput("test_napi_wrap_prototype", []); + it("does not follow prototypes", async () => { + await checkSameOutput("test_napi_wrap_prototype", []); }); - it("does not consider proxies", () => { - checkSameOutput("test_napi_wrap_proxy", []); + it("does not consider proxies", async () => { + await checkSameOutput("test_napi_wrap_proxy", []); }); - it("can remove a wrap", () => { - checkSameOutput("test_napi_remove_wrap", []); + it("can remove a wrap", async () => { + await checkSameOutput("test_napi_remove_wrap", []); }); - it("has the right lifetime", () => { - checkSameOutput("test_wrap_lifetime_without_ref", []); - checkSameOutput("test_wrap_lifetime_with_weak_ref", []); - checkSameOutput("test_wrap_lifetime_with_strong_ref", []); - checkSameOutput("test_remove_wrap_lifetime_with_weak_ref", []); - checkSameOutput("test_remove_wrap_lifetime_with_strong_ref", []); + it("has the right lifetime", async () => { + await checkSameOutput("test_wrap_lifetime_without_ref", []); + await checkSameOutput("test_wrap_lifetime_with_weak_ref", []); + await checkSameOutput("test_wrap_lifetime_with_strong_ref", []); + await checkSameOutput("test_remove_wrap_lifetime_with_weak_ref", []); + await checkSameOutput("test_remove_wrap_lifetime_with_strong_ref", []); // check that napi finalizers also run at VM exit, even if they didn't get run by GC - checkSameOutput("test_ref_deleted_in_cleanup", []); + await checkSameOutput("test_ref_deleted_in_cleanup", []); // check that calling napi_delete_ref in the ref's finalizer is not use-after-free - checkSameOutput("test_ref_deleted_in_async_finalize", []); + await checkSameOutput("test_ref_deleted_in_async_finalize", []); }); }); describe("napi_define_class", () => { - it("handles edge cases in the constructor", () => { - checkSameOutput("test_napi_class", []); - checkSameOutput("test_subclass_napi_class", []); - checkSameOutput("test_napi_class_non_constructor_call", []); - checkSameOutput("test_reflect_construct_napi_class", []); + it("handles edge cases in the constructor", async () => { + await checkSameOutput("test_napi_class", []); + await checkSameOutput("test_subclass_napi_class", []); + await checkSameOutput("test_napi_class_non_constructor_call", []); + await checkSameOutput("test_reflect_construct_napi_class", []); + }); + + it("does not crash with Reflect.construct when newTarget has no prototype", async () => { + await checkSameOutput("test_reflect_construct_no_prototype_crash", []); }); }); describe("bigint conversion to int64/uint64", () => { - it("works", () => { + it("works", async () => { const tests = [-1n, 0n, 1n]; for (const power of [63, 64, 65]) { for (const sign of [-1, 1]) { @@ -425,26 +458,26 @@ describe("napi", () => { } const testsString = "[" + tests.map(bigint => bigint.toString() + "n").join(",") + "]"; - checkSameOutput("bigint_to_i64", testsString); - checkSameOutput("bigint_to_u64", testsString); + await checkSameOutput("bigint_to_i64", testsString); + await checkSameOutput("bigint_to_u64", testsString); }); - it("returns the right error code", () => { + it("returns the right error code", async () => { const badTypes = '[null, undefined, 5, "123", "abc"]'; - checkSameOutput("bigint_to_i64", badTypes); - checkSameOutput("bigint_to_u64", badTypes); - checkSameOutput("bigint_to_64_null", []); + await checkSameOutput("bigint_to_i64", badTypes); + await checkSameOutput("bigint_to_u64", badTypes); + await checkSameOutput("bigint_to_64_null", []); }); }); describe("create_bigint_words", () => { - it("works", () => { - checkSameOutput("test_create_bigint_words", []); + it("works", async () => { + await checkSameOutput("test_create_bigint_words", []); }); }); describe("napi_get_last_error_info", () => { - it("returns information from the most recent call", () => { - checkSameOutput("test_extended_error_messages", []); + it("returns information from the most recent call", async () => { + await checkSameOutput("test_extended_error_messages", []); }); }); @@ -460,10 +493,10 @@ describe("napi", () => { ["[1, 2, 3]", false], ["'hello'", false], ]; - it("returns consistent values with node.js", () => { + it("returns consistent values with node.js", async () => { for (const [value, expected] of tests) { // main.js does eval then spread so to pass a single value we need to wrap in an array - const output = checkSameOutput(`test_is_${kind}`, "[" + value + "]"); + const output = await checkSameOutput(`test_is_${kind}`, "[" + value + "]"); expect(output).toBe(`napi_is_${kind} -> ${expected.toString()}`); } }); @@ -476,26 +509,33 @@ describe("napi", () => { ])("works when the module register function returns %s", (returnKind, expected) => { expect(require(`./napi-app/build/Debug/${returnKind}_addon.node`)).toEqual(expected); }); - it("works when the module register function throws", () => { + it("works when the module register function throws", async () => { expect(() => require("./napi-app/build/Debug/throw_addon.node")).toThrow(new Error("oops!")); }); }); -function checkSameOutput(test: string, args: any[] | string, envArgs: Record = {}) { - const nodeResult = runOn("node", test, args, envArgs).trim(); - let bunResult = runOn(bunExe(), test, args, envArgs); +async function checkSameOutput(test: string, args: any[] | string, envArgs: Record = {}) { + let [nodeResult, bunResult] = await Promise.all([ + runOn("node", test, args, envArgs), + runOn(bunExe(), test, args, envArgs), + ]); + nodeResult = nodeResult.trim(); // remove all debug logs - bunResult = bunResult.replaceAll(/^\[\w+\].+$/gm, "").trim(); + bunResult = bunResult + .replaceAll(/^\[\w+\].+$/gm, "") + // TODO: we don't seem to print ProxyObject in this case. + .replaceAll("function ProxyObject()", "function ()") + .trim(); expect(bunResult).toEqual(nodeResult); return nodeResult; } -function runOn(executable: string, test: string, args: any[] | string, envArgs: Record = {}) { +async function runOn(executable: string, test: string, args: any[] | string, envArgs: Record = {}) { // when the inspector runs (can be due to VSCode extension), there is // a bug that in debug modes the console logs extra stuff const { BUN_INSPECT_CONNECT_TO: _, ...rest } = bunEnv; const env = { ...rest, ...envArgs }; - const exec = spawnSync({ + const exec = spawn({ cmd: [ executable, "--expose-gc", @@ -504,11 +544,19 @@ function runOn(executable: string, test: string, args: any[] | string, envArgs: typeof args == "string" ? args : JSON.stringify(args), ], env, + stdout: "pipe", + stderr: "pipe", + stdin: "inherit", }); - const errs = exec.stderr.toString(); + const [stdout, stderr, result] = await Promise.all([ + new Response(exec.stdout).text(), + new Response(exec.stderr).text(), + exec.exited, + ]); + const errs = stderr.toString(); if (errs !== "") { throw new Error(errs); } - expect(exec.success).toBeTrue(); - return exec.stdout.toString(); + expect(result).toBe(0); + return stdout; } diff --git a/test/napi/node-napi.test.ts b/test/napi/node-napi.test.ts index 5f043e98ef..7311247646 100644 --- a/test/napi/node-napi.test.ts +++ b/test/napi/node-napi.test.ts @@ -1,7 +1,6 @@ import { Glob, spawn, spawnSync } from "bun"; -import { beforeAll, describe, expect, it } from "bun:test"; +import { describe, expect, it } from "bun:test"; import { bunEnv, bunExe, isBroken, isCI, isIntelMacOS, isMusl, isWindows } from "harness"; -import os from "node:os"; import { dirname, join } from "path"; const jsNativeApiRoot = join(__dirname, "node-napi-tests", "test", "js-native-api"); @@ -88,67 +87,51 @@ for (const t of failingNodeApiTests) { } } -beforeAll(async () => { - const directories = jsNativeApiTests - .filter(t => !failingJsNativeApiTests.includes(t)) - .map(t => join(jsNativeApiRoot, t)) - .concat(nodeApiTests.filter(t => !failingNodeApiTests.includes(t)).map(t => join(nodeApiRoot, t))) - .map(t => dirname(t)); - const uniqueDirectories = Array.from(new Set(directories)); +const directories = jsNativeApiTests + .filter(t => !failingJsNativeApiTests.includes(t)) + .map(t => join(jsNativeApiRoot, t)) + .concat(nodeApiTests.filter(t => !failingNodeApiTests.includes(t)).map(t => join(nodeApiRoot, t))) + .map(t => dirname(t)); +const uniqueDirectories = Array.from(new Set(directories)); - async function buildOne(dir: string) { - const child = spawn({ - cmd: [bunExe(), "x", "node-gyp", "rebuild", "--debug"], - cwd: dir, - stderr: "pipe", - stdout: "ignore", - stdin: "inherit", - env: { - ...bunEnv, - npm_config_target: "v23.2.0", - // on linux CI, node-gyp will default to g++ and the version installed there is very old, - // so we make it use clang instead - ...(process.platform == "linux" && isCI - ? { "CC": "/usr/lib/llvm-19/bin/clang", CXX: "/usr/lib/llvm-19/bin/clang++" } - : {}), - }, +describe("build", () => { + for (const dir of uniqueDirectories) { + it(`${dir.slice(import.meta.dir.length + 1)}`, async () => { + const child = spawn({ + cmd: [bunExe(), "x", "node-gyp@11", "rebuild", "--debug", "-j", "max"], + cwd: dir, + stderr: "pipe", + stdout: "ignore", + stdin: "inherit", + env: { + ...bunEnv, + npm_config_target: "v24.3.0", + CXXFLAGS: (bunEnv.CXXFLAGS ?? "") + (process.platform == "win32" ? " -std=c++20" : " -std=gnu++20"), + // on linux CI, node-gyp will default to g++ and the version installed there is very old, + // so we make it use clang instead + ...(process.platform == "linux" && isCI + ? { "CC": "/usr/lib/llvm-19/bin/clang", CXX: "/usr/lib/llvm-19/bin/clang++" } + : {}), + }, + }); + await child.exited; + if (child.exitCode !== 0) { + const stderr = await new Response(child.stderr).text(); + console.error(`node-gyp rebuild in ${dir} failed:\n${stderr}`); + console.error("bailing out!"); + process.exit(1); + } }); - await child.exited; - if (child.exitCode !== 0) { - const stderr = await new Response(child.stderr).text(); - console.error(`node-gyp rebuild in ${dir} failed:\n${stderr}`); - console.error("bailing out!"); - process.exit(1); - } } +}); - async function worker() { - while (uniqueDirectories.length > 0) { - const dir = uniqueDirectories.pop(); - await buildOne(dir!); - } - } - - const parallelism = Math.min(8, os.cpus().length, 1 /* TODO(@heimskr): remove */); - const jobs: Promise[] = []; - for (let i = 0; i < parallelism; i++) { - jobs.push(worker()); - } - - await Promise.all(jobs); -}, 600000); - -describe.each([ - ["js-native-api", jsNativeApiTests, jsNativeApiRoot, failingJsNativeApiTests], - ["node-api", nodeApiTests, nodeApiRoot, failingNodeApiTests], -])("%s tests", (_name, tests, root, failing) => { - describe.each(tests)("%s", test => { - it.skipIf(failing.includes(test))( - "passes", - () => { +describe("js-native-api tests", () => { + for (const test of jsNativeApiTests) { + describe.skipIf(failingJsNativeApiTests.includes(test))(`${test}`, () => { + it("passes", () => { const result = spawnSync({ cmd: [bunExe(), "run", test], - cwd: root, + cwd: jsNativeApiRoot, stderr: "inherit", stdout: "ignore", stdin: "inherit", @@ -156,8 +139,26 @@ describe.each([ }); expect(result.success).toBeTrue(); expect(result.exitCode).toBe(0); - }, - 60000, // timeout - ); - }); + }, 60_000); + }); + } +}); + +describe("node-api tests", () => { + for (const test of nodeApiTests) { + describe.skipIf(failingNodeApiTests.includes(test))(`${test}`, () => { + it("passes", () => { + const result = spawnSync({ + cmd: [bunExe(), "run", test], + cwd: nodeApiRoot, + stderr: "inherit", + stdout: "ignore", + stdin: "inherit", + env: bunEnv, + }); + expect(result.success).toBeTrue(); + expect(result.exitCode).toBe(0); + }, 60_000); + }); + } }); diff --git a/test/no-validate-exceptions.txt b/test/no-validate-exceptions.txt new file mode 100644 index 0000000000..6275da6dc6 --- /dev/null +++ b/test/no-validate-exceptions.txt @@ -0,0 +1,2081 @@ +# List of tests for which we do NOT set validateExceptionChecks=1 when running in ASan CI +test/bake/dev-and-prod.test.ts +test/bake/dev/plugins.test.ts +test/bake/dev/vfile.test.ts +test/bake/dev/ssg-pages-router.test.ts +test/bake/framework-router.test.ts +test/bundler/bun-build-api.test.ts +test/bundler/bundler_banner.test.ts +test/bundler/bundler_browser.test.ts +test/bundler/bundler_bun.test.ts +test/bundler/bundler_cjs2esm.test.ts +test/bundler/bundler_comments.test.ts +test/bundler/bundler_compile.test.ts +test/bundler/bundler_decorator_metadata.test.ts +test/bundler/bundler_defer.test.ts +test/bundler/bundler_drop.test.ts +test/bundler/bundler_env.test.ts +test/bundler/bundler_footer.test.ts +test/bundler/bundler_html.test.ts +test/bundler/bundler_html_server.test.ts +test/bundler/bundler_jsx.test.ts +test/bundler/bundler_minify.test.ts +test/bundler/bundler_naming.test.ts +test/bundler/bundler_plugin.test.ts +test/bundler/bundler_regressions.test.ts +test/bundler/bundler_splitting.test.ts +test/bundler/bundler_string.test.ts +test/bundler/css/css-modules.test.ts +test/bundler/css/wpt/background-computed.test.ts +test/bundler/css/wpt/color-computed-rgb.test.ts +test/bundler/css/wpt/color-computed.test.ts +test/bundler/css/wpt/relative_color_out_of_gamut.test.ts +test/bundler/esbuild/css.test.ts +test/bundler/esbuild/dce.test.ts +test/bundler/esbuild/extra.test.ts +test/bundler/esbuild/importstar.test.ts +test/cli/install/isolated-install.test.ts +test/bundler/esbuild/importstar_ts.test.ts +test/bundler/esbuild/loader.test.ts +test/bundler/esbuild/lower.test.ts +test/bundler/esbuild/packagejson.test.ts +test/bundler/esbuild/splitting.test.ts +test/bundler/esbuild/ts.test.ts +test/bundler/esbuild/tsconfig.test.ts +test/bundler/html-import-manifest.test.ts +test/bundler/transpiler/bun-pragma.test.ts +test/bundler/transpiler/jsx-production.test.ts +test/bundler/transpiler/macro-test.test.ts +test/bundler/transpiler/runtime-transpiler.test.ts +test/bundler/transpiler/transpiler.test.js +test/cli/init/init.test.ts +test/cli/install/bun-add.test.ts +test/cli/install/bun-audit.test.ts +test/cli/install/bun-create.test.ts +test/cli/install/bun-info.test.ts +test/cli/install/bun-install-dep.test.ts +test/cli/install/bun-install-lifecycle-scripts.test.ts +test/cli/install/bun-install-patch.test.ts +test/cli/install/bun-install-registry.test.ts +test/cli/install/bun-install-retry.test.ts +test/cli/install/bun-link.test.ts +test/cli/install/bun-lock.test.ts +test/cli/install/bun-lockb.test.ts +test/cli/install/bun-pack.test.ts +test/cli/install/bun-patch.test.ts +test/cli/install/bun-pm.test.ts +test/cli/install/bun-pm-version.test.ts +test/cli/install/bun-publish.test.ts +test/cli/install/bun-remove.test.ts +test/cli/install/bun-update.test.ts +test/cli/install/bun-upgrade.test.ts +test/cli/install/bun-workspaces.test.ts +test/cli/install/bunx.test.ts +test/cli/install/catalogs.test.ts +test/cli/install/npmrc.test.ts +test/cli/install/overrides.test.ts +test/cli/run/env.test.ts +test/cli/run/esm-defineProperty.test.ts +test/cli/run/garbage-env.test.ts +test/cli/run/jsx-namespaced-attributes.test.ts +test/cli/run/log-test.test.ts +test/cli/run/require-and-import-trailing.test.ts +test/cli/run/run-autoinstall.test.ts +test/cli/run/run-eval.test.ts +test/cli/run/self-reference.test.ts +test/cli/run/shell-keepalive.test.ts +test/cli/test/bun-test.test.ts +test/cli/watch/watch.test.ts +test/config/bunfig/preload.test.ts +test/integration/bun-types/bun-types.test.ts +test/integration/bun-types/fixture/serve-types.test.ts +test/integration/esbuild/esbuild.test.ts +test/integration/jsdom/jsdom.test.ts +test/integration/mysql2/mysql2.test.ts +test/integration/nest/nest_metadata.test.ts +test/integration/sass/sass.test.ts +test/integration/sharp/sharp.test.ts +test/integration/svelte/client-side.test.ts +test/integration/typegraphql/src/typegraphql.test.ts +test/internal/bindgen.test.ts +test/js/bun/bun-object/deep-match.spec.ts +test/js/bun/bun-object/write.spec.ts +test/js/bun/console/bun-inspect-table.test.ts +test/js/bun/console/console-iterator.test.ts +test/js/bun/console/console-table.test.ts +test/js/bun/cookie/cookie-expires-validation.test.ts +test/js/bun/cookie/cookie-map.test.ts +test/js/bun/cookie/cookie.test.ts +test/js/bun/crypto/cipheriv-decipheriv.test.ts +test/js/bun/crypto/wpt-webcrypto.generateKey.test.ts +test/js/bun/dns/resolve-dns.test.ts +test/js/bun/glob/scan.test.ts +test/js/bun/globals.test.js +test/js/bun/http/async-iterator-stream.test.ts +test/js/bun/http/bun-connect-x509.test.ts +test/js/bun/http/bun-serve-args.test.ts +test/js/bun/http/bun-serve-cookies.test.ts +test/js/bun/http/bun-serve-file.test.ts +test/js/bun/http/bun-serve-headers.test.ts +test/js/bun/http/bun-serve-html-entry.test.ts +test/js/bun/http/bun-serve-html-manifest.test.ts +test/js/bun/http/bun-serve-html.test.ts +test/js/bun/http/bun-serve-routes.test.ts +test/js/bun/http/bun-serve-static.test.ts +test/js/bun/http/bun-server.test.ts +test/js/bun/http/decodeURIComponentSIMD.test.ts +test/js/bun/http/fetch-file-upload.test.ts +test/js/bun/http/hspec.test.ts +test/js/bun/http/http-server-chunking.test.ts +test/js/bun/http/http-spec.ts +test/js/bun/http/leaks-test.test.ts +test/js/bun/http/proxy.test.ts +test/js/bun/http/serve-body-leak.test.ts +test/js/bun/http/serve-listen.test.ts +test/js/bun/http/serve.test.ts +test/js/bun/import-attributes/import-attributes.test.ts +test/js/bun/ini/ini.test.ts +test/js/bun/io/bun-write.test.js +test/js/bun/jsc/bun-jsc.test.ts +test/js/bun/jsc/domjit.test.ts +test/js/bun/net/socket.test.ts +test/js/bun/net/tcp-server.test.ts +test/js/bun/net/tcp.spec.ts +test/js/bun/net/tcp.test.ts +test/js/bun/patch/patch.test.ts +test/js/bun/perf_hooks/histogram.test.ts +test/js/bun/plugin/plugins.test.ts +test/js/bun/resolve/build-error.test.ts +test/js/bun/resolve/bun-lock.test.ts +test/js/bun/resolve/esModule-annotation.test.js +test/js/bun/resolve/import-custom-condition.test.ts +test/js/bun/resolve/import-empty.test.js +test/js/bun/resolve/import-meta-resolve.test.mjs +test/js/bun/resolve/import-meta.test.js +test/js/bun/resolve/jsonc.test.ts +test/js/bun/resolve/require.test.ts +test/js/bun/resolve/toml/toml.test.js +test/js/bun/s3/s3-insecure.test.ts +test/js/bun/s3/s3-list-objects.test.ts +test/js/bun/s3/s3-storage-class.test.ts +test/js/bun/s3/s3.test.ts +test/js/bun/shell/bunshell-default.test.ts +test/js/bun/shell/bunshell-file.test.ts +test/js/bun/shell/bunshell-instance.test.ts +test/js/bun/shell/commands/basename.test.ts +test/js/bun/shell/commands/dirname.test.ts +test/js/bun/shell/commands/echo.test.ts +test/js/bun/shell/commands/exit.test.ts +test/js/bun/shell/commands/false.test.ts +test/js/bun/shell/commands/ls.test.ts +test/js/bun/shell/commands/mv.test.ts +test/js/bun/shell/commands/rm.test.ts +test/js/bun/shell/commands/seq.test.ts +test/js/bun/shell/commands/true.test.ts +test/js/bun/shell/commands/which.test.ts +test/js/bun/shell/commands/yes.test.ts +test/js/bun/shell/env.positionals.test.ts +test/js/bun/shell/exec.test.ts +test/js/bun/shell/file-io.test.ts +test/js/bun/shell/lazy.test.ts +test/js/bun/shell/leak.test.ts +test/js/bun/shell/lex.test.ts +test/js/bun/shell/shell-hang.test.ts +test/js/bun/shell/shell-load.test.ts +test/js/bun/shell/shelloutput.test.ts +test/js/bun/shell/throw.test.ts +test/js/bun/shell/yield.test.ts +test/js/bun/spawn/bun-ipc-inherit.test.ts +test/js/bun/spawn/job-object-bug.test.ts +test/js/bun/spawn/spawn-empty-arrayBufferOrBlob.test.ts +test/js/bun/spawn/spawn-path.test.ts +test/js/bun/spawn/spawn-stdin-destroy.test.ts +test/js/bun/spawn/spawn-stdin-readable-stream-edge-cases.test.ts +test/js/bun/spawn/spawn-stdin-readable-stream-integration.test.ts +test/js/bun/spawn/spawn-stdin-readable-stream-sync.test.ts +test/js/bun/spawn/spawn-stdin-readable-stream.test.ts +test/js/bun/spawn/spawn-stream-serve.test.ts +test/js/bun/spawn/spawn-streaming-stdout.test.ts +test/js/node/module/sourcemap.test.js +test/js/bun/spawn/spawn-stress.test.ts +test/js/bun/spawn/spawn.ipc.bun-node.test.ts +test/js/bun/spawn/spawn.ipc.node-bun.test.ts +test/js/bun/spawn/spawn.ipc.test.ts +test/js/bun/spawn/spawn_waiter_thread.test.ts +test/js/bun/sqlite/sql-timezone.test.js +test/js/bun/stream/direct-readable-stream.test.tsx +test/js/bun/symbols.test.ts +test/js/bun/test/done-async.test.ts +test/js/bun/test/expect-assertions.test.ts +test/js/bun/test/jest-extended.test.js +test/js/bun/test/mock-fn.test.js +test/js/bun/test/mock/6874/A.test.ts +test/js/bun/test/mock/6874/B.test.ts +test/js/bun/test/mock/6879/6879.test.ts +test/js/bun/test/mock/mock-module.test.ts +test/js/bun/test/snapshot-tests/bun-snapshots.test.ts +test/js/bun/test/snapshot-tests/existing-snapshots.test.ts +test/js/bun/test/snapshot-tests/new-snapshot.test.ts +test/js/bun/test/snapshot-tests/snapshots/more.test.ts +test/js/bun/test/snapshot-tests/snapshots/moremore.test.ts +test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts +test/js/bun/test/stack.test.ts +test/js/bun/test/test-failing.test.ts +test/js/bun/test/test-only.test.ts +test/js/bun/test/test-test.test.ts +test/js/bun/udp/dgram.test.ts +test/js/bun/udp/udp_socket.test.ts +test/js/bun/util/BunObject.test.ts +test/js/bun/util/arraybuffersink.test.ts +test/js/bun/util/bun-cryptohasher.test.ts +test/js/bun/util/bun-file-exists.test.js +test/js/bun/util/bun-file.test.ts +test/js/bun/util/bun-isMainThread.test.js +test/js/bun/util/concat.test.js +test/js/bun/util/cookie.test.js +test/js/bun/util/error-gc-test.test.js +test/js/bun/util/escapeHTML.test.js +test/js/bun/util/fileUrl.test.js +test/js/bun/util/filesink.test.ts +test/js/bun/util/filesystem_router.test.ts +test/js/bun/util/fuzzy-wuzzy.test.ts +test/js/bun/util/hash.test.js +test/js/bun/util/heap-snapshot.test.ts +test/js/bun/util/index-of-line.test.ts +test/js/bun/util/inspect-error.test.js +test/js/bun/util/inspect.test.js +test/js/bun/util/mmap.test.js +test/js/bun/util/password.test.ts +test/js/bun/util/randomUUIDv5.test.ts +test/js/bun/util/readablestreamtoarraybuffer.test.ts +test/js/bun/util/stringWidth.test.ts +test/js/bun/util/text-loader.test.ts +test/js/bun/util/unsafe.test.js +test/js/bun/util/v8-heap-snapshot.test.ts +test/js/bun/util/which.test.ts +test/js/bun/util/zstd.test.ts +test/js/bun/websocket/websocket-server.test.ts +test/js/deno/abort/abort-controller.test.ts +test/js/deno/crypto/random.test.ts +test/js/deno/crypto/webcrypto.test.ts +test/js/deno/encoding/encoding.test.ts +test/js/deno/event/custom-event.test.ts +test/js/deno/event/event-target.test.ts +test/js/deno/event/event.test.ts +test/js/deno/fetch/blob.test.ts +test/js/deno/fetch/body.test.ts +test/js/deno/fetch/headers.test.ts +test/js/deno/fetch/request.test.ts +test/js/deno/fetch/response.test.ts +test/js/deno/performance/performance.test.ts +test/js/deno/url/url.test.ts +test/js/deno/url/urlsearchparams.test.ts +test/js/deno/v8/error.test.ts +test/js/first_party/undici/undici.test.ts +test/js/junit-reporter/junit.test.js +test/js/node/assert/assert-promise.test.ts +test/js/node/assert/assert.spec.ts +test/js/node/async_hooks/AsyncLocalStorage.test.ts +test/js/node/async_hooks/AsyncLocalStorage-tracking.test.ts +test/js/node/buffer-concat.test.ts +test/js/node/buffer.test.js +test/js/node/child_process/child-process-exec.test.ts +test/js/node/child_process/child-process-stdio.test.js +test/js/node/child_process/child_process-node.test.js +test/js/node/child_process/child_process_ipc.test.js +test/js/node/child_process/child_process_ipc_large_disconnect.test.js +test/js/node/child_process/child_process_send_cb.test.js +test/js/node/cluster.test.ts +test/js/node/cluster/test-docs-http-server.ts +test/js/node/cluster/test-worker-no-exit-http.ts +test/js/node/crypto/crypto-hmac-algorithm.test.ts +test/js/node/crypto/crypto-oneshot.test.ts +test/js/node/crypto/crypto-random.test.ts +test/js/node/crypto/crypto-rsa.test.js +test/js/node/crypto/crypto.hmac.test.ts +test/js/node/crypto/crypto.key-objects.test.ts +test/js/node/crypto/crypto.test.ts +test/js/node/crypto/ecdh.test.ts +test/js/node/crypto/node-crypto.test.js +test/js/node/crypto/pbkdf2.test.ts +test/js/node/diagnostics_channel/diagnostics_channel.test.ts +test/js/node/dns/dns-lookup-keepalive.test.ts +test/js/node/dns/node-dns.test.js +test/js/node/events/event-emitter.test.ts +test/js/node/fs/cp.test.ts +test/js/node/fs/dir.test.ts +test/js/node/fs/fs-leak.test.js +test/js/node/fs/fs-mkdir.test.ts +test/js/node/fs/fs-oom.test.ts +test/js/node/fs/fs-promises-writeFile-async-iterator.test.ts +test/js/node/fs/fs-stats-truncate.test.ts +test/js/node/fs/fs.test.ts +test/js/node/fs/glob.test.ts +test/js/node/fs/promises.test.js +test/js/node/http/client-timeout-error.test.ts +test/js/node/http/node-fetch.test.js +test/js/node/http/node-http-backpressure.test.ts +test/js/node/http/node-http-parser.test.ts +test/js/node/http/node-http-primoridals.test.ts +test/js/node/http/node-http-transfer-encoding.test.ts +test/js/node/http/node-http.test.ts +test/js/node/http/numeric-header.test.ts +test/js/node/module/node-module-module.test.js +test/js/node/module/require-extensions.test.ts +test/js/node/net/double-connect.test.ts +test/js/node/net/node-net-allowHalfOpen.test.js +test/js/node/net/node-net-server.test.ts +test/js/node/net/node-net.test.ts +test/js/node/net/server.spec.ts +test/js/node/no-addons.test.ts +test/js/node/os/os.test.js +test/js/node/path/browserify.test.js +test/js/node/path/matches-glob.test.ts +test/js/node/path/parse-format.test.js +test/js/node/path/to-namespaced-path.test.js +test/js/node/process/call-constructor.test.js +test/js/node/process/process-args.test.js +test/js/node/process/process-nexttick.test.js +test/js/node/process/process-stdin.test.ts +test/js/node/process/process-stdio.test.ts +test/js/node/process/process.test.js +test/js/node/promise/reject-tostring.test.ts +test/js/node/readline/pause_stdin_should_exit.test.ts +test/js/node/readline/readline.node.test.ts +test/js/node/readline/readline_never_unrefs.test.ts +test/js/node/readline/readline_promises.node.test.ts +test/js/node/readline/stdin_fell_asleep.test.ts +test/js/node/stream/node-stream-uint8array.test.ts +test/js/node/stream/node-stream.test.js +test/js/node/string-module.test.js +test/js/node/string_decoder/string-decoder.test.js +test/js/node/stubs.test.js +test/js/node/test/parallel/test-abortsignal-any.mjs +test/js/node/test/parallel/test-assert-async.js +test/js/node/test/parallel/test-assert-builtins-not-read-from-filesystem.js +test/js/node/test/parallel/test-assert-calltracker-calls.js +test/js/node/test/parallel/test-assert-calltracker-getCalls.js +test/js/node/test/parallel/test-assert-calltracker-report.js +test/js/node/test/parallel/test-assert-calltracker-verify.js +test/js/node/test/parallel/test-assert-checktag.js +test/js/node/test/parallel/test-assert-deep-with-error.js +test/js/node/test/parallel/test-assert-esm-cjs-message-verify.js +test/js/node/test/parallel/test-assert-fail-deprecation.js +test/js/node/test/parallel/test-assert-if-error.js +test/js/node/test/parallel/test-assert-strict-exists.js +test/js/node/test/parallel/test-assert.js +test/js/node/test/parallel/test-async-hooks-asyncresource-constructor.js +test/js/node/test/parallel/test-async-hooks-constructor.js +test/js/node/test/parallel/test-async-hooks-recursive-stack-runInAsyncScope.js +test/js/node/test/parallel/test-async-hooks-run-in-async-scope-caught-exception.js +test/js/node/test/parallel/test-async-hooks-run-in-async-scope-this-arg.js +test/js/node/test/parallel/test-async-hooks-vm-gc.js +test/js/node/test/parallel/test-async-hooks-worker-asyncfn-terminate-1.js +test/js/node/test/parallel/test-async-hooks-worker-asyncfn-terminate-2.js +test/js/node/test/parallel/test-async-hooks-worker-asyncfn-terminate-3.js +test/js/node/test/parallel/test-async-hooks-worker-asyncfn-terminate-4.js +test/js/node/test/parallel/test-async-local-storage-bind.js +test/js/node/test/parallel/test-async-local-storage-contexts.js +test/js/node/test/parallel/test-async-local-storage-deep-stack.js +test/js/node/test/parallel/test-async-local-storage-enter-with.js +test/js/node/test/parallel/test-async-local-storage-exit-does-not-leak.js +test/js/node/test/parallel/test-async-local-storage-http-multiclients.js +test/js/node/test/parallel/test-async-local-storage-snapshot.js +test/js/node/test/parallel/test-async-wrap-constructor.js +test/js/node/test/parallel/test-atomics-wake.js +test/js/node/test/parallel/test-bad-unicode.js +test/js/node/test/parallel/test-beforeexit-event-exit.js +test/js/node/test/parallel/test-binding-constants.js +test/js/node/test/parallel/test-blob-createobjecturl.js +test/js/node/test/parallel/test-blocklist-clone.js +test/js/node/test/parallel/test-blocklist.js +test/js/node/test/parallel/test-broadcastchannel-custom-inspect.js +test/js/node/test/parallel/test-btoa-atob.js +test/js/node/test/parallel/test-c-ares.js +test/js/node/test/parallel/test-child-process-advanced-serialization-largebuffer.js +test/js/node/test/parallel/test-child-process-advanced-serialization.js +test/js/node/test/parallel/test-child-process-can-write-to-stdout.js +test/js/node/test/parallel/test-child-process-constructor.js +test/js/node/test/parallel/test-child-process-cwd.js +test/js/node/test/parallel/test-child-process-default-options.js +test/js/node/test/parallel/test-child-process-destroy.js +test/js/node/test/parallel/test-child-process-detached.js +test/js/node/test/parallel/test-child-process-dgram-reuseport.js +test/js/node/test/parallel/test-child-process-disconnect.js +test/js/node/test/parallel/test-child-process-double-pipe.js +test/js/node/test/parallel/test-child-process-emfile.js +test/js/node/test/parallel/test-child-process-env.js +test/js/node/test/parallel/test-child-process-exec-abortcontroller-promisified.js +test/js/node/test/parallel/test-child-process-exec-any-shells-windows.js +test/js/node/test/parallel/test-child-process-exec-cwd.js +test/js/node/test/parallel/test-child-process-exec-encoding.js +test/js/node/test/parallel/test-child-process-exec-env.js +test/js/node/test/parallel/test-child-process-exec-error.js +test/js/node/test/parallel/test-child-process-exec-maxbuf.js +test/js/node/test/parallel/test-child-process-exec-std-encoding.js +test/js/node/test/parallel/test-child-process-exec-stdout-stderr-data-string.js +test/js/node/test/parallel/test-child-process-exec-timeout-expire.js +test/js/node/test/parallel/test-child-process-exec-timeout-kill.js +test/js/node/test/parallel/test-child-process-exec-timeout-not-expired.js +test/js/node/test/parallel/test-child-process-execFile-promisified-abortController.js +test/js/node/test/parallel/test-child-process-execfile-maxbuf.js +test/js/node/test/parallel/test-child-process-execfile.js +test/js/node/test/parallel/test-child-process-execfilesync-maxbuf.js +test/js/node/test/parallel/test-child-process-execsync-maxbuf.js +test/js/node/test/parallel/test-child-process-exit-code.js +test/js/node/test/parallel/test-child-process-flush-stdio.js +test/js/node/test/parallel/test-child-process-fork-abort-signal.js +test/js/node/test/parallel/test-child-process-fork-and-spawn.js +test/js/node/test/parallel/test-child-process-fork-args.js +test/js/node/test/parallel/test-child-process-fork-close.js +test/js/node/test/parallel/test-child-process-fork-closed-channel-segfault.js +test/js/node/test/parallel/test-child-process-fork-detached.js +test/js/node/test/parallel/test-child-process-fork-exec-argv.js +test/js/node/test/parallel/test-child-process-fork-exec-path.js +test/js/node/test/parallel/test-child-process-fork-no-shell.js +test/js/node/test/parallel/test-child-process-fork-ref.js +test/js/node/test/parallel/test-child-process-fork-ref2.js +test/js/node/test/parallel/test-child-process-fork-stdio-string-variant.js +test/js/node/test/parallel/test-child-process-fork-timeout-kill-signal.js +test/js/node/test/parallel/test-child-process-fork-url.mjs +test/js/node/test/parallel/test-child-process-fork.js +test/js/node/test/parallel/test-child-process-fork3.js +test/js/node/test/parallel/test-child-process-ipc-next-tick.js +test/js/node/test/parallel/test-child-process-ipc.js +test/js/node/test/parallel/test-child-process-kill.js +test/js/node/test/parallel/test-child-process-net-reuseport.js +test/js/node/test/parallel/test-child-process-no-deprecation.js +test/js/node/test/parallel/test-child-process-promisified.js +test/js/node/test/parallel/test-child-process-prototype-tampering.mjs +test/js/node/test/parallel/test-child-process-reject-null-bytes.js +test/js/node/test/parallel/test-child-process-send-after-close.js +test/js/node/test/parallel/test-child-process-send-cb.js +test/js/node/test/parallel/test-child-process-send-type-error.js +test/js/node/test/parallel/test-child-process-send-utf8.js +test/js/node/test/parallel/test-child-process-set-blocking.js +test/js/node/test/parallel/test-child-process-silent.js +test/js/node/test/parallel/test-child-process-spawn-args.js +test/js/node/test/parallel/test-child-process-spawn-argv0.js +test/js/node/test/parallel/test-child-process-spawn-controller.js +test/js/node/test/parallel/test-child-process-spawn-error.js +test/js/node/test/parallel/test-child-process-spawn-event.js +test/js/node/test/parallel/test-child-process-spawn-shell.js +test/js/node/test/parallel/test-child-process-spawn-timeout-kill-signal.js +test/js/node/test/parallel/test-child-process-spawn-typeerror.js +test/js/node/test/parallel/test-child-process-spawnsync-args.js +test/js/node/test/parallel/test-child-process-spawnsync-env.js +test/js/node/test/parallel/test-child-process-spawnsync-input.js +test/js/node/test/parallel/test-child-process-spawnsync-kill-signal.js +test/js/node/test/parallel/test-child-process-spawnsync-maxbuf.js +test/js/node/test/parallel/test-child-process-spawnsync-shell.js +test/js/node/test/parallel/test-child-process-spawnsync-timeout.js +test/js/node/test/parallel/test-child-process-spawnsync-validation-errors.js +test/js/node/test/parallel/test-child-process-spawnsync.js +test/js/node/test/parallel/test-child-process-stdin-ipc.js +test/js/node/test/parallel/test-child-process-stdin.js +test/js/node/test/parallel/test-child-process-stdio-big-write-end.js +test/js/node/test/parallel/test-child-process-stdio-inherit.js +test/js/node/test/parallel/test-child-process-stdio-overlapped.js +test/js/node/test/parallel/test-child-process-stdio.js +test/js/node/test/parallel/test-child-process-stdout-flush-exit.js +test/js/node/test/parallel/test-child-process-stdout-flush.js +test/js/node/test/parallel/test-child-process-stdout-ipc.js +test/js/node/test/parallel/test-cli-eval-event.js +test/js/node/test/parallel/test-cli-options-precedence.js +test/js/node/test/parallel/test-client-request-destroy.js +test/js/node/test/parallel/test-common-countdown.js +test/js/node/test/parallel/test-common-expect-warning.js +test/js/node/test/parallel/test-common-must-not-call.js +test/js/node/test/parallel/test-config-json-schema.js +test/js/node/test/parallel/test-console-assign-undefined.js +test/js/node/test/parallel/test-console-async-write-error.js +test/js/node/test/parallel/test-console-group.js +test/js/node/test/parallel/test-console-instance.js +test/js/node/test/parallel/test-console-issue-43095.js +test/js/node/test/parallel/test-console-log-stdio-broken-dest.js +test/js/node/test/parallel/test-console-log-throw-primitive.js +test/js/node/test/parallel/test-console-methods.js +test/js/node/test/parallel/test-console-no-swallow-stack-overflow.js +test/js/node/test/parallel/test-console-not-call-toString.js +test/js/node/test/parallel/test-console-self-assign.js +test/js/node/test/parallel/test-console-sync-write-error.js +test/js/node/test/parallel/test-console-tty-colors.js +test/js/node/test/parallel/test-console-with-frozen-intrinsics.js +test/js/node/test/parallel/test-coverage-with-inspector-disabled.js +test/js/node/test/parallel/test-crypto-async-sign-verify.js +test/js/node/test/parallel/test-crypto-certificate.js +test/js/node/test/parallel/test-crypto-cipheriv-decipheriv.js +test/js/node/test/parallel/test-crypto-classes.js +test/js/node/test/parallel/test-crypto-dh-constructor.js +test/js/node/test/parallel/test-crypto-dh-curves.js +test/js/node/test/parallel/test-crypto-dh-errors.js +test/js/node/test/parallel/test-crypto-dh-generate-keys.js +test/js/node/test/parallel/test-crypto-dh-leak.js +test/js/node/test/parallel/test-crypto-dh-odd-key.js +test/js/node/test/parallel/test-crypto-dh-padding.js +test/js/node/test/parallel/test-crypto-dh-shared.js +test/js/node/test/parallel/test-crypto-dh.js +test/js/node/test/parallel/test-crypto-domain.js +test/js/node/test/parallel/test-crypto-ecdh-convert-key.js +test/js/node/test/parallel/test-crypto-encoding-validation-error.js +test/js/node/test/parallel/test-crypto-from-binary.js +test/js/node/test/parallel/test-crypto-gcm-explicit-short-tag.js +test/js/node/test/parallel/test-crypto-gcm-implicit-short-tag.js +test/js/node/test/parallel/test-crypto-getcipherinfo.js +test/js/node/test/parallel/test-crypto-hash-stream-pipe.js +test/js/node/test/parallel/test-crypto-hash.js +test/js/node/test/parallel/test-crypto-hkdf.js +test/js/node/test/parallel/test-crypto-hmac.js +test/js/node/test/parallel/test-crypto-key-objects.js +test/js/node/test/parallel/test-crypto-keygen-async-dsa-key-object.js +test/js/node/test/parallel/test-crypto-keygen-async-dsa.js +test/js/node/test/parallel/test-crypto-keygen-async-elliptic-curve-jwk-ec.js +test/js/node/test/parallel/test-crypto-keygen-async-elliptic-curve-jwk-rsa.js +test/js/node/test/parallel/test-crypto-keygen-async-elliptic-curve-jwk.js +test/js/node/test/parallel/test-crypto-keygen-async-encrypted-private-key-der.js +test/js/node/test/parallel/test-crypto-keygen-async-encrypted-private-key.js +test/js/node/test/parallel/test-crypto-keygen-async-explicit-elliptic-curve-encrypted-p256.js +test/js/node/test/parallel/test-crypto-keygen-async-explicit-elliptic-curve-encrypted.js.js +test/js/node/test/parallel/test-crypto-keygen-async-explicit-elliptic-curve.js +test/js/node/test/parallel/test-crypto-keygen-async-named-elliptic-curve-encrypted-p256.js +test/js/node/test/parallel/test-crypto-keygen-async-named-elliptic-curve-encrypted.js +test/js/node/test/parallel/test-crypto-keygen-async-named-elliptic-curve.js +test/js/node/test/parallel/test-crypto-keygen-async-rsa.js +test/js/node/test/parallel/test-crypto-keygen-bit-length.js +test/js/node/test/parallel/test-crypto-keygen-duplicate-deprecated-option.js +test/js/node/test/parallel/test-crypto-keygen-eddsa.js +test/js/node/test/parallel/test-crypto-keygen-empty-passphrase-no-error.js +test/js/node/test/parallel/test-crypto-keygen-empty-passphrase-no-prompt.js +test/js/node/test/parallel/test-crypto-keygen-invalid-parameter-encoding-dsa.js +test/js/node/test/parallel/test-crypto-keygen-invalid-parameter-encoding-ec.js +test/js/node/test/parallel/test-crypto-keygen-key-object-without-encoding.js +test/js/node/test/parallel/test-crypto-keygen-key-objects.js +test/js/node/test/parallel/test-crypto-keygen-missing-oid.js +test/js/node/test/parallel/test-crypto-keygen-non-standard-public-exponent.js +test/js/node/test/parallel/test-crypto-keygen-promisify.js +test/js/node/test/parallel/test-crypto-keygen-rfc8017-9-1.js +test/js/node/test/parallel/test-crypto-keygen-rfc8017-a-2-3.js +test/js/node/test/parallel/test-crypto-keygen-rsa-pss.js +test/js/node/test/parallel/test-crypto-keygen-sync.js +test/js/node/test/parallel/test-crypto-lazy-transform-writable.js +test/js/node/test/parallel/test-crypto-no-algorithm.js +test/js/node/test/parallel/test-crypto-oaep-zero-length.js +test/js/node/test/parallel/test-crypto-oneshot-hash.js +test/js/node/test/parallel/test-crypto-op-during-process-exit.js +test/js/node/test/parallel/test-crypto-padding.js +test/js/node/test/parallel/test-crypto-padding-aes256.js +test/js/node/test/parallel/test-crypto-pbkdf2.js +test/js/node/test/parallel/test-crypto-prime.js +test/js/node/test/parallel/test-crypto-private-decrypt-gh32240.js +test/js/node/test/parallel/test-crypto-psychic-signatures.js +test/js/node/test/parallel/test-crypto-publicDecrypt-fails-first-time.js +test/js/node/test/parallel/test-crypto-random.js +test/js/node/test/parallel/test-crypto-randomfillsync-regression.js +test/js/node/test/parallel/test-crypto-randomuuid.js +test/js/node/test/parallel/test-crypto-scrypt.js +test/js/node/test/parallel/test-crypto-secret-keygen.js +test/js/node/test/parallel/test-crypto-sign-verify.js +test/js/node/test/parallel/test-crypto-stream.js +test/js/node/test/parallel/test-crypto-subtle-zero-length.js +test/js/node/test/parallel/test-crypto-update-encoding.js +test/js/node/test/parallel/test-crypto-verify-failure.js +test/js/node/test/parallel/test-crypto-webcrypto-aes-decrypt-tag-too-small.js +test/js/node/test/parallel/test-crypto-worker-thread.js +test/js/node/test/parallel/test-crypto-x509.js +test/js/node/test/parallel/test-datetime-change-notify.js +test/js/node/test/parallel/test-debug-process.js +test/js/node/test/parallel/test-debugger-backtrace.js +test/js/node/test/parallel/test-debugger-exec.js +test/js/node/test/parallel/test-debugger-invalid-json.mjs +test/js/node/test/parallel/test-debugger-low-level.js +test/js/node/test/parallel/test-debugger-preserve-breaks.js +test/js/node/test/parallel/test-debugger-repeat-last.js +test/js/node/test/parallel/test-debugger-restart-message.js +test/js/node/test/parallel/test-delayed-require.js +test/js/node/test/parallel/test-destroy-socket-in-lookup.js +test/js/node/test/parallel/test-dgram-abort-closed.js +test/js/node/test/parallel/test-dgram-address.js +test/js/node/test/parallel/test-dgram-async-dispose.mjs +test/js/node/test/parallel/test-dgram-bind-default-address.js +test/js/node/test/parallel/test-dgram-bind-error-repeat.js +test/js/node/test/parallel/test-dgram-bind-socket-close-before-lookup.js +test/js/node/test/parallel/test-dgram-bind.js +test/js/node/test/parallel/test-dgram-bytes-length.js +test/js/node/test/parallel/test-dgram-close-during-bind.js +test/js/node/test/parallel/test-dgram-close-in-listening.js +test/js/node/test/parallel/test-dgram-close-is-not-callback.js +test/js/node/test/parallel/test-dgram-close-signal.js +test/js/node/test/parallel/test-dgram-close.js +test/js/node/test/parallel/test-dgram-cluster-close-during-bind.js +test/js/node/test/parallel/test-dgram-cluster-close-in-listening.js +test/js/node/test/parallel/test-dgram-connect-send-callback-buffer-length.js +test/js/node/test/parallel/test-dgram-connect-send-callback-buffer.js +test/js/node/test/parallel/test-dgram-connect-send-callback-multi-buffer.js +test/js/node/test/parallel/test-dgram-connect-send-default-host.js +test/js/node/test/parallel/test-dgram-connect-send-empty-array.js +test/js/node/test/parallel/test-dgram-connect-send-empty-buffer.js +test/js/node/test/parallel/test-dgram-connect-send-empty-packet.js +test/js/node/test/parallel/test-dgram-connect-send-multi-buffer-copy.js +test/js/node/test/parallel/test-dgram-connect-send-multi-string-array.js +test/js/node/test/parallel/test-dgram-connect.js +test/js/node/test/parallel/test-dgram-custom-lookup.js +test/js/node/test/parallel/test-dgram-deprecation-error.js +test/js/node/test/parallel/test-dgram-error-message-address.js +test/js/node/test/parallel/test-dgram-implicit-bind.js +test/js/node/test/parallel/test-dgram-ipv6only.js +test/js/node/test/parallel/test-dgram-listen-after-bind.js +test/js/node/test/parallel/test-dgram-membership.js +test/js/node/test/parallel/test-dgram-msgsize.js +test/js/node/test/parallel/test-dgram-multicast-loopback.js +test/js/node/test/parallel/test-dgram-multicast-set-interface.js +test/js/node/test/parallel/test-dgram-multicast-setTTL.js +test/js/node/test/parallel/test-dgram-oob-buffer.js +test/js/node/test/parallel/test-dgram-recv-error.js +test/js/node/test/parallel/test-dgram-ref.js +test/js/node/test/parallel/test-dgram-reuseport.js +test/js/node/test/parallel/test-dgram-send-address-types.js +test/js/node/test/parallel/test-dgram-send-bad-arguments.js +test/js/node/test/parallel/test-dgram-send-callback-buffer-empty-address.js +test/js/node/test/parallel/test-dgram-send-callback-buffer-length-empty-address.js +test/js/node/test/parallel/test-dgram-send-callback-buffer-length.js +test/js/node/test/parallel/test-dgram-send-callback-buffer.js +test/js/node/test/parallel/test-dgram-send-callback-multi-buffer-empty-address.js +test/js/node/test/parallel/test-dgram-send-callback-multi-buffer.js +test/js/node/test/parallel/test-dgram-send-callback-recursive.js +test/js/node/test/parallel/test-dgram-send-cb-quelches-error.js +test/js/node/test/parallel/test-dgram-send-default-host.js +test/js/node/test/parallel/test-dgram-send-empty-array.js +test/js/node/test/parallel/test-dgram-send-empty-buffer.js +test/js/node/test/parallel/test-dgram-send-empty-packet.js +test/js/node/test/parallel/test-dgram-send-error.js +test/js/node/test/parallel/test-dgram-send-invalid-msg-type.js +test/js/node/test/parallel/test-dgram-send-multi-buffer-copy.js +test/js/node/test/parallel/test-dgram-send-multi-string-array.js +test/js/node/test/parallel/test-dgram-sendto.js +test/js/node/test/parallel/test-dgram-setBroadcast.js +test/js/node/test/parallel/test-dgram-setTTL.js +test/js/node/test/parallel/test-dgram-udp4.js +test/js/node/test/parallel/test-dgram-udp6-link-local-address.js +test/js/node/test/parallel/test-dgram-udp6-send-default-host.js +test/js/node/test/parallel/test-dgram-unref-in-cluster.js +test/js/node/test/parallel/test-dgram-unref.js +test/js/node/test/parallel/test-diagnostics-channel-bind-store.js +test/js/node/test/parallel/test-diagnostics-channel-has-subscribers.js +test/js/node/test/parallel/test-diagnostics-channel-object-channel-pub-sub.js +test/js/node/test/parallel/test-diagnostics-channel-pub-sub.js +test/js/node/test/parallel/test-diagnostics-channel-safe-subscriber-errors.js +test/js/node/test/parallel/test-diagnostics-channel-symbol-named.js +test/js/node/test/parallel/test-diagnostics-channel-sync-unsubscribe.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-callback-error.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-callback-run-stores.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-callback.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-promise-error.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-promise-run-stores.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-promise.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-sync-error.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-sync-run-stores.js +test/js/node/test/parallel/test-diagnostics-channel-tracing-channel-sync.js +test/js/node/test/parallel/test-diagnostics-channel-udp.js +test/js/node/test/parallel/test-domain-crypto.js +test/js/node/test/parallel/test-domain-ee-error-listener.js +test/js/node/test/parallel/test-domain-nested-throw.js +test/js/node/test/parallel/test-domain-vm-promise-isolation.js +test/js/node/test/parallel/test-domexception-cause.js +test/js/node/test/parallel/test-dsa-fips-invalid-key.js +test/js/node/test/parallel/test-emit-after-uncaught-exception.js +test/js/node/test/parallel/test-error-prepare-stack-trace.js +test/js/node/test/parallel/test-eslint-alphabetize-errors.js +test/js/node/test/parallel/test-eslint-alphabetize-primordials.js +test/js/node/test/parallel/test-eslint-async-iife-no-unused-result.js +test/js/node/test/parallel/test-eslint-avoid-prototype-pollution.js +test/js/node/test/parallel/test-eslint-crypto-check.js +test/js/node/test/parallel/test-eslint-documented-deprecation-codes.js +test/js/node/test/parallel/test-eslint-documented-errors.js +test/js/node/test/parallel/test-eslint-duplicate-requires.js +test/js/node/test/parallel/test-eslint-eslint-check.js +test/js/node/test/parallel/test-eslint-inspector-check.js +test/js/node/test/parallel/test-eslint-lowercase-name-for-primitive.js +test/js/node/test/parallel/test-eslint-no-array-destructuring.js +test/js/node/test/parallel/test-eslint-no-unescaped-regexp-dot.js +test/js/node/test/parallel/test-eslint-non-ascii-character.js +test/js/node/test/parallel/test-eslint-prefer-assert-iferror.js +test/js/node/test/parallel/test-eslint-prefer-assert-methods.js +test/js/node/test/parallel/test-eslint-prefer-common-mustnotcall.js +test/js/node/test/parallel/test-eslint-prefer-common-mustsucceed.js +test/js/node/test/parallel/test-eslint-prefer-optional-chaining.js +test/js/node/test/parallel/test-eslint-prefer-primordials.js +test/js/node/test/parallel/test-eslint-prefer-proto.js +test/js/node/test/parallel/test-eslint-prefer-util-format-errors.js +test/js/node/test/parallel/test-eslint-require-common-first.js +test/js/node/test/parallel/test-eslint-required-modules.js +test/js/node/test/parallel/test-eval-strict-referenceerror.js +test/js/node/test/parallel/test-eval.js +test/js/node/test/parallel/test-event-capture-rejections.js +test/js/node/test/parallel/test-event-emitter-add-listeners.js +test/js/node/test/parallel/test-event-emitter-check-listener-leaks.js +test/js/node/test/parallel/test-event-emitter-emit-context.js +test/js/node/test/parallel/test-event-emitter-error-monitor.js +test/js/node/test/parallel/test-event-emitter-errors.js +test/js/node/test/parallel/test-event-emitter-get-max-listeners.js +test/js/node/test/parallel/test-event-emitter-invalid-listener.js +test/js/node/test/parallel/test-event-emitter-listener-count.js +test/js/node/test/parallel/test-event-emitter-listeners-side-effects.js +test/js/node/test/parallel/test-event-emitter-listeners.js +test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-null.js +test/js/node/test/parallel/test-event-emitter-max-listeners-warning-for-symbol.js +test/js/node/test/parallel/test-event-emitter-max-listeners-warning.js +test/js/node/test/parallel/test-event-emitter-max-listeners.js +test/js/node/test/parallel/test-event-emitter-method-names.js +test/js/node/test/parallel/test-event-emitter-modify-in-emit.js +test/js/node/test/parallel/test-event-emitter-no-error-provided-to-error-event.js +test/js/node/test/parallel/test-event-emitter-num-args.js +test/js/node/test/parallel/test-event-emitter-once.js +test/js/node/test/parallel/test-event-emitter-prepend.js +test/js/node/test/parallel/test-event-emitter-remove-all-listeners.js +test/js/node/test/parallel/test-event-emitter-remove-listeners.js +test/js/node/test/parallel/test-event-emitter-set-max-listeners-side-effects.js +test/js/node/test/parallel/test-event-emitter-special-event-names.js +test/js/node/test/parallel/test-event-emitter-subclass.js +test/js/node/test/parallel/test-event-emitter-symbols.js +test/js/node/test/parallel/test-event-target.js +test/js/node/test/parallel/test-events-add-abort-listener.mjs +test/js/node/test/parallel/test-events-customevent.js +test/js/node/test/parallel/test-events-getmaxlisteners.js +test/js/node/test/parallel/test-events-list.js +test/js/node/test/parallel/test-events-listener-count-with-listener.js +test/js/node/test/parallel/test-events-on-async-iterator.js +test/js/node/test/parallel/test-events-once.js +test/js/node/test/parallel/test-events-static-geteventlisteners.js +test/js/node/test/parallel/test-events-uncaught-exception-stack.js +test/js/node/test/parallel/test-eventsource-disabled.js +test/js/node/test/parallel/test-eventtarget-once-twice.js +test/js/node/test/parallel/test-eventtarget.js +test/js/node/test/parallel/test-exception-handler.js +test/js/node/test/parallel/test-exception-handler2.js +test/js/node/test/parallel/test-fetch.mjs +test/js/node/test/parallel/test-global-domexception.js +test/js/node/test/parallel/test-global-encoder.js +test/js/node/test/parallel/test-global-webcrypto.js +test/js/node/test/parallel/test-handle-wrap-close-abort.js +test/js/node/test/parallel/test-http-1.0-keep-alive.js +test/js/node/test/parallel/test-http-abort-before-end.js +test/js/node/test/parallel/test-http-abort-stream-end.js +test/js/node/test/parallel/test-http-aborted.js +test/js/node/test/parallel/test-http-agent-false.js +test/js/node/test/parallel/test-http-agent-getname.js +test/js/node/test/parallel/test-http-agent-keepalive-delay.js +test/js/node/test/parallel/test-http-agent-no-protocol.js +test/js/node/test/parallel/test-http-agent-null.js +test/js/node/test/parallel/test-http-agent-remove.js +test/js/node/test/parallel/test-http-agent-uninitialized-with-handle.js +test/js/node/test/parallel/test-http-agent-uninitialized.js +test/js/node/test/parallel/test-http-allow-content-length-304.js +test/js/node/test/parallel/test-http-allow-req-after-204-res.js +test/js/node/test/parallel/test-http-autoselectfamily.js +test/js/node/test/parallel/test-http-bind-twice.js +test/js/node/test/parallel/test-http-blank-header.js +test/js/node/test/parallel/test-http-buffer-sanity.js +test/js/node/test/parallel/test-http-byteswritten.js +test/js/node/test/parallel/test-http-catch-uncaughtexception.js +test/js/node/test/parallel/test-http-chunk-problem.js +test/js/node/test/parallel/test-http-chunked-smuggling.js +test/js/node/test/parallel/test-http-chunked.js +test/js/node/test/parallel/test-http-client-abort-event.js +test/js/node/test/parallel/test-http-client-abort-response-event.js +test/js/node/test/parallel/test-http-client-abort.js +test/js/node/test/parallel/test-http-client-abort2.js +test/js/node/test/parallel/test-http-client-agent-abort-close-event.js +test/js/node/test/parallel/test-http-client-check-http-token.js +test/js/node/test/parallel/test-http-client-close-with-default-agent.js +test/js/node/test/parallel/test-http-client-defaults.js +test/js/node/test/parallel/test-http-client-encoding.js +test/js/node/test/parallel/test-http-client-get-url.js +test/js/node/test/parallel/test-http-client-headers-host-array.js +test/js/node/test/parallel/test-http-client-input-function.js +test/js/node/test/parallel/test-http-client-insecure-http-parser-error.js +test/js/node/test/parallel/test-http-client-invalid-path.js +test/js/node/test/parallel/test-http-client-keep-alive-hint.js +test/js/node/test/parallel/test-http-client-keep-alive-release-before-finish.js +test/js/node/test/parallel/test-http-client-pipe-end.js +test/js/node/test/parallel/test-http-client-race-2.js +test/js/node/test/parallel/test-http-client-race.js +test/js/node/test/parallel/test-http-client-read-in-error.js +test/js/node/test/parallel/test-http-client-reject-unexpected-agent.js +test/js/node/test/parallel/test-http-client-req-error-dont-double-fire.js +test/js/node/test/parallel/test-http-client-request-options.js +test/js/node/test/parallel/test-http-client-res-destroyed.js +test/js/node/test/parallel/test-http-client-timeout-agent.js +test/js/node/test/parallel/test-http-client-timeout-connect-listener.js +test/js/node/test/parallel/test-http-client-timeout-event.js +test/js/node/test/parallel/test-http-client-timeout-option.js +test/js/node/test/parallel/test-http-client-timeout.js +test/js/node/test/parallel/test-http-client-unescaped-path.js +test/js/node/test/parallel/test-http-client-upload-buf.js +test/js/node/test/parallel/test-http-client-upload.js +test/js/node/test/parallel/test-http-client-with-create-connection.js +test/js/node/test/parallel/test-http-common.js +test/js/node/test/parallel/test-http-conn-reset.js +test/js/node/test/parallel/test-http-content-length-mismatch.js +test/js/node/test/parallel/test-http-contentLength0.js +test/js/node/test/parallel/test-http-date-header.js +test/js/node/test/parallel/test-http-decoded-auth.js +test/js/node/test/parallel/test-http-default-encoding.js +test/js/node/test/parallel/test-http-dns-error.js +test/js/node/test/parallel/test-http-double-content-length.js +test/js/node/test/parallel/test-http-dummy-characters-smuggling.js +test/js/node/test/parallel/test-http-early-hints-invalid-argument.js +test/js/node/test/parallel/test-http-end-throw-socket-handling.js +test/js/node/test/parallel/test-http-eof-on-connect.js +test/js/node/test/parallel/test-http-exceptions.js +test/js/node/test/parallel/test-http-expect-continue.js +test/js/node/test/parallel/test-http-expect-handling.js +test/js/node/test/parallel/test-http-extra-response.js +test/js/node/test/parallel/test-http-flush-headers.js +test/js/node/test/parallel/test-http-flush-response-headers.js +test/js/node/test/parallel/test-http-full-response.js +test/js/node/test/parallel/test-http-get-pipeline-problem.js +test/js/node/test/parallel/test-http-head-request.js +test/js/node/test/parallel/test-http-head-response-has-no-body-end-implicit-headers.js +test/js/node/test/parallel/test-http-head-response-has-no-body-end.js +test/js/node/test/parallel/test-http-head-response-has-no-body.js +test/js/node/test/parallel/test-http-head-throw-on-response-body-write.js +test/js/node/test/parallel/test-http-header-obstext.js +test/js/node/test/parallel/test-http-header-overflow.js +test/js/node/test/parallel/test-http-header-owstext.js +test/js/node/test/parallel/test-http-header-read.js +test/js/node/test/parallel/test-http-header-validators.js +test/js/node/test/parallel/test-http-hex-write.js +test/js/node/test/parallel/test-http-highwatermark.js +test/js/node/test/parallel/test-http-host-headers.js +test/js/node/test/parallel/test-http-hostname-typechecking.js +test/js/node/test/parallel/test-http-import-websocket.js +test/js/node/test/parallel/test-http-incoming-message-destroy.js +test/js/node/test/parallel/test-http-invalid-path-chars.js +test/js/node/test/parallel/test-http-invalid-te.js +test/js/node/test/parallel/test-http-invalid-urls.js +test/js/node/test/parallel/test-http-invalidheaderfield.js +test/js/node/test/parallel/test-http-invalidheaderfield2.js +test/js/node/test/parallel/test-http-keep-alive-drop-requests.js +test/js/node/test/parallel/test-http-keep-alive-pipeline-max-requests.js +test/js/node/test/parallel/test-http-keep-alive-timeout-custom.js +test/js/node/test/parallel/test-http-keep-alive-timeout-race-condition.js +test/js/node/test/parallel/test-http-listening.js +test/js/node/test/parallel/test-http-malformed-request.js +test/js/node/test/parallel/test-http-many-ended-pipelines.js +test/js/node/test/parallel/test-http-max-header-size.js +test/js/node/test/parallel/test-http-methods.js +test/js/node/test/parallel/test-http-missing-header-separator-cr.js +test/js/node/test/parallel/test-http-missing-header-separator-lf.js +test/js/node/test/parallel/test-http-no-content-length.js +test/js/node/test/parallel/test-http-outgoing-buffer.js +test/js/node/test/parallel/test-http-outgoing-destroy.js +test/js/node/test/parallel/test-http-outgoing-end-multiple.js +test/js/node/test/parallel/test-http-outgoing-end-types.js +test/js/node/test/parallel/test-http-outgoing-finish-writable.js +test/js/node/test/parallel/test-http-outgoing-finish.js +test/js/node/test/parallel/test-http-outgoing-finished.js +test/js/node/test/parallel/test-http-outgoing-first-chunk-singlebyte-encoding.js +test/js/node/test/parallel/test-http-outgoing-internal-headernames-getter.js +test/js/node/test/parallel/test-http-outgoing-internal-headernames-setter.js +test/js/node/test/parallel/test-http-outgoing-internal-headers.js +test/js/node/test/parallel/test-http-outgoing-message-write-callback.js +test/js/node/test/parallel/test-http-outgoing-proto.js +test/js/node/test/parallel/test-http-outgoing-settimeout.js +test/js/node/test/parallel/test-http-outgoing-writableFinished.js +test/js/node/test/parallel/test-http-outgoing-write-types.js +test/js/node/test/parallel/test-http-parser-bad-ref.js +test/js/node/test/parallel/test-http-parser-lazy-loaded.js +test/js/node/test/parallel/test-http-parser.js +test/js/node/test/parallel/test-http-pause-no-dump.js +test/js/node/test/parallel/test-http-pause-resume-one-end.js +test/js/node/test/parallel/test-http-pause.js +test/js/node/test/parallel/test-http-pipe-fs.js +test/js/node/test/parallel/test-http-pipeline-requests-connection-leak.js +test/js/node/test/parallel/test-http-pipeline-socket-parser-typeerror.js +test/js/node/test/parallel/test-http-proxy.js +test/js/node/test/parallel/test-http-readable-data-event.js +test/js/node/test/parallel/test-http-request-agent.js +test/js/node/test/parallel/test-http-request-arguments.js +test/js/node/test/parallel/test-http-request-end-twice.js +test/js/node/test/parallel/test-http-request-end.js +test/js/node/test/parallel/test-http-request-invalid-method-error.js +test/js/node/test/parallel/test-http-request-large-payload.js +test/js/node/test/parallel/test-http-request-method-delete-payload.js +test/js/node/test/parallel/test-http-request-methods.js +test/js/node/test/parallel/test-http-request-smuggling-content-length.js +test/js/node/test/parallel/test-http-res-write-after-end.js +test/js/node/test/parallel/test-http-res-write-end-dont-take-array.js +test/js/node/test/parallel/test-http-response-add-header-after-sent.js +test/js/node/test/parallel/test-http-response-close.js +test/js/node/test/parallel/test-http-response-cork.js +test/js/node/test/parallel/test-http-response-multi-content-length.js +test/js/node/test/parallel/test-http-response-readable.js +test/js/node/test/parallel/test-http-response-remove-header-after-sent.js +test/js/node/test/parallel/test-http-response-setheaders.js +test/js/node/test/parallel/test-http-response-splitting.js +test/js/node/test/parallel/test-http-response-status-message.js +test/js/node/test/parallel/test-http-response-statuscode.js +test/js/node/test/parallel/test-http-response-writehead-returns-this.js +test/js/node/test/parallel/test-http-server-async-dispose.js +test/js/node/test/parallel/test-http-server-capture-rejections.js +test/js/node/test/parallel/test-http-server-close-all.js +test/js/node/test/parallel/test-http-server-close-destroy-timeout.js +test/js/node/test/parallel/test-http-server-close-idle-wait-response.js +test/js/node/test/parallel/test-http-server-de-chunked-trailer.js +test/js/node/test/parallel/test-http-server-delete-parser.js +test/js/node/test/parallel/test-http-server-destroy-socket-on-client-error.js +test/js/node/test/parallel/test-http-server-keep-alive-defaults.js +test/js/node/test/parallel/test-http-server-keep-alive-max-requests-null.js +test/js/node/test/parallel/test-http-server-method.query.js +test/js/node/test/parallel/test-http-server-multiheaders.js +test/js/node/test/parallel/test-http-server-non-utf8-header.js +test/js/node/test/parallel/test-http-server-options-incoming-message.js +test/js/node/test/parallel/test-http-server-options-server-response.js +test/js/node/test/parallel/test-http-server-reject-chunked-with-content-length.js +test/js/node/test/parallel/test-http-server-stale-close.js +test/js/node/test/parallel/test-http-server-timeouts-validation.js +test/js/node/test/parallel/test-http-server-write-after-end.js +test/js/node/test/parallel/test-http-server-write-end-after-end.js +test/js/node/test/parallel/test-http-set-cookies.js +test/js/node/test/parallel/test-http-set-header-chain.js +test/js/node/test/parallel/test-http-set-max-idle-http-parser.js +test/js/node/test/parallel/test-http-socket-error-listeners.js +test/js/node/test/parallel/test-http-status-code.js +test/js/node/test/parallel/test-http-status-message.js +test/js/node/test/parallel/test-http-status-reason-invalid-chars.js +test/js/node/test/parallel/test-http-timeout-client-warning.js +test/js/node/test/parallel/test-http-timeout-overflow.js +test/js/node/test/parallel/test-http-timeout.js +test/js/node/test/parallel/test-http-uncaught-from-request-callback.js +test/js/node/test/parallel/test-http-upgrade-reconsume-stream.js +test/js/node/test/parallel/test-http-url.parse-auth-with-header-in-request.js +test/js/node/test/parallel/test-http-url.parse-auth.js +test/js/node/test/parallel/test-http-url.parse-basic.js +test/js/node/test/parallel/test-http-url.parse-only-support-http-https-protocol.js +test/js/node/test/parallel/test-http-url.parse-path.js +test/js/node/test/parallel/test-http-url.parse-post.js +test/js/node/test/parallel/test-http-url.parse-search.js +test/js/node/test/parallel/test-http-wget.js +test/js/node/test/parallel/test-http-write-callbacks.js +test/js/node/test/parallel/test-http-write-empty-string.js +test/js/node/test/parallel/test-http-write-head-2.js +test/js/node/test/parallel/test-http-write-head.js +test/js/node/test/parallel/test-http-zero-length-write.js +test/js/node/test/parallel/test-http-zerolengthbuffer.js +test/js/node/test/parallel/test-http2-altsvc.js +test/js/node/test/parallel/test-http2-cancel-while-client-reading.js +test/js/node/test/parallel/test-http2-clean-output.js +test/js/node/test/parallel/test-http2-client-port-80.js +test/js/node/test/parallel/test-http2-client-priority-before-connect.js +test/js/node/test/parallel/test-http2-client-request-listeners-warning.js +test/js/node/test/parallel/test-http2-client-request-options-errors.js +test/js/node/test/parallel/test-http2-client-rststream-before-connect.js +test/js/node/test/parallel/test-http2-client-setLocalWindowSize.js +test/js/node/test/parallel/test-http2-client-setNextStreamID-errors.js +test/js/node/test/parallel/test-http2-client-shutdown-before-connect.js +test/js/node/test/parallel/test-http2-client-stream-destroy-before-connect.js +test/js/node/test/parallel/test-http2-client-upload-reject.js +test/js/node/test/parallel/test-http2-client-upload.js +test/js/node/test/parallel/test-http2-client-write-before-connect.js +test/js/node/test/parallel/test-http2-client-write-empty-string.js +test/js/node/test/parallel/test-http2-close-while-writing.js +test/js/node/test/parallel/test-http2-compat-aborted.js +test/js/node/test/parallel/test-http2-compat-client-upload-reject.js +test/js/node/test/parallel/test-http2-compat-errors.js +test/js/node/test/parallel/test-http2-compat-expect-continue-check.js +test/js/node/test/parallel/test-http2-compat-expect-continue.js +test/js/node/test/parallel/test-http2-compat-expect-handling.js +test/js/node/test/parallel/test-http2-compat-method-connect.js +test/js/node/test/parallel/test-http2-compat-serverrequest-end.js +test/js/node/test/parallel/test-http2-compat-serverrequest-headers.js +test/js/node/test/parallel/test-http2-compat-serverrequest-host.js +test/js/node/test/parallel/test-http2-compat-serverrequest-pause.js +test/js/node/test/parallel/test-http2-compat-serverrequest-pipe.js +test/js/node/test/parallel/test-http2-compat-serverrequest-settimeout.js +test/js/node/test/parallel/test-http2-compat-serverrequest-trailers.js +test/js/node/test/parallel/test-http2-compat-serverrequest.js +test/js/node/test/parallel/test-http2-compat-serverresponse-close.js +test/js/node/test/parallel/test-http2-compat-serverresponse-destroy.js +test/js/node/test/parallel/test-http2-compat-serverresponse-end-after-statuses-without-body.js +test/js/node/test/parallel/test-http2-compat-serverresponse-finished.js +test/js/node/test/parallel/test-http2-compat-serverresponse-flushheaders.js +test/js/node/test/parallel/test-http2-compat-serverresponse-headers-send-date.js +test/js/node/test/parallel/test-http2-compat-serverresponse-settimeout.js +test/js/node/test/parallel/test-http2-compat-serverresponse-statuscode.js +test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property-set.js +test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage-property.js +test/js/node/test/parallel/test-http2-compat-serverresponse-write.js +test/js/node/test/parallel/test-http2-compat-serverresponse.js +test/js/node/test/parallel/test-http2-compat-socket-destroy-delayed.js +test/js/node/test/parallel/test-http2-compat-write-early-hints-invalid-argument-type.js +test/js/node/test/parallel/test-http2-compat-write-early-hints-invalid-argument-value.js +test/js/node/test/parallel/test-http2-compat-write-early-hints.js +test/js/node/test/parallel/test-http2-compat-write-head-after-close.js +test/js/node/test/parallel/test-http2-compat-write-head-destroyed.js +test/js/node/test/parallel/test-http2-connect-tls-with-delay.js +test/js/node/test/parallel/test-http2-connect.js +test/js/node/test/parallel/test-http2-cookies.js +test/js/node/test/parallel/test-http2-create-client-connect.js +test/js/node/test/parallel/test-http2-create-client-session.js +test/js/node/test/parallel/test-http2-createsecureserver-options.js +test/js/node/test/parallel/test-http2-createserver-options.js +test/js/node/test/parallel/test-http2-createwritereq.js +test/js/node/test/parallel/test-http2-date-header.js +test/js/node/test/parallel/test-http2-destroy-after-write.js +test/js/node/test/parallel/test-http2-dont-override.js +test/js/node/test/parallel/test-http2-endafterheaders.js +test/js/node/test/parallel/test-http2-error-order.js +test/js/node/test/parallel/test-http2-forget-closed-streams.js +test/js/node/test/parallel/test-http2-goaway-opaquedata.js +test/js/node/test/parallel/test-http2-graceful-close.js +test/js/node/test/parallel/test-http2-head-request.js +test/js/node/test/parallel/test-http2-info-headers.js +test/js/node/test/parallel/test-http2-invalidargtypes-errors.js +test/js/node/test/parallel/test-http2-large-write-close.js +test/js/node/test/parallel/test-http2-large-write-destroy.js +test/js/node/test/parallel/test-http2-large-write-multiple-requests.js +test/js/node/test/parallel/test-http2-large-writes-session-memory-leak.js +test/js/node/test/parallel/test-http2-malformed-altsvc.js +test/js/node/test/parallel/test-http2-many-writes-and-destroy.js +test/js/node/test/parallel/test-http2-max-session-memory-leak.js +test/js/node/test/parallel/test-http2-methods.js +test/js/node/test/parallel/test-http2-misbehaving-flow-control-paused.js +test/js/node/test/parallel/test-http2-misbehaving-flow-control.js +test/js/node/test/parallel/test-http2-misused-pseudoheaders.js +test/js/node/test/parallel/test-http2-multiheaders-raw.js +test/js/node/test/parallel/test-http2-multiheaders.js +test/js/node/test/parallel/test-http2-multiplex.js +test/js/node/test/parallel/test-http2-multistream-destroy-on-read-tls.js +test/js/node/test/parallel/test-http2-no-more-streams.js +test/js/node/test/parallel/test-http2-no-wanttrailers-listener.js +test/js/node/test/parallel/test-http2-options-max-headers-block-length.js +test/js/node/test/parallel/test-http2-origin.js +test/js/node/test/parallel/test-http2-pipe-named-pipe.js +test/js/node/test/parallel/test-http2-pipe.js +test/js/node/test/parallel/test-http2-premature-close.js +test/js/node/test/parallel/test-http2-priority-cycle-.js +test/js/node/test/parallel/test-http2-request-remove-connect-listener.js +test/js/node/test/parallel/test-http2-request-response-proto.js +test/js/node/test/parallel/test-http2-res-corked.js +test/js/node/test/parallel/test-http2-respond-errors.js +test/js/node/test/parallel/test-http2-respond-file-204.js +test/js/node/test/parallel/test-http2-respond-file-304.js +test/js/node/test/parallel/test-http2-respond-file-404.js +test/js/node/test/parallel/test-http2-respond-file-compat.js +test/js/node/test/parallel/test-http2-respond-file-error-dir.js +test/js/node/test/parallel/test-http2-respond-file-error-pipe-offset.js +test/js/node/test/parallel/test-http2-respond-file-errors.js +test/js/node/test/parallel/test-http2-respond-file-fd-errors.js +test/js/node/test/parallel/test-http2-respond-file-fd-invalid.js +test/js/node/test/parallel/test-http2-respond-file-fd-range.js +test/js/node/test/parallel/test-http2-respond-file-fd.js +test/js/node/test/parallel/test-http2-respond-file-filehandle.js +test/js/node/test/parallel/test-http2-respond-file-range.js +test/js/node/test/parallel/test-http2-respond-file.js +test/js/node/test/parallel/test-http2-respond-no-data.js +test/js/node/test/parallel/test-http2-respond-with-file-connection-abort.js +test/js/node/test/parallel/test-http2-sent-headers.js +test/js/node/test/parallel/test-http2-serve-file.js +test/js/node/test/parallel/test-http2-server-async-dispose.js +test/js/node/test/parallel/test-http2-server-close-callback.js +test/js/node/test/parallel/test-http2-server-close-idle-connection.js +test/js/node/test/parallel/test-http2-server-errors.js +test/js/node/test/parallel/test-http2-server-rst-before-respond.js +test/js/node/test/parallel/test-http2-server-session-destroy.js +test/js/node/test/parallel/test-http2-server-setLocalWindowSize.js +test/js/node/test/parallel/test-http2-server-shutdown-options-errors.js +test/js/node/test/parallel/test-http2-session-gc-while-write-scheduled.js +test/js/node/test/parallel/test-http2-session-stream-state.js +test/js/node/test/parallel/test-http2-session-timeout.js +test/js/node/test/parallel/test-http2-single-headers.js +test/js/node/test/parallel/test-http2-socket-proxy-handler-for-has.js +test/js/node/test/parallel/test-http2-status-code.js +test/js/node/test/parallel/test-http2-stream-destroy-event-order.js +test/js/node/test/parallel/test-http2-timeouts.js +test/js/node/test/parallel/test-http2-tls-disconnect.js +test/js/node/test/parallel/test-http2-too-many-headers.js +test/js/node/test/parallel/test-http2-trailers-after-session-close.js +test/js/node/test/parallel/test-http2-trailers.js +test/js/node/test/parallel/test-http2-unbound-socket-proxy.js +test/js/node/test/parallel/test-http2-write-callbacks.js +test/js/node/test/parallel/test-http2-zero-length-header.js +test/js/node/test/parallel/test-http2-zero-length-write.js +test/js/node/test/parallel/test-https-agent-constructor.js +test/js/node/test/parallel/test-https-agent-session-eviction.js +test/js/node/test/parallel/test-https-agent.js +test/js/node/test/parallel/test-https-byteswritten.js +test/js/node/test/parallel/test-https-client-get-url.js +test/js/node/test/parallel/test-https-client-renegotiation-limit.js +test/js/node/test/parallel/test-https-close.js +test/js/node/test/parallel/test-https-connecting-to-http.js +test/js/node/test/parallel/test-https-eof-for-eom.js +test/js/node/test/parallel/test-https-foafssl.js +test/js/node/test/parallel/test-https-localaddress-bind-error.js +test/js/node/test/parallel/test-https-options-boolean-check.js +test/js/node/test/parallel/test-https-selfsigned-no-keycertsign-no-crash.js +test/js/node/test/parallel/test-https-server-async-dispose.js +test/js/node/test/parallel/test-https-server-close-destroy-timeout.js +test/js/node/test/parallel/test-https-server-headers-timeout.js +test/js/node/test/parallel/test-https-server-request-timeout.js +test/js/node/test/parallel/test-https-simple.js +test/js/node/test/parallel/test-https-socket-options.js +test/js/node/test/parallel/test-https-truncate.js +test/js/node/test/parallel/test-https-unix-socket-self-signed.js +test/js/node/test/parallel/test-icu-env.js +test/js/node/test/parallel/test-icu-punycode.js +test/js/node/test/parallel/test-icu-transcode.js +test/js/node/test/parallel/test-inspect-support-for-node_options.js +test/js/node/test/parallel/test-inspector-enabled.js +test/js/node/test/parallel/test-inspector-has-inspector-false.js +test/js/node/test/parallel/test-inspector-stops-no-file.js +test/js/node/test/parallel/test-inspector-workers-flat-list.js +test/js/node/test/parallel/test-instanceof.js +test/js/node/test/parallel/test-internal-module-require.js +test/js/node/test/parallel/test-internal-process-binding.js +test/js/node/test/parallel/test-intl-v8BreakIterator.js +test/js/node/test/parallel/test-kill-segfault-freebsd.js +test/js/node/test/parallel/test-listen-fd-detached-inherit.js +test/js/node/test/parallel/test-listen-fd-detached.js +test/js/node/test/parallel/test-math-random.js +test/js/node/test/parallel/test-memory-usage-emfile.js +test/js/node/test/parallel/test-memory-usage.js +test/js/node/test/parallel/test-messagechannel.js +test/js/node/test/parallel/test-messageevent-brandcheck.js +test/js/node/test/parallel/test-microtask-queue-integration.js +test/js/node/test/parallel/test-microtask-queue-run-immediate.js +test/js/node/test/parallel/test-microtask-queue-run.js +test/js/node/test/parallel/test-mime-api.js +test/js/node/test/parallel/test-mime-whatwg.js +test/js/node/test/parallel/test-module-builtin.js +test/js/node/test/parallel/test-module-cache.js +test/js/node/test/parallel/test-module-children.js +test/js/node/test/parallel/test-module-circular-dependency-warning.js +test/js/node/test/parallel/test-module-circular-symlinks.js +test/js/node/test/parallel/test-module-create-require.js +test/js/node/test/parallel/test-module-globalpaths-nodepath.js +test/js/node/test/parallel/test-module-loading-deprecated.js +test/js/node/test/parallel/test-module-loading-error.js +test/js/node/test/parallel/test-module-main-extension-lookup.js +test/js/node/test/parallel/test-module-main-fail.js +test/js/node/test/parallel/test-module-main-preserve-symlinks-fail.js +test/js/node/test/parallel/test-module-multi-extensions.js +test/js/node/test/parallel/test-module-nodemodulepaths.js +test/js/node/test/parallel/test-module-parent-deprecation.js +test/js/node/test/parallel/test-module-parent-setter-deprecation.js +test/js/node/test/parallel/test-module-prototype-mutation.js +test/js/node/test/parallel/test-module-readonly.js +test/js/node/test/parallel/test-module-relative-lookup.js +test/js/node/test/parallel/test-module-run-main-monkey-patch.js +test/js/node/test/parallel/test-module-stat.js +test/js/node/test/parallel/test-module-symlinked-peer-modules.js +test/js/node/test/parallel/test-module-version.js +test/js/node/test/parallel/test-module-wrap.js +test/js/node/test/parallel/test-module-wrapper.js +test/js/node/test/parallel/test-next-tick-doesnt-hang.js +test/js/node/test/parallel/test-next-tick-domain.js +test/js/node/test/parallel/test-next-tick-errors.js +test/js/node/test/parallel/test-next-tick-fixed-queue-regression.js +test/js/node/test/parallel/test-next-tick-intentional-starvation.js +test/js/node/test/parallel/test-next-tick-ordering.js +test/js/node/test/parallel/test-next-tick-ordering2.js +test/js/node/test/parallel/test-next-tick-when-exiting.js +test/js/node/test/parallel/test-next-tick.js +test/js/node/test/parallel/test-no-addons-resolution-condition.js +test/js/node/test/parallel/test-no-node-snapshot.js +test/js/node/test/parallel/test-os-eol.js +test/js/node/test/parallel/test-os-homedir-no-envvar.js +test/js/node/test/parallel/test-os-process-priority.js +test/js/node/test/parallel/test-os-userinfo-handles-getter-errors.js +test/js/node/test/parallel/test-os.js +test/js/node/test/parallel/test-outgoing-message-destroy.js +test/js/node/test/parallel/test-outgoing-message-pipe.js +test/js/node/test/parallel/test-parse-args.mjs +test/js/node/test/parallel/test-path-basename.js +test/js/node/test/parallel/test-path-dirname.js +test/js/node/test/parallel/test-path-extname.js +test/js/node/test/parallel/test-path-glob.js +test/js/node/test/parallel/test-path-isabsolute.js +test/js/node/test/parallel/test-path-join.js +test/js/node/test/parallel/test-path-makelong.js +test/js/node/test/parallel/test-path-normalize.js +test/js/node/test/parallel/test-path-parse-format.js +test/js/node/test/parallel/test-path-posix-exists.js +test/js/node/test/parallel/test-path-posix-relative-on-windows.js +test/js/node/test/parallel/test-path-relative.js +test/js/node/test/parallel/test-path-resolve.js +test/js/node/test/parallel/test-path-win32-exists.js +test/js/node/test/parallel/test-path-zero-length-strings.js +test/js/node/test/parallel/test-path.js +test/js/node/test/parallel/test-perf-gc-crash.js +test/js/node/test/parallel/test-performance-measure.js +test/js/node/test/parallel/test-performanceobserver-gc.js +test/js/node/test/parallel/test-permission-fs-supported.js +test/js/node/test/parallel/test-pipe-abstract-socket-http.js +test/js/node/test/parallel/test-pipe-address.js +test/js/node/test/parallel/test-pipe-file-to-http.js +test/js/node/test/parallel/test-pipe-head.js +test/js/node/test/parallel/test-pipe-outgoing-message-data-emitted-after-ended.js +test/js/node/test/parallel/test-pipe-return-val.js +test/js/node/test/parallel/test-pipe-writev.js +test/js/node/test/parallel/test-preload-print-process-argv.js +test/js/node/test/parallel/test-preload-self-referential.js +test/js/node/test/parallel/test-process-abort.js +test/js/node/test/parallel/test-process-argv-0.js +test/js/node/test/parallel/test-process-assert.js +test/js/node/test/parallel/test-process-available-memory.js +test/js/node/test/parallel/test-process-beforeexit-throw-exit.js +test/js/node/test/parallel/test-process-beforeexit.js +test/js/node/test/parallel/test-process-binding-util.js +test/js/node/test/parallel/test-process-chdir-errormessage.js +test/js/node/test/parallel/test-process-chdir.js +test/js/node/test/parallel/test-process-config.js +test/js/node/test/parallel/test-process-constants-noatime.js +test/js/node/test/parallel/test-process-constrained-memory.js +test/js/node/test/parallel/test-process-cpuUsage.js +test/js/node/test/parallel/test-process-default.js +test/js/node/test/parallel/test-process-dlopen-error-message-crash.js +test/js/node/test/parallel/test-process-dlopen-undefined-exports.js +test/js/node/test/parallel/test-process-domain-segfault.js +test/js/node/test/parallel/test-process-emit.js +test/js/node/test/parallel/test-process-emitwarning.js +test/js/node/test/parallel/test-process-env-windows-error-reset.js +test/js/node/test/parallel/test-process-euid-egid.js +test/js/node/test/parallel/test-process-exception-capture-errors.js +test/js/node/test/parallel/test-process-exception-capture-should-abort-on-uncaught.js +test/js/node/test/parallel/test-process-exception-capture.js +test/js/node/test/parallel/test-process-execpath.js +test/js/node/test/parallel/test-process-exit-code-validation.js +test/js/node/test/parallel/test-process-exit-from-before-exit.js +test/js/node/test/parallel/test-process-exit-handler.js +test/js/node/test/parallel/test-process-exit-recursive.js +test/js/node/test/parallel/test-process-exit.js +test/js/node/test/parallel/test-process-external-stdio-close-spawn.js +test/js/node/test/parallel/test-process-external-stdio-close.js +test/js/node/test/parallel/test-process-features.js +test/js/node/test/parallel/test-process-getgroups.js +test/js/node/test/parallel/test-process-hrtime-bigint.js +test/js/node/test/parallel/test-process-hrtime.js +test/js/node/test/parallel/test-process-kill-null.js +test/js/node/test/parallel/test-process-kill-pid.js +test/js/node/test/parallel/test-process-next-tick.js +test/js/node/test/parallel/test-process-no-deprecation.js +test/js/node/test/parallel/test-process-ppid.js +test/js/node/test/parallel/test-process-really-exit.js +test/js/node/test/parallel/test-process-release.js +test/js/node/test/parallel/test-process-remove-all-signal-listeners.js +test/js/node/test/parallel/test-process-setgroups.js +test/js/node/test/parallel/test-process-setsourcemapsenabled.js +test/js/node/test/parallel/test-process-title-cli.js +test/js/node/test/parallel/test-process-uid-gid.js +test/js/node/test/parallel/test-process-umask-mask.js +test/js/node/test/parallel/test-process-umask.js +test/js/node/test/parallel/test-process-uptime.js +test/js/node/test/parallel/test-process-warning.js +test/js/node/test/parallel/test-promise-handled-rejection-no-warning.js +test/js/node/test/parallel/test-promise-unhandled-default.js +test/js/node/test/parallel/test-promise-unhandled-error.js +test/js/node/test/parallel/test-promise-unhandled-flag.js +test/js/node/test/parallel/test-promise-unhandled-issue-43655.js +test/js/node/test/parallel/test-promise-unhandled-silent-no-hook.js +test/js/node/test/parallel/test-promise-unhandled-silent.js +test/js/node/test/parallel/test-promise-unhandled-throw-handler.js +test/js/node/test/parallel/test-promise-unhandled-throw.js +test/js/node/test/parallel/test-promise-unhandled-warn-no-hook.js +test/js/node/test/parallel/test-promises-unhandled-proxy-rejections.js +test/js/node/test/parallel/test-promises-unhandled-rejections.js +test/js/node/test/parallel/test-promises-unhandled-symbol-rejections.js +test/js/node/test/parallel/test-promises-warning-on-unhandled-rejection.js +test/js/node/test/parallel/test-punycode.js +test/js/node/test/parallel/test-querystring-escape.js +test/js/node/test/parallel/test-querystring-maxKeys-non-finite.js +test/js/node/test/parallel/test-querystring-multichar-separator.js +test/js/node/test/parallel/test-querystring.js +test/js/node/test/parallel/test-queue-microtask.js +test/js/node/test/parallel/test-quic-internal-endpoint-listen-defaults.js +test/js/node/test/parallel/test-quic-internal-endpoint-options.js +test/js/node/test/parallel/test-quic-internal-endpoint-stats-state.js +test/js/node/test/parallel/test-quic-internal-setcallbacks.js +test/js/node/test/parallel/test-readable-from-iterator-closing.js +test/js/node/test/parallel/test-readable-from-web-enqueue-then-close.js +test/js/node/test/parallel/test-readable-from.js +test/js/node/test/parallel/test-readable-large-hwm.js +test/js/node/test/parallel/test-readable-single-end.js +test/js/node/test/parallel/test-readline-async-iterators-backpressure.js +test/js/node/test/parallel/test-readline-async-iterators-destroy.js +test/js/node/test/parallel/test-readline-async-iterators.js +test/js/node/test/parallel/test-readline-carriage-return-between-chunks.js +test/js/node/test/parallel/test-readline-csi.js +test/js/node/test/parallel/test-readline-emit-keypress-events.js +test/js/node/test/parallel/test-readline-input-onerror.js +test/js/node/test/parallel/test-readline-interface-escapecodetimeout.js +test/js/node/test/parallel/test-readline-interface-no-trailing-newline.js +test/js/node/test/parallel/test-readline-interface-recursive-writes.js +test/js/node/test/parallel/test-readline-keys.js +test/js/node/test/parallel/test-readline-position.js +test/js/node/test/parallel/test-readline-promises-csi.mjs +test/js/node/test/parallel/test-readline-promises-tab-complete.js +test/js/node/test/parallel/test-readline-reopen.js +test/js/node/test/parallel/test-readline-set-raw-mode.js +test/js/node/test/parallel/test-readline-tab-complete.js +test/js/node/test/parallel/test-readline-undefined-columns.js +test/js/node/test/parallel/test-readline.js +test/js/node/test/parallel/test-ref-unref-return.js +test/js/node/test/parallel/test-repl-clear-immediate-crash.js +test/js/node/test/parallel/test-repl-close.js +test/js/node/test/parallel/test-repl-dynamic-import.js +test/js/node/test/parallel/test-repl-preview-without-inspector.js +test/js/node/test/parallel/test-repl-syntax-error-handling.js +test/js/node/test/parallel/test-require-cache.js +test/js/node/test/parallel/test-require-delete-array-iterator.js +test/js/node/test/parallel/test-require-dot.js +test/js/node/test/parallel/test-require-empty-main.js +test/js/node/test/parallel/test-require-enoent-dir.js +test/js/node/test/parallel/test-require-exceptions.js +test/js/node/test/parallel/test-require-extension-over-directory.js +test/js/node/test/parallel/test-require-extensions-main.js +test/js/node/test/parallel/test-require-extensions-same-filename-as-dir-trailing-slash.js +test/js/node/test/parallel/test-require-invalid-main-no-exports.js +test/js/node/test/parallel/test-require-invalid-package.js +test/js/node/test/parallel/test-require-json.js +test/js/node/test/parallel/test-require-long-path.js +test/js/node/test/parallel/test-require-node-prefix.js +test/js/node/test/parallel/test-require-nul.js +test/js/node/test/parallel/test-require-process.js +test/js/node/test/parallel/test-require-resolve.js +test/js/node/test/parallel/test-require-symlink.js +test/js/node/test/parallel/test-require-unicode.js +test/js/node/test/parallel/test-resource-usage.js +test/js/node/test/parallel/test-runner-filter-warning.js +test/js/node/test/parallel/test-runner-subtest-after-hook.js +test/js/node/test/parallel/test-set-process-debug-port.js +test/js/node/test/parallel/test-shadow-realm-gc-module.js +test/js/node/test/parallel/test-shadow-realm-module.js +test/js/node/test/parallel/test-shadow-realm-preload-module.js +test/js/node/test/parallel/test-shadow-realm-prepare-stack-trace.js +test/js/node/test/parallel/test-shadow-realm.js +test/js/node/test/parallel/test-sigint-infinite-loop.js +test/js/node/test/parallel/test-signal-args.js +test/js/node/test/parallel/test-signal-handler-remove-on-exit.js +test/js/node/test/parallel/test-signal-handler.js +test/js/node/test/parallel/test-signal-unregister.js +test/js/node/test/parallel/test-socket-address.js +test/js/node/test/parallel/test-socket-options-invalid.js +test/js/node/test/parallel/test-socket-write-after-fin-error.js +test/js/node/test/parallel/test-spawn-cmd-named-pipe.js +test/js/node/test/parallel/test-stdin-child-proc.js +test/js/node/test/parallel/test-stdin-from-file-spawn.js +test/js/node/test/parallel/test-stdin-from-file.js +test/js/node/test/parallel/test-stdin-hang.js +test/js/node/test/parallel/test-stdin-pause-resume-sync.js +test/js/node/test/parallel/test-stdin-pause-resume.js +test/js/node/test/parallel/test-stdin-pipe-large.js +test/js/node/test/parallel/test-stdin-pipe-resume.js +test/js/node/test/parallel/test-stdin-resume-pause.js +test/js/node/test/parallel/test-stdin-script-child-option.js +test/js/node/test/parallel/test-stdin-script-child.js +test/js/node/test/parallel/test-stdio-closed.js +test/js/node/test/parallel/test-stdio-pipe-access.js +test/js/node/test/parallel/test-stdio-pipe-stderr.js +test/js/node/test/parallel/test-stdio-undestroy.js +test/js/node/test/parallel/test-stdout-cannot-be-closed-child-process-pipe.js +test/js/node/test/parallel/test-stdout-pipeline-destroy.js +test/js/node/test/parallel/test-stdout-stderr-reading.js +test/js/node/test/parallel/test-stdout-stderr-write.js +test/js/node/test/parallel/test-stdout-to-file.js +test/js/node/test/parallel/test-string-decoder-end.js +test/js/node/test/parallel/test-string-decoder.js +test/js/node/test/parallel/test-stringbytes-external.js +test/js/node/test/parallel/test-sync-fileread.js +test/js/node/test/parallel/test-sys.js +test/js/node/test/parallel/test-timers-api-refs.js +test/js/node/test/parallel/test-timers-args.js +test/js/node/test/parallel/test-timers-clear-null-does-not-throw-error.js +test/js/node/test/parallel/test-timers-clear-object-does-not-throw-error.js +test/js/node/test/parallel/test-timers-clear-timeout-interval-equivalent.js +test/js/node/test/parallel/test-timers-clearImmediate-als.js +test/js/node/test/parallel/test-timers-clearImmediate.js +test/js/node/test/parallel/test-timers-destroyed.js +test/js/node/test/parallel/test-timers-dispose.js +test/js/node/test/parallel/test-timers-immediate-promisified.js +test/js/node/test/parallel/test-timers-immediate-queue-throw.js +test/js/node/test/parallel/test-timers-immediate-queue.js +test/js/node/test/parallel/test-timers-immediate-unref-nested-once.js +test/js/node/test/parallel/test-timers-immediate-unref-simple.js +test/js/node/test/parallel/test-timers-immediate-unref.js +test/js/node/test/parallel/test-timers-immediate.js +test/js/node/test/parallel/test-timers-interval-promisified.js +test/js/node/test/parallel/test-timers-interval-throw.js +test/js/node/test/parallel/test-timers-invalid-clear.js +test/js/node/test/parallel/test-timers-linked-list.js +test/js/node/test/parallel/test-timers-max-duration-warning.js +test/js/node/test/parallel/test-timers-nan-duration-emit-once-per-process.js +test/js/node/test/parallel/test-timers-nan-duration-warning-promises.js +test/js/node/test/parallel/test-timers-nan-duration-warning.js +test/js/node/test/parallel/test-timers-negative-duration-warning-emit-once-per-process.js +test/js/node/test/parallel/test-timers-negative-duration-warning.js +test/js/node/test/parallel/test-timers-nested.js +test/js/node/test/parallel/test-timers-next-tick.js +test/js/node/test/parallel/test-timers-non-integer-delay.js +test/js/node/test/parallel/test-timers-not-emit-duration-zero.js +test/js/node/test/parallel/test-timers-now.js +test/js/node/test/parallel/test-timers-ordering.js +test/js/node/test/parallel/test-timers-process-tampering.js +test/js/node/test/parallel/test-timers-promises-scheduler.js +test/js/node/test/parallel/test-timers-promises.js +test/js/node/test/parallel/test-timers-refresh-in-callback.js +test/js/node/test/parallel/test-timers-refresh.js +test/js/node/test/parallel/test-timers-same-timeout-wrong-list-deleted.js +test/js/node/test/parallel/test-timers-setimmediate-infinite-loop.js +test/js/node/test/parallel/test-timers-socket-timeout-removes-other-socket-unref-timer.js +test/js/node/test/parallel/test-timers-this.js +test/js/node/test/parallel/test-timers-throw-when-cb-not-function.js +test/js/node/test/parallel/test-timers-timeout-promisified.js +test/js/node/test/parallel/test-timers-timeout-to-interval.js +test/js/node/test/parallel/test-timers-timeout-with-non-integer.js +test/js/node/test/parallel/test-timers-to-primitive.js +test/js/node/test/parallel/test-timers-uncaught-exception.js +test/js/node/test/parallel/test-timers-unenroll-unref-interval.js +test/js/node/test/parallel/test-timers-unref-throw-then-ref.js +test/js/node/test/parallel/test-timers-unref.js +test/js/node/test/parallel/test-timers-unrefd-interval-still-fires.js +test/js/node/test/parallel/test-timers-unrefed-in-beforeexit.js +test/js/node/test/parallel/test-timers-unrefed-in-callback.js +test/js/node/test/parallel/test-timers-user-call.js +test/js/node/test/parallel/test-timers-zero-timeout.js +test/js/node/test/parallel/test-timers.js +test/js/node/test/parallel/test-tty-backwards-api.js +test/js/node/test/parallel/test-tty-stdin-end.js +test/js/node/test/parallel/test-tty-stdin-pipe.js +test/js/node/test/parallel/test-tz-version.js +test/js/node/test/parallel/test-unhandled-exception-with-worker-inuse.js +test/js/node/test/parallel/test-url-canParse-whatwg.js +test/js/node/test/parallel/test-url-domain-ascii-unicode.js +test/js/node/test/parallel/test-url-format-invalid-input.js +test/js/node/test/parallel/test-url-format-whatwg.js +test/js/node/test/parallel/test-url-format.js +test/js/node/test/parallel/test-url-parse-format.js +test/js/node/test/parallel/test-url-parse-invalid-input.js +test/js/node/test/parallel/test-url-parse-query.js +test/js/node/test/parallel/test-url-relative.js +test/js/node/test/parallel/test-url-revokeobjecturl.js +test/js/node/test/parallel/test-url-urltooptions.js +test/js/node/test/parallel/test-utf8-scripts.js +test/js/node/test/parallel/test-util-callbackify.js +test/js/node/test/parallel/test-util-deprecate-invalid-code.js +test/js/node/test/parallel/test-util-deprecate.js +test/js/node/test/parallel/test-util-inherits.js +test/js/node/test/parallel/test-util-inspect-getters-accessing-this.js +test/js/node/test/parallel/test-util-inspect-long-running.js +test/js/node/test/parallel/test-util-inspect-proxy.js +test/js/node/test/parallel/test-util-internal.js +test/js/node/test/parallel/test-util-parse-env.js +test/js/node/test/parallel/test-util-primordial-monkeypatching.js +test/js/node/test/parallel/test-util-promisify-custom-names.mjs +test/js/node/test/parallel/test-util-promisify.js +test/js/node/test/parallel/test-util-sigint-watchdog.js +test/js/node/test/parallel/test-util-sleep.js +test/js/node/test/parallel/test-util-stripvtcontrolcharacters.js +test/js/node/test/parallel/test-util-styletext.js +test/js/node/test/parallel/test-util-text-decoder.js +test/js/node/test/parallel/test-util-types-exists.js +test/js/node/test/parallel/test-util-types.js +test/js/node/test/parallel/test-util.js +test/js/node/test/parallel/test-v8-deserialize-buffer.js +test/js/node/test/parallel/test-v8-flag-pool-size-0.js +test/js/node/test/parallel/test-v8-getheapsnapshot-twice.js +test/js/node/test/parallel/test-v8-global-setter.js +test/js/node/test/parallel/test-v8-serialize-leak.js +test/js/node/test/parallel/test-vm-access-process-env.js +test/js/node/test/parallel/test-vm-api-handles-getter-errors.js +test/js/node/test/parallel/test-vm-attributes-property-not-on-sandbox.js +test/js/node/test/parallel/test-vm-basic.js +test/js/node/test/parallel/test-vm-cached-data.js +test/js/node/test/parallel/test-vm-context-async-script.js +test/js/node/test/parallel/test-vm-context-property-forwarding.js +test/js/node/test/parallel/test-vm-context.js +test/js/node/test/parallel/test-vm-create-and-run-in-context.js +test/js/node/test/parallel/test-vm-create-context-accessors.js +test/js/node/test/parallel/test-vm-create-context-arg.js +test/js/node/test/parallel/test-vm-create-context-circular-reference.js +test/js/node/test/parallel/test-vm-createcacheddata.js +test/js/node/test/parallel/test-vm-cross-context.js +test/js/node/test/parallel/test-vm-data-property-writable.js +test/js/node/test/parallel/test-vm-deleting-property.js +test/js/node/test/parallel/test-vm-function-declaration.js +test/js/node/test/parallel/test-vm-function-redefinition.js +test/js/node/test/parallel/test-vm-getters.js +test/js/node/test/parallel/test-vm-global-assignment.js +test/js/node/test/parallel/test-vm-global-configurable-properties.js +test/js/node/test/parallel/test-vm-global-define-property.js +test/js/node/test/parallel/test-vm-global-get-own.js +test/js/node/test/parallel/test-vm-global-non-writable-properties.js +test/js/node/test/parallel/test-vm-global-property-enumerator.js +test/js/node/test/parallel/test-vm-global-property-interceptors.js +test/js/node/test/parallel/test-vm-global-property-prototype.js +test/js/node/test/parallel/test-vm-global-setter.js +test/js/node/test/parallel/test-vm-harmony-symbols.js +test/js/node/test/parallel/test-vm-indexed-properties.js +test/js/node/test/parallel/test-vm-inherited_properties.js +test/js/node/test/parallel/test-vm-is-context.js +test/js/node/test/parallel/test-vm-low-stack-space.js +test/js/node/test/parallel/test-vm-module-basic.js +test/js/node/test/parallel/test-vm-module-cached-data.js +test/js/node/test/parallel/test-vm-module-errors.js +test/js/node/test/parallel/test-vm-module-import-meta.js +test/js/node/test/parallel/test-vm-module-link.js +test/js/node/test/parallel/test-vm-module-reevaluate.js +test/js/node/test/parallel/test-vm-module-referrer-realm.mjs +test/js/node/test/parallel/test-vm-module-synthetic.js +test/js/node/test/parallel/test-vm-new-script-context.js +test/js/node/test/parallel/test-vm-new-script-new-context.js +test/js/node/test/parallel/test-vm-new-script-this-context.js +test/js/node/test/parallel/test-vm-not-strict.js +test/js/node/test/parallel/test-vm-options-validation.js +test/js/node/test/parallel/test-vm-ownkeys.js +test/js/node/test/parallel/test-vm-ownpropertynames.js +test/js/node/test/parallel/test-vm-ownpropertysymbols.js +test/js/node/test/parallel/test-vm-parse-abort-on-uncaught-exception.js +test/js/node/test/parallel/test-vm-preserves-property.js +test/js/node/test/parallel/test-vm-proxies.js +test/js/node/test/parallel/test-vm-proxy-failure-CP.js +test/js/node/test/parallel/test-vm-run-in-new-context.js +test/js/node/test/parallel/test-vm-script-throw-in-tostring.js +test/js/node/test/parallel/test-vm-set-property-proxy.js +test/js/node/test/parallel/test-vm-set-proto-null-on-globalthis.js +test/js/node/test/parallel/test-vm-sigint-existing-handler.js +test/js/node/test/parallel/test-vm-sigint.js +test/js/node/test/parallel/test-vm-static-this.js +test/js/node/test/parallel/test-vm-strict-assign.js +test/js/node/test/parallel/test-vm-strict-mode.js +test/js/node/test/parallel/test-vm-symbols.js +test/js/node/test/parallel/test-vm-syntax-error-message.js +test/js/node/test/parallel/test-vm-syntax-error-stderr.js +test/js/node/test/parallel/test-vm-timeout-escape-promise-module.js +test/js/node/test/parallel/test-vm-timeout-escape-promise.js +test/js/node/test/parallel/test-vm-timeout.js +test/js/node/test/parallel/test-vm-util-lazy-properties.js +test/js/node/test/parallel/test-warn-stream-wrap.js +test/js/node/test/parallel/test-weakref.js +test/js/node/test/parallel/test-webcrypto-cryptokey-workers.js +test/js/node/test/parallel/test-webcrypto-derivekey.js +test/js/node/test/parallel/test-webcrypto-digest.js +test/js/node/test/parallel/test-webcrypto-encrypt-decrypt-aes.js +test/js/node/test/parallel/test-webcrypto-encrypt-decrypt.js +test/js/node/test/parallel/test-webcrypto-getRandomValues.js +test/js/node/test/parallel/test-webcrypto-random.js +test/js/node/test/parallel/test-webcrypto-sign-verify.js +test/js/node/test/parallel/test-websocket.js +test/js/node/test/parallel/test-webstream-string-tag.js +test/js/node/test/parallel/test-whatwg-encoding-custom-api-basics.js +test/js/node/test/parallel/test-whatwg-encoding-custom-fatal-streaming.js +test/js/node/test/parallel/test-whatwg-encoding-custom-textdecoder-api-invalid-label.js +test/js/node/test/parallel/test-whatwg-encoding-custom-textdecoder-fatal.js +test/js/node/test/parallel/test-whatwg-encoding-custom-textdecoder-ignorebom.js +test/js/node/test/parallel/test-whatwg-encoding-custom-textdecoder-invalid-arg.js +test/js/node/test/parallel/test-whatwg-encoding-custom-textdecoder-streaming.js +test/js/node/test/parallel/test-whatwg-encoding-custom-textdecoder-utf16-surrogates.js +test/js/node/test/parallel/test-whatwg-events-add-event-listener-options-passive.js +test/js/node/test/parallel/test-whatwg-events-add-event-listener-options-signal.js +test/js/node/test/parallel/test-whatwg-events-customevent.js +test/js/node/test/parallel/test-whatwg-events-event-constructors.js +test/js/node/test/parallel/test-whatwg-events-eventtarget-this-of-listener.js +test/js/node/test/parallel/test-whatwg-readablebytestream.js +test/js/node/test/parallel/test-whatwg-readablebytestreambyob.js +test/js/node/test/parallel/test-whatwg-readablestream.mjs +test/js/node/test/parallel/test-whatwg-url-canparse.js +test/js/node/test/parallel/test-whatwg-url-custom-deepequal.js +test/js/node/test/parallel/test-whatwg-url-custom-domainto.js +test/js/node/test/parallel/test-whatwg-url-custom-global.js +test/js/node/test/parallel/test-whatwg-url-custom-href-side-effect.js +test/js/node/test/parallel/test-whatwg-url-custom-inspect.js +test/js/node/test/parallel/test-whatwg-url-custom-parsing.js +test/js/node/test/parallel/test-whatwg-url-custom-properties.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-append.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-delete.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-entries.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-foreach.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-get.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-getall.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-has.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-keys.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-set.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-sort.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-stringifier.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams-values.js +test/js/node/test/parallel/test-whatwg-url-custom-searchparams.js +test/js/node/test/parallel/test-whatwg-url-custom-setters.js +test/js/node/test/parallel/test-whatwg-url-custom-tostringtag.js +test/js/node/test/parallel/test-whatwg-url-invalidthis.js +test/js/node/test/parallel/test-whatwg-url-override-hostname.js +test/js/node/test/parallel/test-whatwg-url-toascii.js +test/js/node/test/parallel/test-windows-abort-exitcode.js +test/js/node/test/parallel/test-windows-failed-heap-allocation.js +test/js/node/test/parallel/test-worker-abort-on-uncaught-exception.js +test/js/node/test/parallel/test-worker-arraybuffer-zerofill.js +test/js/node/test/parallel/test-worker-cjs-workerdata.js +test/js/node/test/parallel/test-worker-cleanexit-with-js.js +test/js/node/test/parallel/test-worker-cleanexit-with-moduleload.js +test/js/node/test/parallel/test-worker-console-listeners.js +test/js/node/test/parallel/test-worker-dns-terminate-during-query.js +test/js/node/test/parallel/test-worker-environmentdata.js +test/js/node/test/parallel/test-worker-esm-exit.js +test/js/node/test/parallel/test-worker-esm-missing-main.js +test/js/node/test/parallel/test-worker-esmodule.js +test/js/node/test/parallel/test-worker-event.js +test/js/node/test/parallel/test-worker-exit-event-error.js +test/js/node/test/parallel/test-worker-exit-from-uncaught-exception.js +test/js/node/test/parallel/test-worker-exit-heapsnapshot.js +test/js/node/test/parallel/test-worker-fs-stat-watcher.js +test/js/node/test/parallel/test-worker-heap-snapshot.js +test/js/node/test/parallel/test-worker-http2-generic-streams-terminate.js +test/js/node/test/parallel/test-worker-invalid-workerdata.js +test/js/node/test/parallel/test-worker-load-file-with-extension-other-than-js.js +test/js/node/test/parallel/test-worker-memory.js +test/js/node/test/parallel/test-worker-message-channel-sharedarraybuffer.js +test/js/node/test/parallel/test-worker-message-event.js +test/js/node/test/parallel/test-worker-message-port-constructor.js +test/js/node/test/parallel/test-worker-message-port-infinite-message-loop.js +test/js/node/test/parallel/test-worker-message-port-receive-message.js +test/js/node/test/parallel/test-worker-message-port-terminate-transfer-list.js +test/js/node/test/parallel/test-worker-message-port-transfer-duplicate.js +test/js/node/test/parallel/test-worker-message-port-transfer-terminate.js +test/js/node/test/parallel/test-worker-message-port-wasm-module.js +test/js/node/test/parallel/test-worker-message-port-wasm-threads.js +test/js/node/test/parallel/test-worker-mjs-workerdata.js +test/js/node/test/parallel/test-worker-nested-on-process-exit.js +test/js/node/test/parallel/test-worker-nested-uncaught.js +test/js/node/test/parallel/test-worker-no-sab.js +test/js/node/test/parallel/test-worker-non-fatal-uncaught-exception.js +test/js/node/test/parallel/test-worker-on-process-exit.js +test/js/node/test/parallel/test-worker-onmessage-not-a-function.js +test/js/node/test/parallel/test-worker-onmessage.js +test/js/node/test/parallel/test-worker-parent-port-ref.js +test/js/node/test/parallel/test-worker-process-argv.js +test/js/node/test/parallel/test-worker-ref-onexit.js +test/js/node/test/parallel/test-worker-ref.js +test/js/node/test/parallel/test-worker-relative-path-double-dot.js +test/js/node/test/parallel/test-worker-relative-path.js +test/js/node/test/parallel/test-worker-safe-getters.js +test/js/node/test/parallel/test-worker-sharedarraybuffer-from-worker-thread.js +test/js/node/test/parallel/test-worker-terminate-http2-respond-with-file.js +test/js/node/test/parallel/test-worker-terminate-nested.js +test/js/node/test/parallel/test-worker-terminate-null-handler.js +test/js/node/test/parallel/test-worker-terminate-timers.js +test/js/node/test/parallel/test-worker-type-check.js +test/js/node/test/parallel/test-worker-workerdata-sharedarraybuffer.js +test/js/node/test/parallel/test-worker.js +test/js/node/test/parallel/test-worker.mjs +test/js/node/test/sequential/test-buffer-creation-regression.js +test/js/node/test/sequential/test-child-process-emfile.js +test/js/node/test/sequential/test-child-process-execsync.js +test/js/node/test/sequential/test-child-process-exit.js +test/js/node/test/sequential/test-crypto-timing-safe-equal.js +test/js/node/test/sequential/test-debug-prompt.js +test/js/node/test/sequential/test-dgram-implicit-bind-failure.js +test/js/node/test/sequential/test-dgram-pingpong.js +test/js/node/test/sequential/test-fs-opendir-recursive.js +test/js/node/test/sequential/test-fs-readdir-recursive.js +test/js/node/test/sequential/test-fs-stat-sync-overflow.js +test/js/node/test/sequential/test-http-econnrefused.js +test/js/node/test/sequential/test-http-keep-alive-large-write.js +test/js/node/test/sequential/test-http-server-keep-alive-timeout-slow-server.js +test/js/node/test/sequential/test-http2-large-file.js +test/js/node/test/sequential/test-init.js +test/js/node/test/sequential/test-net-GH-5504.js +test/js/node/test/sequential/test-net-better-error-messages-port.js +test/js/node/test/sequential/test-net-connect-econnrefused.js +test/js/node/test/sequential/test-net-connect-handle-econnrefused.js +test/js/node/test/sequential/test-net-reconnect-error.js +test/js/node/test/sequential/test-net-response-size.js +test/js/node/test/sequential/test-net-server-address.js +test/js/node/test/sequential/test-net-server-bind.js +test/js/node/test/sequential/test-require-cache-without-stat.js +test/js/node/test/sequential/test-single-executable-application-assets-raw.js +test/js/node/test/sequential/test-single-executable-application-assets.js +test/js/node/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js +test/js/node/test/sequential/test-single-executable-application-empty.js +test/js/node/test/sequential/test-single-executable-application-snapshot-and-code-cache.js +test/js/node/test/sequential/test-single-executable-application-snapshot-worker.js +test/js/node/test/sequential/test-single-executable-application-snapshot.js +test/js/node/test/sequential/test-single-executable-application-use-code-cache.js +test/js/node/test/sequential/test-single-executable-application.js +test/js/node/test/sequential/test-stream2-fs.js +test/js/node/test/sequential/test-timers-block-eventloop.js +test/js/node/test/sequential/test-timers-set-interval-excludes-callback-duration.js +test/js/node/test/sequential/test-tls-connect.js +test/js/node/test/sequential/test-tls-lookup.js +test/js/node/test/sequential/test-tls-psk-client.js +test/js/node/test/sequential/test-tls-securepair-client.js +test/js/node/test_runner/node-test.test.ts +test/js/node/timers.promises/timers.promises.test.ts +test/js/node/timers/node-timers.test.ts +test/js/node/tls/fetch-tls-cert.test.ts +test/js/node/tls/node-tls-cert.test.ts +test/js/node/tls/node-tls-connect.test.ts +test/js/node/tls/node-tls-context.test.ts +test/js/node/tls/node-tls-create-secure-context-args.test.ts +test/js/node/tls/node-tls-no-cipher-match-error.test.ts +test/js/node/tls/node-tls-server.test.ts +test/js/node/tls/node-tls-upgrade.test.ts +test/js/node/tls/renegotiation.test.ts +test/js/node/url/url-parse-format.test.js +test/js/node/url/url-parse-ipv6.test.ts +test/js/node/url/url-relative.test.js +test/js/node/url/url.test.ts +test/js/node/util/bun-inspect.test.ts +test/js/node/util/custom-inspect.test.js +test/js/node/util/mime-api.test.ts +test/js/node/util/node-inspect-tests/parallel/util-inspect.test.js +test/js/node/util/parse_args/default-args.test.mjs +test/js/node/util/test-util-types.test.js +test/js/node/util/util-callbackify.test.js +test/js/node/util/util-promisify.test.js +test/js/node/util/util.test.js +test/js/node/v8/capture-stack-trace.test.js +test/js/node/vm/happy-dom-vm-16277.test.ts +test/js/node/vm/sourcetextmodule-leak.test.ts +test/js/node/vm/vm-sourceUrl.test.ts +test/js/node/vm/vm.test.ts +test/js/node/watch/fs.watchFile.test.ts +test/js/node/worker_threads/15787.test.ts +test/js/node/zlib/zlib.kMaxLength.global.test.js +test/js/node/zlib/zlib.test.js +test/js/sql/local-sql.test.ts +test/js/sql/sql.test.ts +test/js/third_party/@azure/service-bus/azure-service-bus.test.ts +test/js/third_party/@duckdb/node-api/duckdb.test.ts +test/js/third_party/@fastify/websocket/fastity-test-websocket.test.js +test/js/third_party/@napi-rs/canvas/napi-rs-canvas.test.ts +test/js/third_party/body-parser/express-body-parser-test.test.ts +test/js/third_party/body-parser/express-bun-build-compile.test.ts +test/js/third_party/body-parser/express-memory-leak.test.ts +test/js/third_party/comlink/comlink.test.ts +test/js/third_party/duckdb/duckdb-basic-usage.test.ts +test/js/third_party/esbuild/esbuild-child_process.test.ts +test/js/third_party/express/app.router.test.ts +test/js/third_party/express/express.json.test.ts +test/js/third_party/express/express.test.ts +test/js/third_party/express/express.text.test.ts +test/js/third_party/express/res.json.test.ts +test/js/third_party/express/res.location.test.ts +test/js/third_party/express/res.redirect.test.ts +test/js/third_party/express/res.send.test.ts +test/js/third_party/express/res.sendFile.test.ts +test/js/third_party/grpc-js/test-call-credentials.test.ts +test/js/third_party/grpc-js/test-call-propagation.test.ts +test/js/third_party/grpc-js/test-certificate-provider.test.ts +test/js/third_party/grpc-js/test-channel-credentials.test.ts +test/js/third_party/grpc-js/test-channelz.test.ts +test/js/third_party/grpc-js/test-client.test.ts +test/js/third_party/grpc-js/test-confg-parsing.test.ts +test/js/third_party/grpc-js/test-deadline.test.ts +test/js/third_party/grpc-js/test-duration.test.ts +test/js/third_party/grpc-js/test-end-to-end.test.ts +test/js/third_party/grpc-js/test-global-subchannel-pool.test.ts +test/js/third_party/grpc-js/test-idle-timer.test.ts +test/js/third_party/grpc-js/test-local-subchannel-pool.test.ts +test/js/third_party/grpc-js/test-logging.test.ts +test/js/third_party/grpc-js/test-metadata.test.ts +test/js/third_party/grpc-js/test-outlier-detection.test.ts +test/js/third_party/grpc-js/test-pick-first.test.ts +test/js/third_party/grpc-js/test-prototype-pollution.test.ts +test/js/third_party/grpc-js/test-resolver.test.ts +test/js/third_party/grpc-js/test-retry-config.test.ts +test/js/third_party/grpc-js/test-retry.test.ts +test/js/third_party/grpc-js/test-server-credentials.test.ts +test/js/third_party/grpc-js/test-server-deadlines.test.ts +test/js/third_party/grpc-js/test-server-errors.test.ts +test/js/third_party/grpc-js/test-server-interceptors.test.ts +test/js/third_party/grpc-js/test-server.test.ts +test/js/third_party/grpc-js/test-status-builder.test.ts +test/js/third_party/grpc-js/test-uri-parser.test.ts +test/js/third_party/grpc-js/test-tonic.test.ts +test/js/third_party/http2-wrapper/http2-wrapper.test.ts +test/js/third_party/jsonwebtoken/async_sign.test.js +test/js/third_party/jsonwebtoken/buffer.test.js +test/js/third_party/jsonwebtoken/claim-aud.test.js +test/js/third_party/jsonwebtoken/claim-exp.test.js +test/js/third_party/jsonwebtoken/claim-iat.test.js +test/js/third_party/jsonwebtoken/claim-iss.test.js +test/js/third_party/jsonwebtoken/claim-jti.test.js +test/js/third_party/jsonwebtoken/claim-nbf.test.js +test/js/third_party/jsonwebtoken/claim-private.test.js +test/js/third_party/jsonwebtoken/claim-sub.test.js +test/js/third_party/jsonwebtoken/decoding.test.js +test/js/third_party/jsonwebtoken/encoding.test.js +test/js/third_party/jsonwebtoken/expires_format.test.js +test/js/third_party/jsonwebtoken/header-kid.test.js +test/js/third_party/jsonwebtoken/invalid_exp.test.js +test/js/third_party/jsonwebtoken/issue_147.test.js +test/js/third_party/jsonwebtoken/issue_304.test.js +test/js/third_party/jsonwebtoken/issue_70.test.js +test/js/third_party/jsonwebtoken/jwt.asymmetric_signing.test.js +test/js/third_party/jsonwebtoken/jwt.hs.test.js +test/js/third_party/jsonwebtoken/jwt.malicious.test.js +test/js/third_party/jsonwebtoken/noTimestamp.test.js +test/js/third_party/jsonwebtoken/non_object_values.test.js +test/js/third_party/jsonwebtoken/option-complete.test.js +test/js/third_party/jsonwebtoken/option-maxAge.test.js +test/js/third_party/jsonwebtoken/option-nonce.test.js +test/js/third_party/jsonwebtoken/rsa-public-key.test.js +test/js/third_party/jsonwebtoken/schema.test.js +test/js/third_party/jsonwebtoken/set_headers.test.js +test/js/third_party/jsonwebtoken/undefined_secretOrPublickey.test.js +test/js/third_party/jsonwebtoken/validateAsymmetricKey.test.js +test/js/third_party/jsonwebtoken/verify.test.js +test/js/third_party/jsonwebtoken/wrong_alg.test.js +test/js/third_party/mongodb/mongodb.test.ts +test/js/third_party/msw/msw.test.ts +test/js/third_party/nodemailer/nodemailer.test.ts +test/js/third_party/pg-gateway/pglite.test.ts +test/js/third_party/pg/pg.test.ts +test/js/third_party/pino/pino.test.js +test/js/third_party/postgres/postgres.test.ts +test/js/third_party/prisma/prisma.test.ts +test/js/third_party/prompts/prompts.test.ts +test/js/third_party/remix/remix.test.ts +test/js/third_party/resvg/bbox.test.js +test/js/third_party/rollup-v4/rollup-v4.test.ts +test/js/third_party/socket.io/socket.io-close.test.ts +test/js/third_party/socket.io/socket.io-connection-state-recovery.test.ts +test/js/third_party/socket.io/socket.io-handshake.test.ts +test/js/third_party/socket.io/socket.io-messaging-many.test.ts +test/js/third_party/socket.io/socket.io-middleware.test.ts +test/js/third_party/socket.io/socket.io-namespaces.test.ts +test/js/third_party/socket.io/socket.io-server-attachment.test.ts +test/js/third_party/socket.io/socket.io-socket-middleware.test.ts +test/js/third_party/socket.io/socket.io-socket-timeout.test.ts +test/js/third_party/socket.io/socket.io-utility-methods.test.ts +test/js/third_party/socket.io/socket.io.test.ts +test/js/third_party/solc/solc.test.ts +test/js/third_party/st/st.test.ts +test/js/third_party/stripe/stripe.test.ts +test/js/third_party/svelte/svelte.test.ts +test/js/web/abort/abort.test.ts +test/js/web/broadcastchannel/broadcast-channel.test.ts +test/js/web/console/console-timeLog.test.ts +test/js/web/crypto/web-crypto.test.ts +test/js/web/encoding/encode-bad-chunks.test.ts +test/js/web/encoding/text-decoder-stream.test.ts +test/js/web/encoding/text-decoder.test.js +test/js/web/encoding/text-encoder-stream.test.ts +test/js/web/encoding/text-encoder.test.js +test/js/web/fetch/abort-signal-leak.test.ts +test/js/web/fetch/blob-oom.test.ts +test/js/web/fetch/blob.test.ts +test/js/web/fetch/body-clone.test.ts +test/js/web/fetch/body-stream-excess.test.ts +test/js/web/fetch/body-stream.test.ts +test/js/web/fetch/body.test.ts +test/js/web/fetch/chunked-trailing.test.js +test/js/web/fetch/client-fetch.test.ts +test/js/web/fetch/content-length.test.js +test/js/web/fetch/cookies.test.ts +test/js/web/fetch/fetch-args.test.ts +test/js/web/fetch/fetch-gzip.test.ts +test/js/web/fetch/fetch-preconnect.test.ts +test/js/web/fetch/fetch-redirect.test.ts +test/js/web/fetch/fetch-tcp-stress.test.ts +test/js/web/fetch/fetch-url-after-redirect.test.ts +test/js/web/fetch/fetch.brotli.test.ts +test/js/web/fetch/fetch.stream.test.ts +test/js/web/fetch/fetch.test.ts +test/js/web/fetch/fetch.tls.test.ts +test/js/web/fetch/fetch.unix.test.ts +test/js/web/fetch/fetch_headers.test.js +test/js/web/fetch/headers.test.ts +test/js/web/fetch/headers.undici.test.ts +test/js/web/fetch/stream-fast-path.test.ts +test/js/web/fetch/utf8-bom.test.ts +test/js/web/html/FormData.test.ts +test/js/web/request/request-clone-leak.test.ts +test/js/web/request/request-subclass.test.ts +test/js/web/request/request.test.ts +test/js/web/streams/streams.test.js +test/js/web/timers/microtask.test.js +test/js/web/timers/setInterval.test.js +test/js/web/timers/setTimeout.test.js +test/js/web/websocket/websocket-client-short-read.test.ts +test/js/web/websocket/websocket-client.test.ts +test/js/web/websocket/websocket-permessage-deflate-edge-cases.test.ts +test/js/web/websocket/websocket-permessage-deflate-simple.test.ts +test/js/web/websocket/websocket-permessage-deflate.test.ts +test/js/web/websocket/websocket.test.js +test/js/web/workers/message-channel.test.ts +test/js/web/workers/message-event.test.ts +test/js/web/workers/structured-clone.test.ts +test/js/web/workers/worker.test.ts +test/js/web/workers/worker_blob.test.ts +test/js/workerd/html-rewriter.test.js +test/napi/node-napi.test.ts +test/napi/uv.test.ts +test/napi/uv_stub.test.ts +test/regression/issue/012040.test.ts +test/regression/issue/014187.test.ts +test/regression/issue/01466.test.ts +test/regression/issue/014865.test.ts +test/regression/issue/02368.test.ts +test/regression/issue/02499/02499.test.ts +test/regression/issue/04298/04298.test.ts +test/regression/issue/04947.test.js +test/regression/issue/06946/06946.test.ts +test/regression/issue/07001.test.ts +test/regression/issue/07261.test.ts +test/regression/issue/07827.test.ts +test/regression/issue/07917/7917.test.ts +test/regression/issue/08093.test.ts +test/regression/issue/08794.test.ts +test/regression/issue/09041.test.ts +test/regression/issue/09340.test.ts +test/regression/issue/09469.test.ts +test/regression/issue/09555.test.ts +test/regression/issue/09559.test.ts +test/regression/issue/09778.test.ts +test/regression/issue/10132.test.ts +test/regression/issue/10139.test.ts +test/regression/issue/10170.test.ts +test/regression/issue/11297/11297.test.ts +test/regression/issue/11664.test.ts +test/regression/issue/12910/12910.test.ts +test/regression/issue/14477/14477.test.ts +test/regression/issue/14515.test.tsx +test/regression/issue/14976/14976.test.ts +test/regression/issue/14982/14982.test.ts +test/regression/issue/16312.test.ts +test/regression/issue/16474.test.ts +test/regression/issue/17605.test.ts +test/regression/issue/17766.test.ts +test/regression/issue/18159/18159.test.ts +test/regression/issue/18239/18239.test.ts +test/regression/issue/18547.test.ts +test/regression/issue/18595.test.ts +test/regression/issue/19661.test.ts +test/regression/issue/20144/20144.test.ts +test/regression/issue/crypto-names.test.ts +test/v8/v8.test.ts +vendor/elysia/test/a.test.ts +vendor/elysia/test/adapter/web-standard/cookie-to-header.test.ts +vendor/elysia/test/adapter/web-standard/map-compact-response.test.ts +vendor/elysia/test/adapter/web-standard/map-early-response.test.ts +vendor/elysia/test/adapter/web-standard/map-response.test.ts +vendor/elysia/test/adapter/web-standard/set-cookie.test.ts +vendor/elysia/test/aot/analysis.test.ts +vendor/elysia/test/aot/generation.test.ts +vendor/elysia/test/aot/has-transform.test.ts +vendor/elysia/test/aot/has-type.test.ts +vendor/elysia/test/aot/response.test.ts +vendor/elysia/test/bun/router.test.ts +vendor/elysia/test/cookie/explicit.test.ts +vendor/elysia/test/cookie/implicit.test.ts +vendor/elysia/test/cookie/response.test.ts +vendor/elysia/test/cookie/signature.test.ts +vendor/elysia/test/core/as.test.ts +vendor/elysia/test/core/config.test.ts +vendor/elysia/test/core/context.test.ts +vendor/elysia/test/core/dynamic.test.ts +vendor/elysia/test/core/elysia.test.ts +vendor/elysia/test/core/formdata.test.ts +vendor/elysia/test/core/handle-error.test.ts +vendor/elysia/test/core/modules.test.ts +vendor/elysia/test/core/mount.test.ts +vendor/elysia/test/core/native-static.test.ts +vendor/elysia/test/core/normalize.test.ts +vendor/elysia/test/core/path.test.ts +vendor/elysia/test/core/redirect.test.ts +vendor/elysia/test/core/sanitize.test.ts +vendor/elysia/test/core/stop.test.ts +vendor/elysia/test/extends/decorators.test.ts +vendor/elysia/test/extends/error.test.ts +vendor/elysia/test/extends/models.test.ts +vendor/elysia/test/extends/store.test.ts +vendor/elysia/test/hoc/index.test.ts +vendor/elysia/test/lifecycle/after-handle.test.ts +vendor/elysia/test/lifecycle/before-handle.test.ts +vendor/elysia/test/lifecycle/derive.test.ts +vendor/elysia/test/lifecycle/error.test.ts +vendor/elysia/test/lifecycle/hook-types.test.ts +vendor/elysia/test/lifecycle/map-derive.test.ts +vendor/elysia/test/lifecycle/map-resolve.test.ts +vendor/elysia/test/lifecycle/map-response.test.ts +vendor/elysia/test/lifecycle/parser.test.ts +vendor/elysia/test/lifecycle/request.test.ts +vendor/elysia/test/lifecycle/resolve.test.ts +vendor/elysia/test/lifecycle/response.test.ts +vendor/elysia/test/lifecycle/transform.test.ts +vendor/elysia/test/macro/macro.test.ts +vendor/elysia/test/path/group.test.ts +vendor/elysia/test/path/guard.test.ts +vendor/elysia/test/path/path.test.ts +vendor/elysia/test/plugins/affix.test.ts +vendor/elysia/test/plugins/checksum.test.ts +vendor/elysia/test/plugins/error-propagation.test.ts +vendor/elysia/test/plugins/plugin.test.ts +vendor/elysia/test/production/index.test.ts +vendor/elysia/test/response/custom-response.test.ts +vendor/elysia/test/response/headers.test.ts +vendor/elysia/test/response/redirect.test.ts +vendor/elysia/test/response/static.test.ts +vendor/elysia/test/response/stream.test.ts +vendor/elysia/test/sucrose/query.test.ts +vendor/elysia/test/sucrose/sucrose.test.ts +vendor/elysia/test/tracer/aot.test.ts +vendor/elysia/test/tracer/detail.test.ts +vendor/elysia/test/tracer/timing.test.ts +vendor/elysia/test/tracer/trace.test.ts +vendor/elysia/test/type-system/array-string.test.ts +vendor/elysia/test/type-system/boolean-string.test.ts +vendor/elysia/test/type-system/coercion-number.test.ts +vendor/elysia/test/type-system/date.test.ts +vendor/elysia/test/type-system/form.test.ts +vendor/elysia/test/type-system/object-string.test.ts +vendor/elysia/test/type-system/string-format.test.ts +vendor/elysia/test/type-system/union-enum.test.ts +vendor/elysia/test/units/deduplicate-checksum.test.ts +vendor/elysia/test/units/has-ref.test.ts +vendor/elysia/test/units/has-transform.test.ts +vendor/elysia/test/units/merge-deep.test.ts +vendor/elysia/test/units/merge-object-schemas.test.ts +vendor/elysia/test/units/replace-schema-type.test.ts +vendor/elysia/test/validator/body.test.ts +vendor/elysia/test/validator/encode.test.ts +vendor/elysia/test/validator/exact-mirror.test.ts +vendor/elysia/test/validator/header.test.ts +vendor/elysia/test/validator/params.test.ts +vendor/elysia/test/validator/query.test.ts +vendor/elysia/test/validator/response.test.ts +vendor/elysia/test/validator/standalone.test.ts +vendor/elysia/test/validator/validator.test.ts +vendor/elysia/test/ws/aot.test.ts +vendor/elysia/test/ws/connection.test.ts +vendor/elysia/test/ws/destructuring.test.ts +vendor/elysia/test/ws/message.test.ts + +# List of tests that potentially throw inside of reifyStaticProperties +test/js/node/test/parallel/test-stream-iterator-helpers-test262-tests.mjs +test/js/node/test/parallel/test-stream-some-find-every.mjs +test/js/node/test/parallel/test-fs-stat-date.mjs +test/js/node/test/parallel/test-fs-readSync-position-validation.mjs +test/js/node/test/parallel/test-fs-read-promises-position-validation.mjs +test/js/node/test/parallel/test-fs-read-position-validation.mjs +test/js/node/test/parallel/test-net-server-async-dispose.mjs +test/js/node/test/parallel/test-net-connect-custom-lookup-non-string-address.mjs + +# uses node:test +test/js/node/test/parallel/test-fs-write-stream-flush.js +test/js/node/test/parallel/test-fs-write-file-flush.js +test/js/node/test/parallel/test-fs-operations-with-surrogate-pairs.js +test/js/node/test/parallel/test-fs-append-file-flush.js +test/js/node/test/parallel/test-file-write-stream5.js + +# trips asan on my macos test machine +test/js/node/test/parallel/test-fs-watch.js +test/js/node/test/parallel/test-fs-watch-recursive-watch-file.js + +# uses jsobjectref +test/js/node/test/parallel/test-fs-readdir-buffer.js +test/js/node/test/parallel/test-stream-finished.js +test/js/node/test/parallel/test-cluster-primary-error.js + +# needs https://github.com/oven-sh/WebKit/pull/94 +test/js/node/test/parallel/test-cluster-disconnect-idle-worker.js +test/js/node/test/parallel/test-cluster-disconnect-leak.js diff --git a/test/package.json b/test/package.json index 8d0ae2e945..afa24d6521 100644 --- a/test/package.json +++ b/test/package.json @@ -37,7 +37,7 @@ "commander": "12.1.0", "detect-libc": "2.0.3", "devalue": "5.1.1", - "duckdb": "1.1.3", + "duckdb": "1.3.1", "es-module-lexer": "1.3.0", "esbuild": "0.18.6", "express": "4.18.2", @@ -89,6 +89,8 @@ "typeorm": "0.3.20", "typescript": "5.0.2", "undici": "5.20.0", + "uuid": "11.1.0", + "unzipper": "0.12.3", "v8-heapsnapshot": "1.3.1", "verdaccio": "6.0.0", "vitest": "0.32.2", diff --git a/test/regression/issue/20753.test.js b/test/regression/issue/20753.test.js new file mode 100644 index 0000000000..df84b3e267 --- /dev/null +++ b/test/regression/issue/20753.test.js @@ -0,0 +1,116 @@ +import { describe, expect, test } from "bun:test"; +import { isWindows } from "harness"; +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; + +const execFileAsync = promisify(execFile); + +describe.skipIf(isWindows /* accessing posix-specific paths */)("stdout should always be a string", () => { + test("execFile returns string stdout/stderr even when process fails to spawn", done => { + // Test case that would cause the issue: non-existent command + execFile("/does/not/exist", [], (err, stdout, stderr) => { + expect(err).toBeTruthy(); + expect(err.code).toBe("ENOENT"); + + // These should never be undefined - they should be strings by default + expect(stdout).toBeDefined(); + expect(stderr).toBeDefined(); + expect(typeof stdout).toBe("string"); + expect(typeof stderr).toBe("string"); + expect(stdout).toBe(""); + expect(stderr).toBe(""); + + // This is what claude-code was trying to do that failed + expect(() => stdout.trim()).not.toThrow(); + expect(() => stderr.trim()).not.toThrow(); + + done(); + }); + }); + + test("execFile returns string stdout/stderr for permission denied errors", done => { + // Another edge case: file exists but not executable + execFile("/etc/passwd", [], (err, stdout, stderr) => { + expect(err).toBeTruthy(); + expect(err.code).toBe("EACCES"); + + expect(stdout).toBeDefined(); + expect(stderr).toBeDefined(); + expect(typeof stdout).toBe("string"); + expect(typeof stderr).toBe("string"); + expect(stdout).toBe(""); + expect(stderr).toBe(""); + + done(); + }); + }); + + test("execFile returns Buffer stdout/stderr when encoding is 'buffer'", done => { + execFile("/does/not/exist", [], { encoding: "buffer" }, (err, stdout, stderr) => { + expect(err).toBeTruthy(); + expect(err.code).toBe("ENOENT"); + + expect(stdout).toBeDefined(); + expect(stderr).toBeDefined(); + expect(Buffer.isBuffer(stdout)).toBe(true); + expect(Buffer.isBuffer(stderr)).toBe(true); + expect(stdout.length).toBe(0); + expect(stderr.length).toBe(0); + + done(); + }); + }); + + test("execFile promisified version includes stdout/stderr in error object", async () => { + try { + await execFileAsync("/does/not/exist", []); + expect.unreachable("Should have thrown"); + } catch (err) { + expect(err.code).toBe("ENOENT"); + + // Promisified version attaches stdout/stderr to the error object + expect(err.stdout).toBeDefined(); + expect(err.stderr).toBeDefined(); + expect(typeof err.stdout).toBe("string"); + expect(typeof err.stderr).toBe("string"); + expect(err.stdout).toBe(""); + expect(err.stderr).toBe(""); + } + }); + + test("execFile returns stdout/stderr for process that exits with error code", done => { + execFile( + process.execPath, + ["-e", "console.log('output'); console.error('error'); process.exit(1)"], + (err, stdout, stderr) => { + expect(err).toBeTruthy(); + expect(err.code).toBe(1); + + expect(stdout).toBeDefined(); + expect(stderr).toBeDefined(); + expect(typeof stdout).toBe("string"); + expect(typeof stderr).toBe("string"); + expect(stdout).toBe("output\n"); + expect(stderr).toBe("error\n"); + + done(); + }, + ); + }); + + test("execFile handles fast-exiting processes correctly", done => { + // Process that exits immediately + execFile("true", [], (err, stdout, stderr) => { + expect(err).toBeNull(); + + expect(stdout).toBeDefined(); + expect(stderr).toBeDefined(); + expect(typeof stdout).toBe("string"); + expect(typeof stderr).toBe("string"); + expect(stdout).toBe(""); + expect(stderr).toBe(""); + + done(); + }); + }); +}); diff --git a/test/regression/issue/__snapshots__/03830.test.ts.snap b/test/regression/issue/__snapshots__/03830.test.ts.snap index da75c83eb3..7ecf1cfe71 100644 --- a/test/regression/issue/__snapshots__/03830.test.ts.snap +++ b/test/regression/issue/__snapshots__/03830.test.ts.snap @@ -1,4 +1,4 @@ -// Bun Snapshot v1, https://goo.gl/fbAQLP +// Bun Snapshot v1, https://bun.sh/docs/test/snapshots exports[`macros should not lead to seg faults under any given input 1`] = ` "2 | fn(\`©\${Number(0)}\`); diff --git a/test/v8/bad-modules/bun.lock b/test/v8/bad-modules/bun.lock index 8d6ae88dc8..0fa736a257 100644 --- a/test/v8/bad-modules/bun.lock +++ b/test/v8/bad-modules/bun.lock @@ -3,25 +3,25 @@ "workspaces": { "": { "devDependencies": { - "node-gyp": "~10.1.0", + "node-gyp": "^11.2.0", }, }, }, "packages": { "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], - "@npmcli/agent": ["@npmcli/agent@2.2.2", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og=="], + "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], - "@npmcli/fs": ["@npmcli/fs@3.1.1", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg=="], + "@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="], + + "@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="], "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], - "abbrev": ["abbrev@2.0.0", "", {}, "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ=="], + "abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], "agent-base": ["agent-base@7.1.1", "", { "dependencies": { "debug": "^4.3.4" } }, "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA=="], - "aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="], - "ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="], "ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], @@ -30,11 +30,9 @@ "brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - "cacache": ["cacache@18.0.4", "", { "dependencies": { "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^4.0.0", "ssri": "^10.0.0", "tar": "^6.1.11", "unique-filename": "^3.0.0" } }, "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ=="], + "cacache": ["cacache@19.0.1", "", { "dependencies": { "@npmcli/fs": "^4.0.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^7.0.2", "ssri": "^12.0.0", "tar": "^7.4.3", "unique-filename": "^4.0.0" } }, "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ=="], - "chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="], - - "clean-stack": ["clean-stack@2.2.0", "", {}, "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A=="], + "chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], @@ -56,9 +54,11 @@ "exponential-backoff": ["exponential-backoff@3.1.1", "", {}, "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw=="], + "fdir": ["fdir@6.4.6", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w=="], + "foreground-child": ["foreground-child@3.3.0", "", { "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" } }, "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg=="], - "fs-minipass": ["fs-minipass@2.1.0", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg=="], + "fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], "glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], @@ -74,14 +74,10 @@ "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], - "indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="], - "ip-address": ["ip-address@9.0.5", "", { "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" } }, "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g=="], "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], - "is-lambda": ["is-lambda@1.0.1", "", {}, "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ=="], - "isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], "jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], @@ -90,7 +86,7 @@ "lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - "make-fetch-happen": ["make-fetch-happen@13.0.1", "", { "dependencies": { "@npmcli/agent": "^2.0.0", "cacache": "^18.0.0", "http-cache-semantics": "^4.1.1", "is-lambda": "^1.0.1", "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "proc-log": "^4.2.0", "promise-retry": "^2.0.1", "ssri": "^10.0.0" } }, "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA=="], + "make-fetch-happen": ["make-fetch-happen@14.0.3", "", { "dependencies": { "@npmcli/agent": "^3.0.0", "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" } }, "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ=="], "minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], @@ -98,7 +94,7 @@ "minipass-collect": ["minipass-collect@2.0.1", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw=="], - "minipass-fetch": ["minipass-fetch@3.0.5", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^2.1.2" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg=="], + "minipass-fetch": ["minipass-fetch@4.0.1", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^3.0.1" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ=="], "minipass-flush": ["minipass-flush@1.0.5", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw=="], @@ -106,19 +102,19 @@ "minipass-sized": ["minipass-sized@1.0.3", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g=="], - "minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="], + "minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="], - "mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], + "mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], "ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], - "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], + "negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], - "node-gyp": ["node-gyp@10.1.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "glob": "^10.3.10", "graceful-fs": "^4.2.6", "make-fetch-happen": "^13.0.0", "nopt": "^7.0.0", "proc-log": "^3.0.0", "semver": "^7.3.5", "tar": "^6.1.2", "which": "^4.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-B4J5M1cABxPc5PwfjhbV5hoy2DP9p8lFXASnEN6hugXOa61416tnTZ29x9sSwAd0o99XNIcpvDDy1swAExsVKA=="], + "node-gyp": ["node-gyp@11.2.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA=="], - "nopt": ["nopt@7.2.1", "", { "dependencies": { "abbrev": "^2.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w=="], + "nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="], - "p-map": ["p-map@4.0.0", "", { "dependencies": { "aggregate-error": "^3.0.0" } }, "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ=="], + "p-map": ["p-map@7.0.3", "", {}, "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA=="], "package-json-from-dist": ["package-json-from-dist@1.0.0", "", {}, "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw=="], @@ -126,7 +122,9 @@ "path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], - "proc-log": ["proc-log@3.0.0", "", {}, "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A=="], + "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], + + "proc-log": ["proc-log@5.0.0", "", {}, "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ=="], "promise-retry": ["promise-retry@2.0.1", "", { "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" } }, "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g=="], @@ -150,7 +148,7 @@ "sprintf-js": ["sprintf-js@1.1.3", "", {}, "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="], - "ssri": ["ssri@10.0.6", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ=="], + "ssri": ["ssri@12.0.0", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ=="], "string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], @@ -160,44 +158,36 @@ "strip-ansi-cjs": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - "tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="], + "tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], - "unique-filename": ["unique-filename@3.0.0", "", { "dependencies": { "unique-slug": "^4.0.0" } }, "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g=="], + "tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="], - "unique-slug": ["unique-slug@4.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ=="], + "unique-filename": ["unique-filename@4.0.0", "", { "dependencies": { "unique-slug": "^5.0.0" } }, "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ=="], - "which": ["which@4.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg=="], + "unique-slug": ["unique-slug@5.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg=="], + + "which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], "wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], "wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - - "cacache/fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], + "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], - "fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "make-fetch-happen/proc-log": ["proc-log@4.2.0", "", {}, "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA=="], - "minipass-flush/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], "minipass-pipeline/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], "minipass-sized/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - "minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - "string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], "string-width-cjs/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], "strip-ansi-cjs/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="], - "wrap-ansi-cjs/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], "wrap-ansi-cjs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], @@ -206,6 +196,12 @@ "cross-spawn/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + "minipass-flush/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "minipass-pipeline/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "minipass-sized/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "string-width-cjs/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], "wrap-ansi-cjs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], diff --git a/test/v8/bad-modules/mismatched_abi_version.cpp b/test/v8/bad-modules/mismatched_abi_version.cpp index 46f1a5f124..73b2e78c03 100644 --- a/test/v8/bad-modules/mismatched_abi_version.cpp +++ b/test/v8/bad-modules/mismatched_abi_version.cpp @@ -8,7 +8,7 @@ void init(v8::Local exports, v8::Local module, extern "C" { static node::node_module _module = { - // bun expects 127 + // bun expects 137 (Node.js 24.3.0) 42, // nm_version 0, // nm_flags nullptr, // nm_dso_handle diff --git a/test/v8/bad-modules/no_entrypoint.cpp b/test/v8/bad-modules/no_entrypoint.cpp index 710ddf2ef4..2ebb9ae7c4 100644 --- a/test/v8/bad-modules/no_entrypoint.cpp +++ b/test/v8/bad-modules/no_entrypoint.cpp @@ -2,7 +2,7 @@ extern "C" { static node::node_module _module = { - 127, // nm_version + 137, // nm_version (Node.js 24.3.0) 0, // nm_flags nullptr, // nm_dso_handle "no_entrypoint.cpp", // nm_filename diff --git a/test/v8/bad-modules/package.json b/test/v8/bad-modules/package.json index 3854193b8a..5f00636867 100644 --- a/test/v8/bad-modules/package.json +++ b/test/v8/bad-modules/package.json @@ -1,5 +1,5 @@ { "devDependencies": { - "node-gyp": "~10.1.0" + "node-gyp": "~11.2.0" } } diff --git a/test/v8/v8-module/bun.lock b/test/v8/v8-module/bun.lock index 8d6ae88dc8..0fa736a257 100644 --- a/test/v8/v8-module/bun.lock +++ b/test/v8/v8-module/bun.lock @@ -3,25 +3,25 @@ "workspaces": { "": { "devDependencies": { - "node-gyp": "~10.1.0", + "node-gyp": "^11.2.0", }, }, }, "packages": { "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], - "@npmcli/agent": ["@npmcli/agent@2.2.2", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og=="], + "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], - "@npmcli/fs": ["@npmcli/fs@3.1.1", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg=="], + "@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="], + + "@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="], "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], - "abbrev": ["abbrev@2.0.0", "", {}, "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ=="], + "abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], "agent-base": ["agent-base@7.1.1", "", { "dependencies": { "debug": "^4.3.4" } }, "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA=="], - "aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="], - "ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="], "ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], @@ -30,11 +30,9 @@ "brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - "cacache": ["cacache@18.0.4", "", { "dependencies": { "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^4.0.0", "ssri": "^10.0.0", "tar": "^6.1.11", "unique-filename": "^3.0.0" } }, "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ=="], + "cacache": ["cacache@19.0.1", "", { "dependencies": { "@npmcli/fs": "^4.0.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^7.0.2", "ssri": "^12.0.0", "tar": "^7.4.3", "unique-filename": "^4.0.0" } }, "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ=="], - "chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="], - - "clean-stack": ["clean-stack@2.2.0", "", {}, "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A=="], + "chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], @@ -56,9 +54,11 @@ "exponential-backoff": ["exponential-backoff@3.1.1", "", {}, "sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw=="], + "fdir": ["fdir@6.4.6", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w=="], + "foreground-child": ["foreground-child@3.3.0", "", { "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" } }, "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg=="], - "fs-minipass": ["fs-minipass@2.1.0", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg=="], + "fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], "glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], @@ -74,14 +74,10 @@ "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], - "indent-string": ["indent-string@4.0.0", "", {}, "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg=="], - "ip-address": ["ip-address@9.0.5", "", { "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" } }, "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g=="], "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], - "is-lambda": ["is-lambda@1.0.1", "", {}, "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ=="], - "isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], "jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], @@ -90,7 +86,7 @@ "lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - "make-fetch-happen": ["make-fetch-happen@13.0.1", "", { "dependencies": { "@npmcli/agent": "^2.0.0", "cacache": "^18.0.0", "http-cache-semantics": "^4.1.1", "is-lambda": "^1.0.1", "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "proc-log": "^4.2.0", "promise-retry": "^2.0.1", "ssri": "^10.0.0" } }, "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA=="], + "make-fetch-happen": ["make-fetch-happen@14.0.3", "", { "dependencies": { "@npmcli/agent": "^3.0.0", "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" } }, "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ=="], "minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], @@ -98,7 +94,7 @@ "minipass-collect": ["minipass-collect@2.0.1", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw=="], - "minipass-fetch": ["minipass-fetch@3.0.5", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^2.1.2" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg=="], + "minipass-fetch": ["minipass-fetch@4.0.1", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^3.0.1" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ=="], "minipass-flush": ["minipass-flush@1.0.5", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw=="], @@ -106,19 +102,19 @@ "minipass-sized": ["minipass-sized@1.0.3", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g=="], - "minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="], + "minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="], - "mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], + "mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], "ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="], - "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], + "negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], - "node-gyp": ["node-gyp@10.1.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "glob": "^10.3.10", "graceful-fs": "^4.2.6", "make-fetch-happen": "^13.0.0", "nopt": "^7.0.0", "proc-log": "^3.0.0", "semver": "^7.3.5", "tar": "^6.1.2", "which": "^4.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-B4J5M1cABxPc5PwfjhbV5hoy2DP9p8lFXASnEN6hugXOa61416tnTZ29x9sSwAd0o99XNIcpvDDy1swAExsVKA=="], + "node-gyp": ["node-gyp@11.2.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA=="], - "nopt": ["nopt@7.2.1", "", { "dependencies": { "abbrev": "^2.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w=="], + "nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="], - "p-map": ["p-map@4.0.0", "", { "dependencies": { "aggregate-error": "^3.0.0" } }, "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ=="], + "p-map": ["p-map@7.0.3", "", {}, "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA=="], "package-json-from-dist": ["package-json-from-dist@1.0.0", "", {}, "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw=="], @@ -126,7 +122,9 @@ "path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], - "proc-log": ["proc-log@3.0.0", "", {}, "sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A=="], + "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], + + "proc-log": ["proc-log@5.0.0", "", {}, "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ=="], "promise-retry": ["promise-retry@2.0.1", "", { "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" } }, "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g=="], @@ -150,7 +148,7 @@ "sprintf-js": ["sprintf-js@1.1.3", "", {}, "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="], - "ssri": ["ssri@10.0.6", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ=="], + "ssri": ["ssri@12.0.0", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ=="], "string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], @@ -160,44 +158,36 @@ "strip-ansi-cjs": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - "tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="], + "tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], - "unique-filename": ["unique-filename@3.0.0", "", { "dependencies": { "unique-slug": "^4.0.0" } }, "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g=="], + "tinyglobby": ["tinyglobby@0.2.14", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ=="], - "unique-slug": ["unique-slug@4.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ=="], + "unique-filename": ["unique-filename@4.0.0", "", { "dependencies": { "unique-slug": "^5.0.0" } }, "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ=="], - "which": ["which@4.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg=="], + "unique-slug": ["unique-slug@5.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg=="], + + "which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], "wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], "wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - - "cacache/fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], + "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], - "fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "make-fetch-happen/proc-log": ["proc-log@4.2.0", "", {}, "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA=="], - "minipass-flush/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], "minipass-pipeline/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], "minipass-sized/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - "minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - "string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], "string-width-cjs/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], "strip-ansi-cjs/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="], - "wrap-ansi-cjs/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], "wrap-ansi-cjs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], @@ -206,6 +196,12 @@ "cross-spawn/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + "minipass-flush/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "minipass-pipeline/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "minipass-sized/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "string-width-cjs/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], "wrap-ansi-cjs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], diff --git a/test/v8/v8-module/package.json b/test/v8/v8-module/package.json index 3854193b8a..5f00636867 100644 --- a/test/v8/v8-module/package.json +++ b/test/v8/v8-module/package.json @@ -1,5 +1,5 @@ { "devDependencies": { - "node-gyp": "~10.1.0" + "node-gyp": "~11.2.0" } } diff --git a/test/v8/v8.test.ts b/test/v8/v8.test.ts index 0cdcc276e1..a9e4c6232d 100644 --- a/test/v8/v8.test.ts +++ b/test/v8/v8.test.ts @@ -19,9 +19,13 @@ enum BuildMode { // test environment delete bunEnv.CC; delete bunEnv.CXX; + +// Node.js 24.3.0 requires C++20 +bunEnv.CXXFLAGS ??= ""; if (process.platform == "darwin") { - bunEnv.CXXFLAGS ??= ""; - bunEnv.CXXFLAGS += "-std=gnu++17"; + bunEnv.CXXFLAGS += " -std=gnu++20"; +} else { + bunEnv.CXXFLAGS += " -std=c++20"; } // https://github.com/isaacs/node-tar/blob/bef7b1e4ffab822681fea2a9b22187192ed14717/lib/get-write-flag.js // prevent node-tar from using UV_FS_O_FILEMAP @@ -62,8 +66,17 @@ async function build( const build = spawn({ cmd: runtime == Runtime.bun - ? [bunExe(), "x", "--bun", "node-gyp", "rebuild", buildMode == BuildMode.debug ? "--debug" : "--release"] - : [bunExe(), "x", "node-gyp", "rebuild", "--release"], // for node.js we don't bother with debug mode + ? [ + bunExe(), + "--bun", + "run", + "node-gyp", + "rebuild", + buildMode == BuildMode.debug ? "--debug" : "--release", + "-j", + "max", + ] + : [bunExe(), "run", "node-gyp", "rebuild", "--release", "-j", "max"], // for node.js we don't bother with debug mode cwd: tmpDir, env: bunEnv, stdin: "inherit", @@ -82,11 +95,12 @@ async function build( process.exit(1); } - return { - out, - err, - description: `build ${basename(srcDir)} with ${Runtime[runtime]} in ${BuildMode[buildMode]} mode`, - }; + const description = `build ${basename(srcDir)} with ${Runtime[runtime]} in ${BuildMode[buildMode]} mode`; + + console.log(description, "stdout:"); + console.log(out); + console.log(description, "stderr:"); + console.log(err); } describe.todoIf(isBroken && isMusl)("node:v8", () => { @@ -102,18 +116,10 @@ describe.todoIf(isBroken && isMusl)("node:v8", () => { await install(srcDir, directories.node, Runtime.node); await install(join(__dirname, "bad-modules"), directories.badModules, Runtime.node); - const results = await Promise.all([ - build(srcDir, directories.bunRelease, Runtime.bun, BuildMode.release), - build(srcDir, directories.bunDebug, Runtime.bun, BuildMode.debug), - build(srcDir, directories.node, Runtime.node, BuildMode.release), - build(join(__dirname, "bad-modules"), directories.badModules, Runtime.node, BuildMode.release), - ]); - for (const r of results) { - console.log(r.description, "stdout:"); - console.log(r.out); - console.log(r.description, "stderr:"); - console.log(r.err); - } + await build(srcDir, directories.bunRelease, Runtime.bun, BuildMode.release); + await build(srcDir, directories.bunDebug, Runtime.bun, BuildMode.debug); + await build(srcDir, directories.node, Runtime.node, BuildMode.release); + await build(join(__dirname, "bad-modules"), directories.badModules, Runtime.node, BuildMode.release); }); describe("module lifecycle", () => {