diff --git a/.vscode/launch.json b/.vscode/launch.json index 817b7533d3..872ff1e7e1 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -15,6 +15,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -31,6 +32,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--only", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "1", @@ -53,6 +55,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -69,6 +72,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "0", "BUN_DEBUG_jest": "1", @@ -85,6 +89,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--watch", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -101,6 +106,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--hot", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -117,6 +123,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -139,6 +146,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${file}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -162,6 +170,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { "FORCE_COLOR": "0", "BUN_DEBUG_QUIET_LOGS": "1", @@ -178,6 +187,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "0", @@ -197,6 +207,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "0", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", @@ -212,6 +223,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "--watch", "${fileBasename}"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { // "BUN_DEBUG_DEBUGGER": "1", // "BUN_DEBUG_INTERNAL_DEBUGGER": "1", @@ -230,6 +242,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "--hot", "${fileBasename}"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", @@ -245,6 +258,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { "FORCE_COLOR": "0", "BUN_DEBUG_QUIET_LOGS": "1", @@ -267,6 +281,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { "FORCE_COLOR": "0", "BUN_DEBUG_QUIET_LOGS": "1", @@ -290,6 +305,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -306,6 +322,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -322,6 +339,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -338,6 +356,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--watch", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -354,6 +373,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "--hot", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -370,6 +390,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -392,6 +413,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_jest": "1", @@ -415,6 +437,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["exec", "${input:testName}"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", @@ -431,6 +454,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", @@ -446,6 +470,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "0", @@ -461,6 +486,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["test"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", @@ -482,6 +508,7 @@ "program": "${workspaceFolder}/build/debug/bun-debug", "args": ["install"], "cwd": "${fileDirname}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", @@ -497,6 +524,7 @@ "program": "node", "args": ["test/runner.node.mjs"], "cwd": "${workspaceFolder}", + "envFile": "${workspaceFolder}/.env", "env": { "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7a08c2ac15..933c660a93 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts. -If you are using Windows, please refer to [this guide](/docs/project/building-windows.md) +If you are using Windows, please refer to [this guide](https://bun.sh/docs/project/building-windows) ## Install Dependencies diff --git a/LATEST b/LATEST index 2c6bb72b8c..f1e15715db 100644 --- a/LATEST +++ b/LATEST @@ -1 +1 @@ -1.1.41 \ No newline at end of file +1.1.42 \ No newline at end of file diff --git a/bench/bun.lockb b/bench/bun.lockb index e77a3b406c..7ccc5f77c5 100755 Binary files a/bench/bun.lockb and b/bench/bun.lockb differ diff --git a/bench/package.json b/bench/package.json index a80d7566dc..d71efc00aa 100644 --- a/bench/package.json +++ b/bench/package.json @@ -13,7 +13,7 @@ "execa": "^8.0.1", "fast-glob": "3.3.1", "fdir": "^6.1.0", - "mitata": "^1.0.10", + "mitata": "^1.0.25", "react": "^18.3.1", "react-dom": "^18.3.1", "string-width": "7.1.0", diff --git a/bench/snippets/native-overhead.mjs b/bench/snippets/native-overhead.mjs index 32d459247e..43576b21d4 100644 --- a/bench/snippets/native-overhead.mjs +++ b/bench/snippets/native-overhead.mjs @@ -1,20 +1,14 @@ +import { noOpForTesting as noop } from "bun:internal-for-testing"; import { bench, run } from "../runner.mjs"; // These are no-op C++ functions that are exported to JS. -const lazy = globalThis[Symbol.for("Bun.lazy")]; -const noop = lazy("noop"); const fn = noop.function; -const regular = noop.functionRegular; const callback = noop.callback; bench("C++ callback into JS", () => { callback(() => {}); }); -bench("C++ fn regular", () => { - regular(); -}); - bench("C++ fn", () => { fn(); }); diff --git a/bench/snippets/node-zlib-brotli.mjs b/bench/snippets/node-zlib-brotli.mjs new file mode 100644 index 0000000000..01208d3ec9 --- /dev/null +++ b/bench/snippets/node-zlib-brotli.mjs @@ -0,0 +1,37 @@ +import { bench, run } from "../runner.mjs"; +import { brotliCompress, brotliDecompress, createBrotliCompress, createBrotliDecompress } from "node:zlib"; +import { promisify } from "node:util"; +import { pipeline } from "node:stream/promises"; +import { Readable } from "node:stream"; +import { readFileSync } from "node:fs"; + +const brotliCompressAsync = promisify(brotliCompress); +const brotliDecompressAsync = promisify(brotliDecompress); + +const testData = + process.argv.length > 2 + ? readFileSync(process.argv[2]) + : Buffer.alloc(1024 * 1024 * 16, "abcdefghijklmnopqrstuvwxyz"); +let compressed; + +bench("brotli compress", async () => { + compressed = await brotliCompressAsync(testData); +}); + +bench("brotli decompress", async () => { + await brotliDecompressAsync(compressed); +}); + +bench("brotli compress stream", async () => { + const source = Readable.from([testData]); + const compress = createBrotliCompress(); + await pipeline(source, compress); +}); + +bench("brotli decompress stream", async () => { + const source = Readable.from([compressed]); + const decompress = createBrotliDecompress(); + await pipeline(source, decompress); +}); + +await run(); diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index f583488ee5..af3aa504df 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 30046aef5ec6590c74c6a696e4f01683f962a6a2) + set(WEBKIT_VERSION e1a802a2287edfe7f4046a9dd8307c8b59f5d816) endif() if(WEBKIT_LOCAL) diff --git a/completions/bun.bash b/completions/bun.bash index ccabb1d73b..eabdc343fb 100644 --- a/completions/bun.bash +++ b/completions/bun.bash @@ -87,7 +87,7 @@ _bun_completions() { GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js"; GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p"; - PACKAGE_OPTIONS[ADD_OPTIONS_LONG]="--development --optional"; + PACKAGE_OPTIONS[ADD_OPTIONS_LONG]="--development --optional --peer"; PACKAGE_OPTIONS[ADD_OPTIONS_SHORT]="-d"; PACKAGE_OPTIONS[REMOVE_OPTIONS_LONG]=""; PACKAGE_OPTIONS[REMOVE_OPTIONS_SHORT]=""; diff --git a/completions/bun.zsh b/completions/bun.zsh index 49264ec3f9..f885ac03ad 100644 --- a/completions/bun.zsh +++ b/completions/bun.zsh @@ -35,6 +35,7 @@ _bun_add_completion() { '-D[]' \ '--development[]' \ '--optional[Add dependency to "optionalDependencies]' \ + '--peer[Add dependency to "peerDependencies]' \ '--exact[Add the exact version instead of the ^range]' && ret=0 @@ -339,6 +340,7 @@ _bun_install_completion() { '--development[]' \ '-D[]' \ '--optional[Add dependency to "optionalDependencies]' \ + '--peer[Add dependency to "peerDependencies]' \ '--exact[Add the exact version instead of the ^range]' && ret=0 diff --git a/docs/api/s3.md b/docs/api/s3.md new file mode 100644 index 0000000000..4c88579a80 --- /dev/null +++ b/docs/api/s3.md @@ -0,0 +1,677 @@ +Production servers often read, upload, and write files to S3-compatible object storage services instead of the local filesystem. Historically, that means local filesystem APIs you use in development can't be used in production. When you use Bun, things are different. + +Bun provides fast, native bindings for interacting with S3-compatible object storage services. Bun's S3 API is designed to be simple and feel similar to fetch's `Response` and `Blob` APIs (like Bun's local filesystem APIs). + +```ts +import { s3, write, S3Client } from "bun"; + +// Bun.s3 reads environment variables for credentials +// file() returns a lazy reference to a file on S3 +const metadata = s3.file("123.json"); + +// Download from S3 as JSON +const data = await metadata.json(); + +// Upload to S3 +await write(metadata, JSON.stringify({ name: "John", age: 30 })); + +// Presign a URL (synchronous - no network request needed) +const url = metadata.presign({ + acl: "public-read", + expiresIn: 60 * 60 * 24, // 1 day +}); + +// Delete the file +await metadata.delete(); +``` + +S3 is the [de facto standard](https://en.wikipedia.org/wiki/De_facto_standard) internet filesystem. Bun's S3 API works with S3-compatible storage services like: + +- AWS S3 +- Cloudflare R2 +- DigitalOcean Spaces +- MinIO +- Backblaze B2 +- ...and any other S3-compatible storage service + +## Basic Usage + +There are several ways to interact with Bun's S3 API. + +### `Bun.S3Client` & `Bun.s3` + +`Bun.s3` is equivalent to `new Bun.S3Client()`, relying on environment variables for credentials. + +To explicitly set credentials, pass them to the `Bun.S3Client` constructor. + +```ts +import { S3Client } from "bun"; + +const client = new S3Client({ + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", + // sessionToken: "..." + // acl: "public-read", + // endpoint: "https://s3.us-east-1.amazonaws.com", + // endpoint: "https://.r2.cloudflarestorage.com", // Cloudflare R2 + // endpoint: "https://.digitaloceanspaces.com", // DigitalOcean Spaces + // endpoint: "http://localhost:9000", // MinIO +}); + +// Bun.s3 is a global singleton that is equivalent to `new Bun.S3Client()` +Bun.s3 = client; +``` + +### Working with S3 Files + +The **`file`** method in `S3Client` returns a **lazy reference to a file on S3**. + +```ts +// A lazy reference to a file on S3 +const s3file: S3File = client.file("123.json"); +``` + +Like `Bun.file(path)`, the `S3Client`'s `file` method is synchronous. It does zero network requests until you call a method that depends on a network request. + +### Reading files from S3 + +If you've used the `fetch` API, you're familiar with the `Response` and `Blob` APIs. `S3File` extends `Blob`. The same methods that work on `Blob` also work on `S3File`. + +```ts +// Read an S3File as text +const text = await s3file.text(); + +// Read an S3File as JSON +const json = await s3file.json(); + +// Read an S3File as an ArrayBuffer +const buffer = await s3file.arrayBuffer(); + +// Get only the first 1024 bytes +const partial = await s3file.slice(0, 1024).text(); + +// Stream the file +const stream = s3file.stream(); +for await (const chunk of stream) { + console.log(chunk); +} +``` + +#### Memory optimization + +Methods like `text()`, `json()`, `bytes()`, or `arrayBuffer()` avoid duplicating the string or bytes in memory when possible. + +If the text happens to be ASCII, Bun directly transfers the string to JavaScriptCore (the engine) without transcoding and without duplicating the string in memory. When you use `.bytes()` or `.arrayBuffer()`, it will also avoid duplicating the bytes in memory. + +These helper methods not only simplify the API, they also make it faster. + +### Writing & uploading files to S3 + +Writing to S3 is just as simple. + +```ts +// Write a string (replacing the file) +await s3file.write("Hello World!"); + +// Write a Buffer (replacing the file) +await s3file.write(Buffer.from("Hello World!")); + +// Write a Response (replacing the file) +await s3file.write(new Response("Hello World!")); + +// Write with content type +await s3file.write(JSON.stringify({ name: "John", age: 30 }), { + type: "application/json", +}); + +// Write using a writer (streaming) +const writer = s3file.writer({ type: "application/json" }); +writer.write("Hello"); +writer.write(" World!"); +await writer.end(); + +// Write using Bun.write +await Bun.write(s3file, "Hello World!"); +``` + +### Working with large files (streams) + +Bun automatically handles multipart uploads for large files and provides streaming capabilities. The same API that works for local files also works for S3 files. + +```ts +// Write a large file +const bigFile = Buffer.alloc(10 * 1024 * 1024); // 10MB +const writer = s3file.writer({ + // Automatically retry on network errors up to 3 times + retry: 3, + + // Queue up to 10 requests at a time + queueSize: 10, + + // Upload in 5 MB chunks + partSize: 5, +}); +for (let i = 0; i < 10; i++) { + await writer.write(bigFile); +} +await writer.end(); +``` + +## Presigning URLs + +When your production service needs to let users upload files to your server, it's often more reliable for the user to upload directly to S3 instead of your server acting as an intermediary. + +To facilitate this, you can presign URLs for S3 files. This generates a URL with a signature that allows a user to securely upload that specific file to S3, without exposing your credentials or granting them unnecessary access to your bucket. + +```ts +import { s3 } from "bun"; + +// Generate a presigned URL that expires in 24 hours (default) +const url = s3.presign("my-file.txt", { + expiresIn: 3600, // 1 hour +}); +``` + +### Setting ACLs + +To set an ACL (access control list) on a presigned URL, pass the `acl` option: + +```ts +const url = s3file.presign({ + acl: "public-read", + expiresIn: 3600, +}); +``` + +You can pass any of the following ACLs: + +| ACL | Explanation | +| ----------------------------- | ------------------------------------------------------------------- | +| `"public-read"` | The object is readable by the public. | +| `"private"` | The object is readable only by the bucket owner. | +| `"public-read-write"` | The object is readable and writable by the public. | +| `"authenticated-read"` | The object is readable by the bucket owner and authenticated users. | +| `"aws-exec-read"` | The object is readable by the AWS account that made the request. | +| `"bucket-owner-read"` | The object is readable by the bucket owner. | +| `"bucket-owner-full-control"` | The object is readable and writable by the bucket owner. | +| `"log-delivery-write"` | The object is writable by AWS services used for log delivery. | + +### Expiring URLs + +To set an expiration time for a presigned URL, pass the `expiresIn` option. + +```ts +const url = s3file.presign({ + // Seconds + expiresIn: 3600, // 1 hour + + // access control list + acl: "public-read", + + // HTTP method + method: "PUT", +}); +``` + +### `method` + +To set the HTTP method for a presigned URL, pass the `method` option. + +```ts +const url = s3file.presign({ + method: "PUT", + // method: "DELETE", + // method: "GET", + // method: "HEAD", + // method: "POST", + // method: "PUT", +}); +``` + +### `new Response(S3File)` + +To quickly redirect users to a presigned URL for an S3 file, pass an `S3File` instance to a `Response` object as the body. + +```ts +const response = new Response(s3file); +console.log(response); +``` + +This will automatically redirect the user to the presigned URL for the S3 file, saving you the memory, time, and bandwidth cost of downloading the file to your server and sending it back to the user. + +```ts +Response (0 KB) { + ok: false, + url: "", + status: 302, + statusText: "", + headers: Headers { + "location": "https://.r2.cloudflarestorage.com/...", + }, + redirected: true, + bodyUsed: false +} +``` + +## Support for S3-Compatible Services + +Bun's S3 implementation works with any S3-compatible storage service. Just specify the appropriate endpoint: + +### Using Bun's S3Client with AWS S3 + +AWS S3 is the default. You can also pass a `region` option instead of an `endpoint` option for AWS S3. + +```ts +import { S3Client } from "bun"; + +// AWS S3 +const s3 = new S3Client({ + accessKeyId: "access-key", + secretAccessKey: "secret-key", + bucket: "my-bucket", + // endpoint: "https://s3.us-east-1.amazonaws.com", + // region: "us-east-1", +}); +``` + +### Using Bun's S3Client with Google Cloud Storage + +To use Bun's S3 client with [Google Cloud Storage](https://cloud.google.com/storage), set `endpoint` to `"https://storage.googleapis.com"` in the `S3Client` constructor. + +```ts +import { S3Client } from "bun"; + +// Google Cloud Storage +const gcs = new S3Client({ + accessKeyId: "access-key", + secretAccessKey: "secret-key", + bucket: "my-bucket", + endpoint: "https://storage.googleapis.com", +}); +``` + +### Using Bun's S3Client with Cloudflare R2 + +To use Bun's S3 client with [Cloudflare R2](https://developers.cloudflare.com/r2/), set `endpoint` to the R2 endpoint in the `S3Client` constructor. The R2 endpoint includes your account ID. + +```ts +import { S3Client } from "bun"; + +// CloudFlare R2 +const r2 = new S3Client({ + accessKeyId: "access-key", + secretAccessKey: "secret-key", + bucket: "my-bucket", + endpoint: "https://.r2.cloudflarestorage.com", +}); +``` + +### Using Bun's S3Client with DigitalOcean Spaces + +To use Bun's S3 client with [DigitalOcean Spaces](https://www.digitalocean.com/products/spaces/), set `endpoint` to the DigitalOcean Spaces endpoint in the `S3Client` constructor. + +```ts +import { S3Client } from "bun"; + +const spaces = new S3Client({ + accessKeyId: "access-key", + secretAccessKey: "secret-key", + bucket: "my-bucket", + // region: "nyc3", + endpoint: "https://.digitaloceanspaces.com", +}); +``` + +### Using Bun's S3Client with MinIO + +To use Bun's S3 client with [MinIO](https://min.io/), set `endpoint` to the URL that MinIO is running on in the `S3Client` constructor. + +```ts +import { S3Client } from "bun"; + +const minio = new S3Client({ + accessKeyId: "access-key", + secretAccessKey: "secret-key", + bucket: "my-bucket", + + // Make sure to use the correct endpoint URL + // It might not be localhost in production! + endpoint: "http://localhost:9000", +}); +``` + +## Credentials + +Credentials are one of the hardest parts of using S3, and we've tried to make it as easy as possible. By default, Bun reads the following environment variables for credentials. + +| Option name | Environment variable | +| ----------------- | ---------------------- | +| `accessKeyId` | `S3_ACCESS_KEY_ID` | +| `secretAccessKey` | `S3_SECRET_ACCESS_KEY` | +| `region` | `S3_REGION` | +| `endpoint` | `S3_ENDPOINT` | +| `bucket` | `S3_BUCKET` | +| `sessionToken` | `S3_SESSION_TOKEN` | + +If the `S3_*` environment variable is not set, Bun will also check for the `AWS_*` environment variable, for each of the above options. + +| Option name | Fallback environment variable | +| ----------------- | ----------------------------- | +| `accessKeyId` | `AWS_ACCESS_KEY_ID` | +| `secretAccessKey` | `AWS_SECRET_ACCESS_KEY` | +| `region` | `AWS_REGION` | +| `endpoint` | `AWS_ENDPOINT` | +| `bucket` | `AWS_BUCKET` | +| `sessionToken` | `AWS_SESSION_TOKEN` | + +These environment variables are read from [`.env` files](/docs/runtime/env) or from the process environment at initialization time (`process.env` is not used for this). + +These defaults are overriden by the options you pass to `s3(credentials)`, `new Bun.S3Client(credentials)`, or any of the methods that accept credentials. So if, for example, you use the same credentials for different buckets, you can set the credentials once in your `.env` file and then pass `bucket: "my-bucket"` to the `s3()` helper function without having to specify all the credentials again. + +### `S3Client` objects + +When you're not using environment variables or using multiple buckets, you can create a `S3Client` object to explicitly set credentials. + +```ts +import { S3Client } from "bun"; + +const client = new S3Client({ + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", + // sessionToken: "..." + endpoint: "https://s3.us-east-1.amazonaws.com", + // endpoint: "https://.r2.cloudflarestorage.com", // Cloudflare R2 + // endpoint: "http://localhost:9000", // MinIO +}); + +// Write using a Response +await file.write(new Response("Hello World!")); + +// Presign a URL +const url = file.presign({ + expiresIn: 60 * 60 * 24, // 1 day + acl: "public-read", +}); + +// Delete the file +await file.delete(); +``` + +### `S3Client.prototype.write` + +To upload or write a file to S3, call `write` on the `S3Client` instance. + +```ts +const client = new Bun.S3Client({ + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + endpoint: "https://s3.us-east-1.amazonaws.com", + bucket: "my-bucket", +}); +await client.write("my-file.txt", "Hello World!"); +await client.write("my-file.txt", new Response("Hello World!")); + +// equivalent to +// await client.file("my-file.txt").write("Hello World!"); +``` + +### `S3Client.prototype.delete` + +To delete a file from S3, call `delete` on the `S3Client` instance. + +```ts +const client = new Bun.S3Client({ + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", +}); + +await client.delete("my-file.txt"); +// equivalent to +// await client.file("my-file.txt").delete(); +``` + +### `S3Client.prototype.exists` + +To check if a file exists in S3, call `exists` on the `S3Client` instance. + +```ts +const client = new Bun.S3Client({ + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", +}); + +const exists = await client.exists("my-file.txt"); +// equivalent to +// const exists = await client.file("my-file.txt").exists(); +``` + +## `S3File` + +`S3File` instances are created by calling the `S3` instance method or the `s3()` helper function. Like `Bun.file()`, `S3File` instances are lazy. They don't refer to something that necessarily exists at the time of creation. That's why all the methods that don't involve network requests are fully synchronous. + +```ts +interface S3File extends Blob { + slice(start: number, end?: number): S3File; + exists(): Promise; + unlink(): Promise; + presign(options: S3Options): string; + text(): Promise; + json(): Promise; + bytes(): Promise; + arrayBuffer(): Promise; + stream(options: S3Options): ReadableStream; + write( + data: + | string + | Uint8Array + | ArrayBuffer + | Blob + | ReadableStream + | Response + | Request, + options?: BlobPropertyBag, + ): Promise; + + exists(options?: S3Options): Promise; + unlink(options?: S3Options): Promise; + delete(options?: S3Options): Promise; + presign(options?: S3Options): string; + + stat(options?: S3Options): Promise; + /** + * Size is not synchronously available because it requires a network request. + * + * @deprecated Use `stat()` instead. + */ + size: NaN; + + // ... more omitted for brevity +} +``` + +Like `Bun.file()`, `S3File` extends [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob), so all the methods that are available on `Blob` are also available on `S3File`. The same API for reading data from a local file is also available for reading data from S3. + +| Method | Output | +| ---------------------------- | ---------------- | +| `await s3File.text()` | `string` | +| `await s3File.bytes()` | `Uint8Array` | +| `await s3File.json()` | `JSON` | +| `await s3File.stream()` | `ReadableStream` | +| `await s3File.arrayBuffer()` | `ArrayBuffer` | + +That means using `S3File` instances with `fetch()`, `Response`, and other web APIs that accept `Blob` instances just works. + +### Partial reads with `slice` + +To read a partial range of a file, you can use the `slice` method. + +```ts +const partial = s3file.slice(0, 1024); + +// Read the partial range as a Uint8Array +const bytes = await partial.bytes(); + +// Read the partial range as a string +const text = await partial.text(); +``` + +Internally, this works by using the HTTP `Range` header to request only the bytes you want. This `slice` method is the same as [`Blob.prototype.slice`](https://developer.mozilla.org/en-US/docs/Web/API/Blob/slice). + +### Deleting files from S3 + +To delete a file from S3, you can use the `delete` method. + +```ts +await s3file.delete(); +// await s3File.unlink(); +``` + +`delete` is the same as `unlink`. + +## Error codes + +When Bun's S3 API throws an error, it will have a `code` property that matches one of the following values: + +- `ERR_S3_MISSING_CREDENTIALS` +- `ERR_S3_INVALID_METHOD` +- `ERR_S3_INVALID_PATH` +- `ERR_S3_INVALID_ENDPOINT` +- `ERR_S3_INVALID_SIGNATURE` +- `ERR_S3_INVALID_SESSION_TOKEN` + +When the S3 Object Storage service returns an error (that is, not Bun), it will be an `S3Error` instance (an `Error` instance with the name `"S3Error"`). + +## `S3Client` static methods + +The `S3Client` class provides several static methods for interacting with S3. + +### `S3Client.presign` (static) + +To generate a presigned URL for an S3 file, you can use the `S3Client.presign` static method. + +```ts +import { S3Client } from "bun"; + +const credentials = { + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", + // endpoint: "https://s3.us-east-1.amazonaws.com", + // endpoint: "https://.r2.cloudflarestorage.com", // Cloudflare R2 +}; + +const url = S3Client.presign("my-file.txt", { + ...credentials, + expiresIn: 3600, +}); +``` + +This is equivalent to calling `new S3Client(credentials).presign("my-file.txt", { expiresIn: 3600 })`. + +### `S3Client.exists` (static) + +To check if an S3 file exists, you can use the `S3Client.exists` static method. + +```ts +import { S3Client } from "bun"; + +const credentials = { + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", + // endpoint: "https://s3.us-east-1.amazonaws.com", +}; + +const exists = await S3Client.exists("my-file.txt", credentials); +``` + +The same method also works on `S3File` instances. + +```ts +const s3file = Bun.s3("my-file.txt", { + ...credentials, +}); +const exists = await s3file.exists(); +``` + +### `S3Client.stat` (static) + +To get the size, etag, and other metadata of an S3 file, you can use the `S3Client.stat` static method. + +```ts +import { S3Client } from "bun"; + +const credentials = { + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", + // endpoint: "https://s3.us-east-1.amazonaws.com", +}; + +const stat = await S3Client.stat("my-file.txt", credentials); +// { +// size: 1024, +// etag: "1234567890", +// lastModified: new Date(), +// } +``` + +### `S3Client.delete` (static) + +To delete an S3 file, you can use the `S3Client.delete` static method. + +```ts +import { S3Client } from "bun"; + +const credentials = { + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + bucket: "my-bucket", + // endpoint: "https://s3.us-east-1.amazonaws.com", +}; + +await S3Client.delete("my-file.txt", credentials); +// equivalent to +// await new S3Client(credentials).delete("my-file.txt"); + +// S3Client.unlink is alias of S3Client.delete +await S3Client.unlink("my-file.txt", credentials); +``` + +## s3:// protocol + +To make it easier to use the same code for local files and S3 files, the `s3://` protocol is supported in `fetch` and `Bun.file()`. + +```ts +const response = await fetch("s3://my-bucket/my-file.txt"); +const file = Bun.file("s3://my-bucket/my-file.txt"); +``` + +You can additionally pass `s3` options to the `fetch` and `Bun.file` functions. + +```ts +const response = await fetch("s3://my-bucket/my-file.txt", { + s3: { + accessKeyId: "your-access-key", + secretAccessKey: "your-secret-key", + endpoint: "https://s3.us-east-1.amazonaws.com", + }, + headers: { + "x-amz-meta-foo": "bar", + }, +}); +``` + +### UTF-8, UTF-16, and BOM (byte order mark) + +Like `Response` and `Blob`, `S3File` assumes UTF-8 encoding by default. + +When calling one of the `text()` or `json()` methods on an `S3File`: + +- When a UTF-16 byte order mark (BOM) is detected, it will be treated as UTF-16. JavaScriptCore natively supports UTF-16, so it skips the UTF-8 transcoding process (and strips the BOM). This is mostly good, but it does mean if you have invalid surrogate pairs characters in your UTF-16 string, they will be passed through to JavaScriptCore (same as source code). +- When a UTF-8 BOM is detected, it gets stripped before the string is passed to JavaScriptCore and invalid UTF-8 codepoints are replaced with the Unicode replacement character (`\uFFFD`). +- UTF-32 is not supported. diff --git a/docs/bundler/plugins.md b/docs/bundler/plugins.md index 8e6b79c0e7..1831e8d6cf 100644 --- a/docs/bundler/plugins.md +++ b/docs/bundler/plugins.md @@ -69,7 +69,7 @@ await Bun.build({ ### Namespaces -`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespaace? +`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespace? Every module has a namespace. Namespaces are used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`. @@ -239,7 +239,7 @@ One of the arguments passed to the `onLoad` callback is a `defer` function. This This allows you to delay execution of the `onLoad` callback until all other modules have been loaded. -This is useful for returning contens of a module that depends on other modules. +This is useful for returning contents of a module that depends on other modules. ##### Example: tracking and reporting unused exports diff --git a/docs/cli/add.md b/docs/cli/add.md index ff90730d73..ca9a8af46e 100644 --- a/docs/cli/add.md +++ b/docs/cli/add.md @@ -33,6 +33,14 @@ To add a package as an optional dependency (`"optionalDependencies"`): $ bun add --optional lodash ``` +## `--peer` + +To add a package as a peer dependency (`"peerDependencies"`): + +```bash +$ bun add --peer @types/bun +``` + ## `--exact` {% callout %} diff --git a/docs/guides/install/add-optional.md b/docs/guides/install/add-optional.md index 6ea2182f01..ad671aa0e0 100644 --- a/docs/guides/install/add-optional.md +++ b/docs/guides/install/add-optional.md @@ -2,7 +2,7 @@ name: Add an optional dependency --- -To add an npm package as a peer dependency, use the `--optional` flag. +To add an npm package as an optional dependency, use the `--optional` flag. ```sh $ bun add zod --optional diff --git a/docs/install/cache.md b/docs/install/cache.md index 6543ec87a4..f03f75e432 100644 --- a/docs/install/cache.md +++ b/docs/install/cache.md @@ -1,4 +1,4 @@ -All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached. +All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`, or the path defined by the environment variable `BUN_INSTALL_CACHE_DIR`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached. {% details summary="Configuring cache behavior (bunfig.toml)" %} diff --git a/docs/install/index.md b/docs/install/index.md index 8f412a05e9..0f04c92a12 100644 --- a/docs/install/index.md +++ b/docs/install/index.md @@ -62,12 +62,18 @@ To exclude dependency types from installing, use `--omit` with `dev`, `optional` $ bun install --omit=dev --omit=optional ``` -To perform a dry run (i.e. don't actually install anything): +To perform a dry run (i.e. don't actually install anything or update the lockfile): ```bash $ bun install --dry-run ``` +To generate a lockfile without install packages: + +```bash +$ bun install --lockfile-only +``` + To modify logging verbosity: ```bash @@ -137,6 +143,12 @@ To add a package as an optional dependency (`"optionalDependencies"`): $ bun add --optional lodash ``` +To add a package as a peer dependency (`"peerDependencies"`): + +```bash +$ bun add --peer @types/bun +``` + To install a package globally: ```bash diff --git a/docs/install/lockfile.md b/docs/install/lockfile.md index 01df57fe0f..72e11c8944 100644 --- a/docs/install/lockfile.md +++ b/docs/install/lockfile.md @@ -49,6 +49,18 @@ Packages, metadata for those packages, the hoisted install order, dependencies f It uses linear arrays for all data. [Packages](https://github.com/oven-sh/bun/blob/be03fc273a487ac402f19ad897778d74b6d72963/src/install/install.zig#L1825) are referenced by an auto-incrementing integer ID or a hash of the package name. Strings longer than 8 characters are de-duplicated. Prior to saving on disk, the lockfile is garbage-collected & made deterministic by walking the package tree and cloning the packages in dependency order. +#### Generate a lockfile without installing? + +To generate a lockfile without installing to `node_modules` you can use the `--lockfile-only` flag. The lockfile will always be saved to disk, even if it is up-to-date with the `package.json`(s) for your project. + +```bash +$ bun install --lockfile-only +``` + +{% callout %} +**Note** - using `--lockfile-only` will still populate the global install cache with registry metadata and git/tarball dependencies. +{% endcallout %} + #### Can I opt out? To install without creating a lockfile: diff --git a/docs/install/workspaces.md b/docs/install/workspaces.md index 64d2445132..fb25a0a7db 100644 --- a/docs/install/workspaces.md +++ b/docs/install/workspaces.md @@ -53,6 +53,16 @@ Each workspace has it's own `package.json`. When referencing other packages in t } ``` +`bun install` will install dependencies for all workspaces in the monorepo, de-duplicating packages if possible. If you only want to install dependencies for specific workspaces, you can use the `--filter` flag. + +```bash +# Install dependencies for all workspaces starting with `pkg-` except for `pkg-c` +$ bun install --filter "pkg-*" --filter "!pkg-c" + +# Paths can also be used. This is equivalent to the command above. +$ bun install --filter "./packages/pkg-*" --filter "!pkg-c" # or --filter "!./packages/pkg-c" +``` + Workspaces have a couple major benefits. - **Code can be split into logical parts.** If one package relies on another, you can simply add it as a dependency in `package.json`. If package `b` depends on `a`, `bun install` will install your local `packages/a` directory into `node_modules` instead of downloading it from the npm registry. diff --git a/docs/nav.ts b/docs/nav.ts index 900cfdcb24..124ff0febc 100644 --- a/docs/nav.ts +++ b/docs/nav.ts @@ -311,6 +311,9 @@ export default { page("api/streams", "Streams", { description: `Reading, writing, and manipulating streams of data in Bun.`, }), // "`Bun.serve`"), + page("api/s3", "S3 Object Storage", { + description: `Bun provides fast, native bindings for interacting with S3-compatible object storage services.`, + }), page("api/file-io", "File I/O", { description: `Read and write files fast with Bun's heavily optimized file system API.`, }), // "`Bun.write`"), diff --git a/docs/runtime/nodejs-apis.md b/docs/runtime/nodejs-apis.md index f51b4c4553..b0e2630b6a 100644 --- a/docs/runtime/nodejs-apis.md +++ b/docs/runtime/nodejs-apis.md @@ -53,7 +53,7 @@ Some methods are not optimized yet. ### [`node:events`](https://nodejs.org/api/events.html) -🟡 `events.addAbortListener` & `events.getMaxListeners` do not support (web api) `EventTarget` +🟢 Fully implemented. `EventEmitterAsyncResource` uses `AsyncResource` underneath. ### [`node:fs`](https://nodejs.org/api/fs.html) @@ -157,11 +157,11 @@ Some methods are not optimized yet. ### [`node:v8`](https://nodejs.org/api/v8.html) -🔴 `serialize` and `deserialize` use JavaScriptCore's wire format instead of V8's. Otherwise, not implemented. For profiling, use [`bun:jsc`](https://bun.sh/docs/project/benchmarking#bunjsc) instead. +🟡 `writeHeapSnapshot` and `getHeapSnapshot` are implemented. `serialize` and `deserialize` use JavaScriptCore's wire format instead of V8's. Other methods are not implemented. For profiling, use [`bun:jsc`](https://bun.sh/docs/project/benchmarking#bunjsc) instead. ### [`node:vm`](https://nodejs.org/api/vm.html) -🟡 Core functionality works, but experimental VM ES modules are not implemented, including `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`,`importModuleDynamically`, and `vm.measureMemory`. Options like `timeout`, `breakOnSigint`, `cachedData` are not implemented yet. There is a bug with `this` value for contextified options not having the correct prototype. +🟡 Core functionality works, but experimental VM ES modules are not implemented, including `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`,`importModuleDynamically`, and `vm.measureMemory`. Options like `timeout`, `breakOnSigint`, `cachedData` are not implemented yet. ### [`node:wasi`](https://nodejs.org/api/wasi.html) @@ -341,7 +341,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa ### [`process`](https://nodejs.org/api/process.html) -🟡 Missing `domain` `initgroups` `setegid` `seteuid` `setgid` `setgroups` `setuid` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled`. `process.binding` is partially implemented. +🟡 Missing `initgroups` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled`. `process.binding` is partially implemented. ### [`queueMicrotask()`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask) diff --git a/package.json b/package.json index 56ad737a1d..b0cfe51737 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "private": true, "name": "bun", - "version": "1.1.42", + "version": "1.1.43", "workspaces": [ "./packages/bun-types" ], diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index 96e6cbf5ae..e4f66b59f9 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -17,6 +17,7 @@ declare module "bun" { import type { FFIFunctionCallableSymbol } from "bun:ffi"; import type { Encoding as CryptoEncoding } from "crypto"; import type { CipherNameAndProtocol, EphemeralKeyInfo, PeerCertificate } from "tls"; + import type { Stats } from "node:fs"; interface Env { NODE_ENV?: string; /** @@ -1226,45 +1227,220 @@ declare module "bun" { */ unlink(): Promise; } - - interface S3FileOptions extends BlobPropertyBag { + interface NetworkSink extends FileSink { /** - * The bucket to use for the S3 client. by default will use the `S3_BUCKET` and `AWS_BUCKET` environment variable, or deduce as first part of the path. + * Write a chunk of data to the network. + * + * If the network is not writable yet, the data is buffered. + */ + write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number; + /** + * Flush the internal buffer, committing the data to the network. + */ + flush(): number | Promise; + /** + * Finish the upload. This also flushes the internal buffer. + */ + end(error?: Error): number | Promise; + + /** + * Get the stat of the file. + */ + stat(): Promise; + } + + var S3Client: S3Client; + + /** + * Creates a new S3File instance for working with a single file. + * + * @param path The path or key of the file + * @param options S3 configuration options + * @returns `S3File` instance for the specified path + * + * @example + * import { s3 } from "bun"; + * const file = s3("my-file.txt", { + * bucket: "my-bucket", + * accessKeyId: "your-access-key", + * secretAccessKey: "your-secret-key" + * }); + * + * // Read the file + * const content = await file.text(); + * + * @example + * // Using s3:// protocol + * const file = s3("s3://my-bucket/my-file.txt", { + * accessKeyId: "your-access-key", + * secretAccessKey: "your-secret-key" + * }); + */ + function s3(path: string | URL, options?: S3Options): S3File; + + /** + * Configuration options for S3 operations + */ + interface S3Options extends BlobPropertyBag { + /** + * The Access Control List (ACL) policy for the file. + * Controls who can access the file and what permissions they have. + * + * @example + * // Setting public read access + * const file = s3("public-file.txt", { + * acl: "public-read", + * bucket: "my-bucket" + * }); + * + * @example + * // Using with presigned URLs + * const url = file.presign({ + * acl: "public-read", + * expiresIn: 3600 + * }); + */ + acl?: + | "private" + | "public-read" + | "public-read-write" + | "aws-exec-read" + | "authenticated-read" + | "bucket-owner-read" + | "bucket-owner-full-control" + | "log-delivery-write"; + + /** + * The S3 bucket name. Can be set via `S3_BUCKET` or `AWS_BUCKET` environment variables. + * + * @example + * // Using explicit bucket + * const file = s3("my-file.txt", { bucket: "my-bucket" }); + * + * @example + * // Using environment variables + * // With S3_BUCKET=my-bucket in .env + * const file = s3("my-file.txt"); */ bucket?: string; + /** - * The region to use for the S3 client. By default, it will use the `S3_REGION` and `AWS_REGION` environment variable. + * The AWS region. Can be set via `S3_REGION` or `AWS_REGION` environment variables. + * + * @example + * const file = s3("my-file.txt", { + * bucket: "my-bucket", + * region: "us-west-2" + * }); */ region?: string; + /** - * The access key ID to use for the S3 client. By default, it will use the `S3_ACCESS_KEY_ID` and `AWS_ACCESS_KEY_ID` environment variable. + * The access key ID for authentication. + * Can be set via `S3_ACCESS_KEY_ID` or `AWS_ACCESS_KEY_ID` environment variables. */ accessKeyId?: string; + /** - * The secret access key to use for the S3 client. By default, it will use the `S3_SECRET_ACCESS_KEY and `AWS_SECRET_ACCESS_KEY` environment variable. + * The secret access key for authentication. + * Can be set via `S3_SECRET_ACCESS_KEY` or `AWS_SECRET_ACCESS_KEY` environment variables. */ secretAccessKey?: string; /** - * The endpoint to use for the S3 client. Defaults to `https://s3.{region}.amazonaws.com`, it will also use the `S3_ENDPOINT` and `AWS_ENDPOINT` environment variable. + * Optional session token for temporary credentials. + * Can be set via `S3_SESSION_TOKEN` or `AWS_SESSION_TOKEN` environment variables. + * + * @example + * // Using temporary credentials + * const file = s3("my-file.txt", { + * accessKeyId: tempAccessKey, + * secretAccessKey: tempSecretKey, + * sessionToken: tempSessionToken + * }); + */ + sessionToken?: string; + + /** + * The S3-compatible service endpoint URL. + * Can be set via `S3_ENDPOINT` or `AWS_ENDPOINT` environment variables. + * + * @example + * // AWS S3 + * const file = s3("my-file.txt", { + * endpoint: "https://s3.us-east-1.amazonaws.com" + * }); + * + * @example + * // Cloudflare R2 + * const file = s3("my-file.txt", { + * endpoint: "https://.r2.cloudflarestorage.com" + * }); + * + * @example + * // DigitalOcean Spaces + * const file = s3("my-file.txt", { + * endpoint: "https://.digitaloceanspaces.com" + * }); + * + * @example + * // MinIO (local development) + * const file = s3("my-file.txt", { + * endpoint: "http://localhost:9000" + * }); */ endpoint?: string; /** - * The size of each part in MiB. Minimum and Default is 5 MiB and maximum is 5120 MiB. + * The size of each part in multipart uploads (in bytes). + * - Minimum: 5 MiB + * - Maximum: 5120 MiB + * - Default: 5 MiB + * + * @example + * // Configuring multipart uploads + * const file = s3("large-file.dat", { + * partSize: 10 * 1024 * 1024, // 10 MiB parts + * queueSize: 4 // Upload 4 parts in parallel + * }); + * + * const writer = file.writer(); + * // ... write large file in chunks */ partSize?: number; + /** - * The number of parts to upload in parallel. Default is 5 and maximum is 255. This can speed up the upload of large files but will also use more memory. + * Number of parts to upload in parallel for multipart uploads. + * - Default: 5 + * - Maximum: 255 + * + * Increasing this value can improve upload speeds for large files + * but will use more memory. */ queueSize?: number; + /** - * The number of times to retry the upload if it fails. Default is 3 and maximum is 255. + * Number of retry attempts for failed uploads. + * - Default: 3 + * - Maximum: 255 + * + * @example + * // Setting retry attempts + * const file = s3("my-file.txt", { + * retry: 5 // Retry failed uploads up to 5 times + * }); */ retry?: number; /** - * The Content-Type of the file. If not provided, it is automatically set based on the file extension when possible. + * The Content-Type of the file. + * Automatically set based on file extension when possible. + * + * @example + * // Setting explicit content type + * const file = s3("data.bin", { + * type: "application/octet-stream" + * }); */ type?: string; @@ -1274,144 +1450,534 @@ declare module "bun" { highWaterMark?: number; } - interface S3FilePresignOptions extends S3FileOptions { + /** + * Options for generating presigned URLs + */ + interface S3FilePresignOptions extends S3Options { /** - * The number of seconds the presigned URL will be valid for. Defaults to 86400 (1 day). + * Number of seconds until the presigned URL expires. + * - Default: 86400 (1 day) + * + * @example + * // Short-lived URL + * const url = file.presign({ + * expiresIn: 3600 // 1 hour + * }); + * + * @example + * // Long-lived public URL + * const url = file.presign({ + * expiresIn: 7 * 24 * 60 * 60, // 7 days + * acl: "public-read" + * }); */ expiresIn?: number; + /** - * The HTTP method to use for the presigned URL. Defaults to GET. + * The HTTP method allowed for the presigned URL. + * + * @example + * // GET URL for downloads + * const downloadUrl = file.presign({ + * method: "GET", + * expiresIn: 3600 + * }); + * + * @example + * // PUT URL for uploads + * const uploadUrl = file.presign({ + * method: "PUT", + * expiresIn: 3600, + * type: "application/json" + * }); */ - method?: string; + method?: "GET" | "POST" | "PUT" | "DELETE" | "HEAD"; } - interface S3File extends BunFile { - /** - * @param path - The path to the file. If bucket options is not provided or set in the path, it will be deduced from the path. - * @param options - The options to use for the S3 client. - */ - new (path: string | URL, options?: S3FileOptions): S3File; + interface S3Stats { + size: number; + lastModified: Date; + etag: string; + type: string; + } + + /** + * Represents a file in an S3-compatible storage service. + * Extends the Blob interface for compatibility with web APIs. + */ + interface S3File extends Blob { /** * The size of the file in bytes. + * This is a Promise because it requires a network request to determine the size. + * + * @example + * // Getting file size + * const size = await file.size; + * console.log(`File size: ${size} bytes`); + * + * @example + * // Check if file is larger than 1MB + * if (await file.size > 1024 * 1024) { + * console.log("Large file detected"); + * } */ - size: Promise; /** - * Offset any operation on the file starting at `begin` and ending at `end`. `end` is relative to 0 + * TODO: figure out how to get the typescript types to not error for this property. + */ + // size: Promise; + + /** + * Creates a new S3File representing a slice of the original file. + * Uses HTTP Range headers for efficient partial downloads. * - * Similar to [`TypedArray.subarray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). Does not copy the file, open the file, or modify the file. + * @param begin - Starting byte offset + * @param end - Ending byte offset (exclusive) + * @param contentType - Optional MIME type for the slice + * @returns A new S3File representing the specified range * - * It will use [`range`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Range) to download only the bytes you need. + * @example + * // Reading file header + * const header = file.slice(0, 1024); + * const headerText = await header.text(); * - * @param begin - start offset in bytes - * @param end - absolute offset in bytes (relative to 0) - * @param contentType - MIME type for the new S3File + * @example + * // Reading with content type + * const jsonSlice = file.slice(1024, 2048, "application/json"); + * const data = await jsonSlice.json(); + * + * @example + * // Reading from offset to end + * const remainder = file.slice(1024); + * const content = await remainder.text(); */ slice(begin?: number, end?: number, contentType?: string): S3File; - - /** */ - /** - * Offset any operation on the file starting at `begin` - * - * Similar to [`TypedArray.subarray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). Does not copy the file, open the file, or modify the file. - * - * It will use [`range`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Range) to download only the bytes you need. - * - * @param begin - start offset in bytes - * @param contentType - MIME type for the new S3File - */ slice(begin?: number, contentType?: string): S3File; - - /** - * @param contentType - MIME type for the new S3File - */ slice(contentType?: string): S3File; /** - * Incremental writer to stream writes to S3, this is equivalent of using MultipartUpload and is suitable for large files. + * Creates a writable stream for uploading data. + * Suitable for large files as it uses multipart upload. + * + * @param options - Configuration for the upload + * @returns A NetworkSink for writing data + * + * @example + * // Basic streaming write + * const writer = file.writer({ + * type: "application/json" + * }); + * writer.write('{"hello": '); + * writer.write('"world"}'); + * await writer.end(); + * + * @example + * // Optimized large file upload + * const writer = file.writer({ + * partSize: 10 * 1024 * 1024, // 10MB parts + * queueSize: 4, // Upload 4 parts in parallel + * retry: 3 // Retry failed parts + * }); + * + * // Write large chunks of data efficiently + * for (const chunk of largeDataChunks) { + * await writer.write(chunk); + * } + * await writer.end(); + * + * @example + * // Error handling + * const writer = file.writer(); + * try { + * await writer.write(data); + * await writer.end(); + * } catch (err) { + * console.error('Upload failed:', err); + * // Writer will automatically abort multipart upload on error + * } */ - writer(options?: S3FileOptions): FileSink; + writer(options?: S3Options): NetworkSink; /** - * The readable stream of the file. + * Gets a readable stream of the file's content. + * Useful for processing large files without loading them entirely into memory. + * + * @returns A ReadableStream for the file content + * + * @example + * // Basic streaming read + * const stream = file.stream(); + * for await (const chunk of stream) { + * console.log('Received chunk:', chunk); + * } + * + * @example + * // Piping to response + * const stream = file.stream(); + * return new Response(stream, { + * headers: { 'Content-Type': file.type } + * }); + * + * @example + * // Processing large files + * const stream = file.stream(); + * const textDecoder = new TextDecoder(); + * for await (const chunk of stream) { + * const text = textDecoder.decode(chunk); + * // Process text chunk by chunk + * } */ readonly readable: ReadableStream; - - /** - * Get a readable stream of the file. - */ stream(): ReadableStream; /** - * The name or path of the file, as specified in the constructor. + * The name or path of the file in the bucket. + * + * @example + * const file = s3("folder/image.jpg"); + * console.log(file.name); // "folder/image.jpg" */ readonly name?: string; /** - * The bucket name of the file. + * The bucket name containing the file. + * + * @example + * const file = s3("s3://my-bucket/file.txt"); + * console.log(file.bucket); // "my-bucket" */ readonly bucket?: string; /** - * Does the file exist? - * It will use [`head`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) to check if the file exists. + * Checks if the file exists in S3. + * Uses HTTP HEAD request to efficiently check existence without downloading. + * + * @returns Promise resolving to true if file exists, false otherwise + * + * @example + * // Basic existence check + * if (await file.exists()) { + * console.log("File exists in S3"); + * } + * + * @example + * // With error handling + * try { + * const exists = await file.exists(); + * if (!exists) { + * console.log("File not found"); + * } + * } catch (err) { + * console.error("Error checking file:", err); + * } */ exists(): Promise; /** - * Uploads the data to S3. This is equivalent of using {@link S3File.upload} with a {@link S3File}. - * @param data - The data to write. - * @param options - The options to use for the S3 client. + * Uploads data to S3. + * Supports various input types and automatically handles large files. + * + * @param data - The data to upload + * @param options - Upload configuration options + * @returns Promise resolving to number of bytes written + * + * @example + * // Writing string data + * await file.write("Hello World", { + * type: "text/plain" + * }); + * + * @example + * // Writing JSON + * const data = { hello: "world" }; + * await file.write(JSON.stringify(data), { + * type: "application/json" + * }); + * + * @example + * // Writing from Response + * const response = await fetch("https://example.com/data"); + * await file.write(response); + * + * @example + * // Writing with ACL + * await file.write(data, { + * acl: "public-read", + * type: "application/octet-stream" + * }); */ write( data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob, - options?: S3FileOptions, + options?: S3Options, ): Promise; /** - * Returns a presigned URL for the file. - * @param options - The options to use for the presigned URL. + * Generates a presigned URL for the file. + * Allows temporary access to the file without exposing credentials. + * + * @param options - Configuration for the presigned URL + * @returns Presigned URL string + * + * @example + * // Basic download URL + * const url = file.presign({ + * expiresIn: 3600 // 1 hour + * }); + * + * @example + * // Upload URL with specific content type + * const uploadUrl = file.presign({ + * method: "PUT", + * expiresIn: 3600, + * type: "image/jpeg", + * acl: "public-read" + * }); + * + * @example + * // URL with custom permissions + * const url = file.presign({ + * method: "GET", + * expiresIn: 7 * 24 * 60 * 60, // 7 days + * acl: "public-read" + * }); */ presign(options?: S3FilePresignOptions): string; /** * Deletes the file from S3. + * + * @returns Promise that resolves when deletion is complete + * + * @example + * // Basic deletion + * await file.delete(); + * + * @example + * // With error handling + * try { + * await file.delete(); + * console.log("File deleted successfully"); + * } catch (err) { + * console.error("Failed to delete file:", err); + * } */ - unlink(): Promise; + delete(): Promise; + + /** + * Alias for delete() method. + * Provided for compatibility with Node.js fs API naming. + * + * @example + * await file.unlink(); + */ + unlink: S3File["delete"]; + + /** + * Get the stat of a file in an S3-compatible storage service. + * + * @returns Promise resolving to S3Stat + */ + stat(): Promise; } - namespace S3File { + /** + * A configured S3 bucket instance for managing files. + * The instance is callable to create S3File instances and provides methods + * for common operations. + * + * @example + * // Basic bucket setup + * const bucket = new S3Client({ + * bucket: "my-bucket", + * accessKeyId: "key", + * secretAccessKey: "secret" + * }); + * + * // Get file instance + * const file = bucket("image.jpg"); + * + * // Common operations + * await bucket.write("data.json", JSON.stringify({hello: "world"})); + * const url = bucket.presign("file.pdf"); + * await bucket.unlink("old.txt"); + */ + type S3Client = { /** - * Uploads the data to S3. - * @param data - The data to write. - * @param options - The options to use for the S3 client. + * Create a new instance of an S3 bucket so that credentials can be managed + * from a single instance instead of being passed to every method. + * + * @param options The default options to use for the S3 client. Can be + * overriden by passing options to the methods. + * + * ## Keep S3 credentials in a single instance + * + * @example + * const bucket = new Bun.S3Client({ + * accessKeyId: "your-access-key", + * secretAccessKey: "your-secret-key", + * bucket: "my-bucket", + * endpoint: "https://s3.us-east-1.amazonaws.com", + * sessionToken: "your-session-token", + * }); + * + * // S3Client is callable, so you can do this: + * const file = bucket.file("my-file.txt"); + * + * // or this: + * await file.write("Hello Bun!"); + * await file.text(); + * + * // To delete the file: + * await bucket.delete("my-file.txt"); + * + * // To write a file without returning the instance: + * await bucket.write("my-file.txt", "Hello Bun!"); + * */ - function upload( - path: string | S3File, - data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File, - options?: S3FileOptions, + new (options?: S3Options): S3Client; + + /** + * Creates an S3File instance for the given path. + * + * @example + * const file = bucket.file("image.jpg"); + * await file.write(imageData); + * const configFile = bucket("config.json", { + * type: "application/json", + * acl: "private" + * }); + */ + file(path: string, options?: S3Options): S3File; + + /** + * Writes data directly to a path in the bucket. + * Supports strings, buffers, streams, and web API types. + * + * @example + * // Write string + * await bucket.write("hello.txt", "Hello World"); + * + * // Write JSON with type + * await bucket.write( + * "data.json", + * JSON.stringify({hello: "world"}), + * {type: "application/json"} + * ); + * + * // Write from fetch + * const res = await fetch("https://example.com/data"); + * await bucket.write("data.bin", res); + * + * // Write with ACL + * await bucket.write("public.html", html, { + * acl: "public-read", + * type: "text/html" + * }); + */ + write( + path: string, + data: + | string + | ArrayBufferView + | ArrayBuffer + | SharedArrayBuffer + | Request + | Response + | BunFile + | S3File + | Blob + | File, + options?: S3Options, ): Promise; /** - * Returns a presigned URL for the file. - * @param options - The options to use for the presigned URL. + * Generate a presigned URL for temporary access to a file. + * Useful for generating upload/download URLs without exposing credentials. + * + * @example + * // Download URL + * const downloadUrl = bucket.presign("file.pdf", { + * expiresIn: 3600 // 1 hour + * }); + * + * // Upload URL + * const uploadUrl = bucket.presign("uploads/image.jpg", { + * method: "PUT", + * expiresIn: 3600, + * type: "image/jpeg", + * acl: "public-read" + * }); + * + * // Long-lived public URL + * const publicUrl = bucket.presign("public/doc.pdf", { + * expiresIn: 7 * 24 * 60 * 60, // 7 days + * acl: "public-read" + * }); */ - function presign(path: string | S3File, options?: S3FilePresignOptions): string; + presign(path: string, options?: S3FilePresignOptions): string; /** - * Deletes the file from S3. + * Delete a file from the bucket. + * + * @example + * // Simple delete + * await bucket.unlink("old-file.txt"); + * + * // With error handling + * try { + * await bucket.unlink("file.dat"); + * console.log("File deleted"); + * } catch (err) { + * console.error("Delete failed:", err); + * } */ - function unlink(path: string | S3File, options?: S3FileOptions): Promise; + unlink(path: string, options?: S3Options): Promise; + delete: S3Client["unlink"]; /** - * The size of the file in bytes. + * Get the size of a file in bytes. + * Uses HEAD request to efficiently get size. + * + * @example + * // Get size + * const bytes = await bucket.size("video.mp4"); + * console.log(`Size: ${bytes} bytes`); + * + * // Check if file is large + * if (await bucket.size("data.zip") > 100 * 1024 * 1024) { + * console.log("File is larger than 100MB"); + * } */ - function size(path: string | S3File, options?: S3FileOptions): Promise; + size(path: string, options?: S3Options): Promise; /** - * The size of the file in bytes. + * Check if a file exists in the bucket. + * Uses HEAD request to check existence. + * + * @example + * // Check existence + * if (await bucket.exists("config.json")) { + * const file = bucket("config.json"); + * const config = await file.json(); + * } + * + * // With error handling + * try { + * if (!await bucket.exists("required.txt")) { + * throw new Error("Required file missing"); + * } + * } catch (err) { + * console.error("Check failed:", err); + * } */ - function exists(path: string | S3File, options?: S3FileOptions): Promise; - } + exists(path: string, options?: S3Options): Promise; + /** + * Get the stat of a file in an S3-compatible storage service. + * + * @param path The path to the file. + * @param options The options to use for the S3 client. + */ + stat(path: string, options?: S3Options): Promise; + }; /** * This lets you use macros as regular imports @@ -3263,17 +3829,6 @@ declare module "bun" { // tslint:disable-next-line:unified-signatures function file(fileDescriptor: number, options?: BlobPropertyBag): BunFile; - /** - * Lazily load/upload a file from S3. - * @param path - The path to the file. If bucket options is not provided or set in the path, it will be deduced from the path. - * @param options - The options to use for the S3 client. - */ - function s3(path: string | URL, options?: S3FileOptions): S3File; - /** - * The S3 file class. - */ - const S3: typeof S3File; - /** * Allocate a new [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) without zeroing the bytes. * @@ -3580,9 +4135,24 @@ declare module "bun" { function nanoseconds(): number; /** - * Generate a heap snapshot for seeing where the heap is being used + * Show precise statistics about memory usage of your application + * + * Generate a heap snapshot in JavaScriptCore's format that can be viewed with `bun --inspect` or Safari's Web Inspector */ - function generateHeapSnapshot(): HeapSnapshot; + function generateHeapSnapshot(format?: "jsc"): HeapSnapshot; + + /** + * Show precise statistics about memory usage of your application + * + * Generate a V8 Heap Snapshot that can be used with Chrome DevTools & Visual Studio Code + * + * This is a JSON string that can be saved to a file. + * ```ts + * const snapshot = Bun.generateHeapSnapshot("v8"); + * await Bun.write("heap.heapsnapshot", snapshot); + * ``` + */ + function generateHeapSnapshot(format: "v8"): string; /** * The next time JavaScriptCore is idle, clear unused memory and attempt to reduce the heap size. diff --git a/packages/bun-usockets/src/bsd.c b/packages/bun-usockets/src/bsd.c index 0c2543b161..a452163988 100644 --- a/packages/bun-usockets/src/bsd.c +++ b/packages/bun-usockets/src/bsd.c @@ -843,13 +843,6 @@ static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char return LIBUS_SOCKET_ERROR; } -#ifndef _WIN32 - // 700 permission by default - fchmod(listenFd, S_IRWXU); -#else - _chmod(path, S_IREAD | S_IWRITE | S_IEXEC); -#endif - #ifdef _WIN32 _unlink(path); #else diff --git a/packages/bun-vscode/example/bun.lock b/packages/bun-vscode/example/bun.lock index 8eb6bfd86d..9b42110d97 100644 --- a/packages/bun-vscode/example/bun.lock +++ b/packages/bun-vscode/example/bun.lock @@ -26,183 +26,316 @@ "mime", ], "packages": { - "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], + "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.5", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg=="], + "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], + "@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="], + "@jridgewell/source-map": ["@jridgewell/source-map@0.3.6", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25" } }, "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ=="], + "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], + "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], + "@sinclair/typebox": ["@sinclair/typebox@0.30.4", "", {}, "sha512-wFuuDR+O1OAE2GL0q68h1Ty00RE6Ihcixr55A6TU5RCvOUHnwJw9LGuDVg9NxDiAp7m/YJpa+UaOuLAz0ziyOQ=="], - "@types/bun": ["@types/bun@1.1.14", "", { "dependencies": { "bun-types": "1.1.37" } }, "sha512-opVYiFGtO2af0dnWBdZWlioLBoxSdDO5qokaazLhq8XQtGZbY4pY3/JxY8Zdf/hEwGubbp7ErZXoN1+h2yesxA=="], + + "@types/bun": ["@types/bun@1.1.13", "", { "dependencies": { "bun-types": "1.1.34" } }, "sha512-KmQxSBgVWCl6RSuerlLGZlIWfdxkKqat0nxN61+qu4y1KDn0Ll3j7v1Pl8GnaL3a/U6GGWVTJh75ap62kR1E8Q=="], + "@types/eslint": ["@types/eslint@9.6.1", "", { "dependencies": { "@types/estree": "*", "@types/json-schema": "*" } }, "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag=="], + "@types/eslint-scope": ["@types/eslint-scope@3.7.7", "", { "dependencies": { "@types/eslint": "*", "@types/estree": "*" } }, "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg=="], + "@types/estree": ["@types/estree@1.0.6", "", {}, "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="], + "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="], + "@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="], + "@types/ws": ["@types/ws@8.5.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA=="], + "@webassemblyjs/ast": ["@webassemblyjs/ast@1.14.1", "", { "dependencies": { "@webassemblyjs/helper-numbers": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2" } }, "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ=="], + "@webassemblyjs/floating-point-hex-parser": ["@webassemblyjs/floating-point-hex-parser@1.13.2", "", {}, "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA=="], + "@webassemblyjs/helper-api-error": ["@webassemblyjs/helper-api-error@1.13.2", "", {}, "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ=="], + "@webassemblyjs/helper-buffer": ["@webassemblyjs/helper-buffer@1.14.1", "", {}, "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA=="], + "@webassemblyjs/helper-numbers": ["@webassemblyjs/helper-numbers@1.13.2", "", { "dependencies": { "@webassemblyjs/floating-point-hex-parser": "1.13.2", "@webassemblyjs/helper-api-error": "1.13.2", "@xtuc/long": "4.2.2" } }, "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA=="], + "@webassemblyjs/helper-wasm-bytecode": ["@webassemblyjs/helper-wasm-bytecode@1.13.2", "", {}, "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA=="], + "@webassemblyjs/helper-wasm-section": ["@webassemblyjs/helper-wasm-section@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/wasm-gen": "1.14.1" } }, "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw=="], + "@webassemblyjs/ieee754": ["@webassemblyjs/ieee754@1.13.2", "", { "dependencies": { "@xtuc/ieee754": "^1.2.0" } }, "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw=="], + "@webassemblyjs/leb128": ["@webassemblyjs/leb128@1.13.2", "", { "dependencies": { "@xtuc/long": "4.2.2" } }, "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw=="], + "@webassemblyjs/utf8": ["@webassemblyjs/utf8@1.13.2", "", {}, "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ=="], + "@webassemblyjs/wasm-edit": ["@webassemblyjs/wasm-edit@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/helper-wasm-section": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-opt": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1", "@webassemblyjs/wast-printer": "1.14.1" } }, "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ=="], + "@webassemblyjs/wasm-gen": ["@webassemblyjs/wasm-gen@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg=="], + "@webassemblyjs/wasm-opt": ["@webassemblyjs/wasm-opt@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1" } }, "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw=="], + "@webassemblyjs/wasm-parser": ["@webassemblyjs/wasm-parser@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-api-error": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ=="], + "@webassemblyjs/wast-printer": ["@webassemblyjs/wast-printer@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@xtuc/long": "4.2.2" } }, "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw=="], + "@xtuc/ieee754": ["@xtuc/ieee754@1.2.0", "", {}, "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA=="], + "@xtuc/long": ["@xtuc/long@4.2.2", "", {}, "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ=="], + "accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="], - "acorn": ["acorn@8.14.0", "", {}, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], + + "acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], + "acorn-loose": ["acorn-loose@8.4.0", "", { "dependencies": { "acorn": "^8.11.0" } }, "sha512-M0EUka6rb+QC4l9Z3T0nJEzNOO7JcoJlYMrBlyBCiFSXRyxjLKayd4TbQs2FDRWQU1h9FR7QVNHt+PEaoNL5rQ=="], + "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], + "ajv-keywords": ["ajv-keywords@3.5.2", "", { "peerDependencies": { "ajv": "^6.9.1" } }, "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ=="], + "array-flatten": ["array-flatten@1.1.1", "", {}, "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="], + "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="], - "axios": ["axios@1.7.9", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw=="], - "body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="], - "browserslist": ["browserslist@4.24.2", "", { "dependencies": { "caniuse-lite": "^1.0.30001669", "electron-to-chromium": "^1.5.41", "node-releases": "^2.0.18", "update-browserslist-db": "^1.1.1" } }, "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg=="], + + "axios": ["axios@1.7.7", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q=="], + + "body-parser": ["body-parser@1.20.1", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.4", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.11.0", "raw-body": "2.5.1", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw=="], + + "browserslist": ["browserslist@4.24.2", "", { "dependencies": { "caniuse-lite": "^1.0.30001669", "electron-to-chromium": "^1.5.41", "node-releases": "^2.0.18", "update-browserslist-db": "^1.1.1" }, "bin": { "browserslist": "cli.js" } }, "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg=="], + "buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="], - "bun-types": ["bun-types@1.1.37", "", { "dependencies": { "@types/node": "~20.12.8", "@types/ws": "~8.5.10" } }, "sha512-C65lv6eBr3LPJWFZ2gswyrGZ82ljnH8flVE03xeXxKhi2ZGtFiO4isRKTKnitbSqtRAcaqYSR6djt1whI66AbA=="], + + "bun-types": ["bun-types@1.1.34", "", { "dependencies": { "@types/node": "~20.12.8", "@types/ws": "~8.5.10" } }, "sha512-br5QygTEL/TwB4uQOb96Ky22j4Gq2WxWH/8Oqv20fk5HagwKXo/akB+LiYgSfzexCt6kkcUaVm+bKiPl71xPvw=="], + "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], - "call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="], - "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.1", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g=="], - "call-bound": ["call-bound@1.0.2", "", { "dependencies": { "call-bind": "^1.0.8", "get-intrinsic": "^1.2.5" } }, "sha512-0lk0PHFe/uz0vl527fG9CgdE9WdafjDbCXvBbs+LUv000TVt2Jjhqbs4Jwm8gz070w8xXyEAxrPOMullsxXeGg=="], - "caniuse-lite": ["caniuse-lite@1.0.30001688", "", {}, "sha512-Nmqpru91cuABu/DTCXbM2NSRHzM2uVHfPnhJ/1zEAJx/ILBRVmz3pzH4N7DZqbdG0gWClsCC05Oj0mJ/1AWMbA=="], + + "call-bind": ["call-bind@1.0.2", "", { "dependencies": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" } }, "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA=="], + + "caniuse-lite": ["caniuse-lite@1.0.30001680", "", {}, "sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA=="], + "chrome-trace-event": ["chrome-trace-event@1.0.4", "", {}, "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ=="], + "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], + "commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="], + "content-disposition": ["content-disposition@0.5.4", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ=="], + "content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], - "cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="], + + "cookie": ["cookie@0.5.0", "", {}, "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw=="], + "cookie-signature": ["cookie-signature@1.0.6", "", {}, "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="], + "debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - "define-data-property": ["define-data-property@1.1.4", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="], + "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="], + "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], + "destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="], - "dunder-proto": ["dunder-proto@1.0.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A=="], + "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], - "electron-to-chromium": ["electron-to-chromium@1.5.73", "", {}, "sha512-8wGNxG9tAG5KhGd3eeA0o6ixhiNdgr0DcHWm85XPCphwZgD1lIEoi6t3VERayWao7SF7AAZTw6oARGJeVjH8Kg=="], - "elysia": ["elysia@0.6.24", "", { "dependencies": { "@sinclair/typebox": "^0.30.4", "fast-querystring": "^1.1.2", "memoirist": "0.1.4", "mergician": "^1.1.0", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeerDependencies": ["typescript"] }, "sha512-qaN8b816tSecNIsgNwFCMOMlayOaChme9i/VHxCRZyPTgtdAAnrYDZaUQfatyt1jcHUdkf3IT4ny5GuS7NB26w=="], - "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], + + "electron-to-chromium": ["electron-to-chromium@1.5.58", "", {}, "sha512-al2l4r+24ZFL7WzyPTlyD0fC33LLzvxqLCwurtBibVPghRGO9hSTl+tis8t1kD7biPiH/en4U0I7o/nQbYeoVA=="], + + "elysia": ["elysia@0.6.3", "", { "dependencies": { "@sinclair/typebox": "^0.30.4", "fast-querystring": "^1.1.2", "memoirist": "0.1.4", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeers": ["typescript"] }, "sha512-LhdH476fotAQuEUpnLdn8fAzwo3ZmwHVrYzQhujo+x+OpmMXGMJXT7L7/Ct+b5wwR2txP5xCxI1A0suxhRxgIQ=="], + + "encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], + "enhanced-resolve": ["enhanced-resolve@5.17.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg=="], - "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], - "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], + "es-module-lexer": ["es-module-lexer@1.5.4", "", {}, "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw=="], - "es-object-atoms": ["es-object-atoms@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw=="], + "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], + "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], + "eslint-scope": ["eslint-scope@5.1.1", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" } }, "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw=="], + "esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="], + "estraverse": ["estraverse@4.3.0", "", {}, "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw=="], + "etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="], + "events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="], - "express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="], + + "express": ["express@4.18.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.1", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.5.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.2.0", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.1", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", "serve-static": "1.15.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ=="], + "fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="], + "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], + "fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="], + "fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="], - "finalhandler": ["finalhandler@1.3.1", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ=="], + + "finalhandler": ["finalhandler@1.2.0", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg=="], + "follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="], + "form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="], + "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], + "fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="], - "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], - "get-intrinsic": ["get-intrinsic@1.2.6", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "dunder-proto": "^1.0.0", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.0.0" } }, "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA=="], + + "function-bind": ["function-bind@1.1.1", "", {}, "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="], + + "get-intrinsic": ["get-intrinsic@1.2.1", "", { "dependencies": { "function-bind": "^1.1.1", "has": "^1.0.3", "has-proto": "^1.0.1", "has-symbols": "^1.0.3" } }, "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw=="], + "glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="], - "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], + "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], + + "has": ["has@1.0.3", "", { "dependencies": { "function-bind": "^1.1.1" } }, "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw=="], + "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], - "has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="], - "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], - "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], + + "has-proto": ["has-proto@1.0.1", "", {}, "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg=="], + + "has-symbols": ["has-symbols@1.0.3", "", {}, "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A=="], + "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], + "iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="], + "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + "ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], + "jest-worker": ["jest-worker@27.5.1", "", { "dependencies": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" } }, "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg=="], - "js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="], + "json-parse-even-better-errors": ["json-parse-even-better-errors@2.3.1", "", {}, "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w=="], + "json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], + "loader-runner": ["loader-runner@4.3.0", "", {}, "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg=="], - "loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="], - "math-intrinsics": ["math-intrinsics@1.0.0", "", {}, "sha512-4MqMiKP90ybymYvsut0CH2g4XWbfLtmlCkXmtmdcDCxNB+mQcu1w/1+L/VD7vi/PSv7X2JYV7SCcR+jiPXnQtA=="], + "media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="], + "memoirist": ["memoirist@0.1.4", "", {}, "sha512-D6GbPSqO2nUVOmm7VZjJc5tC60pkOVUPzLwkKl1vCiYP+2b1cG8N9q1O3P0JmNM68u8vsgefPbxRUCSGxSXD+g=="], - "merge-descriptors": ["merge-descriptors@1.0.3", "", {}, "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ=="], + + "merge-descriptors": ["merge-descriptors@1.0.1", "", {}, "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w=="], + "merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="], - "mergician": ["mergician@1.1.0", "", {}, "sha512-FXbxzU6BBhGkV8XtUr8Sk015ZRaAALviit8Lle6OEgd1udX8wlu6tBeUMLGQGdz1MfHpAVNNQkXowyDnJuhXpA=="], + "methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="], - "mime": ["mime@3.0.0", "", {}, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="], - "mime-db": ["mime-db@1.53.0", "", {}, "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg=="], + + "mime": ["mime@3.0.0", "", { "bin": { "mime": "cli.js" } }, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="], + + "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], + "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], - "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + + "ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], + "neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="], - "node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="], - "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], - "object-inspect": ["object-inspect@1.13.3", "", {}, "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA=="], + + "node-releases": ["node-releases@2.0.18", "", {}, "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g=="], + + "object-inspect": ["object-inspect@1.12.3", "", {}, "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g=="], + "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], + "openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="], + "parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], - "path-to-regexp": ["path-to-regexp@0.1.12", "", {}, "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ=="], + + "path-to-regexp": ["path-to-regexp@0.1.7", "", {}, "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="], + "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], - "prop-types": ["prop-types@15.8.1", "", { "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.13.1" } }, "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg=="], + "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], + "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], + "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], - "qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="], + + "qs": ["qs@6.11.0", "", { "dependencies": { "side-channel": "^1.0.4" } }, "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q=="], + "randombytes": ["randombytes@2.1.0", "", { "dependencies": { "safe-buffer": "^5.1.0" } }, "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ=="], + "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], - "raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="], - "react": ["react@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", "scheduler": "0.0.0-fec00a869" } }, "sha512-FaS3ViFU4ag7cuhDHQgGK3DAdWaD8YFXzEbO/Qzz33Si7VEzRRdnyoegFwg7VkEKxR6CvCVP6revi9Tm3Gq+WQ=="], - "react-dom": ["react-dom@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", "scheduler": "0.0.0-fec00a869" }, "peerDependencies": { "react": "0.0.0-fec00a869" } }, "sha512-atB5i2HgCvbvhtGXq9oaX/BCL2AFZjnccougU8S9eulRFNQbNrfGNwIcj04PRo3XU1ZsBw5syL/5l596UaolKA=="], - "react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="], - "react-refresh": ["react-refresh@0.0.0-f77c7b9d7", "", {}, "sha512-mErwv0xcQz2sYnCJPaQ93D23Irnrfo5c+wG2k2KAgWOvFfqXPQdIUZ1j9S+gKYQI2kqgd0fdTJchEJydqroyJw=="], + + "raw-body": ["raw-body@2.5.1", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig=="], + + "react": ["react@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-QquU1j1TmZR+KgGSFvWTlOuwLvGrA8ldUJean+gT0nYIhSJ1ZkdXJQFnFRWqxoc74C7SY1o4NMz0yJxpUBoQ2w=="], + + "react-dom": ["react-dom@0.0.0-experimental-380f5d67-20241113", "", { "dependencies": { "scheduler": "0.0.0-experimental-380f5d67-20241113" }, "peerDependencies": { "react": "0.0.0-experimental-380f5d67-20241113" } }, "sha512-1ok9k5rAF7YuTveNefkPOvZHHuh5RLnCc5DU7sT7IL3i2K+LZmlsbSdlylMevjt9OzovxWQdsk04Fd4GKVCBWg=="], + + "react-refresh": ["react-refresh@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-PwTxoYh02oTSdM2DLV8r3ZzHwObVDIsS05fxNcajIZe+/kIFTWThmXYJpGMljzjIs0wwScVkMONU6URTRPQvHA=="], + "react-server-dom-bun": ["react-server-dom-bun@0.0.0-experimental-603e6108-20241029", "", { "dependencies": { "neo-async": "^2.6.1" } }, "sha512-FfteCHlOgJSnDJRatgIkIU74jQQ9M1+fH2e6kfY9Sibu8FAWEUjgApKQPDfiXgjrkY7w0ITQu0b2FezC0eGzCw=="], - "react-server-dom-webpack": ["react-server-dom-webpack@0.0.0-experimental-feed8f3f9-20240118", "", { "dependencies": { "acorn-loose": "^8.3.0", "loose-envify": "^1.1.0", "neo-async": "^2.6.1" }, "peerDependencies": { "react": "0.0.0-experimental-feed8f3f9-20240118", "react-dom": "0.0.0-experimental-feed8f3f9-20240118", "webpack": "^5.59.0" } }, "sha512-9+gS3ydJF5aYwKkvfzN+DtHfICzvQ+gYGv+2MVZo65gDSit1wC0vwOd0YebHqJNC2JruND+nEyd7wQAYmVdAZA=="], + + "react-server-dom-webpack": ["react-server-dom-webpack@0.0.0-experimental-380f5d67-20241113", "", { "dependencies": { "acorn-loose": "^8.3.0", "neo-async": "^2.6.1", "webpack-sources": "^3.2.0" }, "peerDependencies": { "react": "0.0.0-experimental-380f5d67-20241113", "react-dom": "0.0.0-experimental-380f5d67-20241113", "webpack": "^5.59.0" } }, "sha512-hUluisy+9Srvrju5yS+qBOIAX82E+MRYOmoTNbV0kUsTi964ZZFLBzuruASAyUbbP1OhtFl0DwBxYN+UT0yUFQ=="], + "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], + "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], - "scheduler": ["scheduler@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-0U25jnyBP6dRPYwaVW4WMYB0jJSYlrIHFmIuXv27X+KIHJr7vyE9gcFTqZ61NQTuxYLYepAHnUs4KgQEUDlI+g=="], + + "scheduler": ["scheduler@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-UtSmlBSHar7hQvCXiozfIryfUFCL58+mqjrZONnLD06xdTlfgLrTcI5gS3Xo/RnNhUziLPV0DsinpI3a+q7Yzg=="], + "schema-utils": ["schema-utils@3.3.0", "", { "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } }, "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg=="], - "send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw=="], + + "send": ["send@0.18.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg=="], + "serialize-javascript": ["serialize-javascript@6.0.2", "", { "dependencies": { "randombytes": "^2.1.0" } }, "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g=="], - "serve-static": ["serve-static@1.16.2", "", { "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.19.0" } }, "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw=="], - "set-function-length": ["set-function-length@1.2.2", "", { "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2" } }, "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg=="], + + "serve-static": ["serve-static@1.15.0", "", { "dependencies": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.18.0" } }, "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g=="], + "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], - "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], - "side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], - "side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], - "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], + + "side-channel": ["side-channel@1.0.4", "", { "dependencies": { "call-bind": "^1.0.0", "get-intrinsic": "^1.0.2", "object-inspect": "^1.9.0" } }, "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw=="], + "source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], + "source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="], + "statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], + "supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="], + "tapable": ["tapable@2.2.1", "", {}, "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ=="], - "terser": ["terser@5.37.0", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.8.2", "commander": "^2.20.0", "source-map-support": "~0.5.20" } }, "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA=="], + + "terser": ["terser@5.36.0", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.8.2", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, "bin": { "terser": "bin/terser" } }, "sha512-IYV9eNMuFAV4THUspIRXkLakHnV6XO7FEdtKjf/mDyrnqUg9LnlOn6/RwRvM9SZjR4GUq8Nk8zj67FzVARr74w=="], + "terser-webpack-plugin": ["terser-webpack-plugin@5.3.10", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.20", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.1", "terser": "^5.26.0" }, "peerDependencies": { "webpack": "^5.1.0" } }, "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w=="], + "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], + "type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="], - "typescript": ["typescript@5.7.2", "", {}, "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg=="], + "undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="], + "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], - "update-browserslist-db": ["update-browserslist-db@1.1.1", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" } }, "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A=="], + + "update-browserslist-db": ["update-browserslist-db@1.1.1", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A=="], + "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], + "utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="], + "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], + "watchpack": ["watchpack@2.4.2", "", { "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" } }, "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw=="], - "webpack": ["webpack@5.97.1", "", { "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", "@webassemblyjs/ast": "^1.14.1", "@webassemblyjs/wasm-edit": "^1.14.1", "@webassemblyjs/wasm-parser": "^1.14.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.2.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.3.10", "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" } }, "sha512-EksG6gFY3L1eFMROS/7Wzgrii5mBAFe4rIr3r2BTfo7bcc+DWwFZ4OJ/miOuHJO/A85HwyI4eQ0F6IKXesO7Fg=="], + + "webpack": ["webpack@5.96.1", "", { "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", "@webassemblyjs/ast": "^1.12.1", "@webassemblyjs/wasm-edit": "^1.12.1", "@webassemblyjs/wasm-parser": "^1.12.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.2.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.3.10", "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" }, "bin": { "webpack": "bin/webpack.js" } }, "sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA=="], + "webpack-sources": ["webpack-sources@3.2.3", "", {}, "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w=="], - "send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], - "send/mime": ["mime@1.6.0", "", {}, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="], - "debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], - "mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], + "esrecurse/estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="], + + "send/mime": ["mime@1.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="], + + "send/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], } } diff --git a/scripts/check-node-all.sh b/scripts/check-node-all.sh index 9358e55ae9..4c907de593 100755 --- a/scripts/check-node-all.sh +++ b/scripts/check-node-all.sh @@ -25,7 +25,7 @@ esac export BUN_DEBUG_QUIET_LOGS=1 -for x in $(find test/js/node/test/parallel -type f -name "test-$1*.js") +for x in $(find test/js/node/test/parallel -type f -name "test-$1*.js" | sort) do i=$((i+1)) echo ./$x diff --git a/src/HTMLScanner.zig b/src/HTMLScanner.zig index c6462882f9..b029e3dd46 100644 --- a/src/HTMLScanner.zig +++ b/src/HTMLScanner.zig @@ -28,12 +28,29 @@ pub fn deinit(this: *HTMLScanner) void { this.import_records.deinitWithAllocator(this.allocator); } -fn createImportRecord(this: *HTMLScanner, path: []const u8, kind: ImportKind) !void { +fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKind) !void { + // In HTML, sometimes people do /src/index.js + // In that case, we don't want to use the absolute filesystem path, we want to use the path relative to the project root + const path_to_use = if (input_path.len > 1 and input_path[0] == '/') + bun.path.joinAbsString(bun.fs.FileSystem.instance.top_level_dir, &[_][]const u8{input_path[1..]}, .auto) + + // Check if imports to (e.g) "App.tsx" are actually relative imoprts w/o the "./" + else if (input_path.len > 2 and input_path[0] != '.' and input_path[1] != '/') blk: { + const index_of_dot = std.mem.lastIndexOfScalar(u8, input_path, '.') orelse break :blk input_path; + const ext = input_path[index_of_dot..]; + if (ext.len > 4) break :blk input_path; + // /foo/bar/index.html -> /foo/bar + const dirname: []const u8 = std.fs.path.dirname(this.source.path.text) orelse break :blk input_path; + const resolved = bun.path.joinAbsString(dirname, &[_][]const u8{input_path}, .auto); + break :blk if (bun.sys.exists(resolved)) resolved else input_path; + } else input_path; + const record = ImportRecord{ - .path = fs.Path.init(try this.allocator.dupe(u8, path)), + .path = fs.Path.init(try this.allocator.dupeZ(u8, path_to_use)), .kind = kind, .range = logger.Range.None, }; + try this.import_records.push(this.allocator, record); } diff --git a/src/api/schema.zig b/src/api/schema.zig index 763d3954f1..77ed146a3e 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -1631,7 +1631,7 @@ pub const Api = struct { origin: ?[]const u8 = null, /// absolute_working_dir - absolute_working_dir: ?[]const u8 = null, + absolute_working_dir: ?[:0]const u8 = null, /// define define: ?StringMap = null, diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 78aec7e11e..5f59b52401 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -13,7 +13,7 @@ pub const igLog = bun.Output.scoped(.IncrementalGraph, false); pub const Options = struct { /// Arena must live until DevServer.deinit() arena: Allocator, - root: []const u8, + root: [:0]const u8, vm: *VirtualMachine, framework: bake.Framework, bundler_options: bake.SplitBundlerOptions, diff --git a/src/bake/bake.zig b/src/bake/bake.zig index eead195a75..3722c8efdc 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -15,7 +15,7 @@ pub const UserOptions = struct { arena: std.heap.ArenaAllocator, allocations: StringRefList, - root: []const u8, + root: [:0]const u8, framework: Framework, bundler_options: SplitBundlerOptions, @@ -78,9 +78,9 @@ pub const UserOptions = struct { const StringRefList = struct { strings: std.ArrayListUnmanaged(ZigString.Slice), - pub fn track(al: *StringRefList, str: ZigString.Slice) []const u8 { + pub fn track(al: *StringRefList, str: ZigString.Slice) [:0]const u8 { al.strings.append(bun.default_allocator, str) catch bun.outOfMemory(); - return str.slice(); + return str.sliceZ(); } pub fn free(al: *StringRefList) void { diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index 9def500ece..727c707559 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -21,12 +21,12 @@ const default_allocator = bun.default_allocator; const JestPrettyFormat = @import("./test/pretty_format.zig").JestPrettyFormat; const JSPromise = JSC.JSPromise; const EventType = JSC.EventType; - pub const shim = Shimmer("Bun", "ConsoleObject", @This()); pub const Type = *anyopaque; pub const name = "Bun::ConsoleObject"; pub const include = "\"ConsoleObject.h\""; pub const namespace = shim.namespace; + const Counter = std.AutoHashMapUnmanaged(u64, u32); const BufferedWriter = std.io.BufferedWriter(4096, Output.WriterType); @@ -378,13 +378,13 @@ pub const TablePrinter = struct { } } } else { - var cols_iter = JSC.JSPropertyIterator(.{ + var cols_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(this.globalObject, row_value); defer cols_iter.deinit(); - while (cols_iter.next()) |col_key| { + while (try cols_iter.next()) |col_key| { const value = cols_iter.value; // find or create the column for the property @@ -561,13 +561,13 @@ pub const TablePrinter = struct { }.callback); if (ctx_.err) return error.JSError; } else { - var rows_iter = JSC.JSPropertyIterator(.{ + var rows_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalObject, this.tabular_data); defer rows_iter.deinit(); - while (rows_iter.next()) |row_key| { + while (try rows_iter.next()) |row_key| { try this.updateColumnsForRow(&columns, .{ .str = String.init(row_key) }, rows_iter.value); } } @@ -634,13 +634,13 @@ pub const TablePrinter = struct { }.callback); if (ctx_.err) return error.JSError; } else { - var rows_iter = JSC.JSPropertyIterator(.{ + var rows_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalObject, this.tabular_data); defer rows_iter.deinit(); - while (rows_iter.next()) |row_key| { + while (try rows_iter.next()) |row_key| { try this.printRow(Writer, writer, enable_ansi_colors, &columns, .{ .str = String.init(row_key) }, rows_iter.value); } } @@ -2486,6 +2486,9 @@ pub const Formatter = struct { } else if (value.as(JSC.WebCore.Blob)) |blob| { blob.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {}; return; + } else if (value.as(JSC.WebCore.S3Client)) |s3client| { + s3client.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {}; + return; } else if (value.as(JSC.FetchHeaders) != null) { if (value.get_unsafe(this.globalThis, "toJSON")) |toJSONFunction| { this.addForNewLine("Headers ".len); @@ -2995,7 +2998,7 @@ pub const Formatter = struct { this.quote_strings = true; defer this.quote_strings = prev_quote_strings; - var props_iter = JSC.JSPropertyIterator(.{ + var props_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, @@ -3009,7 +3012,7 @@ pub const Formatter = struct { defer this.indent -|= 1; const count_without_children = props_iter.len - @as(usize, @intFromBool(children_prop != null)); - while (props_iter.next()) |prop| { + while (try props_iter.next()) |prop| { if (prop.eqlComptime("children")) continue; diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index a3a494b1a1..43afbb6793 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -1,5 +1,5 @@ const conv = std.builtin.CallingConvention.Unspecified; - +const S3File = @import("../webcore/S3File.zig"); /// How to add a new function or property to the Bun global /// /// - Add a callback or property to the below struct @@ -18,7 +18,6 @@ pub const BunObject = struct { pub const createShellInterpreter = toJSCallback(bun.shell.Interpreter.createShellInterpreter); pub const deflateSync = toJSCallback(JSZlib.deflateSync); pub const file = toJSCallback(WebCore.Blob.constructBunFile); - pub const generateHeapSnapshot = toJSCallback(Bun.generateHeapSnapshot); pub const gunzipSync = toJSCallback(JSZlib.gunzipSync); pub const gzipSync = toJSCallback(JSZlib.gzipSync); pub const indexOfLine = toJSCallback(Bun.indexOfLine); @@ -31,7 +30,7 @@ pub const BunObject = struct { pub const registerMacro = toJSCallback(Bun.registerMacro); pub const resolve = toJSCallback(Bun.resolve); pub const resolveSync = toJSCallback(Bun.resolveSync); - pub const s3 = toJSCallback(WebCore.Blob.constructS3File); + pub const s3 = S3File.createJSS3File; pub const serve = toJSCallback(Bun.serve); pub const sha = toJSCallback(JSC.wrapStaticMethod(Crypto.SHA512_256, "hash_", true)); pub const shellEscape = toJSCallback(Bun.shellEscape); @@ -57,7 +56,6 @@ pub const BunObject = struct { pub const SHA384 = toJSGetter(Crypto.SHA384.getter); pub const SHA512 = toJSGetter(Crypto.SHA512.getter); pub const SHA512_256 = toJSGetter(Crypto.SHA512_256.getter); - pub const S3 = toJSGetter(JSC.WebCore.Blob.getJSS3FileConstructor); pub const TOML = toJSGetter(Bun.getTOMLObject); pub const Transpiler = toJSGetter(Bun.getTranspilerConstructor); pub const argv = toJSGetter(Bun.getArgv); @@ -73,6 +71,7 @@ pub const BunObject = struct { pub const stdin = toJSGetter(Bun.getStdin); pub const stdout = toJSGetter(Bun.getStdout); pub const unsafe = toJSGetter(Bun.getUnsafe); + pub const S3Client = toJSGetter(Bun.getS3ClientConstructor); // --- Getters --- fn getterName(comptime baseName: anytype) [:0]const u8 { @@ -110,7 +109,6 @@ pub const BunObject = struct { @export(BunObject.FileSystemRouter, .{ .name = getterName("FileSystemRouter") }); @export(BunObject.MD4, .{ .name = getterName("MD4") }); @export(BunObject.MD5, .{ .name = getterName("MD5") }); - @export(BunObject.S3, .{ .name = getterName("S3") }); @export(BunObject.SHA1, .{ .name = getterName("SHA1") }); @export(BunObject.SHA224, .{ .name = getterName("SHA224") }); @export(BunObject.SHA256, .{ .name = getterName("SHA256") }); @@ -134,6 +132,7 @@ pub const BunObject = struct { @export(BunObject.unsafe, .{ .name = getterName("unsafe") }); @export(BunObject.semver, .{ .name = getterName("semver") }); @export(BunObject.embeddedFiles, .{ .name = getterName("embeddedFiles") }); + @export(BunObject.S3Client, .{ .name = getterName("S3Client") }); // --- Getters -- // -- Callbacks -- @@ -145,7 +144,6 @@ pub const BunObject = struct { @export(BunObject.createShellInterpreter, .{ .name = callbackName("createShellInterpreter") }); @export(BunObject.deflateSync, .{ .name = callbackName("deflateSync") }); @export(BunObject.file, .{ .name = callbackName("file") }); - @export(BunObject.generateHeapSnapshot, .{ .name = callbackName("generateHeapSnapshot") }); @export(BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") }); @export(BunObject.gzipSync, .{ .name = callbackName("gzipSync") }); @export(BunObject.indexOfLine, .{ .name = callbackName("indexOfLine") }); @@ -830,10 +828,6 @@ pub fn sleepSync(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b return .undefined; } -pub fn generateHeapSnapshot(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { - return globalObject.generateHeapSnapshot(); -} - pub fn gc(vm: *JSC.VirtualMachine, sync: bool) usize { return vm.garbageCollect(sync); } @@ -3404,7 +3398,9 @@ pub fn getTOMLObject(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSVa pub fn getGlobConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue { return JSC.API.Glob.getConstructor(globalThis); } - +pub fn getS3ClientConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue { + return JSC.WebCore.S3Client.getConstructor(globalThis); +} pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue { const vm = globalThis.bunVM(); const graph = vm.standalone_module_graph orelse return JSC.JSValue.createEmptyArray(globalThis, 0); diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 78ebaee29b..a75e260f72 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -453,13 +453,13 @@ pub const JSBundler = struct { return globalThis.throwInvalidArguments("define must be an object", .{}); } - var define_iter = JSC.JSPropertyIterator(.{ + var define_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, }).init(globalThis, define); defer define_iter.deinit(); - while (define_iter.next()) |prop| { + while (try define_iter.next()) |prop| { const property_value = define_iter.value; const value_type = property_value.jsType(); @@ -485,7 +485,7 @@ pub const JSBundler = struct { } if (try config.getOwnObject(globalThis, "loader")) |loaders| { - var loader_iter = JSC.JSPropertyIterator(.{ + var loader_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, }).init(globalThis, loaders); @@ -496,7 +496,7 @@ pub const JSBundler = struct { var loader_values = try allocator.alloc(Api.Loader, loader_iter.len); errdefer allocator.free(loader_values); - while (loader_iter.next()) |prop| { + while (try loader_iter.next()) |prop| { if (!prop.hasPrefixComptime(".") or prop.length() < 2) { return globalThis.throwInvalidArguments("loader property names must be file extensions, such as '.txt'", .{}); } diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index d53c20996d..7eb446bea2 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -338,7 +338,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std return globalObject.throwInvalidArguments("define must be an object", .{}); } - var define_iter = JSC.JSPropertyIterator(.{ + var define_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, @@ -351,7 +351,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std var values = map_entries[define_iter.len..]; - while (define_iter.next()) |prop| { + while (try define_iter.next()) |prop| { const property_value = define_iter.value; const value_type = property_value.jsType(); @@ -624,26 +624,25 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std return globalObject.throwInvalidArguments("replace must be an object", .{}); } - var iter = JSC.JSPropertyIterator(.{ + var iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, }).init(globalThis, replace); + defer iter.deinit(); if (iter.len > 0) { - errdefer iter.deinit(); try replacements.ensureUnusedCapacity(bun.default_allocator, iter.len); // We cannot set the exception before `try` because it could be // a double free with the `errdefer`. defer if (globalThis.hasException()) { - iter.deinit(); for (replacements.keys()) |key| { bun.default_allocator.free(@constCast(key)); } replacements.clearAndFree(bun.default_allocator); }; - while (iter.next()) |key_| { + while (try iter.next()) |key_| { const value = iter.value; if (value == .zero) continue; diff --git a/src/bun.js/api/S3Client.classes.ts b/src/bun.js/api/S3Client.classes.ts new file mode 100644 index 0000000000..06839ca568 --- /dev/null +++ b/src/bun.js/api/S3Client.classes.ts @@ -0,0 +1,81 @@ +import { define } from "../../codegen/class-definitions"; + +export default [ + define({ + name: "S3Client", + construct: true, + finalize: true, + configurable: false, + klass: { + file: { + fn: "staticFile", + length: 2, + }, + unlink: { + fn: "staticUnlink", + length: 2, + }, + delete: { + /// just an alias for unlink + fn: "staticUnlink", + length: 2, + }, + presign: { + fn: "staticPresign", + length: 2, + }, + exists: { + fn: "staticExists", + length: 2, + }, + size: { + fn: "staticSize", + length: 2, + }, + write: { + fn: "staticWrite", + length: 2, + }, + stat: { + fn: "staticStat", + length: 2, + }, + }, + JSType: "0b11101110", + proto: { + file: { + fn: "file", + length: 2, + }, + unlink: { + fn: "unlink", + length: 2, + }, + delete: { + /// just an alias for unlink + fn: "unlink", + length: 2, + }, + presign: { + fn: "presign", + length: 2, + }, + exists: { + fn: "exists", + length: 2, + }, + size: { + fn: "size", + length: 2, + }, + write: { + fn: "write", + length: 2, + }, + stat: { + fn: "stat", + length: 2, + }, + }, + }), +]; diff --git a/src/bun.js/api/S3Stat.classes.ts b/src/bun.js/api/S3Stat.classes.ts new file mode 100644 index 0000000000..e2339a014e --- /dev/null +++ b/src/bun.js/api/S3Stat.classes.ts @@ -0,0 +1,30 @@ +import { define } from "../../codegen/class-definitions"; + +export default [ + define({ + name: "S3Stat", + construct: true, + finalize: true, + configurable: false, + klass: {}, + JSType: "0b11101110", + proto: { + size: { + getter: "getSize", + cache: true, + }, + lastModified: { + getter: "getLastModified", + cache: true, + }, + etag: { + getter: "getEtag", + cache: true, + }, + type: { + getter: "getContentType", + cache: true, + }, + }, + }), +]; diff --git a/src/bun.js/api/bun/h2_frame_parser.zig b/src/bun.js/api/bun/h2_frame_parser.zig index 05d30aa613..41d8ef85d6 100644 --- a/src/bun.js/api/bun/h2_frame_parser.zig +++ b/src/bun.js/api/bun/h2_frame_parser.zig @@ -2899,14 +2899,14 @@ pub const H2FrameParser = struct { var buffer = shared_request_buffer[0 .. shared_request_buffer.len - FrameHeader.byteSize]; var encoded_size: usize = 0; - var iter = JSC.JSPropertyIterator(.{ + var iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalObject, headers_arg); defer iter.deinit(); // TODO: support CONTINUE for more headers if headers are too big - while (iter.next()) |header_name| { + while (try iter.next()) |header_name| { if (header_name.length() == 0) continue; const name_slice = header_name.toUTF8(bun.default_allocator); @@ -3231,7 +3231,7 @@ pub const H2FrameParser = struct { } // we iterate twice, because pseudo headers must be sent first, but can appear anywhere in the headers object - var iter = JSC.JSPropertyIterator(.{ + var iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalObject, headers_arg); @@ -3240,7 +3240,7 @@ pub const H2FrameParser = struct { for (0..2) |ignore_pseudo_headers| { iter.reset(); - while (iter.next()) |header_name| { + while (try iter.next()) |header_name| { if (header_name.length() == 0) continue; const name_slice = header_name.toUTF8(bun.default_allocator); diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 9971f84d16..7a2fef50eb 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -1626,6 +1626,7 @@ fn NewSocket(comptime ssl: bool) type { if (callback == .zero) { if (handlers.promise.trySwap()) |promise| { + handlers.promise.deinit(); if (this.this_value != .zero) { this.this_value = .zero; } @@ -1633,7 +1634,7 @@ fn NewSocket(comptime ssl: bool) type { // reject the promise on connect() error const err_value = err.toErrorInstance(globalObject); - promise.asPromise().?.rejectOnNextTick(globalObject, err_value); + promise.asPromise().?.reject(globalObject, err_value); } return; @@ -1657,7 +1658,7 @@ fn NewSocket(comptime ssl: bool) type { // The error is effectively handled, but we should still reject the promise. var promise = val.asPromise().?; const err_ = err.toErrorInstance(globalObject); - promise.rejectOnNextTickAsHandled(globalObject, err_); + promise.rejectAsHandled(globalObject, err_); } } pub fn onConnectError(this: *This, _: Socket, errno: c_int) void { diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index 7c8774d548..f638466a7d 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -122,17 +122,18 @@ pub const ResourceUsage = struct { }; pub fn appendEnvpFromJS(globalThis: *JSC.JSGlobalObject, object: JSC.JSValue, envp: *std.ArrayList(?[*:0]const u8), PATH: *[]const u8) !void { - var object_iter = JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true }).init(globalThis, object); + var object_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true }).init(globalThis, object); defer object_iter.deinit(); + try envp.ensureTotalCapacityPrecise(object_iter.len + // +1 incase there's IPC // +1 for null terminator 2); - while (object_iter.next()) |key| { + while (try object_iter.next()) |key| { var value = object_iter.value; if (value == .undefined) continue; - var line = try std.fmt.allocPrintZ(envp.allocator, "{}={}", .{ key, value.getZigString(globalThis) }); + const line = try std.fmt.allocPrintZ(envp.allocator, "{}={}", .{ key, value.getZigString(globalThis) }); if (key.eqlComptime("PATH")) { PATH.* = bun.asByteSlice(line["PATH=".len..]); @@ -1703,6 +1704,8 @@ pub const Subprocess = struct { return spawnMaybeSync(globalThis, args, secondaryArgsValue, true); } + extern "C" const BUN_DEFAULT_PATH_FOR_SPAWN: [*:0]const u8; + // This is split into a separate function to conserve stack space. // On Windows, a single path buffer can take 64 KB. fn getArgv0(globalThis: *JSC.JSGlobalObject, PATH: []const u8, cwd: []const u8, argv0: ?[*:0]const u8, first_cmd: JSValue, allocator: std.mem.Allocator) bun.JSError!struct { @@ -1717,14 +1720,30 @@ pub const Subprocess = struct { var actual_argv0: [:0]const u8 = ""; - if (argv0 == null) { - const resolved = Which.which(path_buf, PATH, cwd, arg0.slice()) orelse { - return throwCommandNotFound(globalThis, arg0.slice()); - }; - actual_argv0 = try allocator.dupeZ(u8, resolved); + const argv0_to_use: []const u8 = if (argv0) |_argv0| + bun.sliceTo(_argv0, 0) + else + arg0.slice(); + + // This mimicks libuv's behavior, which mimicks execvpe + // Only resolve from $PATH when the command is not an absolute path + const PATH_to_use: []const u8 = if (strings.containsChar(argv0_to_use, '/')) + "" + // If no $PATH is provided, we fallback to the one from environ + // This is already the behavior of the PATH passed in here. + else if (PATH.len > 0) + PATH + else if (comptime Environment.isPosix) + // If the user explicitly passed an empty $PATH, we fallback to the OS-specific default (which libuv also does) + bun.sliceTo(BUN_DEFAULT_PATH_FOR_SPAWN, 0) + else + ""; + + if (PATH_to_use.len == 0) { + actual_argv0 = try allocator.dupeZ(u8, argv0_to_use); } else { - const resolved = Which.which(path_buf, PATH, cwd, bun.sliceTo(argv0.?, 0)) orelse { - return throwCommandNotFound(globalThis, arg0.slice()); + const resolved = Which.which(path_buf, PATH_to_use, cwd, argv0_to_use) orelse { + return throwCommandNotFound(globalThis, argv0_to_use); }; actual_argv0 = try allocator.dupeZ(u8, resolved); } @@ -1735,6 +1754,41 @@ pub const Subprocess = struct { }; } + fn getArgv(globalThis: *JSC.JSGlobalObject, args: JSValue, PATH: []const u8, cwd: []const u8, argv0: *?[*:0]const u8, allocator: std.mem.Allocator, argv: *std.ArrayList(?[*:0]const u8)) bun.JSError!void { + var cmds_array = args.arrayIterator(globalThis); + // + 1 for argv0 + // + 1 for null terminator + argv.* = try @TypeOf(argv.*).initCapacity(allocator, cmds_array.len + 2); + + if (args.isEmptyOrUndefinedOrNull()) { + return globalThis.throwInvalidArguments("cmd must be an array of strings", .{}); + } + + if (cmds_array.len == 0) { + return globalThis.throwInvalidArguments("cmd must not be empty", .{}); + } + + const argv0_result = try getArgv0(globalThis, PATH, cwd, argv0.*, cmds_array.next().?, allocator); + + argv0.* = argv0_result.argv0.ptr; + argv.appendAssumeCapacity(argv0_result.arg0.ptr); + + while (cmds_array.next()) |value| { + const arg = try value.toBunString2(globalThis); + defer arg.deref(); + + // if the string is empty, ignore it, don't add it to the argv + if (arg.isEmpty()) { + continue; + } + argv.appendAssumeCapacity(try arg.toOwnedSliceZ(allocator)); + } + + if (argv.items.len == 0) { + return globalThis.throwInvalidArguments("cmd must be an array of strings", .{}); + } + } + pub fn spawnMaybeSync( globalThis: *JSC.JSGlobalObject, args_: JSValue, @@ -1830,40 +1884,6 @@ pub const Subprocess = struct { } } - { - var cmds_array = cmd_value.arrayIterator(globalThis); - // + 1 for argv0 - // + 1 for null terminator - argv = try @TypeOf(argv).initCapacity(allocator, cmds_array.len + 2); - - if (cmd_value.isEmptyOrUndefinedOrNull()) { - return globalThis.throwInvalidArguments("cmd must be an array of strings", .{}); - } - - if (cmds_array.len == 0) { - return globalThis.throwInvalidArguments("cmd must not be empty", .{}); - } - - const argv0_result = try getArgv0(globalThis, PATH, cwd, argv0, cmds_array.next().?, allocator); - argv0 = argv0_result.argv0.ptr; - argv.appendAssumeCapacity(argv0_result.arg0.ptr); - - while (cmds_array.next()) |value| { - const arg = try value.toBunString2(globalThis); - defer arg.deref(); - - // if the string is empty, ignore it, don't add it to the argv - if (arg.isEmpty()) { - continue; - } - argv.appendAssumeCapacity(try arg.toOwnedSliceZ(allocator)); - } - - if (argv.items.len == 0) { - return globalThis.throwInvalidArguments("cmd must be an array of strings", .{}); - } - } - if (args != .zero and args.isObject()) { // This must run before the stdio parsing happens if (!is_sync) { @@ -1930,11 +1950,15 @@ pub const Subprocess = struct { override_env = true; // If the env object does not include a $PATH, it must disable path lookup for argv[0] - PATH = ""; + var NEW_PATH: []const u8 = ""; var envp_managed = env_array.toManaged(allocator); - try appendEnvpFromJS(globalThis, object, &envp_managed, &PATH); + try appendEnvpFromJS(globalThis, object, &envp_managed, &NEW_PATH); env_array = envp_managed.moveToUnmanaged(); + PATH = NEW_PATH; } + + try getArgv(globalThis, cmd_value, PATH, cwd, &argv0, allocator, &argv); + if (try args.get(globalThis, "stdio")) |stdio_val| { if (!stdio_val.isEmptyOrUndefinedOrNull()) { if (stdio_val.jsType().isArray()) { @@ -2007,6 +2031,8 @@ pub const Subprocess = struct { } } } + } else { + try getArgv(globalThis, cmd_value, PATH, cwd, &argv0, allocator, &argv); } } @@ -2124,6 +2150,20 @@ pub const Subprocess = struct { }) { .err => |err| { spawn_options.deinit(); + switch (err.getErrno()) { + .ACCES, .NOENT, .PERM, .ISDIR, .NOTDIR => { + const display_path: [:0]const u8 = if (argv0 != null) + std.mem.sliceTo(argv0.?, 0) + else if (argv.items.len > 0 and argv.items[0] != null) + std.mem.sliceTo(argv.items[0].?, 0) + else + ""; + if (display_path.len > 0) + return globalThis.throwValue(err.withPath(display_path).toJSC(globalThis)); + }, + else => {}, + } + return globalThis.throwValue(err.toJSC(globalThis)); }, .result => |result| result, diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index 9fffa7bb9d..4fad8407d1 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -706,9 +706,9 @@ pub const FFI = struct { if (try object.getTruthy(globalThis, "define")) |define_value| { if (define_value.isObject()) { const Iter = JSC.JSPropertyIterator(.{ .include_value = true, .skip_empty_name = true }); - var iter = Iter.init(globalThis, define_value); + var iter = try Iter.init(globalThis, define_value); defer iter.deinit(); - while (iter.next()) |entry| { + while (try iter.next()) |entry| { const key = entry.toOwnedSliceZ(bun.default_allocator) catch bun.outOfMemory(); var owned_value: [:0]const u8 = ""; if (iter.value != .zero and iter.value != .undefined) { @@ -1421,7 +1421,7 @@ pub const FFI = struct { JSC.markBinding(@src()); const allocator = VirtualMachine.get().allocator; - var symbols_iter = JSC.JSPropertyIterator(.{ + var symbols_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, @@ -1430,7 +1430,7 @@ pub const FFI = struct { try symbols.ensureTotalCapacity(allocator, symbols_iter.len); - while (symbols_iter.next()) |prop| { + while (try symbols_iter.next()) |prop| { const value = symbols_iter.value; if (value.isEmptyOrUndefinedOrNull()) { diff --git a/src/bun.js/api/glob.zig b/src/bun.js/api/glob.zig index edf341ec6a..3ace3a87de 100644 --- a/src/bun.js/api/glob.zig +++ b/src/bun.js/api/glob.zig @@ -406,7 +406,7 @@ pub fn match(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame var str = str_arg.toSlice(globalThis, arena.allocator()); defer str.deinit(); - if (this.is_ascii and isAllAscii(str.slice())) return JSC.JSValue.jsBoolean(globImpl.Ascii.match(this.pattern, str.slice())); + if (this.is_ascii and isAllAscii(str.slice())) return JSC.JSValue.jsBoolean(globImpl.Ascii.match(this.pattern, str.slice()).matches()); const codepoints = codepoints: { if (this.pattern_codepoints) |cp| break :codepoints cp.items[0..]; diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 52366fefb2..46e96b7f71 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -90,7 +90,7 @@ const linux = std.os.linux; const Async = bun.Async; const httplog = Output.scoped(.Server, false); const ctxLog = Output.scoped(.RequestContext, false); -const AWS = @import("../../s3.zig").AWSCredentials; +const S3 = bun.S3; const BlobFileContentResult = struct { data: [:0]const u8, @@ -1222,13 +1222,13 @@ pub const ServerConfig = struct { return global.throwInvalidArguments("Bun.serve expects 'static' to be an object shaped like { [pathname: string]: Response }", .{}); } - var iter = JSC.JSPropertyIterator(.{ + var iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, }).init(global, static); defer iter.deinit(); - while (iter.next()) |key| { + while (try iter.next()) |key| { const path, const is_ascii = key.toOwnedSliceReturningAllASCII(bun.default_allocator) catch bun.outOfMemory(); const value = iter.value; @@ -3182,7 +3182,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.endWithoutBody(this.shouldCloseConnection()); this.deref(); } - pub fn onS3SizeResolved(result: AWS.S3StatResult, this: *RequestContext) void { + pub fn onS3SizeResolved(result: S3.S3StatResult, this: *RequestContext) void { defer { this.deref(); } @@ -3213,17 +3213,23 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp return; }; const globalThis = server.globalThis; + var has_content_length_or_transfer_encoding = false; if (response.getFetchHeaders()) |headers| { // first respect the headers - if (headers.get("transfer-encoding", globalThis)) |transfer_encoding| { - resp.writeHeader("transfer-encoding", transfer_encoding); - } else if (headers.get("content-length", globalThis)) |content_length| { - const len = std.fmt.parseInt(usize, content_length, 10) catch 0; + if (headers.fastGet(.TransferEncoding)) |transfer_encoding| { + const transfer_encoding_str = transfer_encoding.toSlice(server.allocator); + defer transfer_encoding_str.deinit(); + resp.writeHeader("transfer-encoding", transfer_encoding_str.slice()); + has_content_length_or_transfer_encoding = true; + } else if (headers.fastGet(.ContentLength)) |content_length| { + const content_length_str = content_length.toSlice(server.allocator); + defer content_length_str.deinit(); + const len = std.fmt.parseInt(usize, content_length_str.slice(), 10) catch 0; resp.writeHeaderInt("content-length", len); - } else { - resp.writeHeaderInt("content-length", 0); + has_content_length_or_transfer_encoding = true; } - } else { + } + if (!has_content_length_or_transfer_encoding) { // then respect the body response.body.value.toBlobIfPossible(); switch (response.body.value) { @@ -3248,7 +3254,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp const path = blob.store.?.data.s3.path(); const env = globalThis.bunVM().transpiler.env; - credentials.s3Stat(path, @ptrCast(&onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + S3.stat(credentials, path, @ptrCast(&onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); return; } @@ -3267,6 +3273,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp }, } } + this.renderMetadata(); this.endWithoutBody(this.shouldCloseConnection()); } diff --git a/src/bun.js/bindings/BunClientData.cpp b/src/bun.js/bindings/BunClientData.cpp index b5c037f0c2..ee214c06b8 100644 --- a/src/bun.js/bindings/BunClientData.cpp +++ b/src/bun.js/bindings/BunClientData.cpp @@ -24,6 +24,7 @@ #include #include "NodeVM.h" #include "../../bake/BakeGlobalObject.h" + namespace WebCore { using namespace JSC; diff --git a/src/bun.js/bindings/BunCommonStrings.h b/src/bun.js/bindings/BunCommonStrings.h index 0abd69c1db..b74b2e7be8 100644 --- a/src/bun.js/bindings/BunCommonStrings.h +++ b/src/bun.js/bindings/BunCommonStrings.h @@ -11,7 +11,8 @@ // These ones don't need to be in BunBuiltinNames.h // If we don't use it as an identifier name, but we want to avoid allocating the string frequently, put it in this list. #define BUN_COMMON_STRINGS_EACH_NAME_NOT_BUILTIN_NAMES(macro) \ - macro(SystemError) + macro(SystemError) \ + macro(S3Error) // clang-format on #define BUN_COMMON_STRINGS_ACCESSOR_DEFINITION(name) \ diff --git a/src/bun.js/bindings/BunObject+exports.h b/src/bun.js/bindings/BunObject+exports.h index d4f267b822..7ea8582949 100644 --- a/src/bun.js/bindings/BunObject+exports.h +++ b/src/bun.js/bindings/BunObject+exports.h @@ -17,7 +17,6 @@ macro(SHA512_256) \ macro(TOML) \ macro(Transpiler) \ - macro(S3) \ macro(argv) \ macro(assetPrefix) \ macro(cwd) \ @@ -32,6 +31,7 @@ macro(unsafe) \ macro(semver) \ macro(embeddedFiles) \ + macro(S3Client) \ // --- Callbacks --- #define FOR_EACH_CALLBACK(macro) \ diff --git a/src/bun.js/bindings/BunObject.cpp b/src/bun.js/bindings/BunObject.cpp index 5e1062a2bb..6a812c8355 100644 --- a/src/bun.js/bindings/BunObject.cpp +++ b/src/bun.js/bindings/BunObject.cpp @@ -1,4 +1,7 @@ #include "root.h" + +#include "JavaScriptCore/HeapProfiler.h" +#include #include "ZigGlobalObject.h" #include "JavaScriptCore/ArgList.h" #include "JSDOMURL.h" @@ -34,6 +37,8 @@ #include "ErrorCode.h" #include "GeneratedBunObject.h" +#include "JavaScriptCore/BunV8HeapSnapshotBuilder.h" + BUN_DECLARE_HOST_FUNCTION(Bun__DNSResolver__lookup); BUN_DECLARE_HOST_FUNCTION(Bun__DNSResolver__resolve); BUN_DECLARE_HOST_FUNCTION(Bun__DNSResolver__resolveSrv); @@ -241,7 +246,6 @@ JSC_DEFINE_HOST_FUNCTION(functionConcatTypedArrays, (JSGlobalObject * globalObje auto arg2 = callFrame->argument(2); if (!arg2.isUndefined()) { asUint8Array = arg2.toBoolean(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); } return flattenArrayOfBuffersIntoArrayBufferOrUint8Array(globalObject, arrayValue, maxLength, asUint8Array); @@ -528,6 +532,45 @@ JSC_DEFINE_HOST_FUNCTION(functionPathToFileURL, (JSC::JSGlobalObject * lexicalGl RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(jsValue)); } +JSC_DEFINE_HOST_FUNCTION(functionGenerateHeapSnapshot, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +{ + auto& vm = globalObject->vm(); + vm.ensureHeapProfiler(); + auto& heapProfiler = *vm.heapProfiler(); + heapProfiler.clearSnapshots(); + + JSValue arg0 = callFrame->argument(0); + auto throwScope = DECLARE_THROW_SCOPE(vm); + bool useV8 = false; + if (!arg0.isUndefined()) { + if (arg0.isString()) { + auto str = arg0.toWTFString(globalObject); + RETURN_IF_EXCEPTION(throwScope, {}); + if (str == "v8"_s) { + useV8 = true; + } else if (str == "jsc"_s) { + // do nothing + } else { + throwTypeError(globalObject, throwScope, "Expected 'v8' or 'jsc' or undefined"_s); + return {}; + } + } + } + + if (useV8) { + JSC::BunV8HeapSnapshotBuilder builder(heapProfiler); + return JSC::JSValue::encode(jsString(vm, builder.json())); + } + + JSC::HeapSnapshotBuilder builder(heapProfiler); + builder.buildSnapshot(); + auto json = builder.json(); + // Returning an object was a bad idea but it's a breaking change + // so we'll just keep it for now. + JSC::JSValue jsonValue = JSONParseWithException(globalObject, json); + RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(jsonValue)); +} + JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto& vm = globalObject->vm(); @@ -576,7 +619,6 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj Glob BunObject_getter_wrap_Glob DontDelete|PropertyCallback MD4 BunObject_getter_wrap_MD4 DontDelete|PropertyCallback MD5 BunObject_getter_wrap_MD5 DontDelete|PropertyCallback - S3 BunObject_getter_wrap_S3 DontDelete|PropertyCallback SHA1 BunObject_getter_wrap_SHA1 DontDelete|PropertyCallback SHA224 BunObject_getter_wrap_SHA224 DontDelete|PropertyCallback SHA256 BunObject_getter_wrap_SHA256 DontDelete|PropertyCallback @@ -586,6 +628,7 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj TOML BunObject_getter_wrap_TOML DontDelete|PropertyCallback Transpiler BunObject_getter_wrap_Transpiler DontDelete|PropertyCallback embeddedFiles BunObject_getter_wrap_embeddedFiles DontDelete|PropertyCallback + S3Client BunObject_getter_wrap_S3Client DontDelete|PropertyCallback allocUnsafe BunObject_callback_allocUnsafe DontDelete|Function 1 argv BunObject_getter_wrap_argv DontDelete|PropertyCallback build BunObject_callback_build DontDelete|Function 1 @@ -604,7 +647,7 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj file BunObject_callback_file DontDelete|Function 1 fileURLToPath functionFileURLToPath DontDelete|Function 1 gc Generated::BunObject::jsGc DontDelete|Function 1 - generateHeapSnapshot BunObject_callback_generateHeapSnapshot DontDelete|Function 1 + generateHeapSnapshot functionGenerateHeapSnapshot DontDelete|Function 1 gunzipSync BunObject_callback_gunzipSync DontDelete|Function 1 gzipSync BunObject_callback_gzipSync DontDelete|Function 1 hash BunObject_getter_wrap_hash DontDelete|PropertyCallback diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index 95c5ca6f4f..3631362357 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -4,11 +4,17 @@ #include #include #include "CommonJSModuleRecord.h" +#include "ErrorCode+List.h" +#include "JavaScriptCore/ArgList.h" #include "JavaScriptCore/CallData.h" #include "JavaScriptCore/CatchScope.h" #include "JavaScriptCore/JSCJSValue.h" #include "JavaScriptCore/JSCast.h" +#include "JavaScriptCore/JSMap.h" +#include "JavaScriptCore/JSMapInlines.h" +#include "JavaScriptCore/JSObjectInlines.h" #include "JavaScriptCore/JSString.h" +#include "JavaScriptCore/JSType.h" #include "JavaScriptCore/MathCommon.h" #include "JavaScriptCore/Protect.h" #include "JavaScriptCore/PutPropertySlot.h" @@ -35,7 +41,10 @@ #include #include "ProcessBindingTTYWrap.h" #include "wtf/text/ASCIILiteral.h" +#include "wtf/text/StringToIntegerConversion.h" #include "wtf/text/OrdinalNumber.h" +#include "NodeValidator.h" +#include "NodeModuleModule.h" #include "AsyncContextFrame.h" #include "ErrorCode.h" @@ -51,6 +60,9 @@ #include #include #include +#include +#include +#include #else #include #include @@ -89,6 +101,9 @@ typedef int mode_t; #include // setuid, getuid #endif +extern "C" bool Bun__Node__ProcessNoDeprecation; +extern "C" bool Bun__Node__ProcessThrowDeprecation; + namespace Bun { using namespace JSC; @@ -134,6 +149,8 @@ BUN_DECLARE_HOST_FUNCTION(Bun__Process__send); extern "C" void Process__emitDisconnectEvent(Zig::GlobalObject* global); extern "C" void Process__emitErrorEvent(Zig::GlobalObject* global, EncodedJSValue value); +bool setProcessExitCodeInner(JSC::JSGlobalObject* lexicalGlobalObject, Process* process, JSValue code); + static JSValue constructArch(VM& vm, JSObject* processObject) { #if CPU(X86_64) @@ -228,13 +245,9 @@ static JSValue constructProcessReleaseObject(VM& vm, JSObject* processObject) auto* globalObject = processObject->globalObject(); auto* release = JSC::constructEmptyObject(globalObject); - // SvelteKit compatibility hack - release->putDirect(vm, vm.propertyNames->name, jsOwnedString(vm, WTF::String("node"_s)), 0); - - release->putDirect(vm, Identifier::fromString(vm, "lts"_s), jsBoolean(false), 0); + release->putDirect(vm, vm.propertyNames->name, jsOwnedString(vm, String("node"_s)), 0); // maybe this should be 'bun' eventually release->putDirect(vm, Identifier::fromString(vm, "sourceUrl"_s), jsOwnedString(vm, WTF::String(std::span { Bun__githubURL, strlen(Bun__githubURL) })), 0); - release->putDirect(vm, Identifier::fromString(vm, "headersUrl"_s), jsEmptyString(vm), 0); - release->putDirect(vm, Identifier::fromString(vm, "libUrl"_s), jsEmptyString(vm), 0); + release->putDirect(vm, Identifier::fromString(vm, "headersUrl"_s), jsOwnedString(vm, String("https://nodejs.org/download/release/v" REPORTED_NODEJS_VERSION "/node-v" REPORTED_NODEJS_VERSION "-headers.tar.gz"_s)), 0); return release; } @@ -262,9 +275,7 @@ static void dispatchExitInternal(JSC::JSGlobalObject* globalObject, Process* pro emitter.emit(event, arguments); } -JSC_DEFINE_CUSTOM_SETTER(Process_defaultSetter, - (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, - JSC::EncodedJSValue value, JSC::PropertyName propertyName)) +JSC_DEFINE_CUSTOM_SETTER(Process_defaultSetter, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue value, JSC::PropertyName propertyName)) { JSC::VM& vm = globalObject->vm(); @@ -282,8 +293,7 @@ extern "C" HMODULE Bun__LoadLibraryBunString(BunString*); extern "C" size_t Bun__process_dlopen_count; -JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen, - (JSC::JSGlobalObject * globalObject_, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen, (JSC::JSGlobalObject * globalObject_, JSC::CallFrame* callFrame)) { Zig::GlobalObject* globalObject = reinterpret_cast(globalObject_); auto callCountAtStart = globalObject->napiModuleRegisterCallCount; @@ -376,8 +386,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen, #else WTF::String msg = WTF::String::fromUTF8(dlerror()); #endif - JSC::throwTypeError(globalObject, scope, msg); - return {}; + return throwError(globalObject, scope, ErrorCode::ERR_DLOPEN_FAILED, msg); } if (callCountAtStart != globalObject->napiModuleRegisterCallCount) { @@ -463,32 +472,29 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen, return JSValue::encode(resultValue); } -JSC_DEFINE_HOST_FUNCTION(Process_functionUmask, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionUmask, (JSGlobalObject * globalObject, CallFrame* callFrame)) { if (callFrame->argumentCount() == 0 || callFrame->argument(0).isUndefined()) { mode_t currentMask = umask(0); umask(currentMask); - return JSC::JSValue::encode(JSC::jsNumber(currentMask)); + return JSValue::encode(jsNumber(currentMask)); } auto& vm = globalObject->vm(); auto throwScope = DECLARE_THROW_SCOPE(vm); - JSValue numberValue = callFrame->argument(0); + auto value = callFrame->argument(0); - if (!numberValue.isNumber()) { - return Bun::ERR::INVALID_ARG_TYPE(throwScope, globalObject, "mask"_s, "number"_s, numberValue); - } - - if (!numberValue.isAnyInt()) { - return Bun::ERR::OUT_OF_RANGE(throwScope, globalObject, "mask"_s, "an integer"_s, numberValue); - } - - double number = numberValue.toNumber(globalObject); - int64_t newUmask = isInt52(number) ? tryConvertToInt52(number) : numberValue.toInt32(globalObject); - RETURN_IF_EXCEPTION(throwScope, JSC::JSValue::encode(JSC::JSValue {})); - if (newUmask < 0 || newUmask > 4294967295) { - return Bun::ERR::OUT_OF_RANGE(throwScope, globalObject, "mask"_s, 0, 4294967295, numberValue); + mode_t newUmask; + if (value.isString()) { + auto str = value.getString(globalObject); + auto policy = WTF::TrailingJunkPolicy::Disallow; + auto opt = str.is8Bit() ? WTF::parseInteger(str.span8(), 8, policy) : WTF::parseInteger(str.span16(), 8, policy); + if (!opt.has_value()) return Bun::ERR::INVALID_ARG_VALUE(throwScope, globalObject, "mask"_s, value, "must be a 32-bit unsigned integer or an octal string"_s); + newUmask = opt.value(); + } else { + Bun::V::validateUint32(throwScope, globalObject, value, "mask"_s, jsUndefined()); + RETURN_IF_EXCEPTION(throwScope, {}); + newUmask = value.toUInt32(globalObject); } return JSC::JSValue::encode(JSC::jsNumber(umask(newUmask))); @@ -496,6 +502,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionUmask, extern "C" uint64_t Bun__readOriginTimer(void*); extern "C" double Bun__readOriginTimerStart(void*); +extern "C" void Bun__VirtualMachine__exitDuringUncaughtException(void*); // https://github.com/nodejs/node/blob/1936160c31afc9780e4365de033789f39b7cbc0c/src/api/hooks.cc#L49 extern "C" void Process__dispatchOnBeforeExit(Zig::GlobalObject* globalObject, uint8_t exitCode) @@ -503,11 +510,18 @@ extern "C" void Process__dispatchOnBeforeExit(Zig::GlobalObject* globalObject, u if (!globalObject->hasProcessObject()) { return; } - + auto& vm = globalObject->vm(); auto* process = jsCast(globalObject->processObject()); MarkedArgumentBuffer arguments; arguments.append(jsNumber(exitCode)); - process->wrapped().emit(Identifier::fromString(globalObject->vm(), "beforeExit"_s), arguments); + Bun__VirtualMachine__exitDuringUncaughtException(bunVM(vm)); + auto fired = process->wrapped().emit(Identifier::fromString(vm, "beforeExit"_s), arguments); + if (fired) { + if (globalObject->m_nextTickQueue) { + auto nextTickQueue = jsDynamicCast(globalObject->m_nextTickQueue.get()); + if (nextTickQueue) nextTickQueue->drain(vm, globalObject); + } + } } extern "C" void Process__dispatchOnExit(Zig::GlobalObject* globalObject, uint8_t exitCode) @@ -522,58 +536,65 @@ extern "C" void Process__dispatchOnExit(Zig::GlobalObject* globalObject, uint8_t dispatchExitInternal(globalObject, process, exitCode); } -JSC_DEFINE_HOST_FUNCTION(Process_functionUptime, - (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionUptime, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) { double now = static_cast(Bun__readOriginTimer(bunVM(lexicalGlobalObject))); double result = (now / 1000000.0) / 1000.0; return JSC::JSValue::encode(JSC::jsNumber(result)); } -JSC_DEFINE_HOST_FUNCTION(Process_functionExit, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionExit, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { - auto throwScope = DECLARE_THROW_SCOPE(globalObject->vm()); - uint8_t exitCode = 0; - JSValue arg0 = callFrame->argument(0); - if (arg0.isAnyInt()) { - int extiCode32 = arg0.toInt32(globalObject) % 256; - RETURN_IF_EXCEPTION(throwScope, JSC::JSValue::encode(JSC::JSValue {})); - - exitCode = static_cast(extiCode32); - Bun__setExitCode(bunVM(globalObject), exitCode); - } else if (!arg0.isUndefinedOrNull()) { - throwTypeError(globalObject, throwScope, "The \"code\" argument must be an integer"_s); - return {}; - } else { - exitCode = Bun__getExitCode(bunVM(globalObject)); - } - + auto& vm = globalObject->vm(); + auto throwScope = DECLARE_THROW_SCOPE(vm); auto* zigGlobal = defaultGlobalObject(globalObject); auto process = jsCast(zigGlobal->processObject()); - process->m_isExitCodeObservable = true; + auto code = callFrame->argument(0); + + setProcessExitCodeInner(globalObject, process, code); + RETURN_IF_EXCEPTION(throwScope, {}); + + auto exitCode = Bun__getExitCode(bunVM(zigGlobal)); Process__dispatchOnExit(zigGlobal, exitCode); - Bun__Process__exit(zigGlobal, exitCode); + + // process.reallyExit(exitCode); + auto reallyExitVal = process->get(globalObject, Identifier::fromString(vm, "reallyExit"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + MarkedArgumentBuffer args; + args.append(jsNumber(exitCode)); + JSC::call(globalObject, reallyExitVal, args, ""_s); + RETURN_IF_EXCEPTION(throwScope, {}); + return JSC::JSValue::encode(jsUndefined()); } -JSC_DEFINE_HOST_FUNCTION(Process_setUncaughtExceptionCaptureCallback, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_setUncaughtExceptionCaptureCallback, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) { - auto throwScope = DECLARE_THROW_SCOPE(globalObject->vm()); - JSValue arg0 = callFrame->argument(0); - if (!arg0.isCallable() && !arg0.isNull()) { - throwTypeError(globalObject, throwScope, "The \"callback\" argument must be callable or null"_s); - return {}; + auto* globalObject = reinterpret_cast(lexicalGlobalObject); + auto& vm = globalObject->vm(); + auto throwScope = DECLARE_THROW_SCOPE(vm); + auto arg0 = callFrame->argument(0); + auto process = jsCast(globalObject->processObject()); + + if (arg0.isNull()) { + process->setUncaughtExceptionCaptureCallback(arg0); + process->m_reportOnUncaughtException = false; + return JSC::JSValue::encode(jsUndefined()); } - auto* zigGlobal = defaultGlobalObject(globalObject); - jsCast(zigGlobal->processObject())->setUncaughtExceptionCaptureCallback(arg0); + if (!arg0.isCallable()) { + return Bun::ERR::INVALID_ARG_TYPE(throwScope, globalObject, "fn"_s, "function or null"_s, arg0); + } + if (process->m_reportOnUncaughtException) { + return Bun::ERR::UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET(throwScope, globalObject); + } + + process->setUncaughtExceptionCaptureCallback(arg0); + process->m_reportOnUncaughtException = true; return JSC::JSValue::encode(jsUndefined()); } -JSC_DEFINE_HOST_FUNCTION(Process_hasUncaughtExceptionCaptureCallback, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_hasUncaughtExceptionCaptureCallback, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto* zigGlobal = defaultGlobalObject(globalObject); JSValue cb = jsCast(zigGlobal->processObject())->getUncaughtExceptionCaptureCallback(); @@ -586,12 +607,9 @@ JSC_DEFINE_HOST_FUNCTION(Process_hasUncaughtExceptionCaptureCallback, extern "C" uint64_t Bun__readOriginTimer(void*); -JSC_DEFINE_HOST_FUNCTION(Process_functionHRTime, - (JSC::JSGlobalObject * globalObject_, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionHRTime, (JSC::JSGlobalObject * globalObject_, JSC::CallFrame* callFrame)) { - - Zig::GlobalObject* globalObject - = reinterpret_cast(globalObject_); + Zig::GlobalObject* globalObject = reinterpret_cast(globalObject_); auto& vm = globalObject->vm(); auto throwScope = DECLARE_THROW_SCOPE(vm); @@ -599,29 +617,24 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionHRTime, int64_t seconds = static_cast(time / 1000000000); int64_t nanoseconds = time % 1000000000; - if (callFrame->argumentCount() > 0) { - JSC::JSValue arg0 = callFrame->uncheckedArgument(0); - if (!arg0.isUndefinedOrNull()) { - JSArray* relativeArray = JSC::jsDynamicCast(arg0); - if ((!relativeArray && !arg0.isUndefinedOrNull()) || relativeArray->length() < 2) { - JSC::throwTypeError(globalObject, throwScope, "hrtime() argument must be an array or undefined"_s); - return {}; - } - JSValue relativeSecondsValue = relativeArray->getIndexQuickly(0); - JSValue relativeNanosecondsValue = relativeArray->getIndexQuickly(1); - if (!relativeSecondsValue.isNumber() || !relativeNanosecondsValue.isNumber()) { - JSC::throwTypeError(globalObject, throwScope, "hrtime() argument must be an array of 2 integers"_s); - return {}; - } + auto arg0 = callFrame->argument(0); + if (callFrame->argumentCount() > 0 && !arg0.isUndefined()) { + JSArray* relativeArray = JSC::jsDynamicCast(arg0); + if (!relativeArray) { + return Bun::ERR::INVALID_ARG_TYPE(throwScope, globalObject, "time"_s, "Array"_s, arg0); + } + if (relativeArray->length() != 2) return Bun::ERR::OUT_OF_RANGE(throwScope, globalObject_, "time"_s, "2"_s, jsNumber(relativeArray->length())); - int64_t relativeSeconds = JSC__JSValue__toInt64(JSC::JSValue::encode(relativeSecondsValue)); - int64_t relativeNanoseconds = JSC__JSValue__toInt64(JSC::JSValue::encode(relativeNanosecondsValue)); - seconds -= relativeSeconds; - nanoseconds -= relativeNanoseconds; - if (nanoseconds < 0) { - seconds--; - nanoseconds += 1000000000; - } + JSValue relativeSecondsValue = relativeArray->getIndexQuickly(0); + JSValue relativeNanosecondsValue = relativeArray->getIndexQuickly(1); + + int64_t relativeSeconds = JSC__JSValue__toInt64(JSC::JSValue::encode(relativeSecondsValue)); + int64_t relativeNanoseconds = JSC__JSValue__toInt64(JSC::JSValue::encode(relativeNanosecondsValue)); + seconds -= relativeSeconds; + nanoseconds -= relativeNanoseconds; + if (nanoseconds < 0) { + seconds--; + nanoseconds += 1000000000; } } @@ -646,24 +659,22 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionHRTime, RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(array)); } -JSC_DEFINE_HOST_FUNCTION(Process_functionHRTimeBigInt, - (JSC::JSGlobalObject * globalObject_, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionHRTimeBigInt, (JSC::JSGlobalObject * globalObject_, JSC::CallFrame* callFrame)) { Zig::GlobalObject* globalObject = reinterpret_cast(globalObject_); return JSC::JSValue::encode(JSValue(JSC::JSBigInt::createFrom(globalObject, Bun__readOriginTimer(globalObject->bunVM())))); } -JSC_DEFINE_HOST_FUNCTION(Process_functionChdir, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionChdir, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); - ZigString str = ZigString { nullptr, 0 }; - if (callFrame->argumentCount() > 0) { - str = Zig::toZigString(callFrame->uncheckedArgument(0).toWTFString(globalObject)); - } + auto value = callFrame->argument(0); + Bun::V::validateString(scope, globalObject, value, "directory"_s); + RETURN_IF_EXCEPTION(scope, {}); + ZigString str = Zig::toZigString(value.toWTFString(globalObject)); JSC::JSValue result = JSC::JSValue::decode(Bun__Process__setCwd(globalObject, &str)); RETURN_IF_EXCEPTION(scope, {}); @@ -1112,6 +1123,37 @@ Process::~Process() { } +JSC_DEFINE_HOST_FUNCTION(jsFunction_emitWarning, (Zig::JSGlobalObject * lexicalGlobalObject, CallFrame* callFrame)) +{ + auto* globalObject = jsCast(lexicalGlobalObject); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto* process = jsCast(globalObject->processObject()); + auto value = callFrame->argument(0); + + auto ident = builtinNames(vm).warningPublicName(); + if (process->wrapped().hasEventListeners(ident)) { + JSC::MarkedArgumentBuffer args; + args.append(value); + process->wrapped().emit(ident, args); + return JSValue::encode(jsUndefined()); + } + + auto jsArgs = JSValue::encode(value); + Bun__ConsoleObject__messageWithTypeAndLevel(reinterpret_cast(globalObject->consoleClient().get())->m_client, static_cast(MessageType::Log), static_cast(MessageLevel::Warning), globalObject, &jsArgs, 1); + RETURN_IF_EXCEPTION(scope, {}); + return JSValue::encode(jsUndefined()); +} + +JSC_DEFINE_HOST_FUNCTION(jsFunction_throwValue, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto value = callFrame->argument(0); + scope.throwException(globalObject, value); + return {}; +} + JSC_DEFINE_HOST_FUNCTION(Process_functionAbort, (JSGlobalObject * globalObject, CallFrame*)) { #if OS(WINDOWS) @@ -1127,40 +1169,89 @@ JSC_DEFINE_HOST_FUNCTION(Process_emitWarning, (JSGlobalObject * lexicalGlobalObj Zig::GlobalObject* globalObject = jsCast(lexicalGlobalObject); VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); - - if (callFrame->argumentCount() < 1) { - throwVMError(globalObject, scope, "Not enough arguments"_s); - return {}; - } - - RETURN_IF_EXCEPTION(scope, {}); - auto* process = jsCast(globalObject->processObject()); - JSObject* errorInstance = ([&]() -> JSObject* { - JSValue arg0 = callFrame->uncheckedArgument(0); - if (!arg0.isEmpty() && arg0.isCell() && arg0.asCell()->type() == ErrorInstanceType) { - return arg0.getObject(); - } + auto warning = callFrame->argument(0); + auto type = callFrame->argument(1); + auto code = callFrame->argument(2); + auto ctor = callFrame->argument(3); + auto detail = jsUndefined(); - WTF::String str = arg0.toWTFString(globalObject); - auto err = createError(globalObject, str); - err->putDirect(vm, vm.propertyNames->name, jsString(vm, String("warn"_s)), JSC::PropertyAttribute::DontEnum | 0); - return err; - })(); + auto dep_warning = jsString(vm, String("DeprecationWarning"_s)); - auto ident = Identifier::fromString(vm, "warning"_s); - if (process->wrapped().hasEventListeners(ident)) { - JSC::MarkedArgumentBuffer args; - args.append(errorInstance); - - process->wrapped().emit(ident, args); + if (Bun__Node__ProcessNoDeprecation && JSC::JSValue::strictEqual(globalObject, type, dep_warning)) { return JSValue::encode(jsUndefined()); } - auto jsArgs = JSValue::encode(errorInstance); - Bun__ConsoleObject__messageWithTypeAndLevel(reinterpret_cast(globalObject->consoleClient().get())->m_client, static_cast(MessageType::Log), static_cast(MessageLevel::Warning), globalObject, &jsArgs, 1); - RETURN_IF_EXCEPTION(scope, {}); + if (!type.isNull() && type.isObject() && !isJSArray(type)) { + ctor = type.get(globalObject, Identifier::fromString(vm, "ctor"_s)); + RETURN_IF_EXCEPTION(scope, {}); + + code = type.get(globalObject, builtinNames(vm).codePublicName()); + RETURN_IF_EXCEPTION(scope, {}); + + detail = type.get(globalObject, vm.propertyNames->detail); + RETURN_IF_EXCEPTION(scope, {}); + if (!detail.isString()) detail = jsUndefined(); + + type = type.get(globalObject, vm.propertyNames->type); + RETURN_IF_EXCEPTION(scope, {}); + if (!type.toBoolean(globalObject)) type = jsString(vm, String("Warning"_s)); + } else if (type.isCallable()) { + ctor = type; + code = jsUndefined(); + type = jsString(vm, String("Warning"_s)); + } + + if (!type.isUndefined()) { + Bun::V::validateString(scope, globalObject, type, "type"_s); + RETURN_IF_EXCEPTION(scope, {}); + } else { + type = jsString(vm, String("Warning"_s)); + } + + if (code.isCallable()) { + ctor = code; + code = jsUndefined(); + } else if (!code.isUndefined()) { + Bun::V::validateString(scope, globalObject, code, "code"_s); + RETURN_IF_EXCEPTION(scope, {}); + } + + JSObject* errorInstance; + + if (warning.isString()) { + auto s = warning.getString(globalObject); + errorInstance = createError(globalObject, !s.isEmpty() ? s : "Warning"_s); + errorInstance->putDirect(vm, vm.propertyNames->name, type, JSC::PropertyAttribute::DontEnum | 0); + } else if (warning.isCell() && warning.asCell()->type() == ErrorInstanceType) { + errorInstance = warning.getObject(); + } else { + return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, "warning"_s, "string or Error"_s, warning); + } + + if (!code.isUndefined()) errorInstance->putDirect(vm, builtinNames(vm).codePublicName(), code, JSC::PropertyAttribute::DontEnum | 0); + if (!detail.isUndefined()) errorInstance->putDirect(vm, vm.propertyNames->detail, detail, JSC::PropertyAttribute::DontEnum | 0); + // ErrorCaptureStackTrace(warning, ctor || process.emitWarning); + + if (JSC::JSValue::strictEqual(globalObject, type, dep_warning)) { + if (Bun__Node__ProcessNoDeprecation) { + return JSValue::encode(jsUndefined()); + } + if (Bun__Node__ProcessThrowDeprecation) { + // // Delay throwing the error to guarantee that all former warnings were properly logged. + // return process.nextTick(() => { + // throw warning; + // }); + auto func = JSFunction::create(vm, globalObject, 1, ""_s, jsFunction_throwValue, JSC::ImplementationVisibility::Private); + process->queueNextTick(vm, globalObject, func, errorInstance); + return JSValue::encode(jsUndefined()); + } + } + + // process.nextTick(doEmitWarning, warning); + auto func = JSFunction::create(vm, globalObject, 1, ""_s, jsFunction_emitWarning, JSC::ImplementationVisibility::Private); + process->queueNextTick(vm, globalObject, func, errorInstance); return JSValue::encode(jsUndefined()); } @@ -1177,28 +1268,39 @@ JSC_DEFINE_CUSTOM_GETTER(processExitCode, (JSC::JSGlobalObject * lexicalGlobalOb return JSValue::encode(jsNumber(Bun__getExitCode(jsCast(process->globalObject())->bunVM()))); } +bool setProcessExitCodeInner(JSC::JSGlobalObject* lexicalGlobalObject, Process* process, JSValue code) +{ + auto throwScope = DECLARE_THROW_SCOPE(process->vm()); + + if (!code.isUndefinedOrNull()) { + if (code.isString() && !code.getString(lexicalGlobalObject).isEmpty()) { + auto num = code.toNumber(lexicalGlobalObject); + if (!std::isnan(num)) { + code = jsDoubleNumber(num); + } + } + Bun::V::validateInteger(throwScope, lexicalGlobalObject, code, "code"_s, jsUndefined(), jsUndefined()); + RETURN_IF_EXCEPTION(throwScope, false); + + int exitCodeInt = code.toInt32(lexicalGlobalObject) % 256; + RETURN_IF_EXCEPTION(throwScope, false); + + process->m_isExitCodeObservable = true; + void* ptr = jsCast(process->globalObject())->bunVM(); + Bun__setExitCode(ptr, static_cast(exitCodeInt)); + } + return true; +} JSC_DEFINE_CUSTOM_SETTER(setProcessExitCode, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue value, JSC::PropertyName)) { Process* process = jsDynamicCast(JSValue::decode(thisValue)); if (!process) { return false; } - auto throwScope = DECLARE_THROW_SCOPE(process->vm()); - JSValue exitCode = JSValue::decode(value); - if (!exitCode.isAnyInt()) { - throwTypeError(lexicalGlobalObject, throwScope, "exitCode must be an integer"_s); - return false; - } + auto code = JSValue::decode(value); - int exitCodeInt = exitCode.toInt32(lexicalGlobalObject) % 256; - RETURN_IF_EXCEPTION(throwScope, false); - - process->m_isExitCodeObservable = true; - void* ptr = jsCast(process->globalObject())->bunVM(); - Bun__setExitCode(ptr, static_cast(exitCodeInt)); - - return true; + return setProcessExitCodeInner(lexicalGlobalObject, process, code); } JSC_DEFINE_CUSTOM_GETTER(processConnected, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, JSC::PropertyName name)) @@ -1832,10 +1934,19 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionWriteReport, (JSGlobalObject * globalOb static JSValue constructProcessReportObject(VM& vm, JSObject* processObject) { auto* globalObject = processObject->globalObject(); - auto* report = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 4); - report->putDirect(vm, JSC::Identifier::fromString(vm, "getReport"_s), JSC::JSFunction::create(vm, globalObject, 0, String("getReport"_s), Process_functionGetReport, ImplementationVisibility::Public), 0); + // auto* globalObject = reinterpret_cast(lexicalGlobalObject); + auto process = jsCast(processObject); + + auto* report = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 10); + report->putDirect(vm, JSC::Identifier::fromString(vm, "compact"_s), JSC::jsBoolean(false), 0); report->putDirect(vm, JSC::Identifier::fromString(vm, "directory"_s), JSC::jsEmptyString(vm), 0); report->putDirect(vm, JSC::Identifier::fromString(vm, "filename"_s), JSC::jsEmptyString(vm), 0); + report->putDirect(vm, JSC::Identifier::fromString(vm, "getReport"_s), JSC::JSFunction::create(vm, globalObject, 0, String("getReport"_s), Process_functionGetReport, ImplementationVisibility::Public), 0); + report->putDirect(vm, JSC::Identifier::fromString(vm, "reportOnFatalError"_s), JSC::jsBoolean(false), 0); + report->putDirect(vm, JSC::Identifier::fromString(vm, "reportOnSignal"_s), JSC::jsBoolean(false), 0); + report->putDirect(vm, JSC::Identifier::fromString(vm, "reportOnUncaughtException"_s), JSC::jsBoolean(process->m_reportOnUncaughtException), 0); + report->putDirect(vm, JSC::Identifier::fromString(vm, "excludeEnv"_s), JSC::jsBoolean(false), 0); + report->putDirect(vm, JSC::Identifier::fromString(vm, "excludeEnv"_s), JSC::jsString(vm, String("SIGUSR2"_s)), 0); report->putDirect(vm, JSC::Identifier::fromString(vm, "writeReport"_s), JSC::JSFunction::create(vm, globalObject, 1, String("writeReport"_s), Process_functionWriteReport, ImplementationVisibility::Public), 0); return report; } @@ -1867,24 +1978,22 @@ static JSValue constructProcessConfigObject(VM& vm, JSObject* processObject) // } // } JSC::JSObject* config = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 2); - JSC::JSObject* variables = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 1); - variables->putDirect(vm, JSC::Identifier::fromString(vm, "v8_enable_i8n_support"_s), - JSC::jsNumber(1), 0); + JSC::JSObject* variables = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 2); + variables->putDirect(vm, JSC::Identifier::fromString(vm, "v8_enable_i8n_support"_s), JSC::jsNumber(1), 0); variables->putDirect(vm, JSC::Identifier::fromString(vm, "enable_lto"_s), JSC::jsBoolean(false), 0); config->putDirect(vm, JSC::Identifier::fromString(vm, "target_defaults"_s), JSC::constructEmptyObject(globalObject), 0); config->putDirect(vm, JSC::Identifier::fromString(vm, "variables"_s), variables, 0); + config->freeze(vm); return config; } static JSValue constructProcessHrtimeObject(VM& vm, JSObject* processObject) { auto* globalObject = processObject->globalObject(); - JSC::JSFunction* hrtime = JSC::JSFunction::create(vm, globalObject, 0, - String("hrtime"_s), Process_functionHRTime, ImplementationVisibility::Public); + JSC::JSFunction* hrtime = JSC::JSFunction::create(vm, globalObject, 0, String("hrtime"_s), Process_functionHRTime, ImplementationVisibility::Public); - JSC::JSFunction* hrtimeBigInt = JSC::JSFunction::create(vm, globalObject, 0, - String("bigint"_s), Process_functionHRTimeBigInt, ImplementationVisibility::Public); + JSC::JSFunction* hrtimeBigInt = JSC::JSFunction::create(vm, globalObject, 0, String("bigint"_s), Process_functionHRTimeBigInt, ImplementationVisibility::Public); hrtime->putDirect(vm, JSC::Identifier::fromString(vm, "bigint"_s), hrtimeBigInt); @@ -1974,6 +2083,16 @@ static JSValue constructStdin(VM& vm, JSObject* processObject) RELEASE_AND_RETURN(scope, result); } +JSC_DEFINE_CUSTOM_GETTER(processThrowDeprecation, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, JSC::PropertyName name)) +{ + return JSValue::encode(jsBoolean(Bun__Node__ProcessThrowDeprecation)); +} + +JSC_DEFINE_CUSTOM_SETTER(setProcessThrowDeprecation, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, JSC::PropertyName)) +{ + return true; +} + static JSValue constructProcessSend(VM& vm, JSObject* processObject) { auto* globalObject = processObject->globalObject(); @@ -1996,7 +2115,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun__Process__disconnect, (JSGlobalObject * globalObjec auto global = jsCast(globalObject); if (!Bun__GlobalObject__hasIPC(globalObject)) { - Process__emitErrorEvent(global, jsFunction_ERR_IPC_DISCONNECTED(globalObject, nullptr)); + Process__emitErrorEvent(global, JSValue::encode(createError(globalObject, ErrorCode::ERR_IPC_DISCONNECTED, "IPC channel is already disconnected"_s))); return JSC::JSValue::encode(jsUndefined()); } @@ -2154,6 +2273,167 @@ JSC_DEFINE_HOST_FUNCTION(Process_functiongetgroups, (JSGlobalObject * globalObje } return JSValue::encode(groups); } + +static JSValue maybe_uid_by_name(JSC::ThrowScope& throwScope, JSGlobalObject* globalObject, JSValue value) +{ + if (!value.isNumber() && !value.isString()) return JSValue::decode(Bun::ERR::INVALID_ARG_TYPE(throwScope, globalObject, "id"_s, "number or string"_s, value)); + if (!value.isString()) return value; + + auto str = value.getString(globalObject); + if (!str.is8Bit()) { + auto message = makeString("User identifier does not exist: "_s, str); + throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNKNOWN_CREDENTIAL, message)); + return {}; + } + + auto name = (const char*)(str.span8().data()); + struct passwd pwd; + struct passwd* pp = nullptr; + char buf[8192]; + + if (getpwnam_r(name, &pwd, buf, sizeof(buf), &pp) == 0 && pp != nullptr) { + return jsNumber(pp->pw_uid); + } + + auto message = makeString("User identifier does not exist: "_s, str); + throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNKNOWN_CREDENTIAL, message)); + return {}; +} + +static JSValue maybe_gid_by_name(JSC::ThrowScope& throwScope, JSGlobalObject* globalObject, JSValue value) +{ + if (!value.isNumber() && !value.isString()) return JSValue::decode(Bun::ERR::INVALID_ARG_TYPE(throwScope, globalObject, "id"_s, "number or string"_s, value)); + if (!value.isString()) return value; + + auto str = value.getString(globalObject); + if (!str.is8Bit()) { + auto message = makeString("Group identifier does not exist: "_s, str); + throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNKNOWN_CREDENTIAL, message)); + return {}; + } + + auto name = (const char*)(str.span8().data()); + struct group pwd; + struct group* pp = nullptr; + char buf[8192]; + + if (getgrnam_r(name, &pwd, buf, sizeof(buf), &pp) == 0 && pp != nullptr) { + return jsNumber(pp->gr_gid); + } + + auto message = makeString("Group identifier does not exist: "_s, str); + throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNKNOWN_CREDENTIAL, message)); + return {}; +} + +JSC_DEFINE_HOST_FUNCTION(Process_functionsetuid, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto value = callFrame->argument(0); + auto is_number = value.isNumber(); + value = maybe_uid_by_name(scope, globalObject, value); + RETURN_IF_EXCEPTION(scope, {}); + if (is_number) Bun::V::validateInteger(scope, globalObject, value, "id"_s, jsNumber(0), jsNumber(std::pow(2, 31) - 1)); + RETURN_IF_EXCEPTION(scope, {}); + auto id = value.toUInt32(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto result = setuid(id); + if (result != 0) throwSystemError(scope, globalObject, "setuid"_s, errno); + RETURN_IF_EXCEPTION(scope, {}); + return JSValue::encode(jsNumber(result)); +} + +JSC_DEFINE_HOST_FUNCTION(Process_functionseteuid, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto value = callFrame->argument(0); + auto is_number = value.isNumber(); + value = maybe_uid_by_name(scope, globalObject, value); + RETURN_IF_EXCEPTION(scope, {}); + if (is_number) Bun::V::validateInteger(scope, globalObject, value, "id"_s, jsNumber(0), jsNumber(std::pow(2, 31) - 1)); + RETURN_IF_EXCEPTION(scope, {}); + auto id = value.toUInt32(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto result = seteuid(id); + if (result != 0) throwSystemError(scope, globalObject, "seteuid"_s, errno); + RETURN_IF_EXCEPTION(scope, {}); + return JSValue::encode(jsNumber(result)); +} + +JSC_DEFINE_HOST_FUNCTION(Process_functionsetegid, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto value = callFrame->argument(0); + auto is_number = value.isNumber(); + value = maybe_gid_by_name(scope, globalObject, value); + RETURN_IF_EXCEPTION(scope, {}); + if (is_number) Bun::V::validateInteger(scope, globalObject, value, "id"_s, jsNumber(0), jsNumber(std::pow(2, 31) - 1)); + RETURN_IF_EXCEPTION(scope, {}); + auto id = value.toUInt32(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto result = setegid(id); + if (result != 0) throwSystemError(scope, globalObject, "setegid"_s, errno); + RETURN_IF_EXCEPTION(scope, {}); + return JSValue::encode(jsNumber(result)); +} + +JSC_DEFINE_HOST_FUNCTION(Process_functionsetgid, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto value = callFrame->argument(0); + auto is_number = value.isNumber(); + value = maybe_gid_by_name(scope, globalObject, value); + RETURN_IF_EXCEPTION(scope, {}); + if (is_number) Bun::V::validateInteger(scope, globalObject, value, "id"_s, jsNumber(0), jsNumber(std::pow(2, 31) - 1)); + RETURN_IF_EXCEPTION(scope, {}); + auto id = value.toUInt32(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto result = setgid(id); + if (result != 0) throwSystemError(scope, globalObject, "setgid"_s, errno); + RETURN_IF_EXCEPTION(scope, {}); + return JSValue::encode(jsNumber(result)); +} + +JSC_DEFINE_HOST_FUNCTION(Process_functionsetgroups, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto groups = callFrame->argument(0); + Bun::V::validateArray(scope, globalObject, groups, "groups"_s, jsUndefined()); + RETURN_IF_EXCEPTION(scope, {}); + auto groupsArray = JSC::jsDynamicCast(groups); + auto count = groupsArray->length(); + gid_t groupsStack[64]; + if (count > 64) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, "groups.length"_s, 0, 64, groups); + + for (unsigned i = 0; i < count; i++) { + auto item = groupsArray->getIndexQuickly(i); + auto name = makeString("groups["_s, i, "]"_s); + + if (item.isNumber()) { + Bun::V::validateUint32(scope, globalObject, item, jsString(vm, name), jsUndefined()); + RETURN_IF_EXCEPTION(scope, {}); + groupsStack[i] = item.toUInt32(globalObject); + continue; + } else if (item.isString()) { + item = maybe_gid_by_name(scope, globalObject, item); + RETURN_IF_EXCEPTION(scope, {}); + groupsStack[i] = item.toUInt32(globalObject); + continue; + } + return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "number or string"_s, item); + } + + auto result = setgroups(count, groupsStack); + if (result != 0) throwSystemError(scope, globalObject, "setgid"_s, errno); + RETURN_IF_EXCEPTION(scope, {}); + return JSValue::encode(jsNumber(result)); +} + #endif JSC_DEFINE_HOST_FUNCTION(Process_functionAssert, (JSGlobalObject * globalObject, CallFrame* callFrame)) @@ -2163,18 +2443,22 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionAssert, (JSGlobalObject * globalObject, JSValue arg0 = callFrame->argument(0); bool condition = arg0.toBoolean(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); if (condition) { return JSValue::encode(jsUndefined()); } - JSValue arg1 = callFrame->argument(1); - String message = arg1.isUndefined() ? String() : arg1.toWTFString(globalObject); - RETURN_IF_EXCEPTION(throwScope, {}); - auto error = createError(globalObject, makeString("Assertion failed: "_s, message)); - error->putDirect(vm, Identifier::fromString(vm, "code"_s), jsString(vm, makeString("ERR_ASSERTION"_s))); - throwException(globalObject, throwScope, error); - return {}; + auto msg = callFrame->argument(1); + auto msgb = msg.toBoolean(globalObject); + if (msgb) { + return Bun::ERR::ASSERTION(throwScope, globalObject, msg); + } + return Bun::ERR::ASSERTION(throwScope, globalObject, "assertion error"_s); +} + +extern "C" uint64_t Bun__Os__getFreeMemory(void); +JSC_DEFINE_HOST_FUNCTION(Process_availableMemory, (JSGlobalObject * globalObject, CallFrame* callFrame)) +{ + return JSValue::encode(jsDoubleNumber(Bun__Os__getFreeMemory())); } #define PROCESS_BINDING_NOT_IMPLEMENTED_ISSUE(str, issue) \ @@ -2270,12 +2554,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionReallyExit, (JSGlobalObject * globalObj JSValue arg0 = callFrame->argument(0); if (arg0.isAnyInt()) { exitCode = static_cast(arg0.toInt32(globalObject) % 256); - RETURN_IF_EXCEPTION(throwScope, JSC::JSValue::encode(JSC::JSValue {})); - } else if (!arg0.isUndefinedOrNull()) { - throwTypeError(globalObject, throwScope, "The \"code\" argument must be an integer"_s); - return {}; - } else { - exitCode = Bun__getExitCode(bunVM(globalObject)); + RETURN_IF_EXCEPTION(throwScope, {}); } auto* zigGlobal = defaultGlobalObject(globalObject); @@ -2346,18 +2625,12 @@ static Process* getProcessObject(JSC::JSGlobalObject* lexicalGlobalObject, JSVal return process; } -JSC_DEFINE_HOST_FUNCTION(Process_functionConstrainedMemory, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionConstrainedMemory, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { -#if OS(LINUX) || OS(FREEBSD) return JSValue::encode(jsDoubleNumber(static_cast(WTF::ramSize()))); -#else - return JSValue::encode(jsUndefined()); -#endif } -JSC_DEFINE_HOST_FUNCTION(Process_functionCpuUsage, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionCpuUsage, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { JSC::VM& vm = globalObject->vm(); auto throwScope = DECLARE_THROW_SCOPE(vm); @@ -2389,8 +2662,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionCpuUsage, if (!comparatorValue.isUndefined()) { JSC::JSObject* comparator = comparatorValue.getObject(); if (UNLIKELY(!comparator)) { - throwTypeError(globalObject, throwScope, "Expected an object as the first argument"_s); - return JSC::JSValue::encode(JSC::jsUndefined()); + return Bun::ERR::INVALID_ARG_TYPE(throwScope, globalObject, "prevValue"_s, "object"_s, comparatorValue); } JSValue userValue; @@ -2401,33 +2673,29 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionCpuUsage, systemValue = comparator->getDirect(1); } else { userValue = comparator->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "user"_s)); - RETURN_IF_EXCEPTION(throwScope, JSC::JSValue::encode(JSC::jsUndefined())); + RETURN_IF_EXCEPTION(throwScope, {}); + if (userValue.isEmpty()) userValue = jsUndefined(); systemValue = comparator->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "system"_s)); - RETURN_IF_EXCEPTION(throwScope, JSC::JSValue::encode(JSC::jsUndefined())); + RETURN_IF_EXCEPTION(throwScope, {}); + if (systemValue.isEmpty()) systemValue = jsUndefined(); } - if (UNLIKELY(!userValue || !userValue.isNumber())) { - throwTypeError(globalObject, throwScope, "Expected a number for the 'user' property"_s); - return JSC::JSValue::encode(JSC::jsUndefined()); - } + Bun::V::validateNumber(throwScope, globalObject, userValue, "prevValue.user"_s, jsUndefined(), jsUndefined()); + RETURN_IF_EXCEPTION(throwScope, {}); - if (UNLIKELY(!systemValue || !systemValue.isNumber())) { - throwTypeError(globalObject, throwScope, "Expected a number for the 'system' property"_s); - return JSC::JSValue::encode(JSC::jsUndefined()); - } + Bun::V::validateNumber(throwScope, globalObject, systemValue, "prevValue.system"_s, jsUndefined(), jsUndefined()); + RETURN_IF_EXCEPTION(throwScope, {}); double userComparator = userValue.toNumber(globalObject); double systemComparator = systemValue.toNumber(globalObject); - if (userComparator > JSC::maxSafeInteger() || userComparator < 0 || std::isnan(userComparator)) { - throwRangeError(globalObject, throwScope, "The 'user' property must be a number between 0 and 2^53"_s); - return JSC::JSValue::encode(JSC::jsUndefined()); + if (!(userComparator >= 0 && userComparator <= JSC::maxSafeInteger())) { + return Bun::ERR::INVALID_ARG_VALUE_RangeError(throwScope, globalObject, "prevValue.user"_s, userValue, "is invalid"_s); } - if (systemComparator > JSC::maxSafeInteger() || systemComparator < 0 || std::isnan(systemComparator)) { - throwRangeError(globalObject, throwScope, "The 'system' property must be a number between 0 and 2^53"_s); - return JSC::JSValue::encode(JSC::jsUndefined()); + if (!(systemComparator >= 0 && systemComparator <= JSC::maxSafeInteger())) { + return Bun::ERR::INVALID_ARG_VALUE_RangeError(throwScope, globalObject, "prevValue.system"_s, systemValue, "is invalid"_s); } user -= userComparator; @@ -2529,8 +2797,7 @@ err: #endif } -JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsage, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsage, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { JSC::VM& vm = globalObject->vm(); auto throwScope = DECLARE_THROW_SCOPE(vm); @@ -2544,7 +2811,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsage, JSC::JSObject* result = JSC::constructEmptyObject(vm, process->memoryUsageStructure()); if (UNLIKELY(throwScope.exception())) { - return JSC::JSValue::encode(JSC::JSValue {}); + return {}; } // Node.js: @@ -2581,8 +2848,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsage, RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(result)); } -JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsageRSS, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionMemoryUsageRSS, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { JSC::VM& vm = globalObject->vm(); auto throwScope = DECLARE_THROW_SCOPE(vm); @@ -2725,7 +2991,8 @@ JSValue Process::constructNextTickFn(JSC::VM& vm, Zig::GlobalObject* globalObjec { JSValue nextTickQueueObject; if (!globalObject->m_nextTickQueue) { - nextTickQueueObject = Bun::JSNextTickQueue::create(globalObject); + auto nextTickQueue = Bun::JSNextTickQueue::create(globalObject); + nextTickQueueObject = nextTickQueue; globalObject->m_nextTickQueue.set(vm, globalObject, nextTickQueueObject); } else { nextTickQueueObject = jsCast(globalObject->m_nextTickQueue.get()); @@ -2754,6 +3021,17 @@ static JSValue constructProcessNextTickFn(VM& vm, JSObject* processObject) return jsCast(processObject)->constructNextTickFn(globalObject->vm(), globalObject); } +JSC_DEFINE_CUSTOM_GETTER(processNoDeprecation, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::EncodedJSValue thisValue, JSC::PropertyName name)) +{ + return JSValue::encode(jsBoolean(Bun__Node__ProcessNoDeprecation)); +} + +JSC_DEFINE_CUSTOM_SETTER(setProcessNoDeprecation, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, JSC::PropertyName)) +{ + Bun__Node__ProcessNoDeprecation = JSC::JSValue::decode(encodedValue).toBoolean(globalObject); + return true; +} + static JSValue constructFeatures(VM& vm, JSObject* processObject) { // { @@ -2800,9 +3078,7 @@ JSC_DEFINE_CUSTOM_GETTER(processDebugPort, (JSC::JSGlobalObject * globalObject, return JSC::JSValue::encode(jsNumber(_debugPort)); } -JSC_DEFINE_CUSTOM_SETTER(setProcessDebugPort, - (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, - JSC::EncodedJSValue encodedValue, JSC::PropertyName)) +JSC_DEFINE_CUSTOM_SETTER(setProcessDebugPort, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, JSC::PropertyName)) { auto& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); @@ -2843,9 +3119,7 @@ JSC_DEFINE_CUSTOM_GETTER(processTitle, (JSC::JSGlobalObject * globalObject, JSC: #endif } -JSC_DEFINE_CUSTOM_SETTER(setProcessTitle, - (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, - JSC::EncodedJSValue value, JSC::PropertyName)) +JSC_DEFINE_CUSTOM_SETTER(setProcessTitle, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue value, JSC::PropertyName)) { JSC::VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); @@ -2889,14 +3163,12 @@ extern "C" EncodedJSValue Process__getCachedCwd(JSC::JSGlobalObject* globalObjec return JSValue::encode(getCachedCwd(globalObject)); } -JSC_DEFINE_HOST_FUNCTION(Process_functionCwd, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionCwd, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { return JSValue::encode(getCachedCwd(globalObject)); } -JSC_DEFINE_HOST_FUNCTION(Process_functionReallyKill, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionReallyKill, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); @@ -2922,13 +3194,18 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionReallyKill, RELEASE_AND_RETURN(scope, JSValue::encode(jsNumber(result))); } -JSC_DEFINE_HOST_FUNCTION(Process_functionKill, - (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +JSC_DEFINE_HOST_FUNCTION(Process_functionKill, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); auto pid_value = callFrame->argument(0); + + // this is mimicking `if (pid != (pid | 0)) {` int pid = pid_value.toInt32(globalObject); RETURN_IF_EXCEPTION(scope, {}); + if (!JSC::JSValue::equal(globalObject, pid_value, jsNumber(pid))) { + return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, "pid"_s, "number"_s, pid_value); + } + JSC::JSValue signalValue = callFrame->argument(1); int signal = SIGTERM; if (signalValue.isNumber()) { @@ -3022,6 +3299,7 @@ extern "C" void Process__emitErrorEvent(Zig::GlobalObject* global, EncodedJSValu argv constructArgv PropertyCallback argv0 constructArgv0 PropertyCallback assert Process_functionAssert Function 1 + availableMemory Process_availableMemory Function 0 binding Process_functionBinding Function 1 browser constructBrowser PropertyCallback chdir Process_functionChdir Function 1 @@ -3039,7 +3317,7 @@ extern "C" void Process__emitErrorEvent(Zig::GlobalObject* global, EncodedJSValu execArgv constructExecArgv PropertyCallback execPath constructExecPath PropertyCallback exit Process_functionExit Function 1 - exitCode processExitCode CustomAccessor + exitCode processExitCode CustomAccessor|DontDelete features constructFeatures PropertyCallback getActiveResourcesInfo Process_stubFunctionReturningArray Function 0 hasUncaughtExceptionCaptureCallback Process_hasUncaughtExceptionCaptureCallback Function 0 @@ -3050,6 +3328,7 @@ extern "C" void Process__emitErrorEvent(Zig::GlobalObject* global, EncodedJSValu memoryUsage constructMemoryUsage PropertyCallback moduleLoadList Process_stubEmptyArray PropertyCallback nextTick constructProcessNextTickFn PropertyCallback + noDeprecation processNoDeprecation CustomAccessor openStdin Process_functionOpenStdin Function 0 pid constructPid PropertyCallback platform constructPlatform PropertyCallback @@ -3064,6 +3343,7 @@ extern "C" void Process__emitErrorEvent(Zig::GlobalObject* global, EncodedJSValu stderr constructStderr PropertyCallback stdin constructStdin PropertyCallback stdout constructStdout PropertyCallback + throwDeprecation processThrowDeprecation CustomAccessor title processTitle CustomAccessor umask Process_functionUmask Function 1 uptime Process_functionUptime Function 1 @@ -3081,12 +3361,19 @@ extern "C" void Process__emitErrorEvent(Zig::GlobalObject* global, EncodedJSValu _stopProfilerIdleNotifier Process_stubEmptyFunction Function 0 _tickCallback Process_stubEmptyFunction Function 0 _kill Process_functionReallyKill Function 2 + #if !OS(WINDOWS) getegid Process_functiongetegid Function 0 geteuid Process_functiongeteuid Function 0 getgid Process_functiongetgid Function 0 getgroups Process_functiongetgroups Function 0 getuid Process_functiongetuid Function 0 + + setegid Process_functionsetegid Function 1 + seteuid Process_functionseteuid Function 1 + setgid Process_functionsetgid Function 1 + setgroups Process_functionsetgroups Function 1 + setuid Process_functionsetuid Function 1 #endif @end */ diff --git a/src/bun.js/bindings/BunProcess.h b/src/bun.js/bindings/BunProcess.h index 368d93ae8b..3fbbfd0142 100644 --- a/src/bun.js/bindings/BunProcess.h +++ b/src/bun.js/bindings/BunProcess.h @@ -36,6 +36,7 @@ public: } DECLARE_EXPORT_INFO; + bool m_reportOnUncaughtException = false; static void destroy(JSC::JSCell* cell) { diff --git a/src/bun.js/bindings/CallSite.h b/src/bun.js/bindings/CallSite.h index 35c2e42174..8dac8702b1 100644 --- a/src/bun.js/bindings/CallSite.h +++ b/src/bun.js/bindings/CallSite.h @@ -80,6 +80,7 @@ public: void setLineNumber(OrdinalNumber lineNumber) { m_lineNumber = lineNumber; } void setColumnNumber(OrdinalNumber columnNumber) { m_columnNumber = columnNumber; } + void setSourceURL(JSC::VM& vm, JSC::JSString* sourceURL) { m_sourceURL.set(vm, this, sourceURL); } void formatAsString(JSC::VM& vm, JSC::JSGlobalObject* globalObject, WTF::StringBuilder& sb); diff --git a/src/bun.js/bindings/CommonJSModuleRecord.cpp b/src/bun.js/bindings/CommonJSModuleRecord.cpp index 459241ac21..bac55fc1dc 100644 --- a/src/bun.js/bindings/CommonJSModuleRecord.cpp +++ b/src/bun.js/bindings/CommonJSModuleRecord.cpp @@ -1016,11 +1016,13 @@ void JSCommonJSModule::visitChildrenImpl(JSCell* cell, Visitor& visitor) JSCommonJSModule* thisObject = jsCast(cell); ASSERT_GC_OBJECT_INHERITS(thisObject, info()); Base::visitChildren(thisObject, visitor); - visitor.append(thisObject->m_id); - visitor.append(thisObject->m_filename); - visitor.append(thisObject->m_dirname); - visitor.append(thisObject->m_paths); - visitor.append(thisObject->m_overridenParent); + + // Use appendHidden so it doesn't show up in the heap snapshot twice. + visitor.appendHidden(thisObject->m_id); + visitor.appendHidden(thisObject->m_filename); + visitor.appendHidden(thisObject->m_dirname); + visitor.appendHidden(thisObject->m_paths); + visitor.appendHidden(thisObject->m_overridenParent); } DEFINE_VISIT_CHILDREN(JSCommonJSModule); @@ -1029,18 +1031,43 @@ void JSCommonJSModule::analyzeHeap(JSCell* cell, HeapAnalyzer& analyzer) { auto* thisObject = jsCast(cell); - if (auto* id = thisObject->m_id.get()) { - if (!id->isRope()) { - auto label = id->tryGetValue(false); - analyzer.setLabelForCell(cell, makeString("CommonJS Module: "_s, StringView(label))); - } else { - analyzer.setLabelForCell(cell, "CommonJS Module"_s); - } - } else { - analyzer.setLabelForCell(cell, "CommonJS Module"_s); - } + analyzer.setLabelForCell(cell, "Module (CommonJS)"_s); Base::analyzeHeap(cell, analyzer); + auto& vm = cell->vm(); + auto& builtinNames = Bun::builtinNames(vm); + if (auto* id = thisObject->m_id.get()) { + analyzer.analyzePropertyNameEdge(cell, id, vm.propertyNames->id.impl()); + } + + if (thisObject->m_filename) { + JSValue filename = thisObject->m_filename.get(); + if (filename.isCell()) { + analyzer.analyzePropertyNameEdge(cell, filename.asCell(), builtinNames.filenamePublicName().impl()); + } + } + + if (thisObject->m_dirname) { + JSValue dirname = thisObject->m_dirname.get(); + if (dirname.isCell()) { + analyzer.analyzePropertyNameEdge(cell, dirname.asCell(), builtinNames.dirnamePublicName().impl()); + } + } + + if (thisObject->m_paths) { + JSValue paths = thisObject->m_paths.get(); + if (paths.isCell()) { + analyzer.analyzePropertyNameEdge(cell, paths.asCell(), builtinNames.pathsPublicName().impl()); + } + } + + if (thisObject->m_overridenParent) { + JSValue overridenParent = thisObject->m_overridenParent.get(); + if (overridenParent.isCell()) { + const Identifier overridenParentIdentifier = Identifier::fromString(vm, "parent"_s); + analyzer.analyzePropertyNameEdge(cell, overridenParent.asCell(), overridenParentIdentifier.impl()); + } + } } const JSC::ClassInfo JSCommonJSModule::s_info = { "Module"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSCommonJSModule) }; diff --git a/src/bun.js/bindings/ErrorCode.cpp b/src/bun.js/bindings/ErrorCode.cpp index bb7c91195c..1e5d8e720c 100644 --- a/src/bun.js/bindings/ErrorCode.cpp +++ b/src/bun.js/bindings/ErrorCode.cpp @@ -214,7 +214,7 @@ WTF::String JSValueToStringSafe(JSC::JSGlobalObject* globalObject, JSValue arg) return makeString("[Function: "_s, name, ']'); } - return "[Function: (anonymous)]"_s; + return "[Function (anonymous)]"_s; break; } @@ -279,7 +279,7 @@ WTF::String determineSpecificType(JSC::JSGlobalObject* globalObject, JSValue val if (!name.isNull() && name.length() > 0) { return makeString("function "_s, name); } - return String("function"_s); + return String("function "_s); } if (cell->isString()) { auto str = value.toString(globalObject)->getString(globalObject); @@ -405,7 +405,7 @@ namespace ERR { JSC::EncodedJSValue INVALID_ARG_TYPE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const WTF::String& arg_name, const WTF::String& expected_type, JSC::JSValue val_actual_value) { - auto arg_kind = arg_name.startsWith("options."_s) ? "property"_s : "argument"_s; + auto arg_kind = arg_name.contains('.') ? "property"_s : "argument"_s; auto ty_first_char = expected_type[0]; auto ty_kind = ty_first_char >= 'A' && ty_first_char <= 'Z' ? "an instance of"_s : "of type"_s; @@ -420,7 +420,7 @@ JSC::EncodedJSValue INVALID_ARG_TYPE(JSC::ThrowScope& throwScope, JSC::JSGlobalO { auto arg_name = val_arg_name.toWTFString(globalObject); RETURN_IF_EXCEPTION(throwScope, {}); - auto arg_kind = arg_name.startsWith("options."_s) ? "property"_s : "argument"_s; + auto arg_kind = arg_name.contains('.') ? "property"_s : "argument"_s; auto ty_first_char = expected_type[0]; auto ty_kind = ty_first_char >= 'A' && ty_first_char <= 'Z' ? "an instance of"_s : "of type"_s; @@ -500,7 +500,7 @@ JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObjec JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral name, JSC::JSValue value, const WTF::String& reason) { - ASCIILiteral type = String(name).find('.') != notFound ? "property"_s : "argument"_s; + ASCIILiteral type = String(name).contains('.') ? "property"_s : "argument"_s; auto value_string = JSValueToStringSafe(globalObject, value); RETURN_IF_EXCEPTION(throwScope, {}); @@ -509,6 +509,20 @@ JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobal throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, message)); return {}; } +JSC::EncodedJSValue INVALID_ARG_VALUE_RangeError(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral name, JSC::JSValue value, const WTF::String& reason) +{ + ASCIILiteral type = String(name).contains('.') ? "property"_s : "argument"_s; + + auto value_string = JSValueToStringSafe(globalObject, value); + RETURN_IF_EXCEPTION(throwScope, {}); + + auto& vm = globalObject->vm(); + auto message = makeString("The "_s, type, " '"_s, name, "' "_s, reason, ". Received "_s, value_string); + auto* structure = createErrorStructure(vm, globalObject, ErrorType::RangeError, "RangeError"_s, "ERR_INVALID_ARG_VALUE"_s); + auto error = JSC::ErrorInstance::create(vm, structure, message, jsUndefined(), nullptr, JSC::RuntimeType::TypeNothing, ErrorType::RangeError, true); + throwScope.throwException(globalObject, error); + return {}; +} JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue name, JSC::JSValue value, const WTF::String& reason) { auto name_string = JSValueToStringSafe(globalObject, name); @@ -551,7 +565,7 @@ JSC::EncodedJSValue BUFFER_OUT_OF_BOUNDS(JSC::ThrowScope& throwScope, JSC::JSGlo JSC::EncodedJSValue UNKNOWN_SIGNAL(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue signal, bool triedUppercase) { - auto signal_string = JSValueToStringSafe(globalObject, signal); + auto signal_string = signal.toWTFString(globalObject); RETURN_IF_EXCEPTION(throwScope, {}); auto message_extra = triedUppercase ? " (signals must use all capital letters)"_s : ""_s; @@ -574,6 +588,28 @@ JSC::EncodedJSValue SOCKET_BAD_PORT(JSC::ThrowScope& throwScope, JSC::JSGlobalOb return {}; } +JSC::EncodedJSValue UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject) +{ + auto message = "`process.setupUncaughtExceptionCapture()` was called while a capture callback was already active"_s; + throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET, message)); + return {}; +} + +JSC::EncodedJSValue ASSERTION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue msg) +{ + auto msg_string = msg.toWTFString(globalObject); + RETURN_IF_EXCEPTION(throwScope, {}); + auto message = msg_string; + throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_ASSERTION, message)); + return {}; +} +JSC::EncodedJSValue ASSERTION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, ASCIILiteral msg) +{ + auto message = msg; + throwScope.throwException(globalObject, createError(globalObject, ErrorCode::ERR_ASSERTION, message)); + return {}; +} + } static JSC::JSValue ERR_INVALID_ARG_TYPE(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue arg0, JSValue arg1, JSValue arg2) @@ -596,6 +632,25 @@ static JSC::JSValue ERR_INVALID_ARG_TYPE(JSC::ThrowScope& scope, JSC::JSGlobalOb return createError(globalObject, ErrorCode::ERR_INVALID_ARG_TYPE, msg); } +static JSValue ERR_INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue name, JSC::JSValue value, JSC::JSValue reason) +{ + ASSERT(name.isString()); + auto name_string = name.toWTFString(globalObject); + ASCIILiteral type = name_string.contains('.') ? "property"_s : "argument"_s; + + auto value_string = JSValueToStringSafe(globalObject, value); + RETURN_IF_EXCEPTION(throwScope, {}); + + ASSERT(reason.isUndefined() || reason.isString()); + if (reason.isUndefined()) { + auto message = makeString("The "_s, type, " '"_s, name_string, "' is invalid. Received "_s, value_string); + return createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, message); + } + auto reason_string = reason.toWTFString(globalObject); + auto message = makeString("The "_s, type, " '"_s, name_string, "' "_s, reason_string, ". Received "_s, value_string); + return createError(globalObject, ErrorCode::ERR_INVALID_ARG_VALUE, message); +} + JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_OUT_OF_RANGE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { JSC::VM& vm = globalObject->vm(); @@ -608,31 +663,11 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_OUT_OF_RANGE, (JSC::JSGlobalObject * glo return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_OUT_OF_RANGE, message)); } -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_IPC_DISCONNECTED, (JSC::JSGlobalObject * globalObject, JSC::CallFrame*)) -{ - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_IPC_DISCONNECTED, "IPC channel is already disconnected"_s)); -} - -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_SERVER_NOT_RUNNING, (JSC::JSGlobalObject * globalObject, JSC::CallFrame*)) -{ - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SERVER_NOT_RUNNING, "Server is not running."_s)); -} - extern "C" JSC::EncodedJSValue Bun__createErrorWithCode(JSC::JSGlobalObject* globalObject, ErrorCode code, BunString* message) { return JSValue::encode(createError(globalObject, code, message->toWTFString(BunString::ZeroCopy))); } -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_IPC_CHANNEL_CLOSED, (JSC::JSGlobalObject * globalObject, JSC::CallFrame*)) -{ - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_IPC_CHANNEL_CLOSED, "Channel closed."_s)); -} - -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_SOCKET_BAD_TYPE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame*)) -{ - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_BAD_TYPE, "Bad socket type specified. Valid types are: udp4, udp6"_s)); -} - JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_INVALID_PROTOCOL, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { JSC::VM& vm = globalObject->vm(); @@ -650,19 +685,6 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_INVALID_PROTOCOL, (JSC::JSGlobalObject * return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_INVALID_PROTOCOL, message)); } -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_INVALID_ARG_TYPE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - JSC::VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - EXPECT_ARG_COUNT(3); - - auto arg_name = callFrame->argument(0); - auto expected_type = callFrame->argument(1); - auto actual_value = callFrame->argument(2); - return JSValue::encode(ERR_INVALID_ARG_TYPE(scope, globalObject, arg_name, expected_type, actual_value)); -} - JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_BROTLI_INVALID_PARAM, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { JSC::VM& vm = globalObject->vm(); @@ -691,16 +713,6 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_BUFFER_TOO_LARGE, (JSC::JSGlobalObject * return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_BUFFER_TOO_LARGE, message)); } -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_ZLIB_INITIALIZATION_FAILED, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_ZLIB_INITIALIZATION_FAILED, "Initialization failed"_s)); -} - -JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_BUFFER_OUT_OF_BOUNDS, (JSC::JSGlobalObject * globalObject, JSC::CallFrame*)) -{ - return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_BUFFER_OUT_OF_BOUNDS, "Attempt to access memory outside buffer bounds"_s)); -} - JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_UNHANDLED_ERROR, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { JSC::VM& vm = globalObject->vm(); @@ -780,7 +792,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject JSC::VM& vm = globalObject->vm(); auto scope = DECLARE_THROW_SCOPE(vm); - EXPECT_ARG_COUNT(2); + EXPECT_ARG_COUNT(1); JSC::JSValue codeValue = callFrame->argument(0); RETURN_IF_EXCEPTION(scope, {}); @@ -808,9 +820,43 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject JSValue arg0 = callFrame->argument(1); JSValue arg1 = callFrame->argument(2); JSValue arg2 = callFrame->argument(3); - return JSValue::encode(ERR_INVALID_ARG_TYPE(scope, globalObject, arg0, arg1, arg2)); } + + case Bun::ErrorCode::ERR_INVALID_ARG_VALUE: { + JSValue arg0 = callFrame->argument(1); + JSValue arg1 = callFrame->argument(2); + JSValue arg2 = callFrame->argument(3); + return JSValue::encode(ERR_INVALID_ARG_VALUE(scope, globalObject, arg0, arg1, arg2)); + } + + case ErrorCode::ERR_IPC_DISCONNECTED: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_IPC_DISCONNECTED, "IPC channel is already disconnected"_s)); + case ErrorCode::ERR_SERVER_NOT_RUNNING: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SERVER_NOT_RUNNING, "Server is not running."_s)); + case ErrorCode::ERR_IPC_CHANNEL_CLOSED: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_IPC_CHANNEL_CLOSED, "Channel closed."_s)); + case ErrorCode::ERR_SOCKET_BAD_TYPE: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_BAD_TYPE, "Bad socket type specified. Valid types are: udp4, udp6"_s)); + case ErrorCode::ERR_ZLIB_INITIALIZATION_FAILED: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_ZLIB_INITIALIZATION_FAILED, "Initialization failed"_s)); + case ErrorCode::ERR_BUFFER_OUT_OF_BOUNDS: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_BUFFER_OUT_OF_BOUNDS, "Attempt to access memory outside buffer bounds"_s)); + case ErrorCode::ERR_IPC_ONE_PIPE: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_IPC_ONE_PIPE, "Child process can have only one IPC pipe"_s)); + case ErrorCode::ERR_SOCKET_ALREADY_BOUND: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_ALREADY_BOUND, "Socket is already bound"_s)); + case ErrorCode::ERR_SOCKET_BAD_BUFFER_SIZE: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_BAD_BUFFER_SIZE, "Buffer size must be a positive integer"_s)); + case ErrorCode::ERR_SOCKET_DGRAM_IS_CONNECTED: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_DGRAM_IS_CONNECTED, "Already connected"_s)); + case ErrorCode::ERR_SOCKET_DGRAM_NOT_CONNECTED: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_DGRAM_NOT_CONNECTED, "Not connected"_s)); + case ErrorCode::ERR_SOCKET_DGRAM_NOT_RUNNING: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_SOCKET_DGRAM_NOT_RUNNING, "Not running"_s)); + case ErrorCode::ERR_INVALID_CURSOR_POS: + return JSC::JSValue::encode(createError(globalObject, ErrorCode::ERR_INVALID_CURSOR_POS, "Cannot set cursor row without setting its column"_s)); + default: { break; } diff --git a/src/bun.js/bindings/ErrorCode.h b/src/bun.js/bindings/ErrorCode.h index 9c288f9b06..d06bb8e4a2 100644 --- a/src/bun.js/bindings/ErrorCode.h +++ b/src/bun.js/bindings/ErrorCode.h @@ -55,18 +55,11 @@ JSC::JSValue toJS(JSC::JSGlobalObject*, ErrorCode); JSObject* createInvalidThisError(JSGlobalObject* globalObject, JSValue thisValue, const ASCIILiteral typeName); JSObject* createInvalidThisError(JSGlobalObject* globalObject, const String& message); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_INVALID_ARG_TYPE); JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_OUT_OF_RANGE); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_IPC_DISCONNECTED); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_SERVER_NOT_RUNNING); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_IPC_CHANNEL_CLOSED); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_SOCKET_BAD_TYPE); JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_INVALID_PROTOCOL); JSC_DECLARE_HOST_FUNCTION(jsFunctionMakeErrorWithCode); JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_BROTLI_INVALID_PARAM); JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_BUFFER_TOO_LARGE); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_ZLIB_INITIALIZATION_FAILED); -JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_BUFFER_OUT_OF_BOUNDS); JSC_DECLARE_HOST_FUNCTION(jsFunction_ERR_UNHANDLED_ERROR); enum Bound { @@ -84,6 +77,7 @@ JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObjec JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue arg_name_val, const WTF::String& msg, JSC::JSValue actual); JSC::EncodedJSValue OUT_OF_RANGE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const WTF::String& arg_name_val, const WTF::String& msg, JSC::JSValue actual); JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral name, JSC::JSValue value, const WTF::String& reason = "is invalid"_s); +JSC::EncodedJSValue INVALID_ARG_VALUE_RangeError(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, WTF::ASCIILiteral name, JSC::JSValue value, const WTF::String& reason = "is invalid"_s); JSC::EncodedJSValue INVALID_ARG_VALUE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue name, JSC::JSValue value, const WTF::String& reason = "is invalid"_s); JSC::EncodedJSValue UNKNOWN_ENCODING(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const WTF::StringView encoding); JSC::EncodedJSValue INVALID_STATE(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, const WTF::String& statemsg); @@ -91,6 +85,9 @@ JSC::EncodedJSValue STRING_TOO_LONG(JSC::ThrowScope& throwScope, JSC::JSGlobalOb JSC::EncodedJSValue BUFFER_OUT_OF_BOUNDS(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject); JSC::EncodedJSValue UNKNOWN_SIGNAL(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue signal, bool triedUppercase = false); JSC::EncodedJSValue SOCKET_BAD_PORT(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue name, JSC::JSValue port, bool allowZero); +JSC::EncodedJSValue UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject); +JSC::EncodedJSValue ASSERTION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, JSC::JSValue msg); +JSC::EncodedJSValue ASSERTION(JSC::ThrowScope& throwScope, JSC::JSGlobalObject* globalObject, ASCIILiteral msg); } diff --git a/src/bun.js/bindings/ErrorCode.ts b/src/bun.js/bindings/ErrorCode.ts index 14e93b2c85..3c7b465a24 100644 --- a/src/bun.js/bindings/ErrorCode.ts +++ b/src/bun.js/bindings/ErrorCode.ts @@ -52,6 +52,17 @@ export default [ ["ERR_SCRIPT_EXECUTION_TIMEOUT", Error, "Error"], ["ERR_SCRIPT_EXECUTION_INTERRUPTED", Error, "Error"], ["ERR_UNHANDLED_ERROR", Error], + ["ERR_UNKNOWN_CREDENTIAL", Error], + ["ERR_UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET", Error], + ["ERR_DLOPEN_FAILED", Error], + ["ERR_ASSERTION", Error], + ["ERR_IPC_ONE_PIPE", Error], + ["ERR_SOCKET_ALREADY_BOUND", Error], + ["ERR_SOCKET_BAD_BUFFER_SIZE", TypeError], + ["ERR_SOCKET_DGRAM_IS_CONNECTED", Error], + ["ERR_SOCKET_DGRAM_NOT_CONNECTED", Error], + ["ERR_SOCKET_DGRAM_NOT_RUNNING", Error], + ["ERR_INVALID_CURSOR_POS", TypeError], // Bun-specific ["ERR_FORMDATA_PARSE_ERROR", TypeError], @@ -130,10 +141,11 @@ export default [ ["ERR_POSTGRES_CONNECTION_TIMEOUT", Error, "PostgresError"], ["ERR_POSTGRES_LIFETIME_TIMEOUT", Error, "PostgresError"], - // AWS - ["ERR_AWS_MISSING_CREDENTIALS", Error], - ["ERR_AWS_INVALID_METHOD", Error], - ["ERR_AWS_INVALID_PATH", Error], - ["ERR_AWS_INVALID_ENDPOINT", Error], - ["ERR_AWS_INVALID_SIGNATURE", Error], + // S3 + ["ERR_S3_MISSING_CREDENTIALS", Error], + ["ERR_S3_INVALID_METHOD", Error], + ["ERR_S3_INVALID_PATH", Error], + ["ERR_S3_INVALID_ENDPOINT", Error], + ["ERR_S3_INVALID_SIGNATURE", Error], + ["ERR_S3_INVALID_SESSION_TOKEN", Error], ] as ErrorCodeMapping; diff --git a/src/bun.js/bindings/ErrorStackTrace.cpp b/src/bun.js/bindings/ErrorStackTrace.cpp index c52da44fc7..19b5ff9ef5 100644 --- a/src/bun.js/bindings/ErrorStackTrace.cpp +++ b/src/bun.js/bindings/ErrorStackTrace.cpp @@ -389,13 +389,7 @@ static bool isVisibleBuiltinFunction(JSC::CodeBlock* codeBlock) } const JSC::SourceCode& source = codeBlock->source(); - if (auto* provider = source.provider()) { - const auto& url = provider->sourceURL(); - if (!url.isEmpty()) { - return true; - } - } - return false; + return !Zig::sourceURL(source).isEmpty(); } JSCStackFrame::JSCStackFrame(JSC::VM& vm, JSC::StackVisitor& visitor) @@ -512,45 +506,33 @@ JSCStackFrame::SourcePositions* JSCStackFrame::getSourcePositions() ALWAYS_INLINE String JSCStackFrame::retrieveSourceURL() { - static auto sourceURLWasmString = MAKE_STATIC_STRING_IMPL("[wasm code]"); - static auto sourceURLNativeString = MAKE_STATIC_STRING_IMPL("[native code]"); + static const auto sourceURLWasmString = MAKE_STATIC_STRING_IMPL("[wasm code]"); if (m_isWasmFrame) { return String(sourceURLWasmString); } + auto url = Zig::sourceURL(m_codeBlock); + if (!url.isEmpty()) { + return url; + } + if (m_callee && m_callee->isObject()) { if (auto* jsFunction = jsDynamicCast(m_callee)) { - if (auto* executable = jsFunction->executable()) { - if (!executable->isHostFunction()) { - auto* jsExectuable = jsFunction->jsExecutable(); - if (jsExectuable) { - const auto* sourceProvider = jsExectuable->source().provider(); - if (sourceProvider) { - return sourceProvider->sourceURL(); - } - } - } + WTF::String url = Zig::sourceURL(m_vm, jsFunction); + if (!url.isEmpty()) { + return url; } } } - if (!m_codeBlock) { - return String(sourceURLNativeString); - } - - auto* provider = m_codeBlock->source().provider(); - if (provider) { - return provider->sourceURL(); - } - return String(); } ALWAYS_INLINE String JSCStackFrame::retrieveFunctionName() { - static auto functionNameModuleCodeString = MAKE_STATIC_STRING_IMPL("module code"); - static auto functionNameGlobalCodeString = MAKE_STATIC_STRING_IMPL("global code"); + static const auto functionNameModuleCodeString = MAKE_STATIC_STRING_IMPL("module code"); + static const auto functionNameGlobalCodeString = MAKE_STATIC_STRING_IMPL("global code"); if (m_isWasmFrame) { return JSC::Wasm::makeString(m_wasmFunctionIndexOrName); @@ -618,4 +600,98 @@ bool JSCStackFrame::calculateSourcePositions() return true; } +String sourceURL(const JSC::SourceOrigin& origin) +{ + if (origin.isNull()) { + return String(); + } + + return origin.string(); +} + +String sourceURL(JSC::SourceProvider* sourceProvider) +{ + if (UNLIKELY(!sourceProvider)) { + return String(); + } + + String url = sourceProvider->sourceURLDirective(); + if (!url.isEmpty()) { + return url; + } + + url = sourceProvider->sourceURL(); + if (!url.isEmpty()) { + return url; + } + + const auto& origin = sourceProvider->sourceOrigin(); + return sourceURL(origin); +} + +String sourceURL(const JSC::SourceCode& sourceCode) +{ + return sourceURL(sourceCode.provider()); +} + +String sourceURL(JSC::CodeBlock* codeBlock) +{ + if (UNLIKELY(!codeBlock)) { + return String(); + } + + if (!codeBlock->ownerExecutable()) { + return String(); + } + + const auto& source = codeBlock->source(); + return sourceURL(source); +} + +String sourceURL(JSC::VM& vm, JSC::StackFrame& frame) +{ + if (frame.isWasmFrame()) { + return "[wasm code]"_s; + } + + if (UNLIKELY(!frame.codeBlock())) { + return "[native code]"_s; + } + + return sourceURL(frame.codeBlock()); +} + +String sourceURL(JSC::StackVisitor& visitor) +{ + switch (visitor->codeType()) { + case JSC::StackVisitor::Frame::Eval: + case JSC::StackVisitor::Frame::Module: + case JSC::StackVisitor::Frame::Function: + case JSC::StackVisitor::Frame::Global: { + return sourceURL(visitor->codeBlock()); + } + case JSC::StackVisitor::Frame::Native: + return "[native code]"_s; + case JSC::StackVisitor::Frame::Wasm: + return "[wasm code]"_s; + } + + RELEASE_ASSERT_NOT_REACHED(); +} + +String sourceURL(JSC::VM& vm, JSC::JSFunction* function) +{ + auto* executable = function->executable(); + if (!executable || executable->isHostFunction()) { + return String(); + } + + auto* jsExecutable = function->jsExecutable(); + if (!jsExecutable) { + return String(); + } + + return Zig::sourceURL(jsExecutable->source()); +} + } diff --git a/src/bun.js/bindings/ErrorStackTrace.h b/src/bun.js/bindings/ErrorStackTrace.h index 8939059c93..9213ef1d88 100644 --- a/src/bun.js/bindings/ErrorStackTrace.h +++ b/src/bun.js/bindings/ErrorStackTrace.h @@ -213,4 +213,13 @@ private: bool isImplementationVisibilityPrivate(JSC::StackVisitor& visitor); bool isImplementationVisibilityPrivate(const JSC::StackFrame& frame); + +String sourceURL(const JSC::SourceOrigin& origin); +String sourceURL(JSC::SourceProvider* sourceProvider); +String sourceURL(const JSC::SourceCode& sourceCode); +String sourceURL(JSC::CodeBlock* codeBlock); +String sourceURL(JSC::VM& vm, JSC::StackFrame& frame); +String sourceURL(JSC::StackVisitor& visitor); +String sourceURL(JSC::VM& vm, JSC::JSFunction* function); + } diff --git a/src/bun.js/bindings/ImportMetaObject.cpp b/src/bun.js/bindings/ImportMetaObject.cpp index 561ae2d213..0d922f5577 100644 --- a/src/bun.js/bindings/ImportMetaObject.cpp +++ b/src/bun.js/bindings/ImportMetaObject.cpp @@ -205,7 +205,6 @@ extern "C" JSC::EncodedJSValue functionImportMeta__resolveSync(JSC::JSGlobalObje JSC::JSValue isESMValue = callFrame->argument(2); if (isESMValue.isBoolean()) { isESM = isESMValue.toBoolean(globalObject); - RETURN_IF_EXCEPTION(scope, {}); } } @@ -223,7 +222,6 @@ extern "C" JSC::EncodedJSValue functionImportMeta__resolveSync(JSC::JSGlobalObje } else if (fromValue.isBoolean()) { isESM = fromValue.toBoolean(globalObject); - RETURN_IF_EXCEPTION(scope, {}); fromValue = JSC::jsUndefined(); } diff --git a/src/bun.js/bindings/InspectorTestReporterAgent.cpp b/src/bun.js/bindings/InspectorTestReporterAgent.cpp index dad53f2b54..00d8bbc7da 100644 --- a/src/bun.js/bindings/InspectorTestReporterAgent.cpp +++ b/src/bun.js/bindings/InspectorTestReporterAgent.cpp @@ -132,30 +132,20 @@ void InspectorTestReporterAgent::reportTestFound(JSC::CallFrame* callFrame, int if (visitor->hasLineAndColumnInfo()) { lineColumn = visitor->computeLineAndColumn(); - String sourceURLForFrame = visitor->sourceURL(); + String sourceURLForFrame = Zig::sourceURL(visitor); // Sometimes, the sourceURL is empty. // For example, pages in Next.js. if (sourceURLForFrame.isEmpty()) { + auto* codeBlock = visitor->codeBlock(); + ASSERT(codeBlock); // hasLineAndColumnInfo() checks codeBlock(), so this is safe to access here. - const auto& source = visitor->codeBlock()->source(); + const auto& source = codeBlock->source(); // source.isNull() is true when the SourceProvider is a null pointer. if (!source.isNull()) { auto* provider = source.provider(); - // I'm not 100% sure we should show sourceURLDirective here. - if (!provider->sourceURLDirective().isEmpty()) { - sourceURLForFrame = provider->sourceURLDirective(); - } else if (!provider->sourceURL().isEmpty()) { - sourceURLForFrame = provider->sourceURL(); - } else { - const auto& origin = provider->sourceOrigin(); - if (!origin.isNull()) { - sourceURLForFrame = origin.string(); - } - } - sourceID = provider->asID(); } } diff --git a/src/bun.js/bindings/JSBuffer.cpp b/src/bun.js/bindings/JSBuffer.cpp index 0476e797bb..f3cdd036b8 100644 --- a/src/bun.js/bindings/JSBuffer.cpp +++ b/src/bun.js/bindings/JSBuffer.cpp @@ -425,7 +425,7 @@ static inline JSC::EncodedJSValue jsBufferConstructorFunction_allocUnsafeBody(JS VM& vm = lexicalGlobalObject->vm(); auto throwScope = DECLARE_THROW_SCOPE(vm); JSValue lengthValue = callFrame->argument(0); - Bun::V::validateNumber(throwScope, lexicalGlobalObject, lengthValue, jsString(vm, String("size"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateNumber(throwScope, lexicalGlobalObject, lengthValue, "size"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(throwScope, {}); size_t length = lengthValue.toLength(lexicalGlobalObject); auto result = allocBufferUnsafe(lexicalGlobalObject, length); @@ -550,7 +550,7 @@ static inline JSC::EncodedJSValue jsBufferConstructorFunction_allocBody(JSC::JSG auto scope = DECLARE_THROW_SCOPE(vm); JSValue lengthValue = callFrame->argument(0); - Bun::V::validateNumber(scope, lexicalGlobalObject, lengthValue, jsString(vm, String("size"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateNumber(scope, lexicalGlobalObject, lengthValue, "size"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(scope, {}); size_t length = lengthValue.toLength(lexicalGlobalObject); @@ -886,7 +886,7 @@ static inline JSC::EncodedJSValue jsBufferConstructorFunction_copyBytesFromBody( if (!offsetValue.isUndefined() || !lengthValue.isUndefined()) { if (!offsetValue.isUndefined()) { - Bun::V::validateInteger(throwScope, lexicalGlobalObject, offsetValue, jsString(vm, String("offset"_s)), jsNumber(0), jsUndefined()); + Bun::V::validateInteger(throwScope, lexicalGlobalObject, offsetValue, "offset"_s, jsNumber(0), jsUndefined()); RETURN_IF_EXCEPTION(throwScope, {}); offset = offsetValue.asNumber(); if (offset >= viewLength) return JSValue::encode(createEmptyBuffer(lexicalGlobalObject)); @@ -896,7 +896,7 @@ static inline JSC::EncodedJSValue jsBufferConstructorFunction_copyBytesFromBody( double end = 0; if (!lengthValue.isUndefined()) { - Bun::V::validateInteger(throwScope, lexicalGlobalObject, lengthValue, jsString(vm, String("length"_s)), jsNumber(0), jsUndefined()); + Bun::V::validateInteger(throwScope, lexicalGlobalObject, lengthValue, "length"_s, jsNumber(0), jsUndefined()); RETURN_IF_EXCEPTION(throwScope, {}); length = lengthValue.asNumber(); end = offset + length; @@ -998,7 +998,7 @@ static inline JSC::EncodedJSValue jsBufferPrototypeFunction_compareBody(JSC::JSG default: sourceEndValue = callFrame->uncheckedArgument(4); if (sourceEndValue != jsUndefined()) { - Bun::V::validateInteger(throwScope, lexicalGlobalObject, sourceEndValue, jsString(vm, String("sourceEnd"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateInteger(throwScope, lexicalGlobalObject, sourceEndValue, "sourceEnd"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(throwScope, {}); sourceEnd = sourceEndValue.asNumber(); } @@ -1007,7 +1007,7 @@ static inline JSC::EncodedJSValue jsBufferPrototypeFunction_compareBody(JSC::JSG case 4: sourceStartValue = callFrame->uncheckedArgument(3); if (sourceStartValue != jsUndefined()) { - Bun::V::validateInteger(throwScope, lexicalGlobalObject, sourceStartValue, jsString(vm, String("sourceStart"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateInteger(throwScope, lexicalGlobalObject, sourceStartValue, "sourceStart"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(throwScope, {}); sourceStart = sourceStartValue.asNumber(); } @@ -1016,7 +1016,7 @@ static inline JSC::EncodedJSValue jsBufferPrototypeFunction_compareBody(JSC::JSG case 3: targetEndValue = callFrame->uncheckedArgument(2); if (targetEndValue != jsUndefined()) { - Bun::V::validateInteger(throwScope, lexicalGlobalObject, targetEndValue, jsString(vm, String("targetEnd"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateInteger(throwScope, lexicalGlobalObject, targetEndValue, "targetEnd"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(throwScope, {}); targetEnd = targetEndValue.asNumber(); } @@ -1025,7 +1025,7 @@ static inline JSC::EncodedJSValue jsBufferPrototypeFunction_compareBody(JSC::JSG case 2: targetStartValue = callFrame->uncheckedArgument(1); if (targetStartValue != jsUndefined()) { - Bun::V::validateInteger(throwScope, lexicalGlobalObject, targetStartValue, jsString(vm, String("targetStart"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateInteger(throwScope, lexicalGlobalObject, targetStartValue, "targetStart"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(throwScope, {}); targetStart = targetStartValue.asNumber(); } @@ -1225,12 +1225,12 @@ static inline JSC::EncodedJSValue jsBufferPrototypeFunction_fillBody(JSC::JSGlob // https://github.com/nodejs/node/blob/v22.9.0/lib/buffer.js#L1066-L1079 // https://github.com/nodejs/node/blob/v22.9.0/lib/buffer.js#L122 if (!offsetValue.isUndefined()) { - Bun::V::validateNumber(scope, lexicalGlobalObject, offsetValue, jsString(vm, String("offset"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateNumber(scope, lexicalGlobalObject, offsetValue, "offset"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(scope, {}); offset = offsetValue.toLength(lexicalGlobalObject); } if (!endValue.isUndefined()) { - Bun::V::validateNumber(scope, lexicalGlobalObject, endValue, jsString(vm, String("end"_s)), jsNumber(0), jsNumber(limit)); + Bun::V::validateNumber(scope, lexicalGlobalObject, endValue, "end"_s, jsNumber(0), jsNumber(limit)); RETURN_IF_EXCEPTION(scope, {}); end = endValue.toLength(lexicalGlobalObject); } @@ -2373,7 +2373,7 @@ static inline JSC::EncodedJSValue createJSBufferFromJS(JSC::JSGlobalObject* lexi return JSBuffer__bufferFromLength(lexicalGlobalObject, distinguishingArg.asAnyInt()); } else if (distinguishingArg.isNumber()) { JSValue lengthValue = distinguishingArg; - Bun::V::validateNumber(throwScope, lexicalGlobalObject, lengthValue, jsString(vm, String("size"_s)), jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); + Bun::V::validateNumber(throwScope, lexicalGlobalObject, lengthValue, "size"_s, jsNumber(0), jsNumber(Bun::Buffer::kMaxLength)); RETURN_IF_EXCEPTION(throwScope, {}); size_t length = lengthValue.toLength(lexicalGlobalObject); return JSBuffer__bufferFromLength(lexicalGlobalObject, length); diff --git a/src/bun.js/bindings/JSDOMFile.cpp b/src/bun.js/bindings/JSDOMFile.cpp index 6b6f980062..c67cf8f62f 100644 --- a/src/bun.js/bindings/JSDOMFile.cpp +++ b/src/bun.js/bindings/JSDOMFile.cpp @@ -42,7 +42,7 @@ public: static JSDOMFile* create(JSC::VM& vm, JSGlobalObject* globalObject) { - auto* zigGlobal = reinterpret_cast(globalObject); + auto* zigGlobal = defaultGlobalObject(globalObject); auto structure = createStructure(vm, globalObject, zigGlobal->functionPrototype()); auto* object = new (NotNull, JSC::allocateCell(vm)) JSDOMFile(vm, structure); object->finishCreation(vm); @@ -65,7 +65,7 @@ public: static JSC_HOST_CALL_ATTRIBUTES JSC::EncodedJSValue construct(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) { - Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); + auto* globalObject = defaultGlobalObject(lexicalGlobalObject); JSC::VM& vm = globalObject->vm(); JSObject* newTarget = asObject(callFrame->newTarget()); auto* constructor = globalObject->JSDOMFileConstructor(); @@ -75,15 +75,15 @@ public: auto* functionGlobalObject = reinterpret_cast( // ShadowRealm functions belong to a different global object. - getFunctionRealm(globalObject, newTarget)); + getFunctionRealm(lexicalGlobalObject, newTarget)); RETURN_IF_EXCEPTION(scope, {}); structure = InternalFunction::createSubclassStructure( - globalObject, + lexicalGlobalObject, newTarget, functionGlobalObject->JSBlobStructure()); } - void* ptr = JSDOMFile__construct(globalObject, callFrame); + void* ptr = JSDOMFile__construct(lexicalGlobalObject, callFrame); if (UNLIKELY(!ptr)) { return JSValue::encode(JSC::jsUndefined()); diff --git a/src/bun.js/bindings/JSMockFunction.cpp b/src/bun.js/bindings/JSMockFunction.cpp index 5140505d04..0facb458fd 100644 --- a/src/bun.js/bindings/JSMockFunction.cpp +++ b/src/bun.js/bindings/JSMockFunction.cpp @@ -1122,7 +1122,6 @@ JSC_DEFINE_HOST_FUNCTION(jsMockFunctionMockName, (JSC::JSGlobalObject * globalOb // https://github.com/jestjs/jest/blob/bd1c6db7c15c23788ca3e09c919138e48dd3b28a/packages/jest-mock/src/index.ts#L849-L856 if (callframe->argument(0).toBoolean(globalObject)) { - RETURN_IF_EXCEPTION(scope, {}); WTF::String name = callframe->argument(0).toWTFString(globalObject); RETURN_IF_EXCEPTION(scope, {}); thisObject->setName(name); diff --git a/src/bun.js/bindings/JSPropertyIterator.zig b/src/bun.js/bindings/JSPropertyIterator.zig index bd55ea078c..353d89d83a 100644 --- a/src/bun.js/bindings/JSPropertyIterator.zig +++ b/src/bun.js/bindings/JSPropertyIterator.zig @@ -41,13 +41,19 @@ pub fn JSPropertyIterator(comptime options: JSPropertyIteratorOptions) type { this.* = undefined; } - pub fn init(globalObject: *JSC.JSGlobalObject, object: JSC.JSValue) @This() { + pub fn init(globalObject: *JSC.JSGlobalObject, object: JSC.JSValue) bun.JSError!@This() { var iter = @This(){ .object = object.asCell(), .globalObject = globalObject, }; iter.impl = Bun__JSPropertyIterator__create(globalObject, object, &iter.len, options.own_properties_only, options.only_non_index_properties); + if (globalObject.hasException()) { + return error.JSError; + } + if (iter.len > 0) { + bun.debugAssert(iter.impl != null); + } return iter; } @@ -58,39 +64,48 @@ pub fn JSPropertyIterator(comptime options: JSPropertyIteratorOptions) type { } /// The bun.String returned has not incremented it's reference count. - pub fn next(this: *@This()) ?bun.String { - const i: usize = this.iter_i; - if (i >= this.len) { + pub fn next(this: *@This()) !?bun.String { + // Reuse stack space. + while (true) { + const i: usize = this.iter_i; + if (i >= this.len) { + this.i = this.iter_i; + return null; + } + this.i = this.iter_i; - return null; - } - - this.i = this.iter_i; - this.iter_i += 1; - var name = bun.String.dead; - if (comptime options.include_value) { - const FnToUse = if (options.observable) Bun__JSPropertyIterator__getNameAndValue else Bun__JSPropertyIterator__getNameAndValueNonObservable; - const current = FnToUse(this.impl, this.globalObject, this.object, &name, i); - if (current == .zero) { - return this.next(); + this.iter_i += 1; + var name = bun.String.dead; + if (comptime options.include_value) { + const FnToUse = if (options.observable) Bun__JSPropertyIterator__getNameAndValue else Bun__JSPropertyIterator__getNameAndValueNonObservable; + const current = FnToUse(this.impl, this.globalObject, this.object, &name, i); + if (current == .zero) { + if (this.globalObject.hasException()) { + return error.JSError; + } + continue; + } + current.ensureStillAlive(); + this.value = current; + } else { + // Exception check is unnecessary here because it won't throw. + Bun__JSPropertyIterator__getName(this.impl, &name, i); } - current.ensureStillAlive(); - this.value = current; - } else { - Bun__JSPropertyIterator__getName(this.impl, &name, i); - } - if (name.tag == .Dead) { - return this.next(); - } - - if (comptime options.skip_empty_name) { - if (name.isEmpty()) { - return this.next(); + if (name.tag == .Dead) { + continue; } + + if (comptime options.skip_empty_name) { + if (name.isEmpty()) { + continue; + } + } + + return name; } - return name; + unreachable; } /// "code" is not always an own property, and we want to get it without risking exceptions. diff --git a/src/bun.js/bindings/JSS3File.cpp b/src/bun.js/bindings/JSS3File.cpp index 418c449f57..7e2d2309b5 100644 --- a/src/bun.js/bindings/JSS3File.cpp +++ b/src/bun.js/bindings/JSS3File.cpp @@ -1,125 +1,206 @@ + #include "root.h" + +#include "ZigGlobalObject.h" #include "ZigGeneratedClasses.h" -#include + +#include "JavaScriptCore/JSType.h" +#include "JavaScriptCore/JSObject.h" +#include "JavaScriptCore/JSGlobalObject.h" #include +#include #include -#include "JSS3File.h" +#include #include "JavaScriptCore/JSCJSValue.h" +#include "ErrorCode.h" +#include "JSS3File.h" + +namespace Bun { using namespace JSC; +using namespace WebCore; -extern "C" SYSV_ABI void* JSS3File__construct(JSC::JSGlobalObject*, JSC::CallFrame* callframe); -extern "C" SYSV_ABI bool JSS3File__hasInstance(EncodedJSValue, JSC::JSGlobalObject*, EncodedJSValue); - +// External C functions declarations extern "C" { +SYSV_ABI void* JSS3File__construct(JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3File__presign(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3File__stat(void* ptr, JSC::JSGlobalObject*, JSC::CallFrame* callframe); +SYSV_ABI EncodedJSValue JSS3File__bucket(void* ptr, JSC::JSGlobalObject*); +SYSV_ABI bool JSS3File__hasInstance(EncodedJSValue, JSC::JSGlobalObject*, EncodedJSValue); +} -JSC::EncodedJSValue BUN__createJSS3FileConstructor(JSGlobalObject* lexicalGlobalObject) +// Forward declarations +JSC_DECLARE_HOST_FUNCTION(functionS3File_presign); +JSC_DECLARE_HOST_FUNCTION(functionS3File_stat); +static JSC_DECLARE_CUSTOM_GETTER(getterS3File_bucket); +static JSC_DEFINE_CUSTOM_GETTER(getterS3File_bucket, (JSC::JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, JSC::PropertyName)) { - Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); + JSC::VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); - return JSValue::encode(globalObject->JSS3FileConstructor()); + auto* thisObject = jsDynamicCast(JSValue::decode(thisValue)); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3File instance"_s); + return {}; + } + + return JSS3File__bucket(thisObject->wrapped(), globalObject); } -} - -// TODO: make this inehrit from JSBlob instead of InternalFunction -// That will let us remove this hack for [Symbol.hasInstance] and fix the prototype chain. -class JSS3File : public JSC::InternalFunction { - using Base = JSC::InternalFunction; - +static const HashTableValue JSS3FilePrototypeTableValues[] = { + { "presign"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3File_presign, 1 } }, + { "stat"_s, static_cast(PropertyAttribute::Function | PropertyAttribute::ReadOnly), NoIntrinsic, { HashTableValue::NativeFunctionType, functionS3File_stat, 1 } }, + { "bucket"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor | PropertyAttribute::DOMAttribute), NoIntrinsic, { HashTableValue::GetterSetterType, getterS3File_bucket, 0 } }, +}; +class JSS3FilePrototype final : public WebCore::JSBlobPrototype { public: - JSS3File(JSC::VM& vm, JSC::Structure* structure) - : Base(vm, structure, call, construct) + using Base = WebCore::JSBlobPrototype; + static constexpr unsigned StructureFlags = Base::StructureFlags; + + static JSS3FilePrototype* create( + JSC::VM& vm, + JSC::JSGlobalObject* globalObject, + JSC::Structure* structure) { + JSS3FilePrototype* prototype = new (NotNull, JSC::allocateCell(vm)) JSS3FilePrototype(vm, globalObject, structure); + prototype->finishCreation(vm, globalObject); + return prototype; + } + + static JSC::Structure* createStructure( + JSC::VM& vm, + JSC::JSGlobalObject* globalObject, + JSC::JSValue prototype) + { + auto* structure = JSC::Structure::create(vm, globalObject, prototype, TypeInfo(JSC::ObjectType, StructureFlags), info()); + structure->setMayBePrototype(true); + return structure; } DECLARE_INFO; - static constexpr unsigned StructureFlags = (Base::StructureFlags & ~ImplementsDefaultHasInstance) | ImplementsHasInstance; - template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { - return &vm.internalFunctionSpace(); - } - static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) - { - return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(InternalFunctionType, StructureFlags), info()); + STATIC_ASSERT_ISO_SUBSPACE_SHARABLE(JSS3FilePrototype, Base); + return &vm.plainObjectSpace(); } - void finishCreation(JSC::VM& vm) +protected: + JSS3FilePrototype(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) + : Base(vm, globalObject, structure) { - Base::finishCreation(vm, 2, "S3"_s); } - static JSS3File* create(JSC::VM& vm, JSGlobalObject* globalObject) + void finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject) { - auto* zigGlobal = reinterpret_cast(globalObject); - auto structure = createStructure(vm, globalObject, zigGlobal->functionPrototype()); - auto* object = new (NotNull, JSC::allocateCell(vm)) JSS3File(vm, structure); - object->finishCreation(vm); + Base::finishCreation(vm, globalObject); + ASSERT(inherits(info())); + reifyStaticProperties(vm, JSS3File::info(), JSS3FilePrototypeTableValues, *this); - // This is not quite right. But we'll fix it if someone files an issue about it. - object->putDirect(vm, vm.propertyNames->prototype, zigGlobal->JSBlobPrototype(), JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly | 0); - - return object; - } - - static bool customHasInstance(JSObject* object, JSGlobalObject* globalObject, JSValue value) - { - if (!value.isObject()) - return false; - - // Note: this breaks [Symbol.hasInstance] - // We must do this for now until we update the code generator to export classes - return JSS3File__hasInstance(JSValue::encode(object), globalObject, JSValue::encode(value)); - } - - static JSC_HOST_CALL_ATTRIBUTES JSC::EncodedJSValue construct(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) - { - Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); - JSC::VM& vm = globalObject->vm(); - JSObject* newTarget = asObject(callFrame->newTarget()); - auto* constructor = globalObject->JSS3FileConstructor(); - - Structure* structure = globalObject->JSBlobStructure(); - if (constructor != newTarget) { - auto scope = DECLARE_THROW_SCOPE(vm); - - auto* functionGlobalObject = reinterpret_cast( - // ShadowRealm functions belong to a different global object. - getFunctionRealm(globalObject, newTarget)); - RETURN_IF_EXCEPTION(scope, {}); - structure = InternalFunction::createSubclassStructure( - globalObject, - newTarget, - functionGlobalObject->JSBlobStructure()); - } - - void* ptr = JSS3File__construct(globalObject, callFrame); - - if (UNLIKELY(!ptr)) { - return JSValue::encode(JSC::jsUndefined()); - } - - return JSValue::encode( - WebCore::JSBlob::create(vm, globalObject, structure, ptr)); - } - - static JSC_HOST_CALL_ATTRIBUTES EncodedJSValue call(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) - { - auto scope = DECLARE_THROW_SCOPE(lexicalGlobalObject->vm()); - throwTypeError(lexicalGlobalObject, scope, "Class constructor S3 cannot be invoked without 'new'"_s); - return {}; + this->putDirect(vm, vm.propertyNames->toStringTagSymbol, jsOwnedString(vm, "S3File"_s), 0); } }; -const JSC::ClassInfo JSS3File::s_info = { "S3"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3File) }; - -namespace Bun { - -JSC::JSObject* createJSS3FileConstructor(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +// Implementation of JSS3File methods +void JSS3File::destroy(JSCell* cell) { - return JSS3File::create(vm, globalObject); + static_cast(cell)->JSS3File::~JSS3File(); +} + +JSS3File::~JSS3File() +{ + // Base class destructor will be called automatically +} + +JSS3File* JSS3File::create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, void* ptr) +{ + JSS3File* thisObject = new (NotNull, JSC::allocateCell(vm)) JSS3File(vm, structure, ptr); + thisObject->finishCreation(vm); + return thisObject; +} + +JSValue constructS3FileInternal(JSC::JSGlobalObject* lexicalGlobalObject, void* ptr) +{ + ASSERT(ptr); + JSC::VM& vm = lexicalGlobalObject->vm(); + + auto* globalObject = defaultGlobalObject(lexicalGlobalObject); + auto* structure = globalObject->m_JSS3FileStructure.getInitializedOnMainThread(lexicalGlobalObject); + return JSS3File::create(vm, globalObject, structure, ptr); +} + +JSValue constructS3File(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callframe) +{ + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + void* ptr = JSS3File__construct(globalObject, callframe); + RETURN_IF_EXCEPTION(scope, {}); + ASSERT(ptr); + + return constructS3FileInternal(globalObject, ptr); +} + +JSC::Structure* JSS3File::createStructure(JSC::JSGlobalObject* globalObject) +{ + auto& vm = globalObject->vm(); + + JSC::JSObject* superPrototype = defaultGlobalObject(globalObject)->JSBlobPrototype(); + auto* protoStructure = JSS3FilePrototype::createStructure(vm, globalObject, superPrototype); + auto* prototype = JSS3FilePrototype::create(vm, globalObject, protoStructure); + return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(static_cast(0b11101110), StructureFlags), info(), NonArray); +} + +static bool customHasInstance(JSObject* object, JSGlobalObject* globalObject, JSValue value) +{ + if (!value.isObject()) + return false; + + return JSS3File__hasInstance(JSValue::encode(object), globalObject, JSValue::encode(value)); +} + +Structure* createJSS3FileStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +{ + return JSS3File::createStructure(globalObject); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3File_presign, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3File instance"_s); + return {}; + } + + return JSS3File__presign(thisObject->wrapped(), globalObject, callframe); +} + +JSC_DEFINE_HOST_FUNCTION(functionS3File_stat, (JSGlobalObject * globalObject, CallFrame* callframe)) +{ + auto* thisObject = jsDynamicCast(callframe->thisValue()); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + if (!thisObject) { + Bun::throwError(globalObject, scope, Bun::ErrorCode::ERR_INVALID_THIS, "Expected a S3File instance"_s); + return {}; + } + return JSS3File__stat(thisObject->wrapped(), globalObject, callframe); +} + +const JSC::ClassInfo JSS3FilePrototype::s_info = { "S3File"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3FilePrototype) }; +const JSC::ClassInfo JSS3File::s_info = { "S3File"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSS3File) }; + +extern "C" { +SYSV_ABI EncodedJSValue BUN__createJSS3File(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callframe) +{ + return JSValue::encode(constructS3File(globalObject, callframe)); +}; + +SYSV_ABI EncodedJSValue BUN__createJSS3FileUnsafely(JSC::JSGlobalObject* globalObject, void* ptr) +{ + return JSValue::encode(constructS3FileInternal(globalObject, ptr)); +}; } } diff --git a/src/bun.js/bindings/JSS3File.h b/src/bun.js/bindings/JSS3File.h index 63b8170b06..fab0927efb 100644 --- a/src/bun.js/bindings/JSS3File.h +++ b/src/bun.js/bindings/JSS3File.h @@ -1,7 +1,41 @@ #pragma once -#include "root.h" +namespace Zig { +class GlobalObject; +} namespace Bun { -JSC::JSObject* createJSS3FileConstructor(JSC::VM&, JSC::JSGlobalObject*); -} +using namespace JSC; + +class JSS3File : public WebCore::JSBlob { + using Base = WebCore::JSBlob; + +public: + static constexpr bool needsDestruction = true; + static constexpr unsigned StructureFlags = Base::StructureFlags; + + JSS3File(JSC::VM& vm, Structure* structure, void* ptr) + : Base(vm, structure, ptr) + { + } + DECLARE_INFO; + + template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) + { + if constexpr (mode == JSC::SubspaceAccess::Concurrently) + return nullptr; + return WebCore::JSBlob::subspaceFor(vm); + } + + static void destroy(JSCell* cell); + ~JSS3File(); + + static JSS3File* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, void* ptr); + static JSC::Structure* createStructure(JSC::JSGlobalObject* globalObject); +}; + +// Constructor helper +JSValue constructS3File(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callframe); +Structure* createJSS3FileStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject); + +} // namespace Bun diff --git a/src/bun.js/bindings/NoOpForTesting.cpp b/src/bun.js/bindings/NoOpForTesting.cpp new file mode 100644 index 0000000000..919cd5b5f8 --- /dev/null +++ b/src/bun.js/bindings/NoOpForTesting.cpp @@ -0,0 +1,47 @@ + + +#include "root.h" + +#include "JavaScriptCore/CustomGetterSetter.h" +#include "JavaScriptCore/ObjectConstructor.h" +#include "JavaScriptCore/JSObject.h" +#include + +namespace Bun { +using namespace JSC; + +JSC_DEFINE_HOST_FUNCTION(functionNoop, (JSC::JSGlobalObject*, JSC::CallFrame*)) +{ + return JSC::JSValue::encode(JSC::jsUndefined()); +} + +JSC_DEFINE_HOST_FUNCTION(functionCallback, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +{ + JSObject* callback = jsCast(callFrame->uncheckedArgument(0)); + JSC::CallData callData = JSC::getCallData(callback); + return JSC::JSValue::encode(JSC::profiledCall(globalObject, ProfilingReason::API, callback, callData, JSC::jsUndefined(), JSC::MarkedArgumentBuffer())); +} + +JSC_DEFINE_CUSTOM_GETTER(noop_getter, (JSGlobalObject*, EncodedJSValue, PropertyName)) +{ + return JSC::JSValue::encode(JSC::jsUndefined()); +} + +JSC_DEFINE_CUSTOM_SETTER(noop_setter, + (JSC::JSGlobalObject*, JSC::EncodedJSValue, + JSC::EncodedJSValue, JSC::PropertyName)) +{ + return true; +} + +JSC::JSObject* createNoOpForTesting(JSC::JSGlobalObject* globalObject) +{ + auto& vm = globalObject->vm(); + JSC::JSObject* object = JSC::constructEmptyObject(vm, globalObject->nullPrototypeObjectStructure()); + object->putDirectNativeFunction(vm, globalObject, JSC::Identifier::fromString(vm, String("function"_s)), 0, functionNoop, ImplementationVisibility::Public, JSC::NoIntrinsic, 0); + object->putDirectNativeFunction(vm, globalObject, JSC::Identifier::fromString(vm, String("callback"_s)), 0, functionCallback, ImplementationVisibility::Public, JSC::NoIntrinsic, 0); + object->putDirectCustomAccessor(vm, JSC::Identifier::fromString(vm, String("getterSetter"_s)), JSC::CustomGetterSetter::create(vm, noop_getter, noop_setter), 0); + return object; +} + +} diff --git a/src/bun.js/bindings/NoOpForTesting.h b/src/bun.js/bindings/NoOpForTesting.h new file mode 100644 index 0000000000..e39e84daa8 --- /dev/null +++ b/src/bun.js/bindings/NoOpForTesting.h @@ -0,0 +1,3 @@ +namespace Bun { +JSC::JSObject* createNoOpForTesting(JSC::JSGlobalObject* globalObject); +} diff --git a/src/bun.js/bindings/NodeValidator.cpp b/src/bun.js/bindings/NodeValidator.cpp index c259609ef6..dfd177bc3c 100644 --- a/src/bun.js/bindings/NodeValidator.cpp +++ b/src/bun.js/bindings/NodeValidator.cpp @@ -51,6 +51,24 @@ JSC::EncodedJSValue V::validateInteger(JSC::ThrowScope& scope, JSC::JSGlobalObje return JSValue::encode(jsUndefined()); } +JSC::EncodedJSValue V::validateInteger(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSC::JSValue value, ASCIILiteral name, JSC::JSValue min, JSC::JSValue max) +{ + if (!value.isNumber()) return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "number"_s, value); + if (min.isUndefined()) min = jsDoubleNumber(JSC::minSafeInteger()); + if (max.isUndefined()) max = jsDoubleNumber(JSC::maxSafeInteger()); + + auto value_num = value.asNumber(); + auto min_num = min.toNumber(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto max_num = max.toNumber(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + max_num = std::max(min_num, max_num); + + if (std::fmod(value_num, 1.0) != 0) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, "an integer"_s, value); + if (value_num < min_num || value_num > max_num) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, min_num, max_num, value); + + return JSValue::encode(jsUndefined()); +} JSC_DEFINE_HOST_FUNCTION(jsFunction_validateNumber, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { @@ -85,6 +103,28 @@ JSC::EncodedJSValue V::validateNumber(JSC::ThrowScope& scope, JSC::JSGlobalObjec return JSValue::encode(jsUndefined()); } +JSC::EncodedJSValue V::validateNumber(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, ASCIILiteral name, JSValue min, JSValue max) +{ + if (!value.isNumber()) return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "number"_s, value); + + auto value_num = value.asNumber(); + auto min_num = min.toNumber(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto max_num = max.toNumber(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + auto min_isnonnull = !min.isUndefinedOrNull(); + auto max_isnonnull = !max.isUndefinedOrNull(); + + if ((min_isnonnull && value_num < min_num) || (max_isnonnull && value_num > max_num) || ((min_isnonnull || max_isnonnull) && std::isnan(value_num))) { + if (min_isnonnull && max_isnonnull) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, min_num, max_num, value); + if (min_isnonnull) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, min_num, Bun::LOWER, value); + if (max_isnonnull) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, max_num, Bun::UPPER, value); + return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, ""_s, value); + } + + return JSValue::encode(jsUndefined()); +} JSC_DEFINE_HOST_FUNCTION(jsFunction_validateString, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) { @@ -211,8 +251,6 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validatePort, (JSC::JSGlobalObject * globalO if (allowZero.isUndefined()) allowZero = jsBoolean(true); auto allowZero_b = allowZero.toBoolean(globalObject); - RETURN_IF_EXCEPTION(scope, {}); - if (!port.isNumber() && !port.isString()) return Bun::ERR::SOCKET_BAD_PORT(scope, globalObject, name, port, allowZero_b); if (port.isString()) { @@ -297,6 +335,30 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateArray, (JSC::JSGlobalObject * global auto value = callFrame->argument(0); auto name = callFrame->argument(1); auto minLength = callFrame->argument(2); + return V::validateArray(scope, globalObject, value, name, minLength); +} +JSC::EncodedJSValue V::validateArray(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, JSValue name, JSValue minLength) +{ + JSC::VM& vm = globalObject->vm(); + + if (minLength.isUndefined()) minLength = jsNumber(0); + + if (!JSC::isArray(globalObject, value)) return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "Array"_s, value); + + auto length = value.get(globalObject, Identifier::fromString(vm, "length"_s)); + RETURN_IF_EXCEPTION(scope, {}); + auto length_num = length.toNumber(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + auto minLength_num = minLength.toNumber(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + if (length_num < minLength_num) { + return Bun::ERR::INVALID_ARG_VALUE(scope, globalObject, name, value, makeString("must be longer than "_s, minLength_num)); + } + return JSValue::encode(jsUndefined()); +} +JSC::EncodedJSValue V::validateArray(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, ASCIILiteral name, JSValue minLength) +{ + JSC::VM& vm = globalObject->vm(); if (minLength.isUndefined()) minLength = jsNumber(0); @@ -348,7 +410,25 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateUint32, (JSC::JSGlobalObject * globa auto value = callFrame->argument(0); auto name = callFrame->argument(1); auto positive = callFrame->argument(2); - + return V::validateUint32(scope, globalObject, value, name, positive); +} +JSC::EncodedJSValue V::validateUint32(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, JSValue name, JSValue positive) +{ + if (!value.isNumber()) return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "number"_s, value); + if (positive.isUndefined()) positive = jsBoolean(false); + + auto value_num = value.asNumber(); + if (std::fmod(value_num, 1.0) != 0) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, "an integer"_s, value); + + auto positive_b = positive.toBoolean(globalObject); + auto min = positive_b ? 1 : 0; + auto max = std::numeric_limits().max(); + if (value_num < min || value_num > max) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, min, max, value); + + return JSValue::encode(jsUndefined()); +} +JSC::EncodedJSValue V::validateUint32(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, ASCIILiteral name, JSValue positive) +{ if (!value.isNumber()) return Bun::ERR::INVALID_ARG_TYPE(scope, globalObject, name, "number"_s, value); if (positive.isUndefined()) positive = jsBoolean(false); @@ -356,7 +436,6 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateUint32, (JSC::JSGlobalObject * globa if (std::fmod(value_num, 1.0) != 0) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, "an integer"_s, value); auto positive_b = positive.toBoolean(globalObject); - RETURN_IF_EXCEPTION(scope, {}); auto min = positive_b ? 1 : 0; auto max = std::numeric_limits().max(); if (value_num < min || value_num > max) return Bun::ERR::OUT_OF_RANGE(scope, globalObject, name, min, max, value); @@ -463,4 +542,5 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateBuffer, (JSC::JSGlobalObject * globa } return JSValue::encode(jsUndefined()); } + } diff --git a/src/bun.js/bindings/NodeValidator.h b/src/bun.js/bindings/NodeValidator.h index b691c7f5e0..2557c8a422 100644 --- a/src/bun.js/bindings/NodeValidator.h +++ b/src/bun.js/bindings/NodeValidator.h @@ -27,10 +27,16 @@ JSC_DEFINE_HOST_FUNCTION(jsFunction_validateBuffer, (JSC::JSGlobalObject * globa namespace V { JSC::EncodedJSValue validateInteger(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSC::JSValue value, JSC::JSValue name, JSC::JSValue min, JSC::JSValue max); +JSC::EncodedJSValue validateInteger(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSC::JSValue value, ASCIILiteral name, JSC::JSValue min, JSC::JSValue max); JSC::EncodedJSValue validateNumber(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSC::JSValue value, JSC::JSValue name, JSC::JSValue min, JSC::JSValue max); +JSC::EncodedJSValue validateNumber(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, ASCIILiteral name, JSValue min, JSValue max); JSC::EncodedJSValue validateFiniteNumber(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSC::JSValue number, JSC::JSValue name); JSC::EncodedJSValue validateString(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, JSValue name); JSC::EncodedJSValue validateString(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, ASCIILiteral name); +JSC::EncodedJSValue validateArray(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, JSValue name, JSValue minLength); +JSC::EncodedJSValue validateArray(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, ASCIILiteral name, JSValue minLength); +JSC::EncodedJSValue validateUint32(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, JSValue name, JSValue positive); +JSC::EncodedJSValue validateUint32(JSC::ThrowScope& scope, JSC::JSGlobalObject* globalObject, JSValue value, ASCIILiteral name, JSValue positive); } diff --git a/src/bun.js/bindings/OsBinding.cpp b/src/bun.js/bindings/OsBinding.cpp index c994fe8d98..dfe6713d6e 100644 --- a/src/bun.js/bindings/OsBinding.cpp +++ b/src/bun.js/bindings/OsBinding.cpp @@ -14,12 +14,31 @@ extern "C" uint64_t Bun__Os__getFreeMemory(void) vm_statistics_data_t info; mach_msg_type_number_t count = sizeof(info) / sizeof(integer_t); - if (host_statistics(mach_host_self(), HOST_VM_INFO, - (host_info_t)&info, &count) - != KERN_SUCCESS) { + if (host_statistics(mach_host_self(), HOST_VM_INFO, (host_info_t)&info, &count) != KERN_SUCCESS) { return 0; } - return (uint64_t)info.free_count * sysconf(_SC_PAGESIZE); } #endif + +#if OS(LINUX) +#include + +extern "C" uint64_t Bun__Os__getFreeMemory(void) +{ + struct sysinfo info; + if (sysinfo(&info) == 0) { + return info.freeram * info.mem_unit; + } + return 0; +} +#endif + +#if OS(WINDOWS) +extern "C" uint64_t uv_get_available_memory(void); + +extern "C" uint64_t Bun__Os__getFreeMemory(void) +{ + return uv_get_available_memory(); +} +#endif diff --git a/src/bun.js/bindings/S3Error.cpp b/src/bun.js/bindings/S3Error.cpp new file mode 100644 index 0000000000..a3ae91651c --- /dev/null +++ b/src/bun.js/bindings/S3Error.cpp @@ -0,0 +1,63 @@ + +#include "root.h" + +#include +#include +#include "ZigGeneratedClasses.h" +#include "S3Error.h" + +namespace Bun { + +typedef struct S3Error { + BunString code; + BunString message; + BunString path; +} S3Error; + +Structure* createS3ErrorStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject) +{ + return JSC::ErrorInstance::createStructure(vm, globalObject, JSC::constructEmptyObject(globalObject, globalObject->errorPrototype())); +} + +extern "C" { +SYSV_ABI JSC::EncodedJSValue S3Error__toErrorInstance(const S3Error* arg0, + JSC::JSGlobalObject* globalObject) +{ + S3Error err = *arg0; + + JSC::VM& vm = globalObject->vm(); + + auto scope = DECLARE_THROW_SCOPE(vm); + JSC::JSValue message = JSC::jsUndefined(); + if (err.message.tag != BunStringTag::Empty) { + message = Bun::toJS(globalObject, err.message); + } + + auto& names = WebCore::builtinNames(vm); + + JSC::JSValue options = JSC::jsUndefined(); + auto prototype = defaultGlobalObject(globalObject)->m_S3ErrorStructure.getInitializedOnMainThread(globalObject); + JSC::JSObject* result = JSC::ErrorInstance::create(globalObject, prototype, message, options); + result->putDirect( + vm, vm.propertyNames->name, + JSC::JSValue(defaultGlobalObject(globalObject)->commonStrings().S3ErrorString(globalObject)), + JSC::PropertyAttribute::DontEnum | 0); + if (err.code.tag != BunStringTag::Empty) { + JSC::JSValue code = Bun::toJS(globalObject, err.code); + result->putDirect(vm, names.codePublicName(), code, + JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::DontEnum | 0); + } + + if (err.path.tag != BunStringTag::Empty) { + JSC::JSValue path = Bun::toJS(globalObject, err.path); + result->putDirect(vm, names.pathPublicName(), path, + JSC::PropertyAttribute::DontDelete | 0); + } + + RETURN_IF_EXCEPTION(scope, {}); + scope.release(); + + return JSC::JSValue::encode(JSC::JSValue(result)); +} +} +} diff --git a/src/bun.js/bindings/S3Error.h b/src/bun.js/bindings/S3Error.h new file mode 100644 index 0000000000..516a9e907b --- /dev/null +++ b/src/bun.js/bindings/S3Error.h @@ -0,0 +1,7 @@ +#pragma once + +namespace Bun { +using namespace JSC; + +Structure* createS3ErrorStructure(VM& vm, JSGlobalObject* globalObject); +} diff --git a/src/bun.js/bindings/SQLClient.cpp b/src/bun.js/bindings/SQLClient.cpp index 514af1b664..2077cb29b5 100644 --- a/src/bun.js/bindings/SQLClient.cpp +++ b/src/bun.js/bindings/SQLClient.cpp @@ -74,8 +74,28 @@ typedef struct DataCell { DataCellTag tag; DataCellValue value; uint8_t freeValue; + uint8_t _indexedColumnFlag; + uint32_t index; + + bool isIndexedColumn() const { return _indexedColumnFlag == 1; } + bool isNamedColumn() const { return _indexedColumnFlag == 0; } + bool isDuplicateColumn() const { return _indexedColumnFlag == 2; } } DataCell; +class BunStructureFlags { +public: + uint32_t flags; + + BunStructureFlags(uint32_t flags) + : flags(flags) + { + } + + bool hasIndexedColumns() const { return flags & (1 << 0); } + bool hasNamedColumns() const { return flags & (1 << 1); } + bool hasDuplicateColumns() const { return flags & (1 << 2); } +}; + static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCell& cell) { switch (cell.tag) { @@ -230,25 +250,79 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel } } -static JSC::JSValue toJS(JSC::Structure* structure, DataCell* cells, unsigned count, JSC::JSGlobalObject* globalObject) +static JSC::JSValue toJS(JSC::Structure* structure, DataCell* cells, unsigned count, JSC::JSGlobalObject* globalObject, Bun::BunStructureFlags flags) { auto& vm = globalObject->vm(); auto* object = JSC::constructEmptyObject(vm, structure); auto scope = DECLARE_THROW_SCOPE(vm); - for (unsigned i = 0; i < count; i++) { - auto& cell = cells[i]; - JSValue value = toJS(vm, globalObject, cell); - RETURN_IF_EXCEPTION(scope, {}); - object->putDirectOffset(vm, i, value); + // TODO: once we have more tests for this, let's add another branch for + // "only mixed names and mixed indexed columns, no duplicates" + // then we cna remove this sort and instead do two passes. + if (flags.hasIndexedColumns() && flags.hasNamedColumns()) { + // sort the cells by if they're named or indexed, put named first. + // this is to conform to the Structure offsets from earlier. + std::sort(cells, cells + count, [](DataCell& a, DataCell& b) { + return a.isNamedColumn() && !b.isNamedColumn(); + }); } + // Fast path: named columns only, no duplicate columns + if (flags.hasNamedColumns() && !flags.hasDuplicateColumns() && !flags.hasIndexedColumns()) { + for (unsigned i = 0; i < count; i++) { + auto& cell = cells[i]; + JSValue value = toJS(vm, globalObject, cell); + RETURN_IF_EXCEPTION(scope, {}); + ASSERT(!cell.isDuplicateColumn()); + ASSERT(!cell.isIndexedColumn()); + ASSERT(cell.isNamedColumn()); + object->putDirectOffset(vm, i, value); + } + } else if (flags.hasIndexedColumns() && !flags.hasNamedColumns() && !flags.hasDuplicateColumns()) { + for (unsigned i = 0; i < count; i++) { + auto& cell = cells[i]; + JSValue value = toJS(vm, globalObject, cell); + RETURN_IF_EXCEPTION(scope, {}); + ASSERT(!cell.isDuplicateColumn()); + ASSERT(cell.isIndexedColumn()); + ASSERT(!cell.isNamedColumn()); + // cell.index can be > count + // for example: + // select 1 as "8", 2 as "2", 3 as "3" + // -> { "8": 1, "2": 2, "3": 3 } + // 8 > count + object->putDirectIndex(globalObject, cell.index, value); + } + } else { + unsigned structureOffsetIndex = 0; + // slow path: named columns with duplicate columns or indexed columns + for (unsigned i = 0; i < count; i++) { + auto& cell = cells[i]; + if (cell.isIndexedColumn()) { + JSValue value = toJS(vm, globalObject, cell); + RETURN_IF_EXCEPTION(scope, {}); + ASSERT(cell.index < count); + ASSERT(!cell.isNamedColumn()); + ASSERT(!cell.isDuplicateColumn()); + object->putDirectIndex(globalObject, cell.index, value); + } else if (cell.isNamedColumn()) { + JSValue value = toJS(vm, globalObject, cell); + RETURN_IF_EXCEPTION(scope, {}); + ASSERT(!cell.isIndexedColumn()); + ASSERT(!cell.isDuplicateColumn()); + ASSERT(cell.index < count); + object->putDirectOffset(vm, structureOffsetIndex++, value); + } else if (cell.isDuplicateColumn()) { + // skip it! + } + } + } return object; } -static JSC::JSValue toJS(JSC::JSArray* array, JSC::Structure* structure, DataCell* cells, unsigned count, JSC::JSGlobalObject* globalObject) +static JSC::JSValue toJS(JSC::JSArray* array, JSC::Structure* structure, DataCell* cells, unsigned count, JSC::JSGlobalObject* globalObject, Bun::BunStructureFlags flags) { - JSValue value = toJS(structure, cells, count, globalObject); + JSValue value = toJS(structure, cells, count, globalObject, flags); if (value.isEmpty()) return {}; @@ -268,20 +342,44 @@ static JSC::JSValue toJS(JSC::JSArray* array, JSC::Structure* structure, DataCel extern "C" EncodedJSValue JSC__constructObjectFromDataCell( JSC::JSGlobalObject* globalObject, EncodedJSValue encodedArrayValue, - EncodedJSValue encodedStructureValue, DataCell* cells, unsigned count) + EncodedJSValue encodedStructureValue, DataCell* cells, unsigned count, unsigned flags) { JSValue arrayValue = JSValue::decode(encodedArrayValue); JSValue structureValue = JSValue::decode(encodedStructureValue); auto* array = arrayValue ? jsDynamicCast(arrayValue) : nullptr; auto* structure = jsDynamicCast(structureValue); - return JSValue::encode(toJS(array, structure, cells, count, globalObject)); + return JSValue::encode(toJS(array, structure, cells, count, globalObject, Bun::BunStructureFlags(flags))); } -extern "C" EncodedJSValue JSC__createStructure(JSC::JSGlobalObject* globalObject, JSC::JSCell* owner, unsigned int inlineCapacity, BunString* names) +typedef struct ExternColumnIdentifier { + uint8_t tag; + union { + uint32_t index; + BunString name; + }; + + bool isIndexedColumn() const { return tag == 1; } + bool isNamedColumn() const { return tag == 2; } + bool isDuplicateColumn() const { return tag == 0; } +} ExternColumnIdentifier; + +extern "C" EncodedJSValue JSC__createStructure(JSC::JSGlobalObject* globalObject, JSC::JSCell* owner, unsigned int inlineCapacity, ExternColumnIdentifier* namesPtr) { auto& vm = globalObject->vm(); - Structure* structure = globalObject->structureCache().emptyObjectStructureForPrototype(globalObject, globalObject->objectPrototype(), inlineCapacity); + + PropertyNameArray propertyNames(vm, PropertyNameMode::Strings, PrivateSymbolMode::Exclude); + std::span names(namesPtr, inlineCapacity); + unsigned nonDuplicateCount = 0; + for (unsigned i = 0; i < inlineCapacity; i++) { + ExternColumnIdentifier& name = names[i]; + if (name.isNamedColumn()) { + propertyNames.add(Identifier::fromString(vm, name.name.toWTFString())); + } + nonDuplicateCount += !name.isDuplicateColumn(); + } + + Structure* structure = globalObject->structureCache().emptyObjectStructureForPrototype(globalObject, globalObject->objectPrototype(), std::min(nonDuplicateCount, JSFinalObject::maxInlineCapacity)); if (owner) { vm.writeBarrier(owner, structure); } else { @@ -289,14 +387,15 @@ extern "C" EncodedJSValue JSC__createStructure(JSC::JSGlobalObject* globalObject } ensureStillAliveHere(structure); - PropertyNameArray propertyNames(vm, PropertyNameMode::Strings, PrivateSymbolMode::Exclude); - for (unsigned i = 0; i < inlineCapacity; i++) { - propertyNames.add(Identifier::fromString(vm, names[i].toWTFString())); - } - - PropertyOffset offset = 0; - for (unsigned i = 0; i < inlineCapacity; i++) { - structure = structure->addPropertyTransition(vm, structure, propertyNames[i], 0, offset); + if (names.size() > 0) { + PropertyOffset offset = 0; + unsigned indexInPropertyNamesArray = 0; + for (unsigned i = 0; i < inlineCapacity; i++) { + ExternColumnIdentifier& name = names[i]; + if (name.isNamedColumn()) { + structure = structure->addPropertyTransition(vm, structure, propertyNames[indexInPropertyNamesArray++], 0, offset); + } + } } return JSValue::encode(structure); @@ -317,5 +416,4 @@ extern "C" void JSC__putDirectOffset(JSC::VM* vm, JSC::EncodedJSValue object, un { JSValue::decode(object).getObject()->putDirectOffset(*vm, offset, JSValue::decode(value)); } - } diff --git a/src/bun.js/bindings/Sink.h b/src/bun.js/bindings/Sink.h index 0b07ad0f5c..60ded13833 100644 --- a/src/bun.js/bindings/Sink.h +++ b/src/bun.js/bindings/Sink.h @@ -9,7 +9,7 @@ enum SinkID : uint8_t { HTMLRewriterSink = 3, HTTPResponseSink = 4, HTTPSResponseSink = 5, - FetchTaskletChunkedRequestSink = 6, + NetworkSink = 6, }; static constexpr unsigned numberOfSinkIDs diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index e718a28527..c407e2fbe1 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -1,4 +1,5 @@ #include "root.h" + #include "JavaScriptCore/PropertySlot.h" #include "ZigGlobalObject.h" #include "helpers.h" @@ -33,6 +34,7 @@ #include "JavaScriptCore/JSLock.h" #include "JavaScriptCore/JSMap.h" #include "JavaScriptCore/JSMicrotask.h" + #include "JavaScriptCore/JSModuleLoader.h" #include "JavaScriptCore/JSModuleNamespaceObject.h" #include "JavaScriptCore/JSModuleNamespaceObjectInlines.h" @@ -84,7 +86,6 @@ #include "JSDOMConvertUnion.h" #include "JSDOMException.h" #include "JSDOMFile.h" -#include "JSS3File.h" #include "JSDOMFormData.h" #include "JSDOMURL.h" #include "JSEnvironmentVariableMap.h" @@ -158,6 +159,8 @@ #include "JSPerformanceResourceTiming.h" #include "JSPerformanceTiming.h" +#include "JSS3File.h" +#include "S3Error.h" #if ENABLE(REMOTE_INSPECTOR) #include "JavaScriptCore/RemoteInspectorServer.h" #endif @@ -239,6 +242,9 @@ extern "C" void JSCInitialize(const char* envp[], size_t envc, void (*onCrash)(c JSC::Options::useConcurrentJIT() = true; // JSC::Options::useSigillCrashAnalyzer() = true; JSC::Options::useWasm() = true; + // Disable IPInt, the in-place WASM interpreter, by default until it is more stable + // (it breaks pglite as of 2025-01-06) + JSC::Options::useWasmIPInt() = false; JSC::Options::useSourceProviderCache() = true; // JSC::Options::useUnlinkedCodeBlockJettisoning() = false; JSC::Options::exposeInternalModuleLoader() = true; @@ -553,30 +559,7 @@ WTF::String Bun::formatStackTrace( remappedFrame.position.line_zero_based = originalLine.zeroBasedInt(); remappedFrame.position.column_zero_based = originalColumn.zeroBasedInt(); - String sourceURLForFrame = frame.sourceURL(vm); - - // Sometimes, the sourceURL is empty. - // For example, pages in Next.js. - if (sourceURLForFrame.isEmpty()) { - // hasLineAndColumnInfo() checks codeBlock(), so this is safe to access here. - const auto& source = frame.codeBlock()->source(); - - // source.isNull() is true when the SourceProvider is a null pointer. - if (!source.isNull()) { - auto* provider = source.provider(); - // I'm not 100% sure we should show sourceURLDirective here. - if (!provider->sourceURLDirective().isEmpty()) { - sourceURLForFrame = provider->sourceURLDirective(); - } else if (!provider->sourceURL().isEmpty()) { - sourceURLForFrame = provider->sourceURL(); - } else { - const auto& origin = provider->sourceOrigin(); - if (!origin.isNull()) { - sourceURLForFrame = origin.string(); - } - } - } - } + String sourceURLForFrame = Zig::sourceURL(vm, frame); bool isDefinitelyNotRunninginNodeVMGlobalObject = (globalObject == lexicalGlobalObject && globalObject); @@ -681,29 +664,46 @@ static JSValue computeErrorInfoWithPrepareStackTrace(JSC::VM& vm, Zig::GlobalObj // We need to sourcemap it if it's a GlobalObject. if (globalObject == lexicalGlobalObject) { - size_t framesCount = stackTrace.size(); - ZigStackFrame remappedFrames[64]; - framesCount = framesCount > 64 ? 64 : framesCount; - for (int i = 0; i < framesCount; i++) { - remappedFrames[i] = {}; - remappedFrames[i].source_url = Bun::toStringRef(lexicalGlobalObject, stackTrace.at(i).sourceURL()); + for (int i = 0; i < stackTrace.size(); i++) { + ZigStackFrame frame = {}; + + String sourceURLForFrame = Zig::sourceURL(vm, stackFrames.at(i)); + if (JSCStackFrame::SourcePositions* sourcePositions = stackTrace.at(i).getSourcePositions()) { - remappedFrames[i].position.line_zero_based = sourcePositions->line.zeroBasedInt(); - remappedFrames[i].position.column_zero_based = sourcePositions->column.zeroBasedInt(); + frame.position.line_zero_based = sourcePositions->line.zeroBasedInt(); + frame.position.column_zero_based = sourcePositions->column.zeroBasedInt(); } else { - remappedFrames[i].position.line_zero_based = -1; - remappedFrames[i].position.column_zero_based = -1; + frame.position.line_zero_based = -1; + frame.position.column_zero_based = -1; + } + + if (!sourceURLForFrame.isEmpty()) { + frame.source_url = Bun::toStringRef(sourceURLForFrame); + + // This ensures the lifetime of the sourceURL is accounted for correctly + Bun__remapStackFramePositions(globalObject, &frame, 1); + + sourceURLForFrame = frame.source_url.toWTFString(); + } + + auto* callsite = jsCast(callSites.at(i)); + + if (!sourceURLForFrame.isEmpty()) + callsite->setSourceURL(vm, jsString(vm, sourceURLForFrame)); + + if (frame.remapped) { + callsite->setLineNumber(frame.position.line()); + callsite->setColumnNumber(frame.position.column()); } } + } else { + // if it's a different JSGlobalObject, let's still give you the sourceURL directive just to be nice. + for (int i = 0; i < stackTrace.size(); i++) { - Bun__remapStackFramePositions(globalObject, remappedFrames, framesCount); - - for (size_t i = 0; i < framesCount; i++) { - JSC::JSValue callSiteValue = callSites.at(i); - if (remappedFrames[i].remapped) { - CallSite* callSite = JSC::jsCast(callSiteValue); - callSite->setColumnNumber(remappedFrames[i].position.column()); - callSite->setLineNumber(remappedFrames[i].position.line()); + String sourceURLForFrame = Zig::sourceURL(vm, stackFrames.at(i)); + if (!sourceURLForFrame.isEmpty()) { + auto* callsite = jsCast(callSites.at(i)); + callsite->setSourceURL(vm, jsString(vm, sourceURLForFrame)); } } } @@ -1883,30 +1883,6 @@ JSC_DEFINE_HOST_FUNCTION(functionCreateUninitializedArrayBuffer, RELEASE_AND_RETURN(scope, JSValue::encode(JSC::JSArrayBuffer::create(globalObject->vm(), globalObject->arrayBufferStructure(JSC::ArrayBufferSharingMode::Default), WTFMove(arrayBuffer)))); } -JSC_DEFINE_HOST_FUNCTION(functionNoop, (JSC::JSGlobalObject*, JSC::CallFrame*)) -{ - return JSC::JSValue::encode(JSC::jsUndefined()); -} - -JSC_DEFINE_HOST_FUNCTION(functionCallback, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) -{ - JSFunction* callback = jsCast(callFrame->uncheckedArgument(0)); - JSC::CallData callData = JSC::getCallData(callback); - return JSC::JSValue::encode(JSC::profiledCall(globalObject, ProfilingReason::API, callback, callData, JSC::jsUndefined(), JSC::MarkedArgumentBuffer())); -} - -JSC_DEFINE_CUSTOM_GETTER(noop_getter, (JSGlobalObject*, EncodedJSValue, PropertyName)) -{ - return JSC::JSValue::encode(JSC::jsUndefined()); -} - -JSC_DEFINE_CUSTOM_SETTER(noop_setter, - (JSC::JSGlobalObject*, JSC::EncodedJSValue, - JSC::EncodedJSValue, JSC::PropertyName)) -{ - return true; -} - static inline JSC::EncodedJSValue jsFunctionAddEventListenerBody(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame, Zig::GlobalObject* castedThis) { auto& vm = JSC::getVM(lexicalGlobalObject); @@ -2812,42 +2788,10 @@ JSC_DEFINE_CUSTOM_SETTER(moduleNamespacePrototypeSetESModuleMarker, (JSGlobalObj auto scope = DECLARE_THROW_SCOPE(vm); JSValue value = JSValue::decode(encodedValue); WTF::TriState triState = value.toBoolean(globalObject) ? WTF::TriState::True : WTF::TriState::False; - RETURN_IF_EXCEPTION(scope, false); moduleNamespaceObject->m_hasESModuleMarker = triState; return true; } -extern "C" JSC::EncodedJSValue JSS3File__upload(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__presign(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__unlink(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__exists(JSGlobalObject*, JSC::CallFrame*); -extern "C" JSC::EncodedJSValue JSS3File__size(JSGlobalObject*, JSC::CallFrame*); - -JSC_DEFINE_HOST_FUNCTION(jsS3Upload, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__upload(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Presign, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__presign(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Unlink, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__unlink(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Exists, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__exists(lexicalGlobalObject, callFrame); -} - -JSC_DEFINE_HOST_FUNCTION(jsS3Size, (JSC::JSGlobalObject * lexicalGlobalObject, JSC::CallFrame* callFrame)) -{ - return JSS3File__size(lexicalGlobalObject, callFrame); -} - void GlobalObject::finishCreation(VM& vm) { Base::finishCreation(vm); @@ -2869,18 +2813,6 @@ void GlobalObject::finishCreation(VM& vm) init.set(fileConstructor); }); - m_JSS3FileConstructor.initLater( - [](const Initializer& init) { - JSObject* s3Constructor = Bun::createJSS3FileConstructor(init.vm, init.owner); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "upload"_s), 3, jsS3Upload, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "unlink"_s), 3, jsS3Unlink, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "presign"_s), 3, jsS3Presign, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "exists"_s), 3, jsS3Exists, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - s3Constructor->putDirectNativeFunction(init.vm, init.owner, JSC::Identifier::fromString(init.vm, "size"_s), 3, jsS3Size, ImplementationVisibility::Public, JSC::NoIntrinsic, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | 0); - - init.set(s3Constructor); - }); - m_cryptoObject.initLater( [](const Initializer& init) { JSC::JSGlobalObject* globalObject = init.owner; @@ -2928,6 +2860,16 @@ void GlobalObject::finishCreation(VM& vm) init.set(result.toObject(init.owner)); }); + m_JSS3FileStructure.initLater( + [](const Initializer& init) { + init.set(Bun::createJSS3FileStructure(init.vm, init.owner)); + }); + + m_S3ErrorStructure.initLater( + [](const Initializer& init) { + init.set(Bun::createS3ErrorStructure(init.vm, init.owner)); + }); + m_commonJSModuleObjectStructure.initLater( [](const Initializer& init) { init.set(Bun::createCommonJSModuleStructure(reinterpret_cast(init.owner))); @@ -3176,7 +3118,7 @@ void GlobalObject::finishCreation(VM& vm) m_JSFetchTaskletChunkedRequestControllerPrototype.initLater( [](const JSC::LazyProperty::Initializer& init) { - auto* prototype = createJSSinkControllerPrototype(init.vm, init.owner, WebCore::SinkID::FetchTaskletChunkedRequestSink); + auto* prototype = createJSSinkControllerPrototype(init.vm, init.owner, WebCore::SinkID::NetworkSink); init.set(prototype); }); @@ -3308,11 +3250,11 @@ void GlobalObject::finishCreation(VM& vm) init.setConstructor(constructor); }); - m_JSFetchTaskletChunkedRequestSinkClassStructure.initLater( + m_JSNetworkSinkClassStructure.initLater( [](LazyClassStructure::Initializer& init) { - auto* prototype = createJSSinkPrototype(init.vm, init.global, WebCore::SinkID::FetchTaskletChunkedRequestSink); - auto* structure = JSFetchTaskletChunkedRequestSink::createStructure(init.vm, init.global, prototype); - auto* constructor = JSFetchTaskletChunkedRequestSinkConstructor::create(init.vm, init.global, JSFetchTaskletChunkedRequestSinkConstructor::createStructure(init.vm, init.global, init.global->functionPrototype()), jsCast(prototype)); + auto* prototype = createJSSinkPrototype(init.vm, init.global, WebCore::SinkID::NetworkSink); + auto* structure = JSNetworkSink::createStructure(init.vm, init.global, prototype); + auto* constructor = JSNetworkSinkConstructor::create(init.vm, init.global, JSNetworkSinkConstructor::createStructure(init.vm, init.global, init.global->functionPrototype()), jsCast(prototype)); init.setPrototype(prototype); init.setStructure(structure); init.setConstructor(constructor); @@ -3855,7 +3797,8 @@ void GlobalObject::visitChildrenImpl(JSCell* cell, Visitor& visitor) thisObject->m_JSCryptoKey.visit(visitor); thisObject->m_lazyStackCustomGetterSetter.visit(visitor); thisObject->m_JSDOMFileConstructor.visit(visitor); - thisObject->m_JSS3FileConstructor.visit(visitor); + thisObject->m_JSS3FileStructure.visit(visitor); + thisObject->m_S3ErrorStructure.visit(visitor); thisObject->m_JSFFIFunctionStructure.visit(visitor); thisObject->m_JSFileSinkClassStructure.visit(visitor); thisObject->m_JSFileSinkControllerPrototype.visit(visitor); @@ -3863,7 +3806,7 @@ void GlobalObject::visitChildrenImpl(JSCell* cell, Visitor& visitor) thisObject->m_JSHTTPResponseSinkClassStructure.visit(visitor); thisObject->m_JSHTTPSResponseControllerPrototype.visit(visitor); thisObject->m_JSHTTPSResponseSinkClassStructure.visit(visitor); - thisObject->m_JSFetchTaskletChunkedRequestSinkClassStructure.visit(visitor); + thisObject->m_JSNetworkSinkClassStructure.visit(visitor); thisObject->m_JSFetchTaskletChunkedRequestControllerPrototype.visit(visitor); thisObject->m_JSSocketAddressStructure.visit(visitor); thisObject->m_JSSQLStatementStructure.visit(visitor); diff --git a/src/bun.js/bindings/ZigGlobalObject.h b/src/bun.js/bindings/ZigGlobalObject.h index 33beb34c7e..c556cd8688 100644 --- a/src/bun.js/bindings/ZigGlobalObject.h +++ b/src/bun.js/bindings/ZigGlobalObject.h @@ -210,10 +210,10 @@ public: JSC::JSValue HTTPSResponseSinkPrototype() const { return m_JSHTTPSResponseSinkClassStructure.prototypeInitializedOnMainThread(this); } JSC::JSValue JSReadableHTTPSResponseSinkControllerPrototype() const { return m_JSHTTPSResponseControllerPrototype.getInitializedOnMainThread(this); } - JSC::Structure* FetchTaskletChunkedRequestSinkStructure() const { return m_JSFetchTaskletChunkedRequestSinkClassStructure.getInitializedOnMainThread(this); } - JSC::JSObject* FetchTaskletChunkedRequestSink() { return m_JSFetchTaskletChunkedRequestSinkClassStructure.constructorInitializedOnMainThread(this); } - JSC::JSValue FetchTaskletChunkedRequestSinkPrototype() const { return m_JSFetchTaskletChunkedRequestSinkClassStructure.prototypeInitializedOnMainThread(this); } - JSC::JSValue JSReadableFetchTaskletChunkedRequestSinkControllerPrototype() const { return m_JSFetchTaskletChunkedRequestControllerPrototype.getInitializedOnMainThread(this); } + JSC::Structure* NetworkSinkStructure() const { return m_JSNetworkSinkClassStructure.getInitializedOnMainThread(this); } + JSC::JSObject* NetworkSink() { return m_JSNetworkSinkClassStructure.constructorInitializedOnMainThread(this); } + JSC::JSValue NetworkSinkPrototype() const { return m_JSNetworkSinkClassStructure.prototypeInitializedOnMainThread(this); } + JSC::JSValue JSReadableNetworkSinkControllerPrototype() const { return m_JSFetchTaskletChunkedRequestControllerPrototype.getInitializedOnMainThread(this); } JSC::Structure* JSBufferListStructure() const { return m_JSBufferListClassStructure.getInitializedOnMainThread(this); } JSC::JSObject* JSBufferList() { return m_JSBufferListClassStructure.constructorInitializedOnMainThread(this); } @@ -478,9 +478,11 @@ public: LazyProperty m_processEnvObject; + LazyProperty m_JSS3FileStructure; + LazyProperty m_S3ErrorStructure; + JSObject* cryptoObject() const { return m_cryptoObject.getInitializedOnMainThread(this); } JSObject* JSDOMFileConstructor() const { return m_JSDOMFileConstructor.getInitializedOnMainThread(this); } - JSObject* JSS3FileConstructor() const { return m_JSS3FileConstructor.getInitializedOnMainThread(this); } Bun::CommonStrings& commonStrings() { return m_commonStrings; } Bun::Http2CommonStrings& http2CommonStrings() { return m_http2_commongStrings; } @@ -521,7 +523,7 @@ public: LazyClassStructure m_JSFileSinkClassStructure; LazyClassStructure m_JSHTTPResponseSinkClassStructure; LazyClassStructure m_JSHTTPSResponseSinkClassStructure; - LazyClassStructure m_JSFetchTaskletChunkedRequestSinkClassStructure; + LazyClassStructure m_JSNetworkSinkClassStructure; LazyClassStructure m_JSStringDecoderClassStructure; LazyClassStructure m_NapiClassStructure; @@ -574,7 +576,6 @@ public: LazyProperty m_importMetaObjectStructure; LazyProperty m_asyncBoundFunctionStructure; LazyProperty m_JSDOMFileConstructor; - LazyProperty m_JSS3FileConstructor; LazyProperty m_JSCryptoKey; LazyProperty m_NapiExternalStructure; diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 19ba3bb1df..506b1614bc 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -1949,24 +1949,21 @@ JSC__JSValue SystemError__toErrorInstance(const SystemError* arg0, JSC::JSObject* result = JSC::ErrorInstance::create(globalObject, globalObject->errorStructureWithErrorType(), message, options); + auto clientData = WebCore::clientData(vm); + if (err.code.tag != BunStringTag::Empty) { JSC::JSValue code = Bun::toJS(globalObject, err.code); - result->putDirect(vm, names.codePublicName(), code, - JSC::PropertyAttribute::DontDelete | 0); - - result->putDirect(vm, vm.propertyNames->name, code, JSC::PropertyAttribute::DontEnum | 0); - } else { - auto* domGlobalObject = defaultGlobalObject(globalObject); - result->putDirect( - vm, vm.propertyNames->name, - JSC::JSValue(domGlobalObject->commonStrings().SystemErrorString(domGlobalObject)), - JSC::PropertyAttribute::DontEnum | 0); + result->putDirect(vm, clientData->builtinNames().codePublicName(), code, JSC::PropertyAttribute::DontDelete | 0); } if (err.path.tag != BunStringTag::Empty) { JSC::JSValue path = Bun::toJS(globalObject, err.path); - result->putDirect(vm, names.pathPublicName(), path, - JSC::PropertyAttribute::DontDelete | 0); + result->putDirect(vm, clientData->builtinNames().pathPublicName(), path, JSC::PropertyAttribute::DontDelete | 0); + } + + if (err.dest.tag != BunStringTag::Empty) { + JSC::JSValue dest = Bun::toJS(globalObject, err.dest); + result->putDirect(vm, clientData->builtinNames().destPublicName(), dest, JSC::PropertyAttribute::DontDelete | 0); } if (err.fd != -1) { @@ -6235,33 +6232,7 @@ CPP_DECL void Bun__CallFrame__getCallerSrcLoc(JSC::CallFrame* callFrame, JSC::JS lineColumn = visitor->computeLineAndColumn(); - String sourceURLForFrame = visitor->sourceURL(); - - // Sometimes, the sourceURL is empty. - // For example, pages in Next.js. - if (sourceURLForFrame.isEmpty()) { - - // hasLineAndColumnInfo() checks codeBlock(), so this is safe to access here. - const auto& source = visitor->codeBlock()->source(); - - // source.isNull() is true when the SourceProvider is a null pointer. - if (!source.isNull()) { - auto* provider = source.provider(); - // I'm not 100% sure we should show sourceURLDirective here. - if (!provider->sourceURLDirective().isEmpty()) { - sourceURLForFrame = provider->sourceURLDirective(); - } else if (!provider->sourceURL().isEmpty()) { - sourceURLForFrame = provider->sourceURL(); - } else { - const auto& origin = provider->sourceOrigin(); - if (!origin.isNull()) { - sourceURLForFrame = origin.string(); - } - } - } - } - - sourceURL = sourceURLForFrame; + sourceURL = Zig::sourceURL(visitor); return WTF::IterationStatus::Done; } diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 933d78f6f2..e74af06581 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -102,11 +102,31 @@ pub const JSObject = extern struct { } } - extern fn JSC__createStructure(*JSC.JSGlobalObject, *JSC.JSCell, u32, names: [*]bun.String) JSC.JSValue; + extern fn JSC__createStructure(*JSC.JSGlobalObject, *JSC.JSCell, u32, names: [*]ExternColumnIdentifier, flags: u32) JSC.JSValue; - pub fn createStructure(global: *JSGlobalObject, owner: JSC.JSValue, length: u32, names: [*]bun.String) JSValue { + pub const ExternColumnIdentifier = extern struct { + tag: u8 = 0, + value: extern union { + index: u32, + name: bun.String, + }, + + pub fn string(this: *ExternColumnIdentifier) ?*bun.String { + return switch (this.tag) { + 2 => &this.value.name, + else => null, + }; + } + + pub fn deinit(this: *ExternColumnIdentifier) void { + if (this.string()) |str| { + str.deref(); + } + } + }; + pub fn createStructure(global: *JSGlobalObject, owner: JSC.JSValue, length: u32, names: [*]ExternColumnIdentifier, flags: u32) JSValue { JSC.markBinding(@src()); - return JSC__createStructure(global, owner.asCell(), length, names); + return JSC__createStructure(global, owner.asCell(), length, names, flags); } const InitializeCallback = *const fn (ctx: *anyopaque, obj: *JSObject, global: *JSGlobalObject) callconv(.C) void; @@ -1712,6 +1732,7 @@ pub const SystemError = extern struct { path: String = String.empty, syscall: String = String.empty, fd: bun.FileDescriptor = bun.toFD(-1), + dest: String = String.empty, pub fn Maybe(comptime Result: type) type { return union(enum) { @@ -1735,6 +1756,7 @@ pub const SystemError = extern struct { this.code.deref(); this.message.deref(); this.syscall.deref(); + this.dest.deref(); } pub fn ref(this: *SystemError) void { @@ -1742,6 +1764,7 @@ pub const SystemError = extern struct { this.code.ref(); this.message.ref(); this.syscall.ref(); + this.dest.ref(); } pub fn toErrorInstance(this: *const SystemError, global: *JSGlobalObject) JSValue { @@ -1750,6 +1773,7 @@ pub const SystemError = extern struct { this.code.deref(); this.message.deref(); this.syscall.deref(); + this.dest.deref(); } return shim.cppFn("toErrorInstance", .{ this, global }); @@ -1780,6 +1804,7 @@ pub const SystemError = extern struct { this.code.deref(); this.message.deref(); this.syscall.deref(); + this.dest.deref(); } return SystemError__toErrorInstanceWithInfoObject(this, global); @@ -2344,10 +2369,6 @@ pub const JSPromise = extern struct { this.reject(globalThis, val); } - pub fn rejectOnNextTick(this: *WeakType, globalThis: *JSC.JSGlobalObject, val: JSC.JSValue) void { - this.swap().rejectOnNextTick(globalThis, val); - } - pub fn resolve(this: *WeakType, globalThis: *JSC.JSGlobalObject, val: JSC.JSValue) void { this.swap().resolve(globalThis, val); } @@ -2425,9 +2446,7 @@ pub const JSPromise = extern struct { this.reject(globalThis, val); } - pub fn rejectOnNextTick(this: *Strong, globalThis: *JSC.JSGlobalObject, val: JSC.JSValue) void { - this.swap().rejectOnNextTick(globalThis, val); - } + pub const rejectOnNextTick = @compileError("Either use an event loop task, or you're draining microtasks when you shouldn't be."); pub fn resolve(this: *Strong, globalThis: *JSC.JSGlobalObject, val: JSC.JSValue) void { this.swap().resolve(globalThis, val); @@ -2544,18 +2563,6 @@ pub const JSPromise = extern struct { return cppFn("resolveOnNextTick", .{ promise, globalThis, value }); } - pub fn rejectOnNextTick(promise: *JSC.JSPromise, globalThis: *JSGlobalObject, value: JSC.JSValue) void { - return rejectOnNextTickWithHandled(promise, globalThis, value, false); - } - - pub fn rejectOnNextTickAsHandled(promise: *JSC.JSPromise, globalThis: *JSGlobalObject, value: JSC.JSValue) void { - return rejectOnNextTickWithHandled(promise, globalThis, value, true); - } - - pub fn rejectOnNextTickWithHandled(promise: *JSC.JSPromise, globalThis: *JSGlobalObject, value: JSC.JSValue, handled: bool) void { - return cppFn("rejectOnNextTickWithHandled", .{ promise, globalThis, value, handled }); - } - /// Create a new promise with an already fulfilled value /// This is the faster function for doing that. pub fn resolvedPromiseValue(globalThis: *JSGlobalObject, value: JSValue) JSValue { @@ -2621,7 +2628,6 @@ pub const JSPromise = extern struct { "reject", "rejectAsHandled", "rejectAsHandledException", - "rejectOnNextTickWithHandled", "rejectedPromise", "rejectedPromiseValue", "resolve", @@ -4492,7 +4498,7 @@ pub const JSValue = enum(i64) { .quote_strings = true, }; - JestPrettyFormat.format( + try JestPrettyFormat.format( .Debug, globalObject, @as([*]const JSValue, @ptrCast(&this)), @@ -5636,6 +5642,7 @@ pub const JSValue = enum(i64) { return JSC.Node.validators.throwErrInvalidArgType(global, property_name, .{}, "string", prop); }, i32 => return prop.coerce(i32, global), + i64 => return prop.coerce(i64, global), else => @compileError("TODO:" ++ @typeName(T)), } } @@ -6789,6 +6796,12 @@ pub const JSHostFunctionType = fn (*JSGlobalObject, *CallFrame) callconv(JSC.con pub const JSHostFunctionTypeWithCCallConvForAssertions = fn (*JSGlobalObject, *CallFrame) callconv(.C) JSValue; pub const JSHostFunctionPtr = *const JSHostFunctionType; pub const JSHostZigFunction = fn (*JSGlobalObject, *CallFrame) bun.JSError!JSValue; +pub fn JSHostZigFunctionWithContext(comptime ContextType: type) type { + return fn (*ContextType, *JSGlobalObject, *CallFrame) bun.JSError!JSValue; +} +pub fn JSHostFunctionTypeWithContext(comptime ContextType: type) type { + return fn (*ContextType, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue; +} pub fn toJSHostFunction(comptime Function: JSHostZigFunction) JSC.JSHostFunctionType { return struct { @@ -6826,6 +6839,42 @@ pub fn toJSHostFunction(comptime Function: JSHostZigFunction) JSC.JSHostFunction } }.function; } +pub fn toJSHostFunctionWithContext(comptime ContextType: type, comptime Function: JSHostZigFunctionWithContext(ContextType)) JSHostFunctionTypeWithContext(ContextType) { + return struct { + pub fn function(ctx: *ContextType, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { + if (bun.Environment.allow_assert and bun.Environment.is_canary) { + const value = Function(ctx, globalThis, callframe) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + if (comptime bun.Environment.isDebug) { + if (value != .zero) { + if (globalThis.hasException()) { + var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis }; + bun.Output.prettyErrorln( + \\Assertion failed: Native function returned a non-zero JSValue while an exception is pending + \\ + \\ fn: {s} + \\ value: {} + \\ + , .{ + &Function, // use `(lldb) image lookup --address 0x1ec4` to discover what function failed + value.toFmt(&formatter), + }); + Output.flush(); + } + } + } + bun.assert((value == .zero) == globalThis.hasException()); + return value; + } + return @call(.always_inline, Function, .{ ctx, globalThis, callframe }) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + } + }.function; +} pub fn toJSHostValue(globalThis: *JSGlobalObject, value: error{ OutOfMemory, JSError }!JSValue) JSValue { if (bun.Environment.allow_assert and bun.Environment.is_canary) { diff --git a/src/bun.js/bindings/c-bindings.cpp b/src/bun.js/bindings/c-bindings.cpp index 8268b1cab2..cf6a95fcc1 100644 --- a/src/bun.js/bindings/c-bindings.cpp +++ b/src/bun.js/bindings/c-bindings.cpp @@ -870,3 +870,13 @@ extern "C" void Bun__unregisterSignalsForForwarding() } #endif + +#if OS(LINUX) || OS(DARWIN) +#include + +extern "C" const char* BUN_DEFAULT_PATH_FOR_SPAWN = _PATH_DEFPATH; +#elif OS(WINDOWS) +extern "C" const char* BUN_DEFAULT_PATH_FOR_SPAWN = "C:\\Windows\\System32;C:\\Windows;"; +#else +extern "C" const char* BUN_DEFAULT_PATH_FOR_SPAWN = "/usr/bin:/bin"; +#endif diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig index 2a2c19edcd..0dc579e12c 100644 --- a/src/bun.js/bindings/exports.zig +++ b/src/bun.js/bindings/exports.zig @@ -144,7 +144,7 @@ pub const JSArrayBufferSink = JSC.WebCore.ArrayBufferSink.JSSink; pub const JSHTTPSResponseSink = JSC.WebCore.HTTPSResponseSink.JSSink; pub const JSHTTPResponseSink = JSC.WebCore.HTTPResponseSink.JSSink; pub const JSFileSink = JSC.WebCore.FileSink.JSSink; -pub const JSFetchTaskletChunkedRequestSink = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink; +pub const JSNetworkSink = JSC.WebCore.NetworkSink.JSSink; // WebSocket pub const WebSocketHTTPClient = @import("../../http/websocket_http_client.zig").WebSocketHTTPClient; @@ -371,15 +371,22 @@ pub const Process = extern struct { pub const shim = Shimmer("Bun", "Process", @This()); pub const name = "Process"; pub const namespace = shim.namespace; - const _bun: string = "bun"; + var title_mutex = std.Thread.Mutex{}; pub fn getTitle(_: *JSGlobalObject, title: *ZigString) callconv(.C) void { - title.* = ZigString.init(_bun); + title_mutex.lock(); + defer title_mutex.unlock(); + const str = bun.CLI.Bun__Node__ProcessTitle; + title.* = ZigString.init(str orelse "bun"); } // TODO: https://github.com/nodejs/node/blob/master/deps/uv/src/unix/darwin-proctitle.c - pub fn setTitle(globalObject: *JSGlobalObject, _: *ZigString) callconv(.C) JSValue { - return ZigString.init(_bun).toJS(globalObject); + pub fn setTitle(globalObject: *JSGlobalObject, newvalue: *ZigString) callconv(.C) JSValue { + title_mutex.lock(); + defer title_mutex.unlock(); + if (bun.CLI.Bun__Node__ProcessTitle) |_| bun.default_allocator.free(bun.CLI.Bun__Node__ProcessTitle.?); + bun.CLI.Bun__Node__ProcessTitle = newvalue.dupe(bun.default_allocator) catch bun.outOfMemory(); + return newvalue.toJS(globalObject); } pub const getArgv = JSC.Node.Process.getArgv; @@ -976,7 +983,7 @@ comptime { JSArrayBufferSink.shim.ref(); JSHTTPResponseSink.shim.ref(); JSHTTPSResponseSink.shim.ref(); - JSFetchTaskletChunkedRequestSink.shim.ref(); + JSNetworkSink.shim.ref(); JSFileSink.shim.ref(); JSFileSink.shim.ref(); _ = ZigString__free; diff --git a/src/bun.js/bindings/generated_classes_list.zig b/src/bun.js/bindings/generated_classes_list.zig index 72095df5a5..995ddb4466 100644 --- a/src/bun.js/bindings/generated_classes_list.zig +++ b/src/bun.js/bindings/generated_classes_list.zig @@ -77,4 +77,7 @@ pub const Classes = struct { pub const NativeZlib = JSC.API.NativeZlib; pub const NativeBrotli = JSC.API.NativeBrotli; pub const FrameworkFileSystemRouter = bun.bake.FrameworkRouter.JSFrameworkRouter; + + pub const S3Client = JSC.WebCore.S3Client; + pub const S3Stat = JSC.WebCore.S3Stat; }; diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h index 6cf02c5c06..66c8207437 100644 --- a/src/bun.js/bindings/headers-handwritten.h +++ b/src/bun.js/bindings/headers-handwritten.h @@ -127,6 +127,7 @@ typedef struct SystemError { BunString path; BunString syscall; int fd; + BunString dest; } SystemError; typedef void* ArrayBufferSink; diff --git a/src/bun.js/bindings/headers.h b/src/bun.js/bindings/headers.h index 9bdf332b16..ab9f3ca437 100644 --- a/src/bun.js/bindings/headers.h +++ b/src/bun.js/bindings/headers.h @@ -686,24 +686,24 @@ ZIG_DECL void FileSink__updateRef(void* arg0, bool arg1); BUN_DECLARE_HOST_FUNCTION(FileSink__write); #endif -CPP_DECL JSC__JSValue FetchTaskletChunkedRequestSink__assignToStream(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1, void* arg2, void** arg3); -CPP_DECL JSC__JSValue FetchTaskletChunkedRequestSink__createObject(JSC__JSGlobalObject* arg0, void* arg1, uintptr_t destructor); -CPP_DECL void FetchTaskletChunkedRequestSink__detachPtr(JSC__JSValue JSValue0); -CPP_DECL void* FetchTaskletChunkedRequestSink__fromJS(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1); -CPP_DECL void FetchTaskletChunkedRequestSink__onClose(JSC__JSValue JSValue0, JSC__JSValue JSValue1); -CPP_DECL void FetchTaskletChunkedRequestSink__onReady(JSC__JSValue JSValue0, JSC__JSValue JSValue1, JSC__JSValue JSValue2); +CPP_DECL JSC__JSValue NetworkSink__assignToStream(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1, void* arg2, void** arg3); +CPP_DECL JSC__JSValue NetworkSink__createObject(JSC__JSGlobalObject* arg0, void* arg1, uintptr_t destructor); +CPP_DECL void NetworkSink__detachPtr(JSC__JSValue JSValue0); +CPP_DECL void* NetworkSink__fromJS(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1); +CPP_DECL void NetworkSink__onClose(JSC__JSValue JSValue0, JSC__JSValue JSValue1); +CPP_DECL void NetworkSink__onReady(JSC__JSValue JSValue0, JSC__JSValue JSValue1, JSC__JSValue JSValue2); #ifdef __cplusplus -ZIG_DECL JSC__JSValue FetchTaskletChunkedRequestSink__close(JSC__JSGlobalObject* arg0, void* arg1); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__construct); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__end); -ZIG_DECL JSC__JSValue SYSV_ABI SYSV_ABI FetchTaskletChunkedRequestSink__endWithSink(void* arg0, JSC__JSGlobalObject* arg1); -ZIG_DECL void FetchTaskletChunkedRequestSink__finalize(void* arg0); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__flush); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__start); -ZIG_DECL void FetchTaskletChunkedRequestSink__updateRef(void* arg0, bool arg1); -BUN_DECLARE_HOST_FUNCTION(FetchTaskletChunkedRequestSink__write); +ZIG_DECL JSC__JSValue NetworkSink__close(JSC__JSGlobalObject* arg0, void* arg1); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__construct); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__end); +ZIG_DECL JSC__JSValue SYSV_ABI SYSV_ABI NetworkSink__endWithSink(void* arg0, JSC__JSGlobalObject* arg1); +ZIG_DECL void NetworkSink__finalize(void* arg0); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__flush); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__start); +ZIG_DECL void NetworkSink__updateRef(void* arg0, bool arg1); +BUN_DECLARE_HOST_FUNCTION(NetworkSink__write); #endif #ifdef __cplusplus diff --git a/src/bun.js/bindings/headers.zig b/src/bun.js/bindings/headers.zig index c4f37d3490..642d54cf3b 100644 --- a/src/bun.js/bindings/headers.zig +++ b/src/bun.js/bindings/headers.zig @@ -155,11 +155,9 @@ pub extern fn JSC__JSPromise__rejectAsHandled(arg0: ?*bindings.JSPromise, arg1: pub extern fn JSC__JSPromise__rejectAsHandledException(arg0: ?*bindings.JSPromise, arg1: *bindings.JSGlobalObject, arg2: [*c]bindings.Exception) void; pub extern fn JSC__JSPromise__rejectedPromise(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*bindings.JSPromise; pub extern fn JSC__JSPromise__rejectedPromiseValue(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) JSC__JSValue; -pub extern fn JSC__JSPromise__rejectOnNextTickWithHandled(arg0: ?*bindings.JSPromise, arg1: *bindings.JSGlobalObject, JSValue2: JSC__JSValue, arg3: bool) void; pub extern fn JSC__JSPromise__resolve(arg0: ?*bindings.JSPromise, arg1: *bindings.JSGlobalObject, JSValue2: JSC__JSValue) void; pub extern fn JSC__JSPromise__resolvedPromise(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*bindings.JSPromise; pub extern fn JSC__JSPromise__resolvedPromiseValue(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) JSC__JSValue; -pub extern fn JSC__JSPromise__resolveOnNextTick(arg0: ?*bindings.JSPromise, arg1: *bindings.JSGlobalObject, JSValue2: JSC__JSValue) void; pub extern fn JSC__JSPromise__result(arg0: ?*bindings.JSPromise, arg1: *bindings.VM) JSC__JSValue; pub extern fn JSC__JSPromise__setHandled(arg0: ?*bindings.JSPromise, arg1: *bindings.VM) void; pub extern fn JSC__JSPromise__status(arg0: [*c]const JSC__JSPromise, arg1: *bindings.VM) u32; @@ -377,13 +375,13 @@ pub extern fn FileSink__setDestroyCallback(JSValue0: JSC__JSValue, callback: usi pub extern fn FileSink__fromJS(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*anyopaque; pub extern fn FileSink__onClose(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue) void; pub extern fn FileSink__onReady(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue, JSValue2: JSC__JSValue) void; -pub extern fn FetchTaskletChunkedRequestSink__assignToStream(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue, arg2: ?*anyopaque, arg3: [*c]*anyopaque) JSC__JSValue; -pub extern fn FetchTaskletChunkedRequestSink__createObject(arg0: *bindings.JSGlobalObject, arg1: ?*anyopaque, onDestroyPtrTag: usize) JSC__JSValue; -pub extern fn FetchTaskletChunkedRequestSink__detachPtr(JSValue0: JSC__JSValue) void; -pub extern fn FetchTaskletChunkedRequestSink__setDestroyCallback(JSValue0: JSC__JSValue, callback: usize) void; -pub extern fn FetchTaskletChunkedRequestSink__fromJS(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*anyopaque; -pub extern fn FetchTaskletChunkedRequestSink__onClose(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue) void; -pub extern fn FetchTaskletChunkedRequestSink__onReady(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue, JSValue2: JSC__JSValue) void; +pub extern fn NetworkSink__assignToStream(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue, arg2: ?*anyopaque, arg3: [*c]*anyopaque) JSC__JSValue; +pub extern fn NetworkSink__createObject(arg0: *bindings.JSGlobalObject, arg1: ?*anyopaque, onDestroyPtrTag: usize) JSC__JSValue; +pub extern fn NetworkSink__detachPtr(JSValue0: JSC__JSValue) void; +pub extern fn NetworkSink__setDestroyCallback(JSValue0: JSC__JSValue, callback: usize) void; +pub extern fn NetworkSink__fromJS(arg0: *bindings.JSGlobalObject, JSValue1: JSC__JSValue) ?*anyopaque; +pub extern fn NetworkSink__onClose(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue) void; +pub extern fn NetworkSink__onReady(JSValue0: JSC__JSValue, JSValue1: JSC__JSValue, JSValue2: JSC__JSValue) void; pub extern fn ZigException__fromException(arg0: [*c]bindings.Exception) ZigException; pub const JSC__GetterSetter = bindings.GetterSetter; diff --git a/src/bun.js/bindings/helpers.cpp b/src/bun.js/bindings/helpers.cpp index 6aa6a6096a..834bd0a36b 100644 --- a/src/bun.js/bindings/helpers.cpp +++ b/src/bun.js/bindings/helpers.cpp @@ -1,6 +1,7 @@ #include "root.h" #include "helpers.h" #include "BunClientData.h" +#include JSC::JSValue createSystemError(JSC::JSGlobalObject* global, ASCIILiteral message, ASCIILiteral syscall, int err) { @@ -15,11 +16,13 @@ JSC::JSValue createSystemError(JSC::JSGlobalObject* global, ASCIILiteral message JSC::JSValue createSystemError(JSC::JSGlobalObject* global, ASCIILiteral syscall, int err) { - auto* instance = JSC::createError(global, makeString(String(syscall), "() failed"_s)); + auto errstr = String::fromLatin1(Bun__errnoName(err)); + auto* instance = JSC::createError(global, makeString(syscall, "() failed: "_s, errstr, ": "_s, String::fromLatin1(strerror(err)))); auto& vm = global->vm(); auto& builtinNames = WebCore::builtinNames(vm); instance->putDirect(vm, builtinNames.syscallPublicName(), jsString(vm, String(syscall)), 0); instance->putDirect(vm, builtinNames.errnoPublicName(), JSC::jsNumber(err), 0); instance->putDirect(vm, vm.propertyNames->name, jsString(vm, String("SystemError"_s)), JSC::PropertyAttribute::DontEnum | 0); + instance->putDirect(vm, builtinNames.codePublicName(), jsString(vm, errstr)); return instance; } diff --git a/src/bun.js/bindings/helpers.h b/src/bun.js/bindings/helpers.h index cabb787ce1..2653138c79 100644 --- a/src/bun.js/bindings/helpers.h +++ b/src/bun.js/bindings/helpers.h @@ -26,6 +26,7 @@ class GlobalObject; #pragma clang diagnostic ignored "-Wunused-function" extern "C" size_t Bun__stringSyntheticAllocationLimit; +extern "C" const char* Bun__errnoName(int); namespace Zig { diff --git a/src/bun.js/bindings/sqlite/JSSQLStatement.cpp b/src/bun.js/bindings/sqlite/JSSQLStatement.cpp index e27d4159fe..fd4d1ce63c 100644 --- a/src/bun.js/bindings/sqlite/JSSQLStatement.cpp +++ b/src/bun.js/bindings/sqlite/JSSQLStatement.cpp @@ -1670,7 +1670,6 @@ JSC_DEFINE_HOST_FUNCTION(jsSQLStatementCloseStatementFunction, (JSC::JSGlobalObj } bool shouldThrowOnError = (throwOnError.isEmpty() || throwOnError.isUndefined()) ? false : throwOnError.toBoolean(lexicalGlobalObject); - RETURN_IF_EXCEPTION(scope, {}); sqlite3* db = databases()[dbIndex]->db; // no-op if already closed @@ -2368,7 +2367,6 @@ JSC_DEFINE_CUSTOM_SETTER(jsSqlStatementSetSafeIntegers, (JSGlobalObject * lexica CHECK_PREPARED bool value = JSValue::decode(encodedValue).toBoolean(lexicalGlobalObject); - RETURN_IF_EXCEPTION(scope, false); castedThis->useBigInt64 = value; return true; diff --git a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h index dc805895d2..2dffbe8465 100644 --- a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h @@ -58,6 +58,8 @@ public: std::unique_ptr m_clientSubspaceForFunctionTemplate; std::unique_ptr m_clientSubspaceForV8Function; std::unique_ptr m_clientSubspaceForNodeVMGlobalObject; + std::unique_ptr m_clientSubspaceForJSS3Bucket; + std::unique_ptr m_clientSubspaceForJSS3File; #include "ZigGeneratedClasses+DOMClientIsoSubspaces.h" /* --- bun --- */ diff --git a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h index 2d4eb091c5..5af65d80ac 100644 --- a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h @@ -58,6 +58,8 @@ public: std::unique_ptr m_subspaceForFunctionTemplate; std::unique_ptr m_subspaceForV8Function; std::unique_ptr m_subspaceForNodeVMGlobalObject; + std::unique_ptr m_subspaceForJSS3Bucket; + std::unique_ptr m_subspaceForJSS3File; #include "ZigGeneratedClasses+DOMIsoSubspaces.h" /*-- BUN --*/ diff --git a/src/bun.js/bindings/webcore/EventEmitter.cpp b/src/bun.js/bindings/webcore/EventEmitter.cpp index 021edf1fca..8db15a8e5b 100644 --- a/src/bun.js/bindings/webcore/EventEmitter.cpp +++ b/src/bun.js/bindings/webcore/EventEmitter.cpp @@ -119,9 +119,9 @@ bool EventEmitter::emitForBindings(const Identifier& eventType, const MarkedArgu return true; } -void EventEmitter::emit(const Identifier& eventType, const MarkedArgumentBuffer& arguments) +bool EventEmitter::emit(const Identifier& eventType, const MarkedArgumentBuffer& arguments) { - fireEventListeners(eventType, arguments); + return fireEventListeners(eventType, arguments); } void EventEmitter::uncaughtExceptionInEventHandler() @@ -175,12 +175,12 @@ Vector EventEmitter::getListeners(const Identifier& eventType) } // https://dom.spec.whatwg.org/#concept-event-listener-invoke -void EventEmitter::fireEventListeners(const Identifier& eventType, const MarkedArgumentBuffer& arguments) +bool EventEmitter::fireEventListeners(const Identifier& eventType, const MarkedArgumentBuffer& arguments) { auto* data = eventTargetData(); if (!data) - return; + return false; auto* listenersVector = data->eventListenerMap.find(eventType); if (UNLIKELY(!listenersVector)) { @@ -188,24 +188,25 @@ void EventEmitter::fireEventListeners(const Identifier& eventType, const MarkedA Ref protectedThis(*this); auto* thisObject = protectedThis->m_thisObject.get(); if (!thisObject) - return; + return false; Bun__reportUnhandledError(thisObject->globalObject(), JSValue::encode(arguments.at(0))); - return; + return false; } - return; + return false; } bool prevFiringEventListeners = data->isFiringEventListeners; data->isFiringEventListeners = true; - innerInvokeEventListeners(eventType, *listenersVector, arguments); + auto fired = innerInvokeEventListeners(eventType, *listenersVector, arguments); data->isFiringEventListeners = prevFiringEventListeners; + return fired; } // Intentionally creates a copy of the listeners vector to avoid event listeners added after this point from being run. // Note that removal still has an effect due to the removed field in RegisteredEventListener. // https://dom.spec.whatwg.org/#concept-event-listener-inner-invoke -void EventEmitter::innerInvokeEventListeners(const Identifier& eventType, SimpleEventListenerVector listeners, const MarkedArgumentBuffer& arguments) +bool EventEmitter::innerInvokeEventListeners(const Identifier& eventType, SimpleEventListenerVector listeners, const MarkedArgumentBuffer& arguments) { Ref protectedThis(*this); ASSERT(!listeners.isEmpty()); @@ -216,6 +217,7 @@ void EventEmitter::innerInvokeEventListeners(const Identifier& eventType, Simple auto* thisObject = protectedThis->m_thisObject.get(); JSC::JSValue thisValue = thisObject ? JSC::JSValue(thisObject) : JSC::jsUndefined(); + auto fired = false; for (auto& registeredListener : listeners) { // The below code used to be in here, but it's WRONG. Even if a listener is removed, @@ -244,6 +246,7 @@ void EventEmitter::innerInvokeEventListeners(const Identifier& eventType, Simple if (UNLIKELY(callData.type == JSC::CallData::Type::None)) continue; + fired = true; WTF::NakedPtr exceptionPtr; call(lexicalGlobalObject, jsFunction, callData, thisValue, arguments, exceptionPtr); auto* exception = exceptionPtr.get(); @@ -265,6 +268,8 @@ void EventEmitter::innerInvokeEventListeners(const Identifier& eventType, Simple } } } + + return fired; } Vector EventEmitter::eventTypes() diff --git a/src/bun.js/bindings/webcore/EventEmitter.h b/src/bun.js/bindings/webcore/EventEmitter.h index 23687d43a1..e9f6aa167d 100644 --- a/src/bun.js/bindings/webcore/EventEmitter.h +++ b/src/bun.js/bindings/webcore/EventEmitter.h @@ -55,7 +55,7 @@ public: WEBCORE_EXPORT bool removeListener(const Identifier& eventType, EventListener&); WEBCORE_EXPORT bool removeAllListeners(const Identifier& eventType); - WEBCORE_EXPORT void emit(const Identifier&, const MarkedArgumentBuffer&); + WEBCORE_EXPORT bool emit(const Identifier&, const MarkedArgumentBuffer&); WEBCORE_EXPORT void uncaughtExceptionInEventHandler(); WEBCORE_EXPORT Vector getEventNames(); @@ -76,7 +76,7 @@ public: Vector eventTypes(); const SimpleEventListenerVector& eventListeners(const Identifier& eventType); - void fireEventListeners(const Identifier& eventName, const MarkedArgumentBuffer& arguments); + bool fireEventListeners(const Identifier& eventName, const MarkedArgumentBuffer& arguments); bool isFiringEventListeners() const; void invalidateJSEventListeners(JSC::JSObject*); @@ -109,7 +109,7 @@ private: { } - void innerInvokeEventListeners(const Identifier&, SimpleEventListenerVector, const MarkedArgumentBuffer& arguments); + bool innerInvokeEventListeners(const Identifier&, SimpleEventListenerVector, const MarkedArgumentBuffer& arguments); void invalidateEventListenerRegions(); EventEmitterData m_eventTargetData; diff --git a/src/bun.js/bindings/webcore/JSDOMConstructorCallable.h b/src/bun.js/bindings/webcore/JSDOMConstructorCallable.h new file mode 100644 index 0000000000..8a2650aa3c --- /dev/null +++ b/src/bun.js/bindings/webcore/JSDOMConstructorCallable.h @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2015, 2016 Canon Inc. All rights reserved. + * Copyright (C) 2016-2021 Apple Inc. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#pragma once + +#include "JSDOMConstructorBase.h" + +namespace WebCore { + +template class JSDOMConstructorCallable final : public JSDOMConstructorBase { +public: + using Base = JSDOMConstructorBase; + + static JSDOMConstructorCallable* create(JSC::VM&, JSC::Structure*, JSDOMGlobalObject&); + static JSC::Structure* createStructure(JSC::VM&, JSC::JSGlobalObject&, JSC::JSValue prototype); + + DECLARE_INFO; + + // Must be defined for each specialization class. + static JSC::JSValue prototypeForStructure(JSC::VM&, const JSDOMGlobalObject&); + + // Must be defined for each specialization class. + static JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES construct(JSC::JSGlobalObject*, JSC::CallFrame*); + static JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES call(JSC::JSGlobalObject*, JSC::CallFrame*); + +private: + JSDOMConstructorCallable(JSC::VM& vm, JSC::Structure* structure) + : Base(vm, structure, construct, call) + { + } + + void finishCreation(JSC::VM&, JSDOMGlobalObject&); + + // Usually defined for each specialization class. + void initializeProperties(JSC::VM&, JSDOMGlobalObject&) {} +}; + +template inline JSDOMConstructorCallable* JSDOMConstructorCallable::create(JSC::VM& vm, JSC::Structure* structure, JSDOMGlobalObject& globalObject) +{ + JSDOMConstructorCallable* constructor = new (NotNull, JSC::allocateCell(vm)) JSDOMConstructorCallable(vm, structure); + constructor->finishCreation(vm, globalObject); + return constructor; +} + +template inline JSC::Structure* JSDOMConstructorCallable::createStructure(JSC::VM& vm, JSC::JSGlobalObject& globalObject, JSC::JSValue prototype) +{ + return JSC::Structure::create(vm, &globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info()); +} + +template inline void JSDOMConstructorCallable::finishCreation(JSC::VM& vm, JSDOMGlobalObject& globalObject) +{ + Base::finishCreation(vm); + ASSERT(inherits(info())); + initializeProperties(vm, globalObject); +} + +} // namespace WebCore diff --git a/src/bun.js/bindings/webcore/JSEventEmitter.cpp b/src/bun.js/bindings/webcore/JSEventEmitter.cpp index a112593d68..d091d8959c 100644 --- a/src/bun.js/bindings/webcore/JSEventEmitter.cpp +++ b/src/bun.js/bindings/webcore/JSEventEmitter.cpp @@ -7,7 +7,7 @@ #include "IDLTypes.h" #include "JSAddEventListenerOptions.h" #include "JSDOMBinding.h" -#include "JSDOMConstructor.h" +#include "JSDOMConstructorCallable.h" #include "JSDOMConvertBase.h" #include "JSDOMConvertBoolean.h" #include "JSDOMConvertDictionary.h" @@ -23,6 +23,7 @@ #include "JSEvent.h" #include "JSEventListener.h" #include "JSEventListenerOptions.h" +#include "JavaScriptCore/JSCJSValue.h" #include "ScriptExecutionContext.h" #include "WebCoreJSClientData.h" #include @@ -94,7 +95,7 @@ public: }; STATIC_ASSERT_ISO_SUBSPACE_SHARABLE(JSEventEmitterPrototype, JSEventEmitterPrototype::Base); -using JSEventEmitterDOMConstructor = JSDOMConstructor; +using JSEventEmitterDOMConstructor = JSDOMConstructorCallable; template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSEventEmitterDOMConstructor::construct(JSGlobalObject* lexicalGlobalObject, CallFrame* callFrame) { @@ -124,6 +125,38 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSEventEmitterDOMConstru } JSC_ANNOTATE_HOST_FUNCTION(JSEventEmitterDOMConstructorConstruct, JSEventEmitterDOMConstructor::construct); +template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSEventEmitterDOMConstructor::call(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame) +{ + VM& vm = lexicalGlobalObject->vm(); + auto throwScope = DECLARE_THROW_SCOPE(vm); + auto* castedThis = jsCast(callFrame->jsCallee()); + ASSERT(castedThis); + auto* context = castedThis->scriptExecutionContext(); + if (UNLIKELY(!context)) { + return throwConstructorScriptExecutionContextUnavailableError(*lexicalGlobalObject, throwScope, "EventEmitter"_s); + } + const auto object = EventEmitter::create(*context); + if constexpr (IsExceptionOr) { + RETURN_IF_EXCEPTION(throwScope, {}); + } + JSValue maxListeners = castedThis->getIfPropertyExists(lexicalGlobalObject, JSC::Identifier::fromString(vm, "defaultMaxListeners"_s)); + RETURN_IF_EXCEPTION(throwScope, {}); + if (maxListeners && maxListeners.isUInt32()) { + object->setMaxListeners(maxListeners.toUInt32(lexicalGlobalObject)); + } + static_assert(TypeOrExceptionOrUnderlyingType::isRef); + auto jsValue = toJSNewlyCreated>(*lexicalGlobalObject, *castedThis->globalObject(), throwScope, object.copyRef()); + if constexpr (IsExceptionOr) { + RETURN_IF_EXCEPTION(throwScope, {}); + } + Structure* structure = JSEventEmitter::createStructure(vm, lexicalGlobalObject, jsValue); + JSEventEmitter* instance + = JSEventEmitter::create(structure, reinterpret_cast(lexicalGlobalObject), object.copyRef()); + RETURN_IF_EXCEPTION(throwScope, {}); + RELEASE_AND_RETURN(throwScope, JSValue::encode(instance)); +} +JSC_ANNOTATE_HOST_FUNCTION(JSEventEmitterDOMConstructorCall, JSEventEmitterDOMConstructor::call); + template<> const ClassInfo JSEventEmitterDOMConstructor::s_info = { "EventEmitter"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSEventEmitterDOMConstructor) }; template<> JSValue JSEventEmitterDOMConstructor::prototypeForStructure(JSC::VM& vm, const JSDOMGlobalObject& globalObject) diff --git a/src/bun.js/bindings/webcore/JSWorker.cpp b/src/bun.js/bindings/webcore/JSWorker.cpp index 37f1674202..57e9dd062b 100644 --- a/src/bun.js/bindings/webcore/JSWorker.cpp +++ b/src/bun.js/bindings/webcore/JSWorker.cpp @@ -140,12 +140,10 @@ template<> JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES JSWorkerDOMConstructor:: if (auto miniModeValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "smol"_s))) { options.bun.mini = miniModeValue.toBoolean(lexicalGlobalObject); - RETURN_IF_EXCEPTION(throwScope, {}); } if (auto ref = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "ref"_s))) { options.bun.unref = !ref.toBoolean(lexicalGlobalObject); - RETURN_IF_EXCEPTION(throwScope, {}); } if (auto preloadModulesValue = optionsObject->getIfPropertyExists(lexicalGlobalObject, Identifier::fromString(vm, "preload"_s))) { diff --git a/src/bun.js/bindings/webcore/JSWorkerOptions.cpp b/src/bun.js/bindings/webcore/JSWorkerOptions.cpp index 8103d23053..cb46c6f204 100644 --- a/src/bun.js/bindings/webcore/JSWorkerOptions.cpp +++ b/src/bun.js/bindings/webcore/JSWorkerOptions.cpp @@ -69,7 +69,7 @@ template<> WorkerOptions convertDictionary(JSGlobalObject& lexica // if (isNullOrUndefined) // typeValue = jsUndefined(); // else { - // typeValue = object->get(&lexicalGlobalObject, Identifier::fromString(vm, "type"_s)); + // typeValue = object->get(&lexicalGlobalObject, vm.propertyNames->type); // RETURN_IF_EXCEPTION(throwScope, { }); // } // if (!typeValue.isUndefined()) { diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index 21e6393b89..d31753a19e 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -18,9 +18,9 @@ const ReadFileTask = WebCore.Blob.ReadFile.ReadFileTask; const WriteFileTask = WebCore.Blob.WriteFile.WriteFileTask; const napi_async_work = JSC.napi.napi_async_work; const FetchTasklet = Fetch.FetchTasklet; -const AWS = @import("../s3.zig").AWSCredentials; -const S3HttpSimpleTask = AWS.S3HttpSimpleTask; -const S3HttpDownloadStreamingTask = AWS.S3HttpDownloadStreamingTask; +const S3 = bun.S3; +const S3HttpSimpleTask = S3.S3HttpSimpleTask; +const S3HttpDownloadStreamingTask = S3.S3HttpDownloadStreamingTask; const JSValue = JSC.JSValue; const js = JSC.C; diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 36ed5c61b2..4af0d587b4 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -885,6 +885,7 @@ pub const VirtualMachine = struct { onUnhandledRejectionExceptionList: ?*ExceptionList = null, unhandled_error_counter: usize = 0, is_handling_uncaught_exception: bool = false, + exit_on_uncaught_exception: bool = false, modules: ModuleLoader.AsyncModule.Queue = .{}, aggressive_garbage_collection: GCLevel = GCLevel.none, @@ -1190,6 +1191,10 @@ pub const VirtualMachine = struct { extern fn Bun__handleUnhandledRejection(*JSC.JSGlobalObject, reason: JSC.JSValue, promise: JSC.JSValue) c_int; extern fn Bun__Process__exit(*JSC.JSGlobalObject, code: c_int) noreturn; + export fn Bun__VirtualMachine__exitDuringUncaughtException(this: *JSC.VirtualMachine) void { + this.exit_on_uncaught_exception = true; + } + pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSC.JSGlobalObject, reason: JSC.JSValue, promise: JSC.JSValue) bool { if (this.isShuttingDown()) { Output.debugWarn("unhandledRejection during shutdown.", .{}); @@ -1227,6 +1232,11 @@ pub const VirtualMachine = struct { Bun__Process__exit(globalObject, 7); @panic("Uncaught exception while handling uncaught exception"); } + if (this.exit_on_uncaught_exception) { + this.runErrorHandler(err, null); + Bun__Process__exit(globalObject, 1); + @panic("made it past Bun__Process__exit"); + } this.is_handling_uncaught_exception = true; defer this.is_handling_uncaught_exception = false; const handled = Bun__handleUncaughtException(globalObject, err.toError() orelse err, if (is_rejection) 1 else 0) > 0; @@ -2818,6 +2828,11 @@ pub const VirtualMachine = struct { pub const main_file_name: string = "bun:main"; pub fn drainMicrotasks(this: *VirtualMachine) void { + if (comptime Environment.isDebug) { + if (this.eventLoop().debug.is_inside_tick_queue) { + @panic("Calling drainMicrotasks from inside the event loop tick queue is a bug in your code. Please fix your bug."); + } + } this.eventLoop().drainMicrotasks(); } @@ -3954,11 +3969,11 @@ pub const VirtualMachine = struct { .observable = false, .only_non_index_properties = true, }); - var iterator = Iterator.init(this.global, error_instance); + var iterator = try Iterator.init(this.global, error_instance); defer iterator.deinit(); const longest_name = @min(iterator.getLongestPropertyName(), 10); var is_first_property = true; - while (iterator.next() orelse iterator.getCodeProperty()) |field| { + while ((try iterator.next()) orelse iterator.getCodeProperty()) |field| { const value = iterator.value; if (field.eqlComptime("message") or field.eqlComptime("name") or field.eqlComptime("stack")) { continue; @@ -4090,11 +4105,7 @@ pub const VirtualMachine = struct { fn printErrorNameAndMessage(_: *VirtualMachine, name: String, message: String, comptime Writer: type, writer: Writer, comptime allow_ansi_color: bool) !void { if (!name.isEmpty() and !message.isEmpty()) { const display_name: String = if (name.eqlComptime("Error")) String.init("error") else name; - - try writer.print(comptime Output.prettyFmt("{}: {s}\n", allow_ansi_color), .{ - display_name, - message, - }); + try writer.print(comptime Output.prettyFmt("{}: {s}\n", allow_ansi_color), .{ display_name, message }); } else if (!name.isEmpty()) { if (!name.hasPrefixComptime("error")) { try writer.print(comptime Output.prettyFmt("error: {}\n", allow_ansi_color), .{name}); diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index d650424173..b2fac678a6 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -2596,6 +2596,7 @@ pub const ModuleLoader = struct { .@"node:stream/consumers" => return jsSyntheticModule(.@"node:stream/consumers", specifier), .@"node:stream/promises" => return jsSyntheticModule(.@"node:stream/promises", specifier), .@"node:stream/web" => return jsSyntheticModule(.@"node:stream/web", specifier), + .@"node:test" => return jsSyntheticModule(.@"node:test", specifier), .@"node:timers" => return jsSyntheticModule(.@"node:timers", specifier), .@"node:timers/promises" => return jsSyntheticModule(.@"node:timers/promises", specifier), .@"node:tls" => return jsSyntheticModule(.@"node:tls", specifier), @@ -2799,6 +2800,7 @@ pub const HardcodedModule = enum { @"node:stream/promises", @"node:stream/web", @"node:string_decoder", + @"node:test", @"node:timers", @"node:timers/promises", @"node:tls", @@ -2849,6 +2851,8 @@ pub const HardcodedModule = enum { .{ "node-fetch", HardcodedModule.@"node-fetch" }, .{ "isomorphic-fetch", HardcodedModule.@"isomorphic-fetch" }, + .{ "node:test", HardcodedModule.@"node:test" }, + .{ "assert", HardcodedModule.@"node:assert" }, .{ "assert/strict", HardcodedModule.@"node:assert/strict" }, .{ "async_hooks", HardcodedModule.@"node:async_hooks" }, @@ -2920,7 +2924,7 @@ pub const HardcodedModule = enum { pub const Aliases = struct { // Used by both Bun and Node. - const common_alias_kvs = .{ + const common_alias_kvs = [_]struct { string, Alias }{ .{ "node:assert", .{ .path = "assert" } }, .{ "node:assert/strict", .{ .path = "assert/strict" } }, .{ "node:async_hooks", .{ .path = "async_hooks" } }, @@ -2960,6 +2964,7 @@ pub const HardcodedModule = enum { .{ "node:stream/promises", .{ .path = "stream/promises" } }, .{ "node:stream/web", .{ .path = "stream/web" } }, .{ "node:string_decoder", .{ .path = "string_decoder" } }, + .{ "node:test", .{ .path = "node:test" } }, .{ "node:timers", .{ .path = "timers" } }, .{ "node:timers/promises", .{ .path = "timers/promises" } }, .{ "node:tls", .{ .path = "tls" } }, @@ -2974,6 +2979,22 @@ pub const HardcodedModule = enum { .{ "node:worker_threads", .{ .path = "worker_threads" } }, .{ "node:zlib", .{ .path = "zlib" } }, + // These are returned in builtinModules, but probably not many packages use them so we will just alias them. + .{ "node:_http_agent", .{ .path = "http" } }, + .{ "node:_http_client", .{ .path = "http" } }, + .{ "node:_http_common", .{ .path = "http" } }, + .{ "node:_http_incoming", .{ .path = "http" } }, + .{ "node:_http_outgoing", .{ .path = "http" } }, + .{ "node:_http_server", .{ .path = "http" } }, + .{ "node:_stream_duplex", .{ .path = "stream" } }, + .{ "node:_stream_passthrough", .{ .path = "stream" } }, + .{ "node:_stream_readable", .{ .path = "stream" } }, + .{ "node:_stream_transform", .{ .path = "stream" } }, + .{ "node:_stream_writable", .{ .path = "stream" } }, + .{ "node:_stream_wrap", .{ .path = "stream" } }, + .{ "node:_tls_wrap", .{ .path = "tls" } }, + .{ "node:_tls_common", .{ .path = "tls" } }, + .{ "assert", .{ .path = "assert" } }, .{ "assert/strict", .{ .path = "assert/strict" } }, .{ "async_hooks", .{ .path = "async_hooks" } }, @@ -3013,6 +3034,7 @@ pub const HardcodedModule = enum { .{ "stream/promises", .{ .path = "stream/promises" } }, .{ "stream/web", .{ .path = "stream/web" } }, .{ "string_decoder", .{ .path = "string_decoder" } }, + // .{ "test", .{ .path = "test" } }, .{ "timers", .{ .path = "timers" } }, .{ "timers/promises", .{ .path = "timers/promises" } }, .{ "tls", .{ .path = "tls" } }, @@ -3055,7 +3077,7 @@ pub const HardcodedModule = enum { .{ "internal/test/binding", .{ .path = "internal/test/binding" } }, }; - const bun_extra_alias_kvs = .{ + const bun_extra_alias_kvs = [_]struct { string, Alias }{ .{ "bun", .{ .path = "bun", .tag = .bun } }, .{ "bun:test", .{ .path = "bun:test", .tag = .bun_test } }, .{ "bun:ffi", .{ .path = "bun:ffi" } }, @@ -3085,10 +3107,9 @@ pub const HardcodedModule = enum { .{ "abort-controller/polyfill", .{ .path = "abort-controller" } }, }; - const node_alias_kvs = .{ + const node_alias_kvs = [_]struct { string, Alias }{ .{ "inspector/promises", .{ .path = "inspector/promises" } }, .{ "node:inspector/promises", .{ .path = "inspector/promises" } }, - .{ "node:test", .{ .path = "node:test" } }, }; const NodeAliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_alias_kvs); diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 72232be196..302e11fd5e 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -231,14 +231,14 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { }; } - pub inline fn getErrno(this: @This()) posix.E { + pub fn getErrno(this: @This()) posix.E { return switch (this) { .result => posix.E.SUCCESS, .err => |e| @enumFromInt(e.errno), }; } - pub inline fn errnoSys(rc: anytype, syscall: Syscall.Tag) ?@This() { + pub fn errnoSys(rc: anytype, syscall: Syscall.Tag) ?@This() { if (comptime Environment.isWindows) { if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { if (rc != 0) return null; @@ -256,7 +256,7 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { }; } - pub inline fn errno(err: anytype, syscall: Syscall.Tag) @This() { + pub fn errno(err: anytype, syscall: Syscall.Tag) @This() { return @This(){ // always truncate .err = .{ @@ -266,7 +266,7 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { }; } - pub inline fn errnoSysFd(rc: anytype, syscall: Syscall.Tag, fd: bun.FileDescriptor) ?@This() { + pub fn errnoSysFd(rc: anytype, syscall: Syscall.Tag, fd: bun.FileDescriptor) ?@This() { if (comptime Environment.isWindows) { if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { if (rc != 0) return null; @@ -285,7 +285,7 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { }; } - pub inline fn errnoSysP(rc: anytype, syscall: Syscall.Tag, path: anytype) ?@This() { + pub fn errnoSysP(rc: anytype, syscall: Syscall.Tag, path: anytype) ?@This() { if (bun.meta.Item(@TypeOf(path)) == u16) { @compileError("Do not pass WString path to errnoSysP, it needs the path encoded as utf8"); } @@ -306,6 +306,49 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { }, }; } + + pub fn errnoSysFP(rc: anytype, syscall: Syscall.Tag, fd: bun.FileDescriptor, path: anytype) ?@This() { + if (comptime Environment.isWindows) { + if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { + if (rc != 0) return null; + } + } + return switch (Syscall.getErrno(rc)) { + .SUCCESS => null, + else => |e| @This(){ + // Always truncate + .err = .{ + .errno = translateToErrInt(e), + .syscall = syscall, + .fd = fd, + .path = bun.asByteSlice(path), + }, + }, + }; + } + + pub fn errnoSysPD(rc: anytype, syscall: Syscall.Tag, path: anytype, dest: anytype) ?@This() { + if (bun.meta.Item(@TypeOf(path)) == u16) { + @compileError("Do not pass WString path to errnoSysPD, it needs the path encoded as utf8"); + } + if (comptime Environment.isWindows) { + if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { + if (rc != 0) return null; + } + } + return switch (Syscall.getErrno(rc)) { + .SUCCESS => null, + else => |e| @This(){ + // Always truncate + .err = .{ + .errno = translateToErrInt(e), + .syscall = syscall, + .path = bun.asByteSlice(path), + .dest = bun.asByteSlice(dest), + }, + }, + }; + } }; } @@ -2081,34 +2124,34 @@ pub const Process = struct { if (to.len == 0) { return globalObject.throwInvalidArguments("Expected path to be a non-empty string", .{}); } + const vm = globalObject.bunVM(); + const fs = vm.transpiler.fs; var buf: bun.PathBuffer = undefined; - const slice = to.sliceZBuf(&buf) catch { - return globalObject.throw("Invalid path", .{}); - }; + const slice = to.sliceZBuf(&buf) catch return globalObject.throw("Invalid path", .{}); - switch (Syscall.chdir(slice)) { + switch (Syscall.chdir(fs.top_level_dir, slice)) { .result => { // When we update the cwd from JS, we have to update the bundler's version as well // However, this might be called many times in a row, so we use a pre-allocated buffer // that way we don't have to worry about garbage collector - const fs = JSC.VirtualMachine.get().transpiler.fs; const into_cwd_buf = switch (bun.sys.getcwd(&buf)) { .result => |r| r, .err => |err| { - _ = Syscall.chdir(@as([:0]const u8, @ptrCast(fs.top_level_dir))); + _ = Syscall.chdir(fs.top_level_dir, fs.top_level_dir); return globalObject.throwValue(err.toJSC(globalObject)); }, }; @memcpy(fs.top_level_dir_buf[0..into_cwd_buf.len], into_cwd_buf); - fs.top_level_dir = fs.top_level_dir_buf[0..into_cwd_buf.len]; + fs.top_level_dir_buf[into_cwd_buf.len] = 0; + fs.top_level_dir = fs.top_level_dir_buf[0..into_cwd_buf.len :0]; const len = fs.top_level_dir.len; // Ensure the path ends with a slash if (fs.top_level_dir_buf[len - 1] != std.fs.path.sep) { fs.top_level_dir_buf[len] = std.fs.path.sep; fs.top_level_dir_buf[len + 1] = 0; - fs.top_level_dir = fs.top_level_dir_buf[0 .. len + 1]; + fs.top_level_dir = fs.top_level_dir_buf[0 .. len + 1 :0]; } const withoutTrailingSlash = if (Environment.isWindows) strings.withoutTrailingSlashWindowsPath else strings.withoutTrailingSlash; var str = bun.String.createUTF8(withoutTrailingSlash(fs.top_level_dir)); @@ -2193,6 +2236,31 @@ pub const Process = struct { pub export const Bun__versions_zstd: [*:0]const u8 = bun.Global.versions.zstd; }; +pub const PathOrBlob = union(enum) { + path: JSC.Node.PathOrFileDescriptor, + blob: Blob, + + const Blob = JSC.WebCore.Blob; + + pub fn fromJSNoCopy(ctx: *JSC.JSGlobalObject, args: *JSC.Node.ArgumentsSlice) bun.JSError!PathOrBlob { + if (try JSC.Node.PathOrFileDescriptor.fromJS(ctx, args, bun.default_allocator)) |path| { + return PathOrBlob{ + .path = path, + }; + } + + const arg = args.nextEat() orelse { + return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", .undefined); + }; + if (arg.as(Blob)) |blob| { + return PathOrBlob{ + .blob = blob.*, + }; + } + return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", arg); + } +}; + comptime { std.testing.refAllDecls(Process); } diff --git a/src/bun.js/node/util/parse_args.zig b/src/bun.js/node/util/parse_args.zig index 221823fcb1..3c1e61e160 100644 --- a/src/bun.js/node/util/parse_args.zig +++ b/src/bun.js/node/util/parse_args.zig @@ -300,13 +300,13 @@ fn storeOption(globalThis: *JSGlobalObject, option_name: ValueRef, option_value: fn parseOptionDefinitions(globalThis: *JSGlobalObject, options_obj: JSValue, option_definitions: *std.ArrayList(OptionDefinition)) bun.JSError!void { try validateObject(globalThis, options_obj, "options", .{}, .{}); - var iter = JSC.JSPropertyIterator(.{ + var iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalThis, options_obj); defer iter.deinit(); - while (iter.next()) |long_option| { + while (try iter.next()) |long_option| { var option = OptionDefinition{ .long_name = String.init(long_option), }; diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index a9ef928354..f90c8bbd15 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -2882,7 +2882,7 @@ pub const Expect = struct { }.anythingInIterator); pass = !any_properties_in_iterator; } else { - var props_iter = JSC.JSPropertyIterator(.{ + var props_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, @@ -4525,13 +4525,13 @@ pub const Expect = struct { const matchers_to_register = args[0]; { - var iter = JSC.JSPropertyIterator(.{ + var iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalThis, matchers_to_register); defer iter.deinit(); - while (iter.next()) |*matcher_name| { + while (try iter.next()) |*matcher_name| { const matcher_fn: JSValue = iter.value; if (!matcher_fn.jsType().isFunction()) { @@ -4664,7 +4664,6 @@ pub const Expect = struct { if (result.isObject()) { if (try result.get(globalThis, "pass")) |pass_value| { pass = pass_value.toBoolean(); - if (globalThis.hasException()) return false; if (result.fastGet(globalThis, .message)) |message_value| { if (!message_value.isString() and !message_value.isCallable(globalThis.vm())) { diff --git a/src/bun.js/test/pretty_format.zig b/src/bun.js/test/pretty_format.zig index 2934577d49..ac9b1fdcd7 100644 --- a/src/bun.js/test/pretty_format.zig +++ b/src/bun.js/test/pretty_format.zig @@ -95,7 +95,7 @@ pub const JestPrettyFormat = struct { comptime Writer: type, writer: Writer, options: FormatOptions, - ) void { + ) bun.JSError!void { var fmt: JestPrettyFormat.Formatter = undefined; defer { if (fmt.map_node) |node| { @@ -123,7 +123,7 @@ pub const JestPrettyFormat = struct { if (level == .Error) { unbuffered_writer.writeAll(comptime Output.prettyFmt("", true)) catch unreachable; } - fmt.format( + try fmt.format( tag, @TypeOf(unbuffered_writer), unbuffered_writer, @@ -135,7 +135,7 @@ pub const JestPrettyFormat = struct { unbuffered_writer.writeAll(comptime Output.prettyFmt("", true)) catch unreachable; } } else { - fmt.format( + try fmt.format( tag, @TypeOf(unbuffered_writer), unbuffered_writer, @@ -152,7 +152,7 @@ pub const JestPrettyFormat = struct { } } if (options.enable_colors) { - fmt.format( + try fmt.format( tag, Writer, writer, @@ -161,7 +161,7 @@ pub const JestPrettyFormat = struct { true, ); } else { - fmt.format( + try fmt.format( tag, Writer, writer, @@ -206,7 +206,7 @@ pub const JestPrettyFormat = struct { tag.tag = .StringPossiblyFormatted; } - fmt.format(tag, Writer, writer, this_value, global, true); + try fmt.format(tag, Writer, writer, this_value, global, true); if (fmt.remaining_values.len == 0) { break; } @@ -228,7 +228,7 @@ pub const JestPrettyFormat = struct { tag.tag = .StringPossiblyFormatted; } - fmt.format(tag, Writer, writer, this_value, global, false); + try fmt.format(tag, Writer, writer, this_value, global, false); if (fmt.remaining_values.len == 0) break; @@ -574,13 +574,13 @@ pub const JestPrettyFormat = struct { const next_value = this.remaining_values[0]; this.remaining_values = this.remaining_values[1..]; switch (token) { - Tag.String => this.printAs(Tag.String, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch {}, // TODO: - Tag.Double => this.printAs(Tag.Double, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch {}, // TODO: - Tag.Object => this.printAs(Tag.Object, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch {}, // TODO: - Tag.Integer => this.printAs(Tag.Integer, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch {}, // TODO: + Tag.String => this.printAs(Tag.String, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch return, + Tag.Double => this.printAs(Tag.Double, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch return, + Tag.Object => this.printAs(Tag.Object, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch return, + Tag.Integer => this.printAs(Tag.Integer, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors) catch return, // undefined is overloaded to mean the '%o" field - Tag.Undefined => this.format(Tag.get(next_value, globalThis), Writer, writer_, next_value, globalThis, enable_ansi_colors), + Tag.Undefined => this.format(Tag.get(next_value, globalThis), Writer, writer_, next_value, globalThis, enable_ansi_colors) catch return, else => unreachable, } @@ -680,9 +680,10 @@ pub const JestPrettyFormat = struct { writer: Writer, pub fn forEach(_: [*c]JSC.VM, globalObject: *JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { var this: *@This() = bun.cast(*@This(), ctx orelse return); + if (this.formatter.failed) return; const key = JSC.JSObject.getIndex(nextValue, globalObject, 0); const value = JSC.JSObject.getIndex(nextValue, globalObject, 1); - this.formatter.writeIndent(Writer, this.writer) catch unreachable; + this.formatter.writeIndent(Writer, this.writer) catch return; const key_tag = Tag.get(key, globalObject); this.formatter.format( @@ -692,8 +693,8 @@ pub const JestPrettyFormat = struct { key, this.formatter.globalThis, enable_ansi_colors, - ); - this.writer.writeAll(" => ") catch unreachable; + ) catch return; + this.writer.writeAll(" => ") catch return; const value_tag = Tag.get(value, globalObject); this.formatter.format( value_tag, @@ -702,9 +703,9 @@ pub const JestPrettyFormat = struct { value, this.formatter.globalThis, enable_ansi_colors, - ); - this.formatter.printComma(Writer, this.writer, enable_ansi_colors) catch unreachable; - this.writer.writeAll("\n") catch unreachable; + ) catch return; + this.formatter.printComma(Writer, this.writer, enable_ansi_colors) catch return; + this.writer.writeAll("\n") catch return; } }; } @@ -715,7 +716,8 @@ pub const JestPrettyFormat = struct { writer: Writer, pub fn forEach(_: [*c]JSC.VM, globalObject: *JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { var this: *@This() = bun.cast(*@This(), ctx orelse return); - this.formatter.writeIndent(Writer, this.writer) catch {}; + if (this.formatter.failed) return; + this.formatter.writeIndent(Writer, this.writer) catch return; const key_tag = Tag.get(nextValue, globalObject); this.formatter.format( key_tag, @@ -724,10 +726,9 @@ pub const JestPrettyFormat = struct { nextValue, this.formatter.globalThis, enable_ansi_colors, - ); - - this.formatter.printComma(Writer, this.writer, enable_ansi_colors) catch unreachable; - this.writer.writeAll("\n") catch unreachable; + ) catch return; + this.formatter.printComma(Writer, this.writer, enable_ansi_colors) catch return; + this.writer.writeAll("\n") catch return; } }; } @@ -790,6 +791,8 @@ pub const JestPrettyFormat = struct { var ctx: *@This() = bun.cast(*@This(), ctx_ptr orelse return); var this = ctx.formatter; const writer_ = ctx.writer; + if (this.failed) return; + var writer = WrappedWriter(Writer){ .ctx = writer_, .failed = false, @@ -801,14 +804,14 @@ pub const JestPrettyFormat = struct { if (ctx.i == 0) { handleFirstProperty(ctx, globalThis, ctx.parent); } else { - this.printComma(Writer, writer_, enable_ansi_colors) catch unreachable; + this.printComma(Writer, writer_, enable_ansi_colors) catch return; } defer ctx.i += 1; if (ctx.i > 0) { if (ctx.always_newline or this.always_newline_scope or this.goodTimeForANewLine()) { writer.writeAll("\n"); - this.writeIndent(Writer, writer_) catch {}; + this.writeIndent(Writer, writer_) catch return; this.resetLine(); } else { this.estimated_line_length += 1; @@ -880,7 +883,7 @@ pub const JestPrettyFormat = struct { } } - this.format(tag, Writer, ctx.writer, value, globalThis, enable_ansi_colors); + this.format(tag, Writer, ctx.writer, value, globalThis, enable_ansi_colors) catch return; if (tag.cell.isStringLike()) { if (comptime enable_ansi_colors) { @@ -899,7 +902,7 @@ pub const JestPrettyFormat = struct { value: JSValue, jsType: JSValue.JSType, comptime enable_ansi_colors: bool, - ) error{}!void { + ) bun.JSError!void { if (this.failed) return; var writer = WrappedWriter(Writer){ .ctx = writer_, .estimated_line_length = &this.estimated_line_length }; @@ -1174,7 +1177,7 @@ pub const JestPrettyFormat = struct { this.writeIndent(Writer, writer_) catch unreachable; this.addForNewLine(1); - this.format(tag, Writer, writer_, element, this.globalThis, enable_ansi_colors); + try this.format(tag, Writer, writer_, element, this.globalThis, enable_ansi_colors); if (tag.cell.isStringLike()) { if (comptime enable_ansi_colors) { @@ -1197,7 +1200,7 @@ pub const JestPrettyFormat = struct { const element = JSValue.fromRef(CAPI.JSObjectGetPropertyAtIndex(this.globalThis, ref, i, null)); const tag = Tag.get(element, this.globalThis); - this.format(tag, Writer, writer_, element, this.globalThis, enable_ansi_colors); + try this.format(tag, Writer, writer_, element, this.globalThis, enable_ansi_colors); if (tag.cell.isStringLike()) { if (comptime enable_ansi_colors) { @@ -1223,16 +1226,42 @@ pub const JestPrettyFormat = struct { }, .Private => { if (value.as(JSC.WebCore.Response)) |response| { - response.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch {}; - return; + response.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch |err| { + this.failed = true; + // TODO: make this better + if (!this.globalThis.hasException()) { + return this.globalThis.throwError(err, "failed to print Response"); + } + return error.JSError; + }; } else if (value.as(JSC.WebCore.Request)) |request| { - request.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch {}; + request.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch |err| { + this.failed = true; + // TODO: make this better + if (!this.globalThis.hasException()) { + return this.globalThis.throwError(err, "failed to print Request"); + } + return error.JSError; + }; return; } else if (value.as(JSC.API.BuildArtifact)) |build| { - build.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch {}; - return; + build.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch |err| { + this.failed = true; + // TODO: make this better + if (!this.globalThis.hasException()) { + return this.globalThis.throwError(err, "failed to print BuildArtifact"); + } + return error.JSError; + }; } else if (value.as(JSC.WebCore.Blob)) |blob| { - blob.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch {}; + blob.writeFormat(Formatter, this, writer_, enable_ansi_colors) catch |err| { + this.failed = true; + // TODO: make this better + if (!this.globalThis.hasException()) { + return this.globalThis.throwError(err, "failed to print Blob"); + } + return error.JSError; + }; return; } else if (value.as(JSC.DOMFormData) != null) { const toJSONFunction = value.get_unsafe(this.globalThis, "toJSON").?; @@ -1244,7 +1273,7 @@ pub const JestPrettyFormat = struct { .Object, Writer, writer_, - toJSONFunction.call(this.globalThis, value, &.{}) catch |err| this.globalThis.takeException(err), + try toJSONFunction.call(this.globalThis, value, &.{}), .Object, enable_ansi_colors, ); @@ -1434,7 +1463,7 @@ pub const JestPrettyFormat = struct { ); const tag = Tag.get(message_value, this.globalThis); - this.format(tag, Writer, writer_, message_value, this.globalThis, enable_ansi_colors); + try this.format(tag, Writer, writer_, message_value, this.globalThis, enable_ansi_colors); writer.writeAll(", \n"); } } @@ -1450,9 +1479,9 @@ pub const JestPrettyFormat = struct { const tag = Tag.get(data, this.globalThis); if (tag.cell.isStringLike()) { - this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); + try this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); } else { - this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); + try this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); } writer.writeAll(", \n"); }, @@ -1465,7 +1494,7 @@ pub const JestPrettyFormat = struct { ); const tag = Tag.get(data, this.globalThis); - this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); + try this.format(tag, Writer, writer_, data, this.globalThis, enable_ansi_colors); writer.writeAll("\n"); } }, @@ -1530,7 +1559,7 @@ pub const JestPrettyFormat = struct { this.quote_strings = true; defer this.quote_strings = old_quote_strings; - this.format(Tag.get(key_value, this.globalThis), Writer, writer_, key_value, this.globalThis, enable_ansi_colors); + try this.format(Tag.get(key_value, this.globalThis), Writer, writer_, key_value, this.globalThis, enable_ansi_colors); needs_space = true; } @@ -1541,7 +1570,7 @@ pub const JestPrettyFormat = struct { this.quote_strings = true; defer this.quote_strings = prev_quote_strings; - var props_iter = JSC.JSPropertyIterator(.{ + var props_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = true, .include_value = true, @@ -1555,7 +1584,7 @@ pub const JestPrettyFormat = struct { defer this.indent -|= 1; const count_without_children = props_iter.len - @as(usize, @intFromBool(children_prop != null)); - while (props_iter.next()) |prop| { + while (try props_iter.next()) |prop| { if (prop.eqlComptime("children")) continue; @@ -1578,7 +1607,7 @@ pub const JestPrettyFormat = struct { } } - this.format(tag, Writer, writer_, property_value, this.globalThis, enable_ansi_colors); + try this.format(tag, Writer, writer_, property_value, this.globalThis, enable_ansi_colors); if (tag.cell.isStringLike()) { if (comptime enable_ansi_colors) { @@ -1641,7 +1670,7 @@ pub const JestPrettyFormat = struct { this.indent += 1; this.writeIndent(Writer, writer_) catch unreachable; defer this.indent -|= 1; - this.format(Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors); + try this.format(Tag.get(children, this.globalThis), Writer, writer_, children, this.globalThis, enable_ansi_colors); } writer.writeAll("\n"); @@ -1664,7 +1693,7 @@ pub const JestPrettyFormat = struct { var j: usize = 0; while (j < length) : (j += 1) { const child = JSC.JSObject.getIndex(children, this.globalThis, @as(u32, @intCast(j))); - this.format(Tag.get(child, this.globalThis), Writer, writer_, child, this.globalThis, enable_ansi_colors); + try this.format(Tag.get(child, this.globalThis), Writer, writer_, child, this.globalThis, enable_ansi_colors); if (j + 1 < length) { writer.writeAll("\n"); this.writeIndent(Writer, writer_) catch unreachable; @@ -1949,7 +1978,7 @@ pub const JestPrettyFormat = struct { } } - pub fn format(this: *JestPrettyFormat.Formatter, result: Tag.Result, comptime Writer: type, writer: Writer, value: JSValue, globalThis: *JSGlobalObject, comptime enable_ansi_colors: bool) void { + pub fn format(this: *JestPrettyFormat.Formatter, result: Tag.Result, comptime Writer: type, writer: Writer, value: JSValue, globalThis: *JSGlobalObject, comptime enable_ansi_colors: bool) bun.JSError!void { if (comptime is_bindgen) { return; } diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig index 3484590697..8699b4b8a7 100644 --- a/src/bun.js/webcore.zig +++ b/src/bun.js/webcore.zig @@ -2,6 +2,8 @@ pub usingnamespace @import("./webcore/response.zig"); pub usingnamespace @import("./webcore/encoding.zig"); pub usingnamespace @import("./webcore/streams.zig"); pub usingnamespace @import("./webcore/blob.zig"); +pub usingnamespace @import("./webcore/S3Stat.zig"); +pub usingnamespace @import("./webcore/S3Client.zig"); pub usingnamespace @import("./webcore/request.zig"); pub usingnamespace @import("./webcore/body.zig"); pub const ObjectURLRegistry = @import("./webcore/ObjectURLRegistry.zig"); diff --git a/src/bun.js/webcore/S3Client.zig b/src/bun.js/webcore/S3Client.zig new file mode 100644 index 0000000000..37b1799cb4 --- /dev/null +++ b/src/bun.js/webcore/S3Client.zig @@ -0,0 +1,298 @@ +const bun = @import("root").bun; +const JSC = bun.JSC; +const JSValue = JSC.JSValue; +const Blob = JSC.WebCore.Blob; +const PathOrBlob = JSC.Node.PathOrBlob; +const ZigString = JSC.ZigString; +const Method = bun.http.Method; +const S3File = @import("./S3File.zig"); +const S3Credentials = bun.S3.S3Credentials; + +pub fn writeFormatCredentials(credentials: *S3Credentials, options: bun.S3.MultiPartUploadOptions, acl: ?bun.S3.ACL, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + try writer.writeAll("\n"); + + { + const Writer = @TypeOf(writer); + + formatter.indent += 1; + defer formatter.indent -|= 1; + + const endpoint = if (credentials.endpoint.len > 0) credentials.endpoint else "https://s3..amazonaws.com"; + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("endpoint: \"", enable_ansi_colors)); + try writer.print(comptime bun.Output.prettyFmt("{s}\"", enable_ansi_colors), .{endpoint}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + const region = if (credentials.region.len > 0) credentials.region else S3Credentials.guessRegion(credentials.endpoint); + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("region: \"", enable_ansi_colors)); + try writer.print(comptime bun.Output.prettyFmt("{s}\"", enable_ansi_colors), .{region}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + // PS: We don't want to print the credentials if they are empty just signal that they are there without revealing them + if (credentials.accessKeyId.len > 0) { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("accessKeyId: \"[REDACTED]\"", enable_ansi_colors)); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + if (credentials.secretAccessKey.len > 0) { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("secretAccessKey: \"[REDACTED]\"", enable_ansi_colors)); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + if (credentials.sessionToken.len > 0) { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("sessionToken: \"[REDACTED]\"", enable_ansi_colors)); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + if (acl) |acl_value| { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("acl: ", enable_ansi_colors)); + try writer.print(comptime bun.Output.prettyFmt("{s}\"", enable_ansi_colors), .{acl_value.toString()}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + } + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("partSize: ", enable_ansi_colors)); + try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(options.partSize), .NumberObject, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("queueSize: ", enable_ansi_colors)); + try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(options.queueSize), .NumberObject, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime bun.Output.prettyFmt("retry: ", enable_ansi_colors)); + try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(options.retry), .NumberObject, enable_ansi_colors); + try writer.writeAll("\n"); + } +} + +pub const S3Client = struct { + const log = bun.Output.scoped(.S3Client, false); + pub usingnamespace JSC.Codegen.JSS3Client; + + pub usingnamespace bun.New(@This()); + credentials: *S3Credentials, + options: bun.S3.MultiPartUploadOptions = .{}, + acl: ?bun.S3.ACL = null, + + pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*@This() { + const arguments = callframe.arguments_old(1).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + var aws_options = try S3Credentials.getCredentialsWithOptions(globalThis.bunVM().transpiler.env.getS3Credentials(), .{}, args.nextEat(), null, globalThis); + defer aws_options.deinit(); + return S3Client.new(.{ + .credentials = aws_options.credentials.dupe(), + .options = aws_options.options, + .acl = aws_options.acl, + }); + } + + pub fn writeFormat(this: *@This(), comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + try writer.writeAll(comptime bun.Output.prettyFmt("S3Client", enable_ansi_colors)); + if (this.credentials.bucket.len > 0) { + try writer.print( + comptime bun.Output.prettyFmt(" (\"{s}\") {{", enable_ansi_colors), + .{ + this.credentials.bucket, + }, + ); + } else { + try writer.writeAll(comptime bun.Output.prettyFmt(" {{", enable_ansi_colors)); + } + + try writeFormatCredentials(this.credentials, this.options, this.acl, Formatter, formatter, writer, enable_ansi_colors); + try formatter.writeIndent(@TypeOf(writer), writer); + try writer.writeAll("}"); + formatter.resetLine(); + } + pub fn file(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path ", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path", .{}); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = Blob.new(try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl)); + blob.allocator = bun.default_allocator; + return blob.toJS(globalThis); + } + + pub fn presign(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path to presign", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path to presign", .{}); + }; + errdefer path.deinit(); + + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return S3File.getPresignUrlFrom(&blob, globalThis, options); + } + + pub fn exists(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path to check if it exists", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path to check if it exists", .{}); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return S3File.S3BlobStatTask.exists(globalThis, &blob); + } + + pub fn size(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path to check the size of", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path to check the size of", .{}); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return S3File.S3BlobStatTask.size(globalThis, &blob); + } + + pub fn stat(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + if (args.len() == 0) { + return globalThis.ERR_MISSING_ARGS("Expected a path to check the stat of", .{}).throw(); + } + return globalThis.throwInvalidArguments("Expected a path to check the stat of", .{}); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return S3File.S3BlobStatTask.stat(globalThis, &blob); + } + + pub fn write(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + return globalThis.ERR_MISSING_ARGS("Expected a path to write to", .{}).throw(); + }; + errdefer path.deinit(); + const data = args.nextEat() orelse { + return globalThis.ERR_MISSING_ARGS("Expected a Blob-y thing to write", .{}).throw(); + }; + + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + var blob_internal: PathOrBlob = .{ .blob = blob }; + return Blob.writeFileInternal(globalThis, &blob_internal, data, .{ + .mkdirp_if_not_exists = false, + .extra_options = options, + }); + } + + pub fn unlink(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { + return globalThis.ERR_MISSING_ARGS("Expected a path to unlink", .{}).throw(); + }; + errdefer path.deinit(); + const options = args.nextEat(); + var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl); + defer blob.detach(); + return blob.store.?.data.s3.unlink(blob.store.?, globalThis, options); + } + + pub fn deinit(this: *@This()) void { + this.credentials.deref(); + this.destroy(); + } + + pub fn finalize( + this: *@This(), + ) void { + this.deinit(); + } + + // Static methods + + pub fn staticWrite(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + return S3File.write(globalThis, callframe); + } + + pub fn staticPresign(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + return S3File.presign(globalThis, callframe); + } + + pub fn staticExists(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + return S3File.exists(globalThis, callframe); + } + + pub fn staticSize(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + return S3File.size(globalThis, callframe); + } + + pub fn staticUnlink(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + return S3File.unlink(globalThis, callframe); + } + + pub fn staticFile(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + const path = (try JSC.Node.PathLike.fromJS(globalThis, &args)) orelse { + return globalThis.throwInvalidArguments("Expected file path string", .{}); + }; + + return try S3File.constructInternalJS(globalThis, path, args.nextEat()); + } + pub fn staticStat(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + return S3File.stat(globalThis, callframe); + } +}; diff --git a/src/bun.js/webcore/S3File.zig b/src/bun.js/webcore/S3File.zig new file mode 100644 index 0000000000..e6f34f97fe --- /dev/null +++ b/src/bun.js/webcore/S3File.zig @@ -0,0 +1,617 @@ +const std = @import("std"); +const bun = @import("root").bun; +const JSC = bun.JSC; +const JSValue = JSC.JSValue; +const Blob = JSC.WebCore.Blob; +const PathOrBlob = JSC.Node.PathOrBlob; +const ZigString = JSC.ZigString; +const Method = bun.http.Method; +const strings = bun.strings; +const Output = bun.Output; +const S3Client = @import("./S3Client.zig"); +const S3 = bun.S3; +const S3Stat = @import("./S3Stat.zig").S3Stat; +pub fn writeFormat(s3: *Blob.S3Store, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + try writer.writeAll(comptime Output.prettyFmt("S3Ref", enable_ansi_colors)); + const credentials = s3.getCredentials(); + + if (credentials.bucket.len > 0) { + try writer.print( + comptime Output.prettyFmt(" (\"{s}/{s}\") {{", enable_ansi_colors), + .{ + credentials.bucket, + s3.path(), + }, + ); + } else { + try writer.print( + comptime Output.prettyFmt(" (\"{s}\") {{", enable_ansi_colors), + .{ + s3.path(), + }, + ); + } + + try S3Client.writeFormatCredentials(credentials, s3.options, s3.acl, Formatter, formatter, writer, enable_ansi_colors); + try formatter.writeIndent(@TypeOf(writer), writer); + try writer.writeAll("}"); + formatter.resetLine(); +} +pub fn presign(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to presign", .{}); + } + + switch (path_or_blob) { + .path => |path| { + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to presign", .{}); + } + const options = args.nextEat(); + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + return try getPresignUrlFrom(&blob, globalThis, options); + }, + .blob => return try getPresignUrlFrom(&path_or_blob.blob, globalThis, args.nextEat()), + } +} + +pub fn unlink(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to delete", .{}); + } + + switch (path_or_blob) { + .path => |path| { + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to delete", .{}); + } + const options = args.nextEat(); + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + return try blob.store.?.data.s3.unlink(blob.store.?, globalThis, options); + }, + .blob => |blob| { + return try blob.store.?.data.s3.unlink(blob.store.?, globalThis, args.nextEat()); + }, + } +} + +pub fn write(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to upload", .{}); + } + + const data = args.nextEat() orelse { + return globalThis.ERR_MISSING_ARGS("Expected a Blob-y thing to upload", .{}).throw(); + }; + + switch (path_or_blob) { + .path => |path| { + const options = args.nextEat(); + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to upload", .{}); + } + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + + var blob_internal: PathOrBlob = .{ .blob = blob }; + return try Blob.writeFileInternal(globalThis, &blob_internal, data, .{ + .mkdirp_if_not_exists = false, + .extra_options = options, + }); + }, + .blob => return try Blob.writeFileInternal(globalThis, &path_or_blob, data, .{ + .mkdirp_if_not_exists = false, + .extra_options = args.nextEat(), + }), + } +} + +pub fn size(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to get size", .{}); + } + + switch (path_or_blob) { + .path => |path| { + const options = args.nextEat(); + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to get size", .{}); + } + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + + return S3BlobStatTask.size(globalThis, &blob); + }, + .blob => |*blob| { + return Blob.getSize(blob, globalThis); + }, + } +} +pub fn exists(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to check if it exists", .{}); + } + + switch (path_or_blob) { + .path => |path| { + const options = args.nextEat(); + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to check if it exists", .{}); + } + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + + return S3BlobStatTask.exists(globalThis, &blob); + }, + .blob => |*blob| { + return Blob.getExists(blob, globalThis, callframe); + }, + } +} + +fn constructS3FileInternalStore( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, +) bun.JSError!Blob { + // get credentials from env + const existing_credentials = globalObject.bunVM().transpiler.env.getS3Credentials(); + return constructS3FileWithS3Credentials(globalObject, path, options, existing_credentials); +} +/// if the credentials have changed, we need to clone it, if not we can just ref/deref it +pub fn constructS3FileWithS3CredentialsAndOptions( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, + default_credentials: *S3.S3Credentials, + default_options: bun.S3.MultiPartUploadOptions, + default_acl: ?bun.S3.ACL, +) bun.JSError!Blob { + var aws_options = try S3.S3Credentials.getCredentialsWithOptions(default_credentials.*, default_options, options, default_acl, globalObject); + defer aws_options.deinit(); + + const store = brk: { + if (aws_options.changed_credentials) { + break :brk Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory(); + } else { + break :brk Blob.Store.initS3WithReferencedCredentials(path, null, default_credentials, bun.default_allocator) catch bun.outOfMemory(); + } + }; + errdefer store.deinit(); + store.data.s3.options = aws_options.options; + store.data.s3.acl = aws_options.acl; + var blob = Blob.initWithStore(store, globalObject); + if (options) |opts| { + if (opts.isObject()) { + if (try opts.getTruthyComptime(globalObject, "type")) |file_type| { + inner: { + if (file_type.isString()) { + var allocator = bun.default_allocator; + var str = file_type.toSlice(globalObject, bun.default_allocator); + defer str.deinit(); + const slice = str.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + blob.content_type_was_set = true; + if (globalObject.bunVM().mimeType(str.slice())) |entry| { + blob.content_type = entry.value; + break :inner; + } + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + blob.content_type = strings.copyLowercase(slice, content_type_buf); + blob.content_type_allocated = true; + } + } + } + } + } + return blob; +} + +pub fn constructS3FileWithS3Credentials( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, + existing_credentials: S3.S3Credentials, +) bun.JSError!Blob { + var aws_options = try S3.S3Credentials.getCredentialsWithOptions(existing_credentials, .{}, options, null, globalObject); + defer aws_options.deinit(); + const store = Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory(); + errdefer store.deinit(); + store.data.s3.options = aws_options.options; + store.data.s3.acl = aws_options.acl; + var blob = Blob.initWithStore(store, globalObject); + if (options) |opts| { + if (opts.isObject()) { + if (try opts.getTruthyComptime(globalObject, "type")) |file_type| { + inner: { + if (file_type.isString()) { + var allocator = bun.default_allocator; + var str = file_type.toSlice(globalObject, bun.default_allocator); + defer str.deinit(); + const slice = str.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + blob.content_type_was_set = true; + if (globalObject.bunVM().mimeType(str.slice())) |entry| { + blob.content_type = entry.value; + break :inner; + } + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + blob.content_type = strings.copyLowercase(slice, content_type_buf); + blob.content_type_allocated = true; + } + } + } + } + } + return blob; +} +fn constructS3FileInternal( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, +) bun.JSError!*Blob { + var ptr = Blob.new(try constructS3FileInternalStore(globalObject, path, options)); + ptr.allocator = bun.default_allocator; + return ptr; +} + +pub const S3BlobStatTask = struct { + promise: JSC.JSPromise.Strong, + store: *Blob.Store, + usingnamespace bun.New(S3BlobStatTask); + + pub fn onS3ExistsResolved(result: S3.S3StatResult, this: *S3BlobStatTask) void { + defer this.deinit(); + const globalThis = this.promise.globalObject().?; + switch (result) { + .not_found => { + this.promise.resolve(globalThis, .false); + }, + .success => |_| { + // calling .exists() should not prevent it to download a bigger file + // this would make it download a slice of the actual value, if the file changes before we download it + // if (this.blob.size == Blob.max_size) { + // this.blob.size = @truncate(stat.size); + // } + this.promise.resolve(globalThis, .true); + }, + .failure => |err| { + this.promise.reject(globalThis, err.toJS(globalThis, this.store.data.s3.path())); + }, + } + } + + pub fn onS3SizeResolved(result: S3.S3StatResult, this: *S3BlobStatTask) void { + defer this.deinit(); + const globalThis = this.promise.globalObject().?; + + switch (result) { + .success => |stat_result| { + this.promise.resolve(globalThis, JSValue.jsNumber(stat_result.size)); + }, + inline .not_found, .failure => |err| { + this.promise.reject(globalThis, err.toJS(globalThis, this.store.data.s3.path())); + }, + } + } + + pub fn onS3StatResolved(result: S3.S3StatResult, this: *S3BlobStatTask) void { + defer this.deinit(); + const globalThis = this.promise.globalObject().?; + switch (result) { + .success => |stat_result| { + this.promise.resolve(globalThis, S3Stat.init( + stat_result.size, + stat_result.etag, + stat_result.contentType, + stat_result.lastModified, + globalThis, + ).toJS(globalThis)); + }, + inline .not_found, .failure => |err| { + this.promise.reject(globalThis, err.toJS(globalThis, this.store.data.s3.path())); + }, + } + } + + pub fn exists(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { + const this = S3BlobStatTask.new(.{ + .promise = JSC.JSPromise.Strong.init(globalThis), + .store = blob.store.?, + }); + this.store.ref(); + const promise = this.promise.value(); + const credentials = blob.store.?.data.s3.getCredentials(); + const path = blob.store.?.data.s3.path(); + const env = globalThis.bunVM().transpiler.env; + + S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3ExistsResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + return promise; + } + pub fn stat(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { + const this = S3BlobStatTask.new(.{ + .promise = JSC.JSPromise.Strong.init(globalThis), + .store = blob.store.?, + }); + this.store.ref(); + const promise = this.promise.value(); + const credentials = blob.store.?.data.s3.getCredentials(); + const path = blob.store.?.data.s3.path(); + const env = globalThis.bunVM().transpiler.env; + + S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3StatResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + return promise; + } + pub fn size(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { + const this = S3BlobStatTask.new(.{ + .promise = JSC.JSPromise.Strong.init(globalThis), + .store = blob.store.?, + }); + this.store.ref(); + const promise = this.promise.value(); + const credentials = blob.store.?.data.s3.getCredentials(); + const path = blob.store.?.data.s3.path(); + const env = globalThis.bunVM().transpiler.env; + + S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + return promise; + } + + pub fn deinit(this: *S3BlobStatTask) void { + this.store.deref(); + this.promise.deinit(); + this.destroy(); + } +}; + +pub fn getPresignUrlFrom(this: *Blob, globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { + if (!this.isS3()) { + return globalThis.ERR_INVALID_THIS("presign is only possible for s3:// files", .{}).throw(); + } + + var method: bun.http.Method = .GET; + var expires: usize = 86400; // 1 day default + + var credentialsWithOptions: S3.S3CredentialsWithOptions = .{ + .credentials = this.store.?.data.s3.getCredentials().*, + }; + defer { + credentialsWithOptions.deinit(); + } + const s3 = &this.store.?.data.s3; + + if (extra_options) |options| { + if (options.isObject()) { + if (try options.getTruthyComptime(globalThis, "method")) |method_| { + method = Method.fromJS(globalThis, method_) orelse { + return globalThis.throwInvalidArguments("method must be GET, PUT, DELETE or HEAD when using s3 protocol", .{}); + }; + } + if (try options.getOptional(globalThis, "expiresIn", i32)) |expires_| { + if (expires_ <= 0) return globalThis.throwInvalidArguments("expiresIn must be greather than 0", .{}); + expires = @intCast(expires_); + } + } + credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis); + } + const path = s3.path(); + + const result = credentialsWithOptions.credentials.signRequest(.{ + .path = path, + .method = method, + .acl = credentialsWithOptions.acl, + }, .{ .expires = expires }) catch |sign_err| { + return S3.throwSignError(sign_err, globalThis); + }; + defer result.deinit(); + var str = bun.String.fromUTF8(result.url); + return str.transferToJS(this.globalThis); +} +pub fn getBucketName( + this: *const Blob, +) ?[]const u8 { + const store = this.store orelse return null; + if (store.data != .s3) return null; + const credentials = store.data.s3.getCredentials(); + var full_path = store.data.s3.path(); + if (strings.startsWith(full_path, "/")) { + full_path = full_path[1..]; + } + var bucket: []const u8 = credentials.bucket; + + if (bucket.len == 0) { + if (strings.indexOf(full_path, "/")) |end| { + bucket = full_path[0..end]; + if (bucket.len > 0) { + return bucket; + } + } + return null; + } + return bucket; +} + +pub fn getBucket( + this: *Blob, + globalThis: *JSC.JSGlobalObject, +) callconv(JSC.conv) JSValue { + if (getBucketName(this)) |name| { + var str = bun.String.createUTF8(name); + return str.transferToJS(globalThis); + } + return .undefined; +} +pub fn getPresignUrl(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const args = callframe.arguments_old(1); + return getPresignUrlFrom(this, globalThis, if (args.len > 0) args.ptr[0] else null); +} + +pub fn getStat(this: *Blob, globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(JSC.conv) JSValue { + return S3BlobStatTask.stat(globalThis, this); +} + +pub fn stat(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + errdefer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + + if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { + return globalThis.throwInvalidArguments("Expected a S3 or path to get size", .{}); + } + + switch (path_or_blob) { + .path => |path| { + const options = args.nextEat(); + if (path == .fd) { + return globalThis.throwInvalidArguments("Expected a S3 or path to get size", .{}); + } + var blob = try constructS3FileInternalStore(globalThis, path.path, options); + defer blob.deinit(); + + return S3BlobStatTask.stat(globalThis, &blob); + }, + .blob => |*blob| { + return S3BlobStatTask.stat(globalThis, blob); + }, + } +} + +pub fn constructInternalJS( + globalObject: *JSC.JSGlobalObject, + path: JSC.Node.PathLike, + options: ?JSC.JSValue, +) bun.JSError!JSValue { + const blob = try constructS3FileInternal(globalObject, path, options); + return blob.toJS(globalObject); +} + +pub fn toJSUnchecked( + globalObject: *JSC.JSGlobalObject, + this: *Blob, +) JSValue { + return BUN__createJSS3FileUnsafely(globalObject, this); +} + +pub fn constructInternal( + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!*Blob { + const vm = globalObject.bunVM(); + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.Node.ArgumentsSlice.init(vm, arguments); + defer args.deinit(); + + const path = (try JSC.Node.PathLike.fromJS(globalObject, &args)) orelse { + return globalObject.throwInvalidArguments("Expected file path string", .{}); + }; + return constructS3FileInternal(globalObject, path, args.nextEat()); +} + +pub fn construct( + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) callconv(JSC.conv) ?*Blob { + return constructInternal(globalObject, callframe) catch |err| switch (err) { + error.JSError => null, + error.OutOfMemory => { + _ = globalObject.throwOutOfMemoryValue(); + return null; + }, + }; +} +pub fn hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { + JSC.markBinding(@src()); + const blob = value.as(Blob) orelse return false; + return blob.isS3(); +} + +comptime { + @export(exports.JSS3File__presign, .{ .name = "JSS3File__presign" }); + @export(construct, .{ .name = "JSS3File__construct" }); + @export(hasInstance, .{ .name = "JSS3File__hasInstance" }); + @export(getBucket, .{ .name = "JSS3File__bucket" }); + @export(getStat, .{ .name = "JSS3File__stat" }); +} + +pub const exports = struct { + pub const JSS3File__presign = JSC.toJSHostFunctionWithContext(Blob, getPresignUrl); + pub const JSS3File__stat = JSC.toJSHostFunctionWithContext(Blob, getStat); +}; +extern fn BUN__createJSS3File(*JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSValue; +extern fn BUN__createJSS3FileUnsafely(*JSC.JSGlobalObject, *Blob) callconv(JSC.conv) JSValue; +pub fn createJSS3File(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { + return BUN__createJSS3File(globalObject, callframe); +} diff --git a/src/bun.js/webcore/S3Stat.zig b/src/bun.js/webcore/S3Stat.zig new file mode 100644 index 0000000000..5635307477 --- /dev/null +++ b/src/bun.js/webcore/S3Stat.zig @@ -0,0 +1,58 @@ +const bun = @import("../../bun.zig"); +const JSC = @import("../../JSC.zig"); + +pub const S3Stat = struct { + const log = bun.Output.scoped(.S3Stat, false); + pub usingnamespace JSC.Codegen.JSS3Stat; + pub usingnamespace bun.New(@This()); + + size: u64, + etag: bun.String, + contentType: bun.String, + lastModified: f64, + + pub fn constructor(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!*@This() { + return globalThis.throwInvalidArguments("S3Stat is not constructable", .{}); + } + + pub fn init( + size: u64, + etag: []const u8, + contentType: []const u8, + lastModified: []const u8, + globalThis: *JSC.JSGlobalObject, + ) *@This() { + var date_str = bun.String.init(lastModified); + defer date_str.deref(); + const last_modified = date_str.parseDate(globalThis); + + return S3Stat.new(.{ + .size = size, + .etag = bun.String.createUTF8(etag), + .contentType = bun.String.createUTF8(contentType), + .lastModified = last_modified, + }); + } + + pub fn getSize(this: *@This(), _: *JSC.JSGlobalObject) JSC.JSValue { + return JSC.JSValue.jsNumber(this.size); + } + + pub fn getEtag(this: *@This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return this.etag.toJS(globalObject); + } + + pub fn getContentType(this: *@This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return this.contentType.toJS(globalObject); + } + + pub fn getLastModified(this: *@This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return JSC.JSValue.fromDateNumber(globalObject, this.lastModified); + } + + pub fn finalize(this: *@This()) void { + this.etag.deref(); + this.contentType.deref(); + this.destroy(); + } +}; diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig index a154a8cb75..53d4c87113 100644 --- a/src/bun.js/webcore/blob.zig +++ b/src/bun.js/webcore/blob.zig @@ -43,33 +43,9 @@ const Request = JSC.WebCore.Request; const libuv = bun.windows.libuv; -const AWSCredentials = @import("../../s3.zig").AWSCredentials; -const S3MultiPartUpload = @import("../../s3.zig").MultiPartUpload; -const AWS = AWSCredentials; - -const PathOrBlob = union(enum) { - path: JSC.Node.PathOrFileDescriptor, - blob: Blob, - - pub fn fromJSNoCopy(ctx: js.JSContextRef, args: *JSC.Node.ArgumentsSlice) bun.JSError!PathOrBlob { - if (try JSC.Node.PathOrFileDescriptor.fromJS(ctx, args, bun.default_allocator)) |path| { - return PathOrBlob{ - .path = path, - }; - } - - const arg = args.nextEat() orelse { - return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", .undefined); - }; - if (arg.as(Blob)) |blob| { - return PathOrBlob{ - .blob = blob.*, - }; - } - return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", arg); - } -}; - +const S3 = bun.S3; +const S3Credentials = S3.S3Credentials; +const PathOrBlob = JSC.Node.PathOrBlob; const WriteFilePromise = @import("./blob/WriteFile.zig").WriteFilePromise; const WriteFileWaitFromLockedValueTask = @import("./blob/WriteFile.zig").WriteFileWaitFromLockedValueTask; const NewReadFileHandler = @import("./blob/ReadFile.zig").NewReadFileHandler; @@ -77,6 +53,8 @@ const WriteFile = @import("./blob/WriteFile.zig").WriteFile; const ReadFile = @import("./blob/ReadFile.zig").ReadFile; const WriteFileWindows = @import("./blob/WriteFile.zig").WriteFileWindows; +const S3File = @import("./S3File.zig"); + pub const Blob = struct { const bloblog = Output.scoped(.Blob, false); @@ -295,7 +273,7 @@ pub const Blob = struct { switch (store.data) { .s3 => |_| { // TODO: s3 - // we need to make this async and use s3Download/s3DownloadSlice + // we need to make this async and use download/downloadSlice }, .file => |file| { @@ -718,14 +696,8 @@ pub const Blob = struct { { const store = this.store.?; switch (store.data) { - .s3 => |s3| { - try writer.writeAll(comptime Output.prettyFmt("S3Ref", enable_ansi_colors)); - try writer.print( - comptime Output.prettyFmt(" (\"{s}\")", enable_ansi_colors), - .{ - s3.pathlike.slice(), - }, - ); + .s3 => |*s3| { + try S3File.writeFormat(s3, Formatter, formatter, writer, enable_ansi_colors); }, .file => |file| { try writer.writeAll(comptime Output.prettyFmt("FileRef", enable_ansi_colors)); @@ -923,14 +895,15 @@ pub const Blob = struct { const Wrapper = struct { promise: JSC.JSPromise.Strong, + store: *Store, pub usingnamespace bun.New(@This()); - pub fn resolve(result: AWS.S3UploadResult, this: *@This()) void { + pub fn resolve(result: S3.S3UploadResult, this: *@This()) void { if (this.promise.globalObject()) |globalObject| { switch (result) { .success => this.promise.resolve(globalObject, JSC.jsNumber(0)), .failure => |err| { - this.promise.rejectOnNextTick(globalObject, err.toJS(globalObject)); + this.promise.reject(globalObject, err.toJS(globalObject, this.store.getPath())); }, } } @@ -939,6 +912,8 @@ pub const Blob = struct { fn deinit(this: *@This()) void { this.promise.deinit(); + this.store.deref(); + this.destroy(); } }; @@ -946,9 +921,20 @@ pub const Blob = struct { const promise_value = promise.value(); const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - aws_options.credentials.s3Upload(s3.path(), "", destination_blob.contentTypeOrMimeType(), proxy_url, @ptrCast(&Wrapper.resolve), Wrapper.new(.{ - .promise = promise, - })); + destination_blob.store.?.ref(); + S3.upload( + &aws_options.credentials, + s3.path(), + "", + destination_blob.contentTypeOrMimeType(), + aws_options.acl, + proxy_url, + @ptrCast(&Wrapper.resolve), + Wrapper.new(.{ + .promise = promise, + .store = destination_blob.store.?, + }), + ); return promise_value; } @@ -1024,7 +1010,7 @@ pub const Blob = struct { if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( ctx, source_blob, - @truncate(s3.options.partSize * S3MultiPartUpload.OneMiB), + @truncate(s3.options.partSize), ), ctx)) |stream| { return destination_blob.pipeReadableStreamToBlob(ctx, stream, options.extra_options); } else { @@ -1058,13 +1044,24 @@ pub const Blob = struct { const proxy_url = if (proxy) |p| p.href else null; switch (store.data) { .bytes => |bytes| { - if (bytes.len > S3MultiPartUpload.MAX_SINGLE_UPLOAD_SIZE) { + if (bytes.len > S3.MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( ctx, source_blob, - @truncate(s3.options.partSize * S3MultiPartUpload.OneMiB), + @truncate(s3.options.partSize), ), ctx)) |stream| { - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), stream, ctx, aws_options.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + stream, + ctx, + aws_options.options, + aws_options.acl, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); } else { return JSC.JSPromise.rejectedPromiseValue(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); } @@ -1074,12 +1071,12 @@ pub const Blob = struct { promise: JSC.JSPromise.Strong, pub usingnamespace bun.New(@This()); - pub fn resolve(result: AWS.S3UploadResult, this: *@This()) void { + pub fn resolve(result: S3.S3UploadResult, this: *@This()) void { if (this.promise.globalObject()) |globalObject| { switch (result) { .success => this.promise.resolve(globalObject, JSC.jsNumber(this.store.data.bytes.len)), .failure => |err| { - this.promise.rejectOnNextTick(globalObject, err.toJS(globalObject)); + this.promise.reject(globalObject, err.toJS(globalObject, this.store.getPath())); }, } } @@ -1095,10 +1092,19 @@ pub const Blob = struct { const promise = JSC.JSPromise.Strong.init(ctx); const promise_value = promise.value(); - aws_options.credentials.s3Upload(s3.path(), bytes.slice(), destination_blob.contentTypeOrMimeType(), proxy_url, @ptrCast(&Wrapper.resolve), Wrapper.new(.{ - .store = store, - .promise = promise, - })); + S3.upload( + &aws_options.credentials, + s3.path(), + bytes.slice(), + destination_blob.contentTypeOrMimeType(), + aws_options.acl, + proxy_url, + @ptrCast(&Wrapper.resolve), + Wrapper.new(.{ + .store = store, + .promise = promise, + }), + ); return promise_value; } }, @@ -1107,9 +1113,20 @@ pub const Blob = struct { if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( ctx, source_blob, - @truncate(s3.options.partSize * S3MultiPartUpload.OneMiB), + @truncate(s3.options.partSize), ), ctx)) |stream| { - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), stream, ctx, s3.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + stream, + ctx, + s3.options, + aws_options.acl, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); } else { return JSC.JSPromise.rejectedPromiseValue(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); } @@ -1287,7 +1304,18 @@ pub const Blob = struct { const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), readable, globalThis, aws_options.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + readable, + globalThis, + aws_options.options, + aws_options.acl, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); } destination_blob.detach(); return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); @@ -1335,7 +1363,18 @@ pub const Blob = struct { } const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - return (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(s3.path(), readable, globalThis, aws_options.options, destination_blob.contentTypeOrMimeType(), proxy_url, null, undefined); + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + readable, + globalThis, + aws_options.options, + aws_options.acl, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); } destination_blob.detach(); return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); @@ -1593,269 +1632,6 @@ pub const Blob = struct { return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(written)); } - - pub fn JSS3File_upload_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.upload(pathOrS3, blob) expects a S3 or path to upload", .{}); - } - - const data = args.nextEat() orelse { - return globalThis.throwInvalidArguments("S3.upload(pathOrS3, blob) expects a Blob-y thing to upload", .{}); - }; - - switch (path_or_blob) { - .path => |path| { - const options = args.nextEat(); - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.upload(pathOrS3, blob) expects a S3 or path to upload", .{}); - } - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - - var blob_internal: PathOrBlob = .{ .blob = blob }; - return try writeFileInternal(globalThis, &blob_internal, data, .{ - .mkdirp_if_not_exists = false, - .extra_options = options, - }); - }, - .blob => return try writeFileInternal(globalThis, &path_or_blob, data, .{ - .mkdirp_if_not_exists = false, - .extra_options = args.nextEat(), - }), - } - } - - pub fn JSS3File_size_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.size(pathOrS3) expects a S3 or path to get size", .{}); - } - - switch (path_or_blob) { - .path => |path| { - const options = args.nextEat(); - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.size(pathOrS3) expects a S3 or path to get size", .{}); - } - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - - return S3BlobStatTask.size(globalThis, &blob); - }, - .blob => |*blob| { - return getSize(blob, globalThis); - }, - } - } - pub fn JSS3File_exists_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.exists(pathOrS3) expects a S3 or path to check if it exists", .{}); - } - - switch (path_or_blob) { - .path => |path| { - const options = args.nextEat(); - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.exists(pathOrS3) expects a S3 or path to check if it exists", .{}); - } - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - - return S3BlobStatTask.exists(globalThis, &blob); - }, - .blob => |*blob| { - return getExists(blob, globalThis, callframe); - }, - } - } - - pub export fn JSS3File__exists(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_exists_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub export fn JSS3File__size(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_size_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub export fn JSS3File__upload(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_upload_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub fn JSS3File_presign_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.presign(pathOrS3, options) expects a S3 or path to presign", .{}); - } - - switch (path_or_blob) { - .path => |path| { - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.presign(pathOrS3, options) expects a S3 or path to presign", .{}); - } - const options = args.nextEat(); - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - return try getPresignUrlFrom(&blob, globalThis, options); - }, - .blob => return try getPresignUrlFrom(&path_or_blob.blob, globalThis, args.nextEat()), - } - } - - pub export fn JSS3File__presign(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_presign_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub fn JSS3File_unlink_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - errdefer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - if (path_or_blob == .blob and (path_or_blob.blob.store == null or path_or_blob.blob.store.?.data != .s3)) { - return globalThis.throwInvalidArguments("S3.unlink(pathOrS3) expects a S3 or path to delete", .{}); - } - - switch (path_or_blob) { - .path => |path| { - if (path == .fd) { - return globalThis.throwInvalidArguments("S3.unlink(pathOrS3) expects a S3 or path to delete", .{}); - } - const options = args.nextEat(); - var blob = try constructS3FileInternalStore(globalThis, path.path, options); - defer blob.deinit(); - return try blob.store.?.data.s3.unlink(globalThis, options); - }, - .blob => |blob| { - return try blob.store.?.data.s3.unlink(globalThis, args.nextEat()); - }, - } - } - - pub export fn JSS3File__unlink(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - return JSS3File_unlink_(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return .zero; - }, - }; - } - pub export fn JSS3File__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { - JSC.markBinding(@src()); - const blob = value.as(Blob) orelse return false; - return blob.isS3(); - } - - pub export fn JSDOMFile__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { - JSC.markBinding(@src()); - const blob = value.as(Blob) orelse return false; - return blob.is_jsdom_file; - } - extern fn BUN__createJSS3FileConstructor(*JSC.JSGlobalObject) JSValue; - - pub fn getJSS3FileConstructor( - globalObject: *JSC.JSGlobalObject, - _: *JSC.JSObject, - ) callconv(JSC.conv) JSValue { - return BUN__createJSS3FileConstructor(globalObject); - } - export fn JSS3File__construct(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) ?*Blob { - const vm = globalThis.bunVM(); - const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(vm, arguments); - defer args.deinit(); - - const path_or_fd = (JSC.Node.PathLike.fromJS(globalThis, &args)) catch |err| switch (err) { - error.JSError => null, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return null; - }, - }; - if (path_or_fd == null) { - globalThis.throwInvalidArguments("Expected file path string", .{}) catch return null; - return null; - } - return constructS3FileInternal(globalThis, path_or_fd.?, args.nextEat()) catch |err| switch (err) { - error.JSError => null, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return null; - }, - }; - } export fn JSDOMFile__construct(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) ?*Blob { return JSDOMFile__construct_(globalThis, callframe) catch |err| switch (err) { error.JSError => null, @@ -1986,66 +1762,7 @@ pub const Blob = struct { } comptime { - if (!JSC.is_bindgen) { - _ = JSDOMFile__hasInstance; - } - } - - fn constructS3FileInternalStore( - globalObject: *JSC.JSGlobalObject, - path: JSC.Node.PathLike, - options: ?JSC.JSValue, - ) bun.JSError!Blob { - - // get ENV config - var aws_options = try AWS.getCredentialsWithOptions(globalObject.bunVM().transpiler.env.getAWSCredentials(), options, globalObject); - defer aws_options.deinit(); - const store = Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory(); - errdefer store.deinit(); - store.data.s3.options = aws_options.options; - - var blob = Blob.initWithStore(store, globalObject); - if (options) |opts| { - if (try opts.getTruthy(globalObject, "type")) |file_type| { - inner: { - if (file_type.isString()) { - var allocator = bun.default_allocator; - var str = file_type.toSlice(globalObject, bun.default_allocator); - defer str.deinit(); - const slice = str.slice(); - if (!strings.isAllASCII(slice)) { - break :inner; - } - blob.content_type_was_set = true; - if (globalObject.bunVM().mimeType(str.slice())) |entry| { - blob.content_type = entry.value; - break :inner; - } - const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - blob.content_type = strings.copyLowercase(slice, content_type_buf); - blob.content_type_allocated = true; - } - } - } - } - return blob; - } - fn constructS3FileInternal( - globalObject: *JSC.JSGlobalObject, - path: JSC.Node.PathLike, - options: ?JSC.JSValue, - ) bun.JSError!*Blob { - var ptr = Blob.new(try constructS3FileInternalStore(globalObject, path, options)); - ptr.allocator = bun.default_allocator; - return ptr; - } - fn constructS3FileInternalJS( - globalObject: *JSC.JSGlobalObject, - path: JSC.Node.PathLike, - options: ?JSC.JSValue, - ) bun.JSError!JSC.JSValue { - var ptr = try constructS3FileInternal(globalObject, path, options); - return ptr.toJS(globalObject); + _ = JSDOMFile__hasInstance; } pub fn constructBunFile( @@ -2063,8 +1780,8 @@ pub const Blob = struct { const options = if (arguments.len >= 2) arguments[1] else null; if (path == .path) { - if (strings.startsWith(path.path.slice(), "s3://")) { - return try constructS3FileInternalJS(globalObject, path.path, options); + if (strings.hasPrefixComptime(path.path.slice(), "s3://")) { + return try S3File.constructInternalJS(globalObject, path.path, options); } } defer path.deinitAndUnprotect(); @@ -2105,28 +1822,13 @@ pub const Blob = struct { return ptr.toJS(globalObject); } - pub fn constructS3File( - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const vm = globalObject.bunVM(); - const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(vm, arguments); - defer args.deinit(); - - const path = (try JSC.Node.PathLike.fromJS(globalObject, &args)) orelse { - return globalObject.throwInvalidArguments("Expected file path string", .{}); - }; - return constructS3FileInternalJS(globalObject, path, args.nextEat()); - } - pub fn findOrCreateFileFromPath(path_or_fd: *JSC.Node.PathOrFileDescriptor, globalThis: *JSGlobalObject, comptime check_s3: bool) Blob { var vm = globalThis.bunVM(); const allocator = bun.default_allocator; if (check_s3) { if (path_or_fd.* == .path) { if (strings.startsWith(path_or_fd.path.slice(), "s3://")) { - const credentials = globalThis.bunVM().transpiler.env.getAWSCredentials(); + const credentials = globalThis.bunVM().transpiler.env.getS3Credentials(); const copy = path_or_fd.*; path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } }; return Blob.initWithStore(Blob.Store.initS3(copy.path, null, credentials, allocator) catch bun.outOfMemory(), globalThis); @@ -2208,6 +1910,14 @@ pub const Blob = struct { } else 0; } + pub fn getPath(this: *const Store) ?[]const u8 { + return switch (this.data) { + .bytes => |*bytes| if (bytes.stored_name.len > 0) bytes.stored_name.slice() else null, + .file => |*file| if (file.pathlike == .path) file.pathlike.path.slice() else null, + .s3 => |*s3| s3.pathlike.slice(), + }; + } + pub fn size(this: *const Store) SizeType { return switch (this.data) { .bytes => this.data.bytes.len, @@ -2248,8 +1958,35 @@ pub const Blob = struct { var this = bun.cast(*Store, ptr); this.deref(); } + pub fn initS3WithReferencedCredentials(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: *S3Credentials, allocator: std.mem.Allocator) !*Store { + var path = pathlike; + // this actually protects/refs the pathlike + path.toThreadSafe(); - pub fn initS3(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: AWSCredentials, allocator: std.mem.Allocator) !*Store { + const store = Blob.Store.new(.{ + .data = .{ + .s3 = S3Store.initWithReferencedCredentials( + path, + mime_type orelse brk: { + const sliced = path.slice(); + if (sliced.len > 0) { + var extname = std.fs.path.extension(sliced); + extname = std.mem.trim(u8, extname, "."); + if (http.MimeType.byExtensionNoDefault(extname)) |mime| { + break :brk mime; + } + } + break :brk null; + }, + credentials, + ), + }, + .allocator = allocator, + .ref_count = std.atomic.Value(u32).init(1), + }); + return store; + } + pub fn initS3(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: S3Credentials, allocator: std.mem.Allocator) !*Store { var path = pathlike; // this actually protects/refs the pathlike path.toThreadSafe(); @@ -3741,12 +3478,9 @@ pub const Blob = struct { pub fn unlink(this: *const FileStore, globalThis: *JSC.JSGlobalObject) JSValue { return switch (this.pathlike) { - .path => switch (globalThis.bunVM().nodeFS().unlink(.{ - .path = this.pathlike.path, - }, .sync)) { - .err => |err| JSC.JSPromise.rejectedPromiseValue(globalThis, err.toJSC(globalThis)), - else => JSC.JSPromise.resolvedPromiseValue(globalThis, .true), - }, + .path => |path_like| JSC.Node.Async.unlink.create(globalThis, undefined, .{ + .path = .{ .encoded_slice = ZigString.init(path_like.slice()).toSliceClone(bun.default_allocator) }, + }, globalThis.bunVM()), .fd => JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Is not possible to unlink a file descriptor", .{})), }; } @@ -3770,19 +3504,20 @@ pub const Blob = struct { pub const S3Store = struct { pathlike: JSC.Node.PathLike, mime_type: http.MimeType = http.MimeType.other, - credentials: ?*AWSCredentials, - options: S3MultiPartUpload.MultiPartUploadOptions = .{}, + credentials: ?*S3Credentials, + options: bun.S3.MultiPartUploadOptions = .{}, + acl: ?S3.ACL = null, pub fn isSeekable(_: *const @This()) ?bool { return true; } - pub fn getCredentials(this: *const @This()) *AWSCredentials { + pub fn getCredentials(this: *const @This()) *S3Credentials { bun.assert(this.credentials != null); return this.credentials.?; } - pub fn getCredentialsWithOptions(this: *const @This(), options: ?JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!AWS.AWSCredentialsWithOptions { - return AWS.getCredentialsWithOptions(this.getCredentials().*, options, globalObject); + pub fn getCredentialsWithOptions(this: *const @This(), options: ?JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!S3.S3CredentialsWithOptions { + return S3Credentials.getCredentialsWithOptions(this.getCredentials().*, this.options, options, this.acl, globalObject); } pub fn path(this: *@This()) []const u8 { @@ -3790,38 +3525,39 @@ pub const Blob = struct { // normalize start and ending if (strings.endsWith(path_name, "/")) { path_name = path_name[0..path_name.len]; + } else if (strings.endsWith(path_name, "\\")) { + path_name = path_name[0 .. path_name.len - 1]; } if (strings.startsWith(path_name, "/")) { path_name = path_name[1..]; + } else if (strings.startsWith(path_name, "\\")) { + path_name = path_name[1..]; } return path_name; } - pub fn unlink(this: *@This(), globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { + pub fn unlink(this: *@This(), store: *Store, globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { const Wrapper = struct { promise: JSC.JSPromise.Strong, + store: *Store, pub usingnamespace bun.New(@This()); - pub fn resolve(result: AWS.S3DeleteResult, self: *@This()) void { + pub fn resolve(result: S3.S3DeleteResult, self: *@This()) void { defer self.deinit(); const globalObject = self.promise.globalObject().?; switch (result) { .success => { self.promise.resolve(globalObject, .true); }, - .not_found => { - const js_err = globalObject.createErrorInstance("File not found", .{}); - js_err.put(globalObject, ZigString.static("code"), ZigString.init("FileNotFound").toJS(globalObject)); - self.promise.reject(globalObject, js_err); - }, - .failure => |err| { - self.promise.rejectOnNextTick(globalObject, err.toJS(globalObject)); + inline .not_found, .failure => |err| { + self.promise.reject(globalObject, err.toJS(globalObject, self.store.getPath())); }, } } fn deinit(self: *@This()) void { + self.store.deref(); self.promise.deinit(); self.destroy(); } @@ -3832,14 +3568,23 @@ pub const Blob = struct { const proxy = if (proxy_url) |url| url.href else null; var aws_options = try this.getCredentialsWithOptions(extra_options, globalThis); defer aws_options.deinit(); - aws_options.credentials.s3Delete(this.path(), @ptrCast(&Wrapper.resolve), Wrapper.new(.{ + S3.delete(&aws_options.credentials, this.path(), @ptrCast(&Wrapper.resolve), Wrapper.new(.{ .promise = promise, + .store = store, // store is needed in case of not found error }), proxy); + store.ref(); return value; } - - pub fn init(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: AWSCredentials) S3Store { + pub fn initWithReferencedCredentials(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: *S3Credentials) S3Store { + credentials.ref(); + return .{ + .credentials = credentials, + .pathlike = pathlike, + .mime_type = mime_type orelse http.MimeType.other, + }; + } + pub fn init(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: S3Credentials) S3Store { return .{ .credentials = credentials.dupe(), .pathlike = pathlike, @@ -4146,7 +3891,7 @@ pub const Blob = struct { pub fn callHandler(this: *S3BlobDownloadTask, raw_bytes: []u8) JSValue { return this.handler(&this.blob, this.globalThis, raw_bytes); } - pub fn onS3DownloadResolved(result: AWS.S3DownloadResult, this: *S3BlobDownloadTask) void { + pub fn onS3DownloadResolved(result: S3.S3DownloadResult, this: *S3BlobDownloadTask) void { defer this.deinit(); switch (result) { .success => |response| { @@ -4156,13 +3901,8 @@ pub const Blob = struct { } JSC.AnyPromise.wrap(.{ .normal = this.promise.get() }, this.globalThis, S3BlobDownloadTask.callHandler, .{ this, bytes }); }, - .not_found => { - const js_err = this.globalThis.createErrorInstance("File not found", .{}); - js_err.put(this.globalThis, ZigString.static("code"), ZigString.init("FileNotFound").toJS(this.globalThis)); - this.promise.reject(this.globalThis, js_err); - }, - .failure => |err| { - this.promise.rejectOnNextTick(this.globalThis, err.toJS(this.globalThis)); + inline .not_found, .failure => |err| { + this.promise.reject(this.globalThis, err.toJS(this.globalThis, this.blob.store.?.getPath())); }, } } @@ -4185,96 +3925,20 @@ pub const Blob = struct { if (blob.offset > 0) { const len: ?usize = if (blob.size != Blob.max_size) @intCast(blob.size) else null; const offset: usize = @intCast(blob.offset); - credentials.s3DownloadSlice(path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); } else if (blob.size == Blob.max_size) { - credentials.s3Download(path, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + S3.download(credentials, path, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); } else { const len: usize = @intCast(blob.size); const offset: usize = @intCast(blob.offset); - credentials.s3DownloadSlice(path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); } return promise; } pub fn deinit(this: *S3BlobDownloadTask) void { this.blob.store.?.deref(); - this.poll_ref.unrefOnNextTick(this.globalThis.bunVM()); - this.promise.deinit(); - this.destroy(); - } - }; - - const S3BlobStatTask = struct { - promise: JSC.JSPromise.Strong, - usingnamespace bun.New(S3BlobStatTask); - - pub fn onS3ExistsResolved(result: AWS.S3StatResult, this: *S3BlobStatTask) void { - defer this.deinit(); - const globalThis = this.promise.globalObject().?; - switch (result) { - .not_found => { - this.promise.resolve(globalThis, .false); - }, - .success => |_| { - // calling .exists() should not prevent it to download a bigger file - // this would make it download a slice of the actual value, if the file changes before we download it - // if (this.blob.size == Blob.max_size) { - // this.blob.size = @truncate(stat.size); - // } - this.promise.resolve(globalThis, .true); - }, - .failure => |err| { - this.promise.rejectOnNextTick(globalThis, err.toJS(globalThis)); - }, - } - } - - pub fn onS3SizeResolved(result: AWS.S3StatResult, this: *S3BlobStatTask) void { - defer this.deinit(); - const globalThis = this.promise.globalObject().?; - - switch (result) { - .not_found => { - const js_err = globalThis.createErrorInstance("File not Found", .{}); - js_err.put(globalThis, ZigString.static("code"), ZigString.static("FileNotFound").toJS(globalThis)); - this.promise.rejectOnNextTick(globalThis, js_err); - }, - .success => |stat| { - this.promise.resolve(globalThis, JSValue.jsNumber(stat.size)); - }, - .failure => |err| { - this.promise.rejectOnNextTick(globalThis, err.toJS(globalThis)); - }, - } - } - - pub fn exists(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { - const this = S3BlobStatTask.new(.{ - .promise = JSC.JSPromise.Strong.init(globalThis), - }); - const promise = this.promise.value(); - const credentials = blob.store.?.data.s3.getCredentials(); - const path = blob.store.?.data.s3.path(); - const env = globalThis.bunVM().transpiler.env; - - credentials.s3Stat(path, @ptrCast(&S3BlobStatTask.onS3ExistsResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); - return promise; - } - - pub fn size(globalThis: *JSC.JSGlobalObject, blob: *Blob) JSValue { - const this = S3BlobStatTask.new(.{ - .promise = JSC.JSPromise.Strong.init(globalThis), - }); - const promise = this.promise.value(); - const credentials = blob.store.?.data.s3.getCredentials(); - const path = blob.store.?.data.s3.path(); - const env = globalThis.bunVM().transpiler.env; - - credentials.s3Stat(path, @ptrCast(&S3BlobStatTask.onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); - return promise; - } - - pub fn deinit(this: *S3BlobStatTask) void { + this.poll_ref.unref(this.globalThis.bunVM()); this.promise.deinit(); this.destroy(); } @@ -4340,7 +4004,7 @@ pub const Blob = struct { return JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is detached", .{})); }; return switch (store.data) { - .s3 => |*s3| try s3.unlink(globalThis, args.nextEat()), + .s3 => |*s3| try s3.unlink(store, globalThis, args.nextEat()), .file => |file| file.unlink(globalThis), else => JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is read-only", .{})), }; @@ -4353,57 +4017,11 @@ pub const Blob = struct { _: *JSC.CallFrame, ) bun.JSError!JSValue { if (this.isS3()) { - return S3BlobStatTask.exists(globalThis, this); + return S3File.S3BlobStatTask.exists(globalThis, this); } return JSC.JSPromise.resolvedPromiseValue(globalThis, this.getExistsSync()); } - pub fn getPresignUrlFrom(this: *Blob, globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { - if (this.isS3()) { - var method: bun.http.Method = .GET; - var expires: usize = 86400; // 1 day default - - var credentialsWithOptions: AWS.AWSCredentialsWithOptions = .{ - .credentials = this.store.?.data.s3.getCredentials().*, - }; - defer { - credentialsWithOptions.deinit(); - } - if (extra_options) |options| { - if (options.isObject()) { - if (try options.getTruthyComptime(globalThis, "method")) |method_| { - method = Method.fromJS(globalThis, method_) orelse { - return globalThis.throwInvalidArguments("method must be GET, PUT, DELETE or HEAD when using s3 protocol", .{}); - }; - } - if (try options.getOptional(globalThis, "expiresIn", i32)) |expires_| { - if (expires_ <= 0) return globalThis.throwInvalidArguments("expiresIn must be greather than 0", .{}); - expires = @intCast(expires_); - } - } - credentialsWithOptions = try this.store.?.data.s3.getCredentialsWithOptions(options, globalThis); - } - const path = this.store.?.data.s3.path(); - - const result = credentialsWithOptions.credentials.signRequest(.{ - .path = path, - .method = method, - }, .{ .expires = expires }) catch |sign_err| { - return AWS.throwSignError(sign_err, globalThis); - }; - defer result.deinit(); - var str = bun.String.fromUTF8(result.url); - return str.transferToJS(this.globalThis); - } - - return globalThis.throwError(error.NotSupported, "is only possible to presign s3:// files"); - } - - pub fn getPresignUrl(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const args = callframe.arguments_old(1); - return getPresignUrlFrom(this, globalThis, if (args.len > 0) args.ptr[0] else null); - } - pub const FileStreamWrapper = struct { promise: JSC.JSPromise.Strong, readable_stream_ref: JSC.WebCore.ReadableStream.Strong, @@ -4424,10 +4042,12 @@ pub const Blob = struct { var args = callframe.arguments_old(2); var this = args.ptr[args.len - 1].asPromisePtr(FileStreamWrapper); defer this.deinit(); - if (this.readable_stream_ref.get()) |stream| { + var strong = this.readable_stream_ref; + defer strong.deinit(); + this.readable_stream_ref = .{}; + if (strong.get()) |stream| { stream.done(globalThis); } - this.readable_stream_ref.deinit(); this.promise.resolve(globalThis, JSC.JSValue.jsNumber(0)); return .undefined; } @@ -4438,11 +4058,14 @@ pub const Blob = struct { defer this.sink.deinit(); const err = args.ptr[0]; - this.promise.rejectOnNextTick(globalThis, err); + var strong = this.readable_stream_ref; + defer strong.deinit(); + this.readable_stream_ref = .{}; - if (this.readable_stream_ref.get()) |stream| { + this.promise.reject(globalThis, err); + + if (strong.get()) |stream| { stream.cancel(globalThis); - this.readable_stream_ref.deinit(); } return .undefined; } @@ -4472,7 +4095,18 @@ pub const Blob = struct { const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - return (if (extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()).s3UploadStream(path, readable_stream, globalThis, aws_options.options, this.contentTypeOrMimeType(), proxy_url, null, undefined); + return S3.uploadStream( + (if (extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + path, + readable_stream, + globalThis, + aws_options.options, + aws_options.acl, + this.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); } if (store.data != .file) { @@ -4698,10 +4332,24 @@ pub const Blob = struct { } } const credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis); - return try credentialsWithOptions.credentials.dupe().s3WritableStream(path, globalThis, credentialsWithOptions.options, this.contentTypeOrMimeType(), proxy_url); + return try S3.writableStream( + credentialsWithOptions.credentials.dupe(), + path, + globalThis, + credentialsWithOptions.options, + this.contentTypeOrMimeType(), + proxy_url, + ); } } - return try s3.getCredentials().s3WritableStream(path, globalThis, .{}, this.contentTypeOrMimeType(), proxy_url); + return try S3.writableStream( + s3.getCredentials(), + path, + globalThis, + .{}, + this.contentTypeOrMimeType(), + proxy_url, + ); } if (store.data != .file) { return globalThis.throwInvalidArguments("Blob is read-only", .{}); @@ -4987,17 +4635,6 @@ pub const Blob = struct { return if (this.getNameString()) |name| name.toJS(globalThis) else .undefined; } - pub fn getBucket( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ) JSValue { - if (this.getBucketName()) |name| { - var str = bun.String.createUTF8(name); - return str.transferToJS(globalThis); - } - return .undefined; - } - pub fn setName( this: *Blob, jsThis: JSC.JSValue, @@ -5048,30 +4685,6 @@ pub const Blob = struct { return null; } - pub fn getBucketName( - this: *const Blob, - ) ?[]const u8 { - const store = this.store orelse return null; - if (store.data != .s3) return null; - const credentials = store.data.s3.getCredentials(); - var full_path = store.data.s3.path(); - if (strings.startsWith(full_path, "/")) { - full_path = full_path[1..]; - } - var bucket: []const u8 = credentials.bucket; - - if (bucket.len == 0) { - if (strings.indexOf(full_path, "/")) |end| { - bucket = full_path[0..end]; - if (bucket.len > 0) { - return bucket; - } - } - return null; - } - return bucket; - } - // TODO: Move this to a separate `File` object or BunFile pub fn getLastModified( this: *Blob, @@ -5122,11 +4735,26 @@ pub const Blob = struct { _ = Bun__Blob__getSizeForBindings; } } - - pub fn getSize(this: *Blob, globalThis: *JSC.JSGlobalObject) JSValue { + pub fn getStat(this: *Blob, globalThis: *JSC.JSGlobalObject, callback: *JSC.CallFrame) JSC.JSValue { + const store = this.store orelse return JSC.JSValue.jsUndefined(); + // TODO: make this async for files + return switch (store.data) { + .file => |*file| { + return switch (file.pathlike) { + .path => |path_like| JSC.Node.Async.stat.create(globalThis, undefined, .{ + .path = .{ .encoded_slice = ZigString.init(path_like.slice()).toSliceClone(bun.default_allocator) }, + }, globalThis.bunVM()), + .fd => |fd| JSC.Node.Async.fstat.create(globalThis, undefined, .{ .fd = fd }, globalThis.bunVM()), + }; + }, + .s3 => S3File.getStat(this, globalThis, callback), + else => JSC.JSValue.jsUndefined(), + }; + } + pub fn getSize(this: *Blob, _: *JSC.JSGlobalObject) JSValue { if (this.size == Blob.max_size) { if (this.isS3()) { - return S3BlobStatTask.size(globalThis, this); + return JSC.JSValue.jsNumber(std.math.nan(f64)); } this.resolveSize(); if (this.size == Blob.max_size and this.store != null) { @@ -5441,8 +5069,12 @@ pub const Blob = struct { // if (comptime Environment.allow_assert) { // assert(this.allocator != null); // } - this.calculateEstimatedByteSize(); + + if (this.isS3()) { + return S3File.toJSUnchecked(globalObject, this); + } + return Blob.toJSUnchecked(globalObject, this); } @@ -6606,3 +6238,9 @@ pub const InlineBlob = extern struct { }; const assert = bun.assert; + +pub export fn JSDOMFile__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { + JSC.markBinding(@src()); + const blob = value.as(Blob) orelse return false; + return blob.is_jsdom_file; +} diff --git a/src/bun.js/webcore/response.classes.ts b/src/bun.js/webcore/response.classes.ts index ba7e022fa3..bbb6112950 100644 --- a/src/bun.js/webcore/response.classes.ts +++ b/src/bun.js/webcore/response.classes.ts @@ -125,6 +125,7 @@ export default [ }), define({ name: "Blob", + final: false, construct: true, finalize: true, JSType: "0b11101110", @@ -167,17 +168,12 @@ export default [ // Non-standard, s3 + BunFile support unlink: { fn: "doUnlink", length: 0 }, + delete: { fn: "doUnlink", length: 0 }, write: { fn: "doWrite", length: 2 }, - // Non-standard, s3 support - bucket: { - cache: true, - getter: "getBucket", - }, - presign: { fn: "getPresignUrl", length: 1 }, - size: { getter: "getSize", }, + stat: { fn: "getStat", length: 0 }, writer: { fn: "getWriter", diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 38e737ea52..341e8c01aa 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -55,7 +55,7 @@ const Async = bun.Async; const BoringSSL = bun.BoringSSL; const X509 = @import("../api/bun/x509.zig"); const PosixToWinNormalizer = bun.path.PosixToWinNormalizer; -const s3 = @import("../../s3.zig"); +const s3 = bun.S3; pub const Response = struct { const ResponseMixin = BodyMixin(@This()); @@ -529,11 +529,13 @@ pub const Response = struct { .url = bun.String.empty, }; - const result = blob.store.?.data.s3.getCredentials().signRequest(.{ + const credentials = blob.store.?.data.s3.getCredentials(); + + const result = credentials.signRequest(.{ .path = blob.store.?.data.s3.path(), .method = .GET, }, .{ .expires = 15 * 60 }) catch |sign_err| { - return s3.AWSCredentials.throwSignError(sign_err, globalThis); + return s3.throwSignError(sign_err, globalThis); }; defer result.deinit(); response.init.headers = response.getOrCreateHeaders(globalThis); @@ -804,7 +806,7 @@ pub const Fetch = struct { }; pub const FetchTasklet = struct { - pub const FetchTaskletStream = JSC.WebCore.FetchTaskletChunkedRequestSink; + pub const FetchTaskletStream = JSC.WebCore.NetworkSink; const log = Output.scoped(.FetchTasklet, false); sink: ?*FetchTaskletStream.JSSink = null, @@ -1173,8 +1175,6 @@ pub const Fetch = struct { } if (!assignment_result.isEmptyOrUndefinedOrNull()) { - this.javascript_vm.drainMicrotasks(); - assignment_result.ensureStillAlive(); // it returns a Promise when it goes through ReadableStreamDefaultReader if (assignment_result.asAnyPromise()) |promise| { @@ -3143,7 +3143,7 @@ pub const Fetch = struct { prepare_body: { // is a S3 file we can use chunked here - if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob(globalThis, &body.AnyBlob.Blob, s3.MultiPartUpload.DefaultPartSize), globalThis)) |stream| { + if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob(globalThis, &body.AnyBlob.Blob, s3.MultiPartUploadOptions.DefaultPartSize), globalThis)) |stream| { var old = body; defer old.detach(); body = .{ .ReadableStream = JSC.WebCore.ReadableStream.Strong.init(stream, globalThis) }; @@ -3252,9 +3252,10 @@ pub const Fetch = struct { if (url.isS3()) { // get ENV config - var credentialsWithOptions: s3.AWSCredentials.AWSCredentialsWithOptions = .{ - .credentials = globalThis.bunVM().transpiler.env.getAWSCredentials(), + var credentialsWithOptions: s3.S3CredentialsWithOptions = .{ + .credentials = globalThis.bunVM().transpiler.env.getS3Credentials(), .options = .{}, + .acl = null, }; defer { credentialsWithOptions.deinit(); @@ -3264,7 +3265,7 @@ pub const Fetch = struct { if (try options.getTruthyComptime(globalThis, "s3")) |s3_options| { if (s3_options.isObject()) { s3_options.ensureStillAlive(); - credentialsWithOptions = try s3.AWSCredentials.getCredentialsWithOptions(credentialsWithOptions.credentials, s3_options, globalThis); + credentialsWithOptions = try s3.S3Credentials.getCredentialsWithOptions(credentialsWithOptions.credentials, .{}, s3_options, null, globalThis); } } } @@ -3278,7 +3279,7 @@ pub const Fetch = struct { url_proxy_buffer: []const u8, pub usingnamespace bun.New(@This()); - pub fn resolve(result: s3.AWSCredentials.S3UploadResult, self: *@This()) void { + pub fn resolve(result: s3.S3UploadResult, self: *@This()) void { if (self.promise.globalObject()) |global| { switch (result) { .success => { @@ -3333,11 +3334,13 @@ pub const Fetch = struct { const promise_value = promise.value(); const proxy_url = if (proxy) |p| p.href else ""; - _ = credentialsWithOptions.credentials.dupe().s3UploadStream( + _ = bun.S3.uploadStream( + credentialsWithOptions.credentials.dupe(), url.s3Path(), body.ReadableStream.get().?, globalThis, credentialsWithOptions.options, + credentialsWithOptions.acl, if (headers) |h| h.getContentType() else null, proxy_url, @ptrCast(&Wrapper.resolve), @@ -3356,7 +3359,7 @@ pub const Fetch = struct { .method = method, }, null) catch |sign_err| { is_error = true; - return JSPromise.rejectedPromiseValue(globalThis, s3.AWSCredentials.getJSSignError(sign_err, globalThis)); + return JSPromise.rejectedPromiseValue(globalThis, s3.getJSSignError(sign_err, globalThis)); }; defer result.deinit(); if (proxy) |proxy_| { @@ -3379,42 +3382,15 @@ pub const Fetch = struct { } const content_type = if (headers) |h| h.getContentType() else null; + var header_buffer: [10]picohttp.Header = undefined; if (range) |range_| { - const _headers = result.headers(); - var headersWithRange: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "range", .value = range_ }, - }; - - setHeaders(&headers, &headersWithRange, allocator); + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); + setHeaders(&headers, _headers, allocator); } else if (content_type) |ct| { if (ct.len > 0) { - const _headers = result.headers(); - if (_headers.len > 4) { - var headersWithContentType: [6]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - _headers[4], - .{ .name = "Content-Type", .value = ct }, - }; - setHeaders(&headers, &headersWithContentType, allocator); - } else { - var headersWithContentType: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "Content-Type", .value = ct }, - }; - - setHeaders(&headers, &headersWithContentType, allocator); - } + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "Content-Type", .value = ct }); + setHeaders(&headers, _headers, allocator); } else { setHeaders(&headers, result.headers(), allocator); } diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 0d437358ea..9cfa4dc51e 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -43,7 +43,6 @@ const Request = JSC.WebCore.Request; const assert = bun.assert; const Syscall = bun.sys; const uv = bun.windows.libuv; -const S3MultiPartUpload = @import("../../s3.zig").MultiPartUpload; const AnyBlob = JSC.WebCore.AnyBlob; pub const ReadableStream = struct { @@ -379,7 +378,7 @@ pub const ReadableStream = struct { const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); const proxy_url = if (proxy) |p| p.href else null; - return credentials.s3ReadableStream(path, blob.offset, if (blob.size != Blob.max_size) blob.size else null, proxy_url, globalThis); + return bun.S3.readableStream(credentials, path, blob.offset, if (blob.size != Blob.max_size) blob.size else null, proxy_url, globalThis); }, } } @@ -482,7 +481,7 @@ pub const StreamStart = union(Tag) { FileSink: FileSinkOptions, HTTPSResponseSink: void, HTTPResponseSink: void, - FetchTaskletChunkedRequestSink: void, + NetworkSink: void, ready: void, owned_and_done: bun.ByteList, done: bun.ByteList, @@ -509,7 +508,7 @@ pub const StreamStart = union(Tag) { FileSink, HTTPSResponseSink, HTTPResponseSink, - FetchTaskletChunkedRequestSink, + NetworkSink, ready, owned_and_done, done, @@ -660,7 +659,7 @@ pub const StreamStart = union(Tag) { }, }; }, - .FetchTaskletChunkedRequestSink, .HTTPSResponseSink, .HTTPResponseSink => { + .NetworkSink, .HTTPSResponseSink, .HTTPResponseSink => { var empty = true; var chunk_size: JSC.WebCore.Blob.SizeType = 2048; @@ -2650,7 +2649,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { } pub const HTTPSResponseSink = HTTPServerWritable(true); pub const HTTPResponseSink = HTTPServerWritable(false); -pub const FetchTaskletChunkedRequestSink = struct { +pub const NetworkSink = struct { task: ?HTTPWritableStream = null, signal: Signal = .{}, globalThis: *JSGlobalObject = undefined, @@ -2658,16 +2657,17 @@ pub const FetchTaskletChunkedRequestSink = struct { buffer: bun.io.StreamBuffer, ended: bool = false, done: bool = false, + cancel: bool = false, encoded: bool = true, endPromise: JSC.JSPromise.Strong = .{}, auto_flusher: AutoFlusher = AutoFlusher{}, - pub usingnamespace bun.New(FetchTaskletChunkedRequestSink); + pub usingnamespace bun.New(NetworkSink); const HTTPWritableStream = union(enum) { fetch: *JSC.WebCore.Fetch.FetchTasklet, - s3_upload: *S3MultiPartUpload, + s3_upload: *bun.S3.MultiPartUpload, }; fn getHighWaterMark(this: *@This()) Blob.SizeType { @@ -2689,6 +2689,16 @@ pub const FetchTaskletChunkedRequestSink = struct { AutoFlusher.registerDeferredMicrotaskWithTypeUnchecked(@This(), this, this.globalThis.bunVM()); } + pub fn path(this: *@This()) ?[]const u8 { + if (this.task) |task| { + return switch (task) { + .s3_upload => |s3| s3.path, + else => null, + }; + } + return null; + } + pub fn onAutoFlush(this: *@This()) bool { if (this.done) { this.auto_flusher.registered = false; @@ -2819,6 +2829,7 @@ pub const FetchTaskletChunkedRequestSink = struct { this.ended = true; this.done = true; this.signal.close(null); + this.cancel = true; this.finalize(); } @@ -2963,7 +2974,7 @@ pub const FetchTaskletChunkedRequestSink = struct { return this.buffer.memoryCost(); } - const name = "FetchTaskletChunkedRequestSink"; + const name = "NetworkSink"; pub const JSSink = NewJSSink(@This(), name); }; pub const BufferedReadableStreamAction = enum { diff --git a/src/bun.zig b/src/bun.zig index 77d75c1f0d..6bb9b890a5 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1399,10 +1399,10 @@ fn getFdPathViaCWD(fd: std.posix.fd_t, buf: *[@This().MAX_PATH_BYTES]u8) ![]u8 { pub const getcwd = std.posix.getcwd; -pub fn getcwdAlloc(allocator: std.mem.Allocator) ![]u8 { +pub fn getcwdAlloc(allocator: std.mem.Allocator) ![:0]u8 { var temp: PathBuffer = undefined; const temp_slice = try getcwd(&temp); - return allocator.dupe(u8, temp_slice); + return allocator.dupeZ(u8, temp_slice); } /// Get the absolute path to a file descriptor. @@ -4221,3 +4221,5 @@ pub const WPathBufferPool = if (Environment.isWindows) PathBufferPoolT(bun.WPath pub fn deleteAll() void {} }; pub const OSPathBufferPool = if (Environment.isWindows) WPathBufferPool else PathBufferPool; + +pub const S3 = @import("./s3/client.zig"); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index a9d2345bf1..660cff180d 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -1655,7 +1655,7 @@ pub const BundleV2 = struct { .entry_points = config.entry_points.keys(), .target = config.target.toAPI(), .absolute_working_dir = if (config.dir.list.items.len > 0) - config.dir.slice() + config.dir.sliceWithSentinel() else null, .inject = &.{}, @@ -12961,7 +12961,7 @@ pub const LinkerContext = struct { chunk, chunks, &display_size, - c.options.source_maps != .none, + chunk.content.sourcemap(c.options.source_maps) != .none, ); var code_result = _code_result catch @panic("Failed to allocate memory for output file"); @@ -12974,7 +12974,7 @@ pub const LinkerContext = struct { chunk.final_rel_path, ); - switch (c.options.source_maps) { + switch (chunk.content.sourcemap(c.options.source_maps)) { .external, .linked => |tag| { const output_source_map = chunk.output_source_map.finalize(bun.default_allocator, code_result.shifts) catch @panic("Failed to allocate memory for external source map"); var source_map_final_rel_path = default_allocator.alloc(u8, chunk.final_rel_path.len + ".map".len) catch unreachable; @@ -13280,7 +13280,7 @@ pub const LinkerContext = struct { chunk, chunks, &display_size, - c.options.source_maps != .none, + chunk.content.sourcemap(c.options.source_maps) != .none, ) catch |err| bun.Output.panic("Failed to create output chunk: {s}", .{@errorName(err)}); var source_map_output_file: ?options.OutputFile = null; @@ -13293,7 +13293,7 @@ pub const LinkerContext = struct { chunk.final_rel_path, ); - switch (c.options.source_maps) { + switch (chunk.content.sourcemap(c.options.source_maps)) { .external, .linked => |tag| { const output_source_map = chunk.output_source_map.finalize(source_map_allocator, code_result.shifts) catch @panic("Failed to allocate memory for external source map"); const source_map_final_rel_path = strings.concat(default_allocator, &.{ @@ -15463,6 +15463,18 @@ pub const Chunk = struct { javascript: JavaScriptChunk, css: CssChunk, html: HtmlChunk, + + pub fn sourcemap(this: *const Content, default: options.SourceMapOption) options.SourceMapOption { + return switch (this.*) { + .javascript => default, + // TODO: + .css => options.SourceMapOption.none, + + // probably never + .html => options.SourceMapOption.none, + }; + } + pub fn loader(this: *const Content) Loader { return switch (this.*) { .javascript => .js, diff --git a/src/c.zig b/src/c.zig index 03ee097e08..1541b9872e 100644 --- a/src/c.zig +++ b/src/c.zig @@ -499,3 +499,7 @@ pub extern fn strlen(ptr: [*c]const u8) usize; pub const passwd = translated.passwd; pub const geteuid = translated.geteuid; pub const getpwuid_r = translated.getpwuid_r; + +export fn Bun__errnoName(err: c_int) ?[*:0]const u8 { + return @tagName(bun.C.SystemErrno.init(err) orelse return null); +} diff --git a/src/cli.zig b/src/cli.zig index 54037e2965..73a936ff1d 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -47,6 +47,11 @@ pub var start_time: i128 = undefined; const Bunfig = @import("./bunfig.zig").Bunfig; const OOM = bun.OOM; +export var Bun__Node__ProcessNoDeprecation = false; +export var Bun__Node__ProcessThrowDeprecation = false; + +pub var Bun__Node__ProcessTitle: ?string = null; + pub const Cli = struct { pub const CompileTarget = @import("./compile_target.zig"); var wait_group: sync.WaitGroup = undefined; @@ -232,10 +237,13 @@ pub const Arguments = struct { clap.parseParam("--fetch-preconnect ... Preconnect to a URL while code is loading") catch unreachable, clap.parseParam("--max-http-header-size Set the maximum size of HTTP headers in bytes. Default is 16KiB") catch unreachable, clap.parseParam("--expose-internals Expose internals used for testing Bun itself. Usage of these APIs are completely unsupported.") catch unreachable, + clap.parseParam("--no-deprecation Suppress all reporting of the custom deprecation.") catch unreachable, + clap.parseParam("--throw-deprecation Determine whether or not deprecation warnings result in errors.") catch unreachable, + clap.parseParam("--title Set the process title") catch unreachable, }; const auto_or_run_params = [_]ParamType{ - clap.parseParam("--filter ... Run a script in all workspace packages matching the pattern") catch unreachable, + clap.parseParam("-F, --filter ... Run a script in all workspace packages matching the pattern") catch unreachable, clap.parseParam("-b, --bun Force a script or package to use Bun's runtime instead of Node.js (via symlinking node)") catch unreachable, clap.parseParam("--shell Control the shell used for package.json scripts. Supports either 'bun' or 'system'") catch unreachable, }; @@ -412,7 +420,7 @@ pub const Arguments = struct { var secondbuf: bun.PathBuffer = undefined; const cwd = bun.getcwd(&secondbuf) catch return; - ctx.args.absolute_working_dir = try allocator.dupe(u8, cwd); + ctx.args.absolute_working_dir = try allocator.dupeZ(u8, cwd); } var parts = [_]string{ ctx.args.absolute_working_dir.?, config_path_ }; @@ -487,16 +495,16 @@ pub const Arguments = struct { } } - var cwd: []u8 = undefined; + var cwd: [:0]u8 = undefined; if (args.option("--cwd")) |cwd_arg| { cwd = brk: { var outbuf: bun.PathBuffer = undefined; const out = bun.path.joinAbs(try bun.getcwd(&outbuf), .loose, cwd_arg); - bun.sys.chdir(out).unwrap() catch |err| { + bun.sys.chdir("", out).unwrap() catch |err| { Output.err(err, "Could not change directory to \"{s}\"\n", .{cwd_arg}); Global.exit(1); }; - break :brk try allocator.dupe(u8, out); + break :brk try allocator.dupeZ(u8, out); }; } else { cwd = try bun.getcwdAlloc(allocator); @@ -795,6 +803,15 @@ pub const Arguments = struct { if (args.flag("--expose-internals")) { bun.JSC.ModuleLoader.is_allowed_to_use_internal_testing_apis = true; } + if (args.flag("--no-deprecation")) { + Bun__Node__ProcessNoDeprecation = true; + } + if (args.flag("--throw-deprecation")) { + Bun__Node__ProcessThrowDeprecation = true; + } + if (args.option("--title")) |title| { + Bun__Node__ProcessTitle = title; + } } if (opts.port != null and opts.origin == null) { diff --git a/src/cli/list-of-yarn-commands.zig b/src/cli/list-of-yarn-commands.zig index 12d0d23a2e..647cf2d29d 100644 --- a/src/cli/list-of-yarn-commands.zig +++ b/src/cli/list-of-yarn-commands.zig @@ -1,109 +1,77 @@ const std = @import("std"); const bun = @import("root").bun; -// yarn v2.3 commands -const yarn_v2 = [_][]const u8{ - "add", - "bin", - "cache", - "config", - "dedupe", - "dlx", - "exec", - "explain", - "info", - "init", - "install", - "link", - "node", - "npm", - "pack", - "patch", - "plugin", - "rebuild", - "remove", - "run", - "set", - "unplug", - "up", - "why", - "workspace", - "workspaces", -}; +pub const all_yarn_commands = bun.ComptimeStringMap(void, .{ + // yarn v2.3 commands + .{"add"}, + .{"bin"}, + .{"cache"}, + .{"config"}, + .{"dedupe"}, + .{"dlx"}, + .{"exec"}, + .{"explain"}, + .{"info"}, + .{"init"}, + .{"install"}, + .{"link"}, + .{"node"}, + .{"npm"}, + .{"pack"}, + .{"patch"}, + .{"plugin"}, + .{"rebuild"}, + .{"remove"}, + .{"run"}, + .{"set"}, + .{"unplug"}, + .{"up"}, + .{"why"}, + .{"workspace"}, + .{"workspaces"}, -// yarn v1 commands -const yarn_v1 = [_][]const u8{ - "access", - "add", - "audit", - "autoclean", - "bin", - "cache", - "check", - "config", - "create", - "exec", - "generate-lock-entry", - "generateLockEntry", - "global", - "help", - "import", - "info", - "init", - "install", - "licenses", - "link", - "list", - "login", - "logout", - "node", - "outdated", - "owner", - "pack", - "policies", - "publish", - "remove", - "run", - "tag", - "team", - "unlink", - "unplug", - "upgrade", - "upgrade-interactive", - "upgradeInteractive", - "version", - "versions", - "why", - "workspace", - "workspaces", -}; - -pub const all_yarn_commands = brk: { - @setEvalBranchQuota(9999); - var array: [yarn_v2.len + yarn_v1.len]u64 = undefined; - var array_i: usize = 0; - for (yarn_v2) |yarn| { - const hash = bun.hash(yarn); - @setEvalBranchQuota(9999); - if (std.mem.indexOfScalar(u64, array[0..array_i], hash) == null) { - @setEvalBranchQuota(9999); - array[array_i] = hash; - array_i += 1; - } - } - - for (yarn_v1) |yarn| { - @setEvalBranchQuota(9999); - - const hash = bun.hash(yarn); - if (std.mem.indexOfScalar(u64, array[0..array_i], hash) == null) { - @setEvalBranchQuota(9999); - - array[array_i] = hash; - array_i += 1; - } - } - - const final = array[0..array_i].*; - break :brk &final; -}; + // yarn v1 commands + .{"access"}, + .{"add"}, + .{"audit"}, + .{"autoclean"}, + .{"bin"}, + .{"cache"}, + .{"check"}, + .{"config"}, + .{"create"}, + .{"exec"}, + .{"generate-lock-entry"}, + .{"generateLockEntry"}, + .{"global"}, + .{"help"}, + .{"import"}, + .{"info"}, + .{"init"}, + .{"install"}, + .{"licenses"}, + .{"link"}, + .{"list"}, + .{"login"}, + .{"logout"}, + .{"node"}, + .{"outdated"}, + .{"owner"}, + .{"pack"}, + .{"policies"}, + .{"publish"}, + .{"remove"}, + .{"run"}, + .{"tag"}, + .{"team"}, + .{"unlink"}, + .{"unplug"}, + .{"upgrade"}, + .{"upgrade-interactive"}, + .{"upgradeInteractive"}, + .{"version"}, + .{"versions"}, + .{"why"}, + .{"workspace"}, + .{"workspaces"}, +}); diff --git a/src/cli/outdated_command.zig b/src/cli/outdated_command.zig index caa67f7b38..af827fcbcb 100644 --- a/src/cli/outdated_command.zig +++ b/src/cli/outdated_command.zig @@ -18,6 +18,8 @@ const FileSystem = bun.fs.FileSystem; const path = bun.path; const glob = bun.glob; const Table = bun.fmt.Table; +const WorkspaceFilter = PackageManager.WorkspaceFilter; +const OOM = bun.OOM; pub const OutdatedCommand = struct { pub fn exec(ctx: Command.Context) !void { @@ -138,7 +140,7 @@ pub const OutdatedCommand = struct { original_cwd: string, manager: *PackageManager, filters: []const string, - ) error{OutOfMemory}![]const PackageID { + ) OOM![]const PackageID { const lockfile = manager.lockfile; const packages = lockfile.packages.slice(); const pkg_names = packages.items(.name); @@ -152,36 +154,10 @@ pub const OutdatedCommand = struct { } const converted_filters = converted_filters: { - const buf = try allocator.alloc(FilterType, filters.len); + const buf = try allocator.alloc(WorkspaceFilter, filters.len); + var path_buf: bun.PathBuffer = undefined; for (filters, buf) |filter, *converted| { - if ((filter.len == 1 and filter[0] == '*') or strings.eqlComptime(filter, "**")) { - converted.* = .all; - continue; - } - - const is_path = filter.len > 0 and filter[0] == '.'; - - const joined_filter = if (is_path) - strings.withoutTrailingSlash(path.joinAbsString(original_cwd, &[_]string{filter}, .posix)) - else - filter; - - if (joined_filter.len == 0) { - converted.* = FilterType.init(&.{}, is_path); - continue; - } - - const length = bun.simdutf.length.utf32.from.utf8.le(joined_filter); - const convert_buf = try allocator.alloc(u32, length); - - const convert_result = bun.simdutf.convert.utf8.to.utf32.with_errors.le(joined_filter, convert_buf); - if (!convert_result.isSuccessful()) { - // nothing would match - converted.* = FilterType.init(&.{}, false); - continue; - } - - converted.* = FilterType.init(convert_buf[0..convert_result.count], is_path); + converted.* = try WorkspaceFilter.init(allocator, filter, original_cwd, &path_buf); } break :converted_filters buf; }; diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index ab39f134e8..4ae3c93ed1 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -46,7 +46,7 @@ const NpmArgs = struct { pub const package_version: string = "npm_package_version"; }; const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; -const yarn_commands: []const u64 = @import("./list-of-yarn-commands.zig").all_yarn_commands; +const yarn_commands = @import("./list-of-yarn-commands.zig").all_yarn_commands; const ShellCompletions = @import("./shell_completions.zig"); const PosixSpawn = bun.posix.spawn; @@ -179,7 +179,7 @@ pub const RunCommand = struct { } // implicit yarn commands - if (std.mem.indexOfScalar(u64, yarn_commands, bun.hash(yarn_cmd)) == null) { + if (!yarn_commands.has(yarn_cmd)) { try copy_script.appendSlice(BUN_RUN); try copy_script.append(' '); try copy_script.appendSlice(yarn_cmd); @@ -231,6 +231,18 @@ pub const RunCommand = struct { delimiter = 0; continue; } + if (strings.hasPrefixComptime(script[start..], "pnpm dlx ")) { + try copy_script.appendSlice(BUN_BIN_NAME ++ " x "); + entry_i += "pnpm dlx ".len; + delimiter = 0; + continue; + } + if (strings.hasPrefixComptime(script[start..], "pnpx ")) { + try copy_script.appendSlice(BUN_BIN_NAME ++ " x "); + entry_i += "pnpx ".len; + delimiter = 0; + continue; + } } delimiter = 0; diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index fb822fa4e9..1ce8301f15 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -581,7 +581,7 @@ pub const UpgradeCommand = struct { tmpdir_path_buf[tmpdir_path.len] = 0; const tmpdir_z = tmpdir_path_buf[0..tmpdir_path.len :0]; - _ = bun.sys.chdir(tmpdir_z); + _ = bun.sys.chdir("", tmpdir_z); const tmpname = "bun.zip"; const exe = diff --git a/src/codegen/bindgen-lib.ts b/src/codegen/bindgen-lib.ts index 483e6d2531..5594df29fc 100644 --- a/src/codegen/bindgen-lib.ts +++ b/src/codegen/bindgen-lib.ts @@ -1,5 +1,9 @@ -// This is the public API for `bind.ts` files -// It is aliased as `import {} from 'bindgen'` +/** + * This is the public API for `bind.ts` files + * It is aliased as `import {} from 'bindgen'` + * @see https://bun.sh/docs/project/bindgen + */ + import { isType, dictionaryImpl, @@ -161,41 +165,57 @@ export namespace t { /** * The DOMString type corresponds to strings. * - * **Note**: A DOMString value might include unmatched surrogate code points. + * @note A DOMString value might include unmatched surrogate code points. * Use USVString if this is not desirable. * - * https://webidl.spec.whatwg.org/#idl-DOMString + * @see https://webidl.spec.whatwg.org/#idl-DOMString */ export const DOMString = builtinType()("DOMString"); - /* - * The USVString type corresponds to scalar value strings. Depending on the + /** + * The {@link USVString} type corresponds to scalar value strings. Depending on the * context, these can be treated as sequences of code units or scalar values. * * Specifications should only use USVString for APIs that perform text * processing and need a string of scalar values to operate on. Most APIs that - * use strings should instead be using DOMString, which does not make any + * use strings should instead be using {@link DOMString}, which does not make any * interpretations of the code units in the string. When in doubt, use - * DOMString + * {@link DOMString} * - * https://webidl.spec.whatwg.org/#idl-USVString + * @see https://webidl.spec.whatwg.org/#idl-USVString */ export const USVString = builtinType()("USVString"); /** * The ByteString type corresponds to byte sequences. * - * WARNING: Specifications should only use ByteString for interfacing with protocols - * that use bytes and strings interchangeably, such as HTTP. In general, - * strings should be represented with DOMString values, even if it is expected - * that values of the string will always be in ASCII or some 8-bit character - * encoding. Sequences or frozen arrays with octet or byte elements, - * Uint8Array, or Int8Array should be used for holding 8-bit data rather than - * ByteString. + * WARNING: Specifications should only use ByteString for interfacing with + * protocols that use bytes and strings interchangeably, such as HTTP. In + * general, strings should be represented with {@link DOMString} values, even + * if it is expected that values of the string will always be in ASCII or some + * 8-bit character encoding. Sequences or frozen arrays with octet or byte + * elements, {@link Uint8Array}, or {@link Int8Array} should be used for + * holding 8-bit data rather than `ByteString`. * * https://webidl.spec.whatwg.org/#idl-ByteString */ export const ByteString = builtinType()("ByteString"); /** * DOMString but encoded as `[]const u8` + * + * ```ts + * // foo.bind.ts + * import { fn, t } from "bindgen"; + * + * export const foo = fn({ + * args: { bar: t.UTF8String }, + * }) + * ``` + * + * ```zig + * // foo.zig + * pub fn foo(bar: []const u8) void { + * // ... + * } + * ``` */ export const UTF8String = builtinType()("UTF8String"); @@ -217,7 +237,7 @@ export namespace t { /** * Reference a type by string name instead of by object reference. This is - * required in some siutations like `Request` which can take an existing + * required in some siutations like {@link Request} which can take an existing * request object in as itself. */ export function ref(name: string): Type { @@ -275,13 +295,46 @@ export namespace t { } } -export type FuncOptions = FuncMetadata & - ( - | { - variants: FuncVariant[]; - } - | FuncVariant - ); +interface FuncOptionsWithVariant extends FuncMetadata { + /** + * Declare a function with multiple overloads. Each overload gets its own + * native function named "name`n`" where `n` is the 1-based index of the + * overload. + * + * ## Example + * ```ts + * // foo.bind.ts + * import { fn } from "bindgen"; + * + * export const foo = fn({ + * variants: [ + * { + * args: { a: t.i32 }, + * ret: t.i32, + * }, + * { + * args: { a: t.i32, b: t.i32 }, + * ret: t.boolean, + * } + * ] + * }); + * ``` + * + * ```zig + * // foo.zig + * pub fn foo1(a: i32) i32 { + * return a; + * } + * + * pub fn foo2(a: i32, b: i32) bool { + * return a == b; + * } + * ``` + */ + variants: FuncVariant[]; +} +type FuncWithoutOverloads = FuncMetadata & FuncVariant; +type FuncOptions = FuncOptionsWithVariant | FuncWithoutOverloads; export interface FuncMetadata { /** diff --git a/src/codegen/bindgen.ts b/src/codegen/bindgen.ts index f9f9d5e402..b3d8be92c6 100644 --- a/src/codegen/bindgen.ts +++ b/src/codegen/bindgen.ts @@ -4,6 +4,7 @@ // Generated bindings are available in `bun.generated..*` in Zig, // or `Generated::::*` in C++ from including `Generated.h`. import * as path from "node:path"; +import fs from "node:fs"; import { CodeWriter, TypeImpl, @@ -1076,7 +1077,15 @@ const unsortedFiles = readdirRecursiveWithExclusionsAndExtensionsSync(src, ["nod // Sort for deterministic output for (const fileName of [...unsortedFiles].sort()) { const zigFile = path.relative(src, fileName.replace(/\.bind\.ts$/, ".zig")); + const zigFilePath = path.join(src, zigFile); let file = files.get(zigFile); + if (!fs.existsSync(zigFilePath)) { + // It would be nice if this would generate the file with the correct boilerplate + const bindName = path.basename(fileName); + throw new Error( + `${bindName} is missing a corresponding Zig file at ${zigFile}. Please create it and make sure it matches signatures in ${bindName}.`, + ); + } if (!file) { file = { functions: [], typedefs: [] }; files.set(zigFile, file); diff --git a/src/codegen/class-definitions.ts b/src/codegen/class-definitions.ts index daf15ed5b5..64d5272f8c 100644 --- a/src/codegen/class-definitions.ts +++ b/src/codegen/class-definitions.ts @@ -59,6 +59,8 @@ export interface ClassDefinition { JSType?: string; noConstructor?: boolean; + final?: boolean; + // Do not try to track the `this` value in the constructor automatically. // That is a memory leak. wantsThis?: never; diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts index b333caf88b..1875972f61 100644 --- a/src/codegen/generate-classes.ts +++ b/src/codegen/generate-classes.ts @@ -456,11 +456,11 @@ void ${proto}::finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject) `; } -function generatePrototypeHeader(typename) { +function generatePrototypeHeader(typename, final = true) { const proto = prototypeName(typename); return ` -class ${proto} final : public JSC::JSNonFinalObject { +class ${proto} ${final ? "final" : ""} : public JSC::JSNonFinalObject { public: using Base = JSC::JSNonFinalObject; @@ -483,7 +483,7 @@ class ${proto} final : public JSC::JSNonFinalObject { return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info()); } - private: + protected: ${proto}(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) : Base(vm, structure) { @@ -537,7 +537,7 @@ class ${name} final : public JSC::InternalFunction { static JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES call(JSC::JSGlobalObject*, JSC::CallFrame*); DECLARE_EXPORT_INFO; - private: + protected: ${name}(JSC::VM& vm, JSC::Structure* structure); void finishCreation(JSC::VM&, JSC::JSGlobalObject* globalObject, ${prototypeName(typeName)}* prototype); }; @@ -1176,7 +1176,23 @@ JSC_DEFINE_HOST_FUNCTION(${symbolName(typeName, name)}Callback, (JSGlobalObject return rows.map(a => a.trim()).join("\n"); } +function allCachedValues(obj: ClassDefinition) { + let values = (obj.values ?? []).slice().map(name => [name, `m_${name}`]); + for (const name in obj.proto) { + let cacheName = obj.proto[name].cache; + if (cacheName === true) { + cacheName = "m_" + name; + } else if (cacheName) { + cacheName = `m_${cacheName}`; + } + if (cacheName) { + values.push([name, cacheName]); + } + } + + return values; +} var extraIncludes = []; function generateClassHeader(typeName, obj: ClassDefinition) { var { klass, proto, JSType = "ObjectType", values = [], callbacks = {}, zigOnly = false } = obj; @@ -1234,8 +1250,10 @@ function generateClassHeader(typeName, obj: ClassDefinition) { suffix += `JSC::JSValue getInternalProperties(JSC::VM &vm, JSC::JSGlobalObject *globalObject, ${name}*);`; } + const final = obj.final ?? true; + return ` - class ${name} final : public JSC::JSDestructibleObject { + class ${name}${final ? " final" : ""} : public JSC::JSDestructibleObject { public: using Base = JSC::JSDestructibleObject; static ${name}* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure, void* ctx); @@ -1364,7 +1382,8 @@ function generateClassImpl(typeName, obj: ClassDefinition) { .join("\n"); for (const name in callbacks) { - DEFINE_VISIT_CHILDREN_LIST += "\n" + ` visitor.append(thisObject->m_callback_${name});`; + // Use appendHidden so it doesn't show up in the heap snapshot twice. + DEFINE_VISIT_CHILDREN_LIST += "\n" + ` visitor.appendHidden(thisObject->m_callback_${name});`; } const values = (obj.values || []) @@ -1578,6 +1597,19 @@ void ${name}::analyzeHeap(JSCell* cell, HeapAnalyzer& analyzer) } Base::analyzeHeap(cell, analyzer); + ${allCachedValues(obj).length > 0 ? `auto& vm = thisObject->vm();` : ""} + + ${allCachedValues(obj) + .map( + ([name, cacheName]) => ` +if (JSValue ${cacheName}Value = thisObject->${cacheName}.get()) { + if (${cacheName}Value.isCell()) { + const Identifier& id = Identifier::fromString(vm, "${name}"_s); + analyzer.analyzePropertyNameEdge(cell, ${cacheName}Value.asCell(), id.impl()); + } +}`, + ) + .join("\n ")} } ${ @@ -1622,7 +1654,12 @@ ${DEFINE_VISIT_CHILDREN} } function generateHeader(typeName, obj) { - return generateClassHeader(typeName, obj).trim() + "\n\n"; + const fields = [ + generateClassHeader(typeName, obj).trim() + "\n\n", + !(obj.final ?? true) ? generatePrototypeHeader(typeName, false) : null, + ].filter(Boolean); + + return "\n" + fields.join("\n").trim(); } function generateImpl(typeName, obj) { @@ -1630,7 +1667,7 @@ function generateImpl(typeName, obj) { const proto = obj.proto; return [ - generatePrototypeHeader(typeName), + (obj.final ?? true) ? generatePrototypeHeader(typeName, true) : null, !obj.noConstructor ? generateConstructorHeader(typeName).trim() + "\n" : null, generatePrototype(typeName, obj).trim(), !obj.noConstructor ? generateConstructorImpl(typeName, obj).trim() : null, @@ -2029,7 +2066,7 @@ function generateLazyClassStructureHeader(typeName, { klass = {}, proto = {}, zi return ` JSC::Structure* ${className(typeName)}Structure() const { return m_${className(typeName)}.getInitializedOnMainThread(this); } JSC::JSObject* ${className(typeName)}Constructor() const { return m_${className(typeName)}.constructorInitializedOnMainThread(this); } - JSC::JSValue ${className(typeName)}Prototype() const { return m_${className(typeName)}.prototypeInitializedOnMainThread(this); } + JSC::JSObject* ${className(typeName)}Prototype() const { return m_${className(typeName)}.prototypeInitializedOnMainThread(this); } JSC::LazyClassStructure m_${className(typeName)}; `.trim(); } diff --git a/src/codegen/generate-jssink.ts b/src/codegen/generate-jssink.ts index 4271cd1212..7ec71fa427 100644 --- a/src/codegen/generate-jssink.ts +++ b/src/codegen/generate-jssink.ts @@ -1,12 +1,6 @@ import { join, resolve } from "path"; -const classes = [ - "ArrayBufferSink", - "FileSink", - "HTTPResponseSink", - "HTTPSResponseSink", - "FetchTaskletChunkedRequestSink", -]; +const classes = ["ArrayBufferSink", "FileSink", "HTTPResponseSink", "HTTPSResponseSink", "NetworkSink"]; function names(name) { return { @@ -736,24 +730,43 @@ extern "C" void ${name}__setDestroyCallback(EncodedJSValue encodedValue, uintptr void ${className}::analyzeHeap(JSCell* cell, HeapAnalyzer& analyzer) { + Base::analyzeHeap(cell, analyzer); auto* thisObject = jsCast<${className}*>(cell); if (void* wrapped = thisObject->wrapped()) { analyzer.setWrappedObjectForCell(cell, wrapped); // if (thisObject->scriptExecutionContext()) // analyzer.setLabelForCell(cell, makeString("url ", thisObject->scriptExecutionContext()->url().string())); } - Base::analyzeHeap(cell, analyzer); + } void ${controller}::analyzeHeap(JSCell* cell, HeapAnalyzer& analyzer) { + Base::analyzeHeap(cell, analyzer); auto* thisObject = jsCast<${controller}*>(cell); if (void* wrapped = thisObject->wrapped()) { analyzer.setWrappedObjectForCell(cell, wrapped); // if (thisObject->scriptExecutionContext()) // analyzer.setLabelForCell(cell, makeString("url ", thisObject->scriptExecutionContext()->url().string())); } - Base::analyzeHeap(cell, analyzer); + + auto& vm = cell->vm(); + + if (thisObject->m_onPull) { + JSValue onPull = thisObject->m_onPull.get(); + if (onPull.isCell()) { + const Identifier& id = Identifier::fromString(vm, "onPull"_s); + analyzer.analyzePropertyNameEdge(cell, onPull.asCell(), id.impl()); + } + } + + if (thisObject->m_onClose) { + JSValue onClose = thisObject->m_onClose.get(); + if (onClose.isCell()) { + const Identifier& id = Identifier::fromString(vm, "onClose"_s); + analyzer.analyzePropertyNameEdge(cell, onClose.asCell(), id.impl()); + } + } } @@ -763,8 +776,11 @@ void ${controller}::visitChildrenImpl(JSCell* cell, Visitor& visitor) ${controller}* thisObject = jsCast<${controller}*>(cell); ASSERT_GC_OBJECT_INHERITS(thisObject, info()); Base::visitChildren(thisObject, visitor); - visitor.append(thisObject->m_onPull); - visitor.append(thisObject->m_onClose); + + // Avoid duplicating in the heap snapshot + visitor.appendHidden(thisObject->m_onPull); + visitor.appendHidden(thisObject->m_onClose); + void* ptr = thisObject->m_sinkPtr; if (ptr) visitor.addOpaqueRoot(ptr); diff --git a/src/codegen/generate-node-errors.ts b/src/codegen/generate-node-errors.ts index debbb07fc5..e4c807be70 100644 --- a/src/codegen/generate-node-errors.ts +++ b/src/codegen/generate-node-errors.ts @@ -15,6 +15,8 @@ enumHeader = ` // Generated by: src/codegen/generate-node-errors.ts #pragma once +#include + namespace Bun { static constexpr size_t NODE_ERROR_COUNT = ${NodeErrors.length}; enum class ErrorCode : uint8_t { @@ -25,6 +27,8 @@ listHeader = ` // Generated by: src/codegen/generate-node-errors.ts #pragma once +#include + struct ErrorCodeData { JSC::ErrorType type; WTF::ASCIILiteral name; diff --git a/src/compile_target.zig b/src/compile_target.zig index 5fee414738..82f0731dd3 100644 --- a/src/compile_target.zig +++ b/src/compile_target.zig @@ -28,6 +28,14 @@ const Libc = enum { /// musl libc musl, + /// npm package name, `@oven-sh/bun-{os}-{arch}` + pub fn npmName(this: Libc) []const u8 { + return switch (this) { + .default => "", + .musl => "-musl", + }; + } + pub fn format(self: @This(), comptime _: []const u8, _: anytype, writer: anytype) !void { if (self == .musl) { try writer.writeAll("-musl"); @@ -64,21 +72,25 @@ pub fn toNPMRegistryURL(this: *const CompileTarget, buf: []u8) ![]const u8 { pub fn toNPMRegistryURLWithURL(this: *const CompileTarget, buf: []u8, registry_url: []const u8) ![]const u8 { return switch (this.os) { inline else => |os| switch (this.arch) { - inline else => |arch| switch (this.baseline) { - // https://registry.npmjs.org/@oven/bun-linux-x64/-/bun-linux-x64-0.1.6.tgz - inline else => |is_baseline| try std.fmt.bufPrint(buf, comptime "{s}/@oven/bun-" ++ - os.npmName() ++ "-" ++ arch.npmName() ++ - (if (is_baseline) "-baseline" else "") ++ - "/-/bun-" ++ - os.npmName() ++ "-" ++ arch.npmName() ++ - (if (is_baseline) "-baseline" else "") ++ - "-" ++ - "{d}.{d}.{d}.tgz", .{ - registry_url, - this.version.major, - this.version.minor, - this.version.patch, - }), + inline else => |arch| switch (this.libc) { + inline else => |libc| switch (this.baseline) { + // https://registry.npmjs.org/@oven/bun-linux-x64/-/bun-linux-x64-0.1.6.tgz + inline else => |is_baseline| try std.fmt.bufPrint(buf, comptime "{s}/@oven/bun-" ++ + os.npmName() ++ "-" ++ arch.npmName() ++ + libc.npmName() ++ + (if (is_baseline) "-baseline" else "") ++ + "/-/bun-" ++ + os.npmName() ++ "-" ++ arch.npmName() ++ + libc.npmName() ++ + (if (is_baseline) "-baseline" else "") ++ + "-" ++ + "{d}.{d}.{d}.tgz", .{ + registry_url, + this.version.major, + this.version.minor, + this.version.patch, + }), + }, }, }, }; @@ -120,7 +132,7 @@ pub fn exePath(this: *const CompileTarget, buf: *bun.PathBuffer, version_str: [: bun.fs.FileSystem.instance.top_level_dir, buf, &.{ - bun.install.PackageManager.fetchCacheDirectoryPath(env).path, + bun.install.PackageManager.fetchCacheDirectoryPath(env, null).path, version_str, }, .auto, diff --git a/src/env_loader.zig b/src/env_loader.zig index 8ea780553f..6364684527 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -17,7 +17,7 @@ const Fs = @import("./fs.zig"); const URL = @import("./url.zig").URL; const Api = @import("./api/schema.zig").Api; const which = @import("./which.zig").which; -const s3 = @import("./s3.zig"); +const s3 = bun.S3; const DotEnvFileSuffix = enum { development, @@ -46,7 +46,7 @@ pub const Loader = struct { did_load_process: bool = false, reject_unauthorized: ?bool = null, - aws_credentials: ?s3.AWSCredentials = null, + aws_credentials: ?s3.S3Credentials = null, pub fn iterator(this: *const Loader) Map.HashTable.Iterator { return this.map.iterator(); @@ -115,7 +115,7 @@ pub const Loader = struct { } } - pub fn getAWSCredentials(this: *Loader) s3.AWSCredentials { + pub fn getS3Credentials(this: *Loader) s3.S3Credentials { if (this.aws_credentials) |credentials| { return credentials; } @@ -125,6 +125,7 @@ pub const Loader = struct { var region: []const u8 = ""; var endpoint: []const u8 = ""; var bucket: []const u8 = ""; + var session_token: []const u8 = ""; if (this.get("S3_ACCESS_KEY_ID")) |access_key| { accessKeyId = access_key; @@ -152,12 +153,18 @@ pub const Loader = struct { } else if (this.get("AWS_BUCKET")) |bucket_| { bucket = bucket_; } + if (this.get("S3_SESSION_TOKEN")) |token| { + session_token = token; + } else if (this.get("AWS_SESSION_TOKEN")) |token| { + session_token = token; + } this.aws_credentials = .{ .accessKeyId = accessKeyId, .secretAccessKey = secretAccessKey, .region = region, .endpoint = endpoint, .bucket = bucket, + .sessionToken = session_token, }; return this.aws_credentials.?; diff --git a/src/fs.zig b/src/fs.zig index 9f8cd22f24..b1feeadd4b 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -37,7 +37,7 @@ pub const Preallocate = struct { }; pub const FileSystem = struct { - top_level_dir: string, + top_level_dir: stringZ, // used on subsequent updates top_level_dir_buf: bun.PathBuffer = undefined, @@ -108,22 +108,14 @@ pub const FileSystem = struct { ENOTDIR, }; - pub fn init(top_level_dir: ?string) !*FileSystem { + pub fn init(top_level_dir: ?stringZ) !*FileSystem { return initWithForce(top_level_dir, false); } - pub fn initWithForce(top_level_dir_: ?string, comptime force: bool) !*FileSystem { + pub fn initWithForce(top_level_dir_: ?stringZ, comptime force: bool) !*FileSystem { const allocator = bun.fs_allocator; var top_level_dir = top_level_dir_ orelse (if (Environment.isBrowser) "/project/" else try bun.getcwdAlloc(allocator)); - - // Ensure there's a trailing separator in the top level directory - // This makes path resolution more reliable - if (!bun.path.isSepAny(top_level_dir[top_level_dir.len - 1])) { - const tld = try allocator.alloc(u8, top_level_dir.len + 1); - bun.copy(u8, tld, top_level_dir); - tld[tld.len - 1] = std.fs.path.sep; - top_level_dir = tld; - } + _ = &top_level_dir; if (!instance_loaded or force) { instance = FileSystem{ diff --git a/src/glob/GlobWalker.zig b/src/glob/GlobWalker.zig index f41b47f7a6..6498fbb7d4 100644 --- a/src/glob/GlobWalker.zig +++ b/src/glob/GlobWalker.zig @@ -1358,7 +1358,7 @@ pub fn GlobWalker_( return GlobAscii.match( pattern_component.patternSlice(this.pattern), filepath, - ); + ).matches(); } const codepoints = this.componentStringUnicode(pattern_component); return matchImpl( diff --git a/src/glob/ascii.zig b/src/glob/ascii.zig index 69413f9505..c2e4724cb1 100644 --- a/src/glob/ascii.zig +++ b/src/glob/ascii.zig @@ -181,6 +181,18 @@ pub fn valid_glob_indices(glob: []const u8, indices: std.ArrayList(BraceIndex)) } } +pub const MatchResult = enum { + no_match, + match, + + negate_no_match, + negate_match, + + pub fn matches(this: MatchResult) bool { + return this == .match or this == .negate_match; + } +}; + /// This function checks returns a boolean value if the pathname `path` matches /// the pattern `glob`. /// @@ -208,7 +220,7 @@ pub fn valid_glob_indices(glob: []const u8, indices: std.ArrayList(BraceIndex)) /// Multiple "!" characters negate the pattern multiple times. /// "\" /// Used to escape any of the special characters above. -pub fn match(glob: []const u8, path: []const u8) bool { +pub fn match(glob: []const u8, path: []const u8) MatchResult { // This algorithm is based on https://research.swtch.com/glob var state = State{}; // Store the state when we see an opening '{' brace in a stack. @@ -290,7 +302,7 @@ pub fn match(glob: []const u8, path: []const u8) bool { (glob[state.glob_index] == ',' or glob[state.glob_index] == '}')) { if (state.skipBraces(glob, false) == .Invalid) - return false; // invalid pattern! + return .no_match; // invalid pattern! } continue; @@ -321,7 +333,7 @@ pub fn match(glob: []const u8, path: []const u8) bool { while (state.glob_index < glob.len and (first or glob[state.glob_index] != ']')) { var low = glob[state.glob_index]; if (!unescape(&low, glob, &state.glob_index)) - return false; // Invalid pattern + return .no_match; // Invalid pattern state.glob_index += 1; // If there is a - and the following character is not ], @@ -332,7 +344,7 @@ pub fn match(glob: []const u8, path: []const u8) bool { state.glob_index += 1; var h = glob[state.glob_index]; if (!unescape(&h, glob, &state.glob_index)) - return false; // Invalid pattern! + return .no_match; // Invalid pattern! state.glob_index += 1; break :blk h; } else low; @@ -342,7 +354,7 @@ pub fn match(glob: []const u8, path: []const u8) bool { first = false; } if (state.glob_index >= glob.len) - return false; // Invalid pattern! + return .no_match; // Invalid pattern! state.glob_index += 1; if (is_match != class_negated) { state.path_index += 1; @@ -351,7 +363,7 @@ pub fn match(glob: []const u8, path: []const u8) bool { }, '{' => if (state.path_index < path.len) { if (brace_stack.len >= brace_stack.stack.len) - return false; // Invalid pattern! Too many nested braces. + return .no_match; // Invalid pattern! Too many nested braces. // Push old state to the stack, and reset current state. state = brace_stack.push(&state); @@ -380,7 +392,7 @@ pub fn match(glob: []const u8, path: []const u8) bool { var cc = c; // Match escaped characters as literals. if (!unescape(&cc, glob, &state.glob_index)) - return false; // Invalid pattern; + return .no_match; // Invalid pattern; const is_match = if (cc == '/') isSeparator(path[state.path_index]) @@ -416,7 +428,7 @@ pub fn match(glob: []const u8, path: []const u8) bool { if (brace_stack.len > 0) { // If in braces, find next option and reset path to index where we saw the '{' switch (state.skipBraces(glob, true)) { - .Invalid => return false, + .Invalid => return .no_match, .Comma => { state.path_index = brace_stack.last().path_index; continue; @@ -440,10 +452,10 @@ pub fn match(glob: []const u8, path: []const u8) bool { } } - return negated; + return if (negated) .negate_match else .no_match; } - return !negated; + return if (!negated) .match else .negate_no_match; } inline fn isSeparator(c: u8) bool { diff --git a/src/ini.zig b/src/ini.zig index 77fe57fe03..ab519a1ace 100644 --- a/src/ini.zig +++ b/src/ini.zig @@ -530,7 +530,7 @@ pub const IniTestingAPIs = struct { const envjs = callframe.argument(1); const env = if (envjs.isEmptyOrUndefinedOrNull()) globalThis.bunVM().transpiler.env else brk: { var envmap = bun.DotEnv.Map.HashTable.init(allocator); - var object_iter = JSC.JSPropertyIterator(.{ + var object_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalThis, envjs); @@ -538,7 +538,7 @@ pub const IniTestingAPIs = struct { try envmap.ensureTotalCapacity(object_iter.len); - while (object_iter.next()) |key| { + while (try object_iter.next()) |key| { const keyslice = try key.toOwnedSlice(allocator); var value = object_iter.value; if (value == .undefined) continue; diff --git a/src/install/bun.lock.zig b/src/install/bun.lock.zig index a865e21978..b34d21e009 100644 --- a/src/install/bun.lock.zig +++ b/src/install/bun.lock.zig @@ -556,7 +556,9 @@ pub const Stringifier = struct { try writer.writeByte('"'); // relative_path is empty string for root resolutions - try writer.writeAll(relative_path); + try writer.print("{}", .{ + bun.fmt.formatJSONStringUTF8(relative_path, .{ .quote = false }), + }); if (depth != 0) { try writer.writeByte('/'); @@ -565,8 +567,8 @@ pub const Stringifier = struct { const dep = deps_buf[dep_id]; const dep_name = dep.name.slice(buf); - try writer.print("{s}\": ", .{ - dep_name, + try writer.print("{}\": ", .{ + bun.fmt.formatJSONStringUTF8(dep_name, .{ .quote = false }), }); const pkg_name = pkg_names[pkg_id]; @@ -760,9 +762,9 @@ pub const Stringifier = struct { try writer.writeAll(", "); } - try writer.print("\"{s}\": \"{s}\"", .{ - dep.name.slice(buf), - dep.version.literal.slice(buf), + try writer.print("{}: {}", .{ + bun.fmt.formatJSONStringUTF8(dep.name.slice(buf), .{}), + bun.fmt.formatJSONStringUTF8(dep.version.literal.slice(buf), .{}), }); } @@ -779,10 +781,10 @@ pub const Stringifier = struct { for (optional_peers_buf.items, 0..) |optional_peer, i| { try writer.print( - \\{s}"{s}"{s} + \\{s}{}{s} , .{ if (i != 0) " " else "", - optional_peer.slice(buf), + bun.fmt.formatJSONStringUTF8(optional_peer.slice(buf), .{}), if (i != optional_peers_buf.items.len - 1) "," else "", }); } @@ -873,14 +875,25 @@ pub const Stringifier = struct { // need a way to detect new/deleted workspaces if (pkg_id == 0) { try writer.writeAll("\"\": {"); + const root_name = pkg_names[0].slice(buf); + if (root_name.len > 0) { + try writer.writeByte('\n'); + try incIndent(writer, indent); + try writer.print("\"name\": {}", .{ + bun.fmt.formatJSONStringUTF8(root_name, .{}), + }); + + // TODO(dylan-conway) should we save version? + any = true; + } } else { try writer.print("{}: {{", .{ bun.fmt.formatJSONStringUTF8(res.slice(buf), .{}), }); try writer.writeByte('\n'); try incIndent(writer, indent); - try writer.print("\"name\": \"{s}\"", .{ - pkg_names[pkg_id].slice(buf), + try writer.print("\"name\": {}", .{ + bun.fmt.formatJSONStringUTF8(pkg_names[pkg_id].slice(buf), .{}), }); if (workspace_versions.get(pkg_name_hashes[pkg_id])) |version| { @@ -929,7 +942,10 @@ pub const Stringifier = struct { const name = dep.name.slice(buf); const version = dep.version.literal.slice(buf); - try writer.print("\"{s}\": \"{s}\"", .{ name, version }); + try writer.print("{}: {}", .{ + bun.fmt.formatJSONStringUTF8(name, .{}), + bun.fmt.formatJSONStringUTF8(version, .{}), + }); } if (!first) { @@ -953,9 +969,11 @@ pub const Stringifier = struct { for (optional_peers_buf.items) |optional_peer| { try writeIndent(writer, indent); try writer.print( - \\"{s}", + \\{}, \\ - , .{optional_peer.slice(buf)}); + , .{ + bun.fmt.formatJSONStringUTF8(optional_peer.slice(buf), .{}), + }); } try decIndent(writer, indent); try writer.writeByte(']'); @@ -1618,7 +1636,7 @@ pub fn parseIntoBinaryLockfile( } } - lockfile.hoist(log, .resolvable, {}) catch |err| { + lockfile.resolve(log) catch |err| { switch (err) { error.OutOfMemory => |oom| return oom, else => { diff --git a/src/install/install.zig b/src/install/install.zig index 495c63ee0a..546dbf2ef4 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -2772,6 +2772,67 @@ pub const PackageManager = struct { last_reported_slow_lifecycle_script_at: u64 = 0, cached_tick_for_slow_lifecycle_script_logging: u64 = 0, + pub const WorkspaceFilter = union(enum) { + all, + name: []const u32, + path: []const u32, + + pub fn init(allocator: std.mem.Allocator, input: string, cwd: string, path_buf: []u8) OOM!WorkspaceFilter { + if ((input.len == 1 and input[0] == '*') or strings.eqlComptime(input, "**")) { + return .all; + } + + var remain = input; + + var prepend_negate = false; + while (remain.len > 0 and remain[0] == '!') { + prepend_negate = !prepend_negate; + remain = remain[1..]; + } + + const is_path = remain.len > 0 and remain[0] == '.'; + + const filter = if (is_path) + strings.withoutTrailingSlash(bun.path.joinAbsStringBuf(cwd, path_buf, &.{remain}, .posix)) + else + remain; + + if (filter.len == 0) { + // won't match anything + return .{ .path = &.{} }; + } + + // TODO(dylan-conway): finish encoding agnostic glob matcher so we don't + // need to convert + const len = bun.simdutf.length.utf32.from.utf8.le(filter) + @intFromBool(prepend_negate); + const buf = try allocator.alloc(u32, len); + + const result = bun.simdutf.convert.utf8.to.utf32.with_errors.le(filter, buf[@intFromBool(prepend_negate)..]); + if (!result.isSuccessful()) { + // won't match anything + return .{ .path = &.{} }; + } + + if (prepend_negate) { + buf[0] = '!'; + } + + const pattern = buf[0..len]; + + return if (is_path) + .{ .path = pattern } + else + .{ .name = pattern }; + } + + pub fn deinit(this: WorkspaceFilter, allocator: std.mem.Allocator) void { + switch (this) { + .path, .name => |pattern| allocator.free(pattern), + .all => {}, + } + } + }; + pub fn reportSlowLifecycleScripts(this: *PackageManager, log_level: Options.LogLevel) void { if (log_level == .silent) return; if (bun.getRuntimeFeatureFlag("BUN_DISABLE_SLOW_LIFECYCLE_SCRIPT_LOGGING")) { @@ -3533,7 +3594,7 @@ pub const PackageManager = struct { noinline fn ensureCacheDirectory(this: *PackageManager) std.fs.Dir { loop: while (true) { if (this.options.enable.cache) { - const cache_dir = fetchCacheDirectoryPath(this.env); + const cache_dir = fetchCacheDirectoryPath(this.env, &this.options); this.cache_directory_path = this.allocator.dupeZ(u8, cache_dir.path) catch bun.outOfMemory(); return std.fs.cwd().makeOpenPath(cache_dir.path, .{}) catch { @@ -6234,11 +6295,17 @@ pub const PackageManager = struct { } const CacheDir = struct { path: string, is_node_modules: bool }; - pub fn fetchCacheDirectoryPath(env: *DotEnv.Loader) CacheDir { + pub fn fetchCacheDirectoryPath(env: *DotEnv.Loader, options: ?*const Options) CacheDir { if (env.get("BUN_INSTALL_CACHE_DIR")) |dir| { return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{dir}), .is_node_modules = false }; } + if (options) |opts| { + if (opts.cache_directory.len > 0) { + return CacheDir{ .path = Fs.FileSystem.instance.abs(&[_]string{opts.cache_directory}), .is_node_modules = false }; + } + } + if (env.get("BUN_INSTALL")) |dir| { var parts = [_]string{ dir, "install/", "cache/" }; return CacheDir{ .path = Fs.FileSystem.instance.abs(&parts), .is_node_modules = false }; @@ -7119,6 +7186,8 @@ pub const PackageManager = struct { save_text_lockfile: bool = false, + lockfile_only: bool = false, + pub const PublishConfig = struct { access: ?Access = null, tag: string = "", @@ -7176,6 +7245,7 @@ pub const PackageManager = struct { pub const Update = struct { development: bool = false, optional: bool = false, + peer: bool = false, }; pub fn openGlobalDir(explicit_global_dir: string) !std.fs.Dir { @@ -7274,6 +7344,10 @@ pub const PackageManager = struct { this.did_override_default_scope = this.scope.url_hash != Npm.Registry.default_url_hash; } if (bun_install_) |config| { + if (config.cache_directory) |cache_directory| { + this.cache_directory = cache_directory; + } + if (config.scoped) |scoped| { for (scoped.scopes.keys(), scoped.scopes.values()) |name, *registry_| { var registry = registry_.*; @@ -7362,6 +7436,10 @@ pub const PackageManager = struct { this.max_concurrent_lifecycle_scripts = jobs; } + if (config.cache_directory) |cache_dir| { + this.cache_directory = cache_dir; + } + this.explicit_global_directory = config.global_dir orelse this.explicit_global_directory; } @@ -7465,6 +7543,10 @@ pub const PackageManager = struct { this.scope.url = URL.parse(cli.registry); } + if (cli.cache_dir) |cache_dir| { + this.cache_directory = cache_dir; + } + if (cli.exact) { this.enable.exact_versions = true; } @@ -7529,6 +7611,8 @@ pub const PackageManager = struct { this.save_text_lockfile = save_text_lockfile; } + this.lockfile_only = cli.lockfile_only; + const disable_progress_bar = default_disable_progress_bar or cli.no_progress; if (cli.verbose) { @@ -7576,8 +7660,13 @@ pub const PackageManager = struct { this.enable.force_save_lockfile = true; } - this.update.development = cli.development; - if (!this.update.development) this.update.optional = cli.optional; + if (cli.development) { + this.update.development = cli.development; + } else if (cli.optional) { + this.update.optional = cli.optional; + } else if (cli.peer) { + this.update.peer = cli.peer; + } switch (cli.patch) { .nothing => {}, @@ -8537,6 +8626,7 @@ pub const PackageManager = struct { pub fn supportsWorkspaceFiltering(this: Subcommand) bool { return switch (this) { .outdated => true, + .install => true, // .pack => true, else => false, }; @@ -8753,7 +8843,7 @@ pub const PackageManager = struct { } else child_path; if (strings.eqlLong(maybe_workspace_path, path, true)) { - fs.top_level_dir = parent; + fs.top_level_dir = try bun.default_allocator.dupeZ(u8, parent); found = true; child_json.close(); if (comptime Environment.isWindows) { @@ -8770,16 +8860,15 @@ pub const PackageManager = struct { } } - fs.top_level_dir = child_cwd; + fs.top_level_dir = try bun.default_allocator.dupeZ(u8, child_cwd); break :root_package_json_file child_json; }; - try bun.sys.chdir(fs.top_level_dir).unwrap(); + try bun.sys.chdir(fs.top_level_dir, fs.top_level_dir).unwrap(); try BunArguments.loadConfig(ctx.allocator, cli.config, ctx, .InstallCommand); bun.copy(u8, &cwd_buf, fs.top_level_dir); - cwd_buf[fs.top_level_dir.len] = std.fs.path.sep; - cwd_buf[fs.top_level_dir.len + 1] = 0; - fs.top_level_dir = cwd_buf[0 .. fs.top_level_dir.len + 1]; + cwd_buf[fs.top_level_dir.len] = 0; + fs.top_level_dir = cwd_buf[0..fs.top_level_dir.len :0]; package_json_cwd = try bun.getFdPath(root_package_json_file.handle, &package_json_cwd_buf); const entries_option = try fs.fs.readDirectory(fs.top_level_dir, null, 0, true); @@ -9344,7 +9433,7 @@ pub const PackageManager = struct { } else { // bun link lodash switch (manager.options.log_level) { - inline else => |log_level| try manager.updatePackageJSONAndInstallWithManager(ctx, log_level), + inline else => |log_level| try manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd, log_level), } } } @@ -9508,6 +9597,7 @@ pub const PackageManager = struct { clap.parseParam("--network-concurrency Maximum number of concurrent network requests (default 48)") catch unreachable, clap.parseParam("--save-text-lockfile Save a text-based lockfile") catch unreachable, clap.parseParam("--omit ... Exclude 'dev', 'optional', or 'peer' dependencies from install") catch unreachable, + clap.parseParam("--lockfile-only Generate a lockfile without installing dependencies") catch unreachable, clap.parseParam("-h, --help Print this help menu") catch unreachable, }; @@ -9515,7 +9605,9 @@ pub const PackageManager = struct { clap.parseParam("-d, --dev Add dependency to \"devDependencies\"") catch unreachable, clap.parseParam("-D, --development") catch unreachable, clap.parseParam("--optional Add dependency to \"optionalDependencies\"") catch unreachable, + clap.parseParam("--peer Add dependency to \"peerDependencies\"") catch unreachable, clap.parseParam("-E, --exact Add the exact version instead of the ^range") catch unreachable, + clap.parseParam("--filter ... Install packages for the matching workspaces") catch unreachable, clap.parseParam(" ... ") catch unreachable, }); @@ -9536,6 +9628,7 @@ pub const PackageManager = struct { clap.parseParam("-d, --dev Add dependency to \"devDependencies\"") catch unreachable, clap.parseParam("-D, --development") catch unreachable, clap.parseParam("--optional Add dependency to \"optionalDependencies\"") catch unreachable, + clap.parseParam("--peer Add dependency to \"peerDependencies\"") catch unreachable, clap.parseParam("-E, --exact Add the exact version instead of the ^range") catch unreachable, clap.parseParam(" ... \"name\" or \"name@version\" of package(s) to install") catch unreachable, }); @@ -9565,7 +9658,7 @@ pub const PackageManager = struct { const outdated_params: []const ParamType = &(shared_params ++ [_]ParamType{ // clap.parseParam("--json Output outdated information in JSON format") catch unreachable, - clap.parseParam("--filter ... Display outdated dependencies for each matching workspace") catch unreachable, + clap.parseParam("-F, --filter ... Display outdated dependencies for each matching workspace") catch unreachable, clap.parseParam(" ... Package patterns to filter by") catch unreachable, }); @@ -9586,7 +9679,7 @@ pub const PackageManager = struct { }); pub const CommandLineArguments = struct { - cache_dir: string = "", + cache_dir: ?string = null, lockfile: string = "", token: string = "", global: bool = false, @@ -9619,6 +9712,7 @@ pub const PackageManager = struct { development: bool = false, optional: bool = false, + peer: bool = false, omit: ?Omit = null, @@ -9637,6 +9731,8 @@ pub const PackageManager = struct { save_text_lockfile: ?bool = null, + lockfile_only: bool = false, + const PatchOpts = union(enum) { nothing: struct {}, patch: struct {}, @@ -9969,6 +10065,11 @@ pub const PackageManager = struct { cli.trusted = args.flag("--trust"); cli.no_summary = args.flag("--no-summary"); cli.ca = args.options("--ca"); + cli.lockfile_only = args.flag("--lockfile-only"); + + if (args.option("--cache-dir")) |cache_dir| { + cli.cache_dir = cache_dir; + } if (args.option("--cafile")) |ca_file_name| { cli.ca_file_name = ca_file_name; @@ -10088,6 +10189,7 @@ pub const PackageManager = struct { if (comptime subcommand == .add or subcommand == .install) { cli.development = args.flag("--development") or args.flag("--dev"); cli.optional = args.flag("--optional"); + cli.peer = args.flag("--peer"); cli.exact = args.flag("--exact"); } @@ -10110,7 +10212,7 @@ pub const PackageManager = struct { buf[cwd_.len] = 0; final_path = buf[0..cwd_.len :0]; } - bun.sys.chdir(final_path).unwrap() catch |err| { + bun.sys.chdir("", final_path).unwrap() catch |err| { Output.errGeneric("failed to change directory to \"{s}\": {s}\n", .{ final_path, @errorName(err) }); Global.crash(); }; @@ -10438,7 +10540,7 @@ pub const PackageManager = struct { } switch (manager.options.log_level) { - inline else => |log_level| try manager.updatePackageJSONAndInstallWithManager(ctx, log_level), + inline else => |log_level| try manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd, log_level), } if (manager.options.patch_features == .patch) { @@ -10569,6 +10671,7 @@ pub const PackageManager = struct { fn updatePackageJSONAndInstallWithManager( manager: *PackageManager, ctx: Command.Context, + original_cwd: string, comptime log_level: Options.LogLevel, ) !void { var update_requests = UpdateRequest.Array.initCapacity(manager.allocator, 64) catch bun.outOfMemory(); @@ -10602,6 +10705,7 @@ pub const PackageManager = struct { ctx, updates, manager.subcommand, + original_cwd, log_level, ); } @@ -10611,6 +10715,7 @@ pub const PackageManager = struct { ctx: Command.Context, updates: []UpdateRequest, subcommand: Subcommand, + original_cwd: string, comptime log_level: Options.LogLevel, ) !void { if (manager.log.errors > 0) { @@ -10674,6 +10779,8 @@ pub const PackageManager = struct { "devDependencies" else if (manager.options.update.optional) "optionalDependencies" + else if (manager.options.update.peer) + "peerDependencies" else "dependencies"; var any_changes = false; @@ -10876,7 +10983,7 @@ pub const PackageManager = struct { break :brk .{ root_package_json.source.contents, root_package_json_path_buf[0..root_package_json_path.len :0] }; }; - try manager.installWithManager(ctx, root_package_json_source, log_level); + try manager.installWithManager(ctx, root_package_json_source, original_cwd, log_level); if (subcommand == .update or subcommand == .add or subcommand == .link) { for (updates) |request| { @@ -12145,7 +12252,7 @@ pub const PackageManager = struct { // TODO(dylan-conway): print `bun install ` or `bun add ` before logs from `init`. // and cleanup install/add subcommand usage - var manager, _ = try init(ctx, cli, .install); + var manager, const original_cwd = try init(ctx, cli, .install); // switch to `bun add ` if (subcommand == .add) { @@ -12155,7 +12262,7 @@ pub const PackageManager = struct { Output.flush(); } return try switch (manager.options.log_level) { - inline else => |log_level| manager.updatePackageJSONAndInstallWithManager(ctx, log_level), + inline else => |log_level| manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd, log_level), }; } @@ -12173,7 +12280,7 @@ pub const PackageManager = struct { }; try switch (manager.options.log_level) { - inline else => |log_level| manager.installWithManager(ctx, package_json_contents, log_level), + inline else => |log_level| manager.installWithManager(ctx, package_json_contents, original_cwd, log_level), }; if (manager.any_failed_to_install) { @@ -13807,12 +13914,14 @@ pub const PackageManager = struct { pub fn installPackages( this: *PackageManager, ctx: Command.Context, + workspace_filters: []const WorkspaceFilter, + install_root_dependencies: bool, comptime log_level: PackageManager.Options.LogLevel, ) !PackageInstall.Summary { const original_trees = this.lockfile.buffers.trees; const original_tree_dep_ids = this.lockfile.buffers.hoisted_dependencies; - try this.lockfile.hoist(this.log, .filter, this); + try this.lockfile.filter(this.log, this, install_root_dependencies, workspace_filters); defer { this.lockfile.buffers.trees = original_trees; @@ -14276,6 +14385,7 @@ pub const PackageManager = struct { manager: *PackageManager, ctx: Command.Context, root_package_json_contents: string, + original_cwd: string, comptime log_level: Options.LogLevel, ) !void { @@ -14890,10 +15000,99 @@ pub const PackageManager = struct { const lockfile_before_install = manager.lockfile; + const save_format: Lockfile.LoadResult.LockfileFormat = if (manager.options.save_text_lockfile) + .text + else switch (load_result) { + .not_found => .binary, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + + if (manager.options.lockfile_only) { + // save the lockfile and exit. make sure metahash is generated for binary lockfile + + manager.lockfile.meta_hash = try manager.lockfile.generateMetaHash( + PackageManager.verbose_install or manager.options.do.print_meta_hash_string, + packages_len_before_install, + ); + + try manager.saveLockfile(&load_result, save_format, had_any_diffs, lockfile_before_install, packages_len_before_install, log_level); + + if (manager.options.do.summary) { + // TODO(dylan-conway): packages aren't installed but we can still print + // added/removed/updated direct dependencies. + Output.pretty( + \\ + \\Saved {s} ({d} package{s}) + , .{ + switch (save_format) { + .text => "bun.lock", + .binary => "bun.lockb", + }, + manager.lockfile.packages.len, + if (manager.lockfile.packages.len == 1) "" else "s", + }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + Output.pretty("\n", .{}); + } + Output.flush(); + return; + } + + var path_buf: bun.PathBuffer = undefined; + var workspace_filters: std.ArrayListUnmanaged(WorkspaceFilter) = .{}; + // only populated when subcommand is `.install` + if (manager.subcommand == .install and manager.options.filter_patterns.len > 0) { + try workspace_filters.ensureUnusedCapacity(manager.allocator, manager.options.filter_patterns.len); + for (manager.options.filter_patterns) |pattern| { + try workspace_filters.append(manager.allocator, try WorkspaceFilter.init(manager.allocator, pattern, original_cwd, &path_buf)); + } + } + defer workspace_filters.deinit(manager.allocator); + + var install_root_dependencies = workspace_filters.items.len == 0; + if (!install_root_dependencies) { + const pkg_names = manager.lockfile.packages.items(.name); + + const abs_root_path = abs_root_path: { + if (comptime !Environment.isWindows) { + break :abs_root_path strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); + } + + var abs_path = Path.pathToPosixBuf(u8, FileSystem.instance.top_level_dir, &path_buf); + break :abs_root_path strings.withoutTrailingSlash(abs_path[Path.windowsVolumeNameLen(abs_path)[0]..]); + }; + + for (workspace_filters.items) |filter| { + const pattern, const path_or_name = switch (filter) { + .name => |pattern| .{ pattern, pkg_names[0].slice(manager.lockfile.buffers.string_bytes.items) }, + .path => |pattern| .{ pattern, abs_root_path }, + .all => { + install_root_dependencies = true; + continue; + }, + }; + + switch (bun.glob.walk.matchImpl(pattern, path_or_name)) { + .match, .negate_match => install_root_dependencies = true, + + .negate_no_match => { + // always skip if a pattern specifically says "!" + install_root_dependencies = false; + break; + }, + + .no_match => {}, + } + } + } + var install_summary = PackageInstall.Summary{}; if (manager.options.do.install_packages) { install_summary = try manager.installPackages( ctx, + workspace_filters.items, + install_root_dependencies, log_level, ); } @@ -14925,79 +15124,8 @@ pub const PackageManager = struct { // It's unnecessary work to re-save the lockfile if there are no changes if (manager.options.do.save_lockfile and (should_save_lockfile or manager.lockfile.isEmpty() or manager.options.enable.force_save_lockfile)) - save: { - if (manager.lockfile.isEmpty()) { - if (!manager.options.dry_run) delete: { - const delete_format = switch (load_result) { - .not_found => break :delete, - .err => |err| err.format, - .ok => |ok| ok.format, - }; - - std.fs.cwd().deleteFileZ(if (delete_format == .text) "bun.lock" else "bun.lockb") catch |err| brk: { - // we don't care - if (err == error.FileNotFound) { - if (had_any_diffs) break :save; - break :brk; - } - - if (log_level != .silent) Output.prettyErrorln("\nerror: {s} deleting empty lockfile", .{@errorName(err)}); - break :save; - }; - } - if (!manager.options.global) { - if (log_level != .silent) { - switch (manager.subcommand) { - .remove => Output.prettyErrorln("\npackage.json has no dependencies! Deleted empty lockfile", .{}), - else => Output.prettyErrorln("No packages! Deleted empty lockfile", .{}), - } - } - } - - break :save; - } - - var save_node: *Progress.Node = undefined; - - if (comptime log_level.showProgress()) { - manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; - save_node = manager.progress.start(ProgressStrings.save(), 0); - save_node.activate(); - - manager.progress.refresh(); - } - - const save_format: Lockfile.LoadResult.LockfileFormat = if (manager.options.save_text_lockfile) - .text - else switch (load_result) { - .not_found => .binary, - .err => |err| err.format, - .ok => |ok| ok.format, - }; - - manager.lockfile.saveToDisk(save_format, manager.options.log_level.isVerbose()); - - if (comptime Environment.allow_assert) { - if (load_result.loadedFromTextLockfile()) { - if (!try manager.lockfile.eql(lockfile_before_install, packages_len_before_install, manager.allocator)) { - Output.panic("Lockfile non-deterministic after saving", .{}); - } - } else { - if (manager.lockfile.hasMetaHashChanged(false, packages_len_before_install) catch false) { - Output.panic("Lockfile metahash non-deterministic after saving", .{}); - } - } - } - - if (comptime log_level.showProgress()) { - save_node.end(); - manager.progress.refresh(); - manager.progress.root.end(); - manager.progress = .{}; - } else if (comptime log_level != .silent) { - Output.prettyErrorln("Saved lockfile", .{}); - Output.flush(); - } + { + try manager.saveLockfile(&load_result, save_format, had_any_diffs, lockfile_before_install, packages_len_before_install, log_level); } if (needs_new_lockfile) { @@ -15024,7 +15152,7 @@ pub const PackageManager = struct { } } - if (manager.options.do.run_scripts) { + if (manager.options.do.run_scripts and install_root_dependencies) { if (manager.root_lifecycle_scripts) |scripts| { if (comptime Environment.allow_assert) { bun.assert(scripts.total > 0); @@ -15048,94 +15176,8 @@ pub const PackageManager = struct { } } - var printed_timestamp = false; if (comptime log_level != .silent) { - if (manager.options.do.summary) { - var printer = Lockfile.Printer{ - .lockfile = manager.lockfile, - .options = manager.options, - .updates = manager.update_requests, - .successfully_installed = install_summary.successfully_installed, - }; - - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| { - try Lockfile.Printer.Tree.print(&printer, manager, Output.WriterType, Output.writer(), enable_ansi_colors, log_level); - }, - } - - if (!did_meta_hash_change) { - manager.summary.remove = 0; - manager.summary.add = 0; - manager.summary.update = 0; - } - - if (install_summary.success > 0) { - // it's confusing when it shows 3 packages and says it installed 1 - const pkgs_installed = @max( - install_summary.success, - @as( - u32, - @truncate(manager.update_requests.len), - ), - ); - Output.pretty("{d} package{s} installed ", .{ pkgs_installed, if (pkgs_installed == 1) "" else "s" }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, manager.options.global); - - if (manager.summary.remove > 0) { - Output.pretty("Removed: {d}\n", .{manager.summary.remove}); - } - } else if (manager.summary.remove > 0) { - if (manager.subcommand == .remove) { - for (manager.update_requests) |request| { - Output.prettyln("- {s}", .{request.name}); - } - } - - Output.pretty("{d} package{s} removed ", .{ manager.summary.remove, if (manager.summary.remove == 1) "" else "s" }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, manager.options.global); - } else if (install_summary.skipped > 0 and install_summary.fail == 0 and manager.update_requests.len == 0) { - const count = @as(PackageID, @truncate(manager.lockfile.packages.len)); - if (count != install_summary.skipped) { - Output.pretty("Checked {d} install{s} across {d} package{s} (no changes) ", .{ - install_summary.skipped, - if (install_summary.skipped == 1) "" else "s", - count, - if (count == 1) "" else "s", - }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, manager.options.global); - } else { - Output.pretty("Done! Checked {d} package{s} (no changes) ", .{ - install_summary.skipped, - if (install_summary.skipped == 1) "" else "s", - }); - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - printed_timestamp = true; - printBlockedPackagesInfo(install_summary, manager.options.global); - } - } - - if (install_summary.fail > 0) { - Output.prettyln("Failed to install {d} package{s}\n", .{ install_summary.fail, if (install_summary.fail == 1) "" else "s" }); - Output.flush(); - } - } - } - - if (comptime log_level != .silent) { - if (manager.options.do.summary) { - if (!printed_timestamp) { - Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); - Output.prettyln(" done", .{}); - printed_timestamp = true; - } - } + try manager.printInstallSummary(ctx, &install_summary, did_meta_hash_change, log_level); } if (install_summary.fail > 0) { @@ -15145,7 +15187,182 @@ pub const PackageManager = struct { Output.flush(); } - fn printBlockedPackagesInfo(summary: PackageInstall.Summary, global: bool) void { + fn printInstallSummary( + this: *PackageManager, + ctx: Command.Context, + install_summary: *const PackageInstall.Summary, + did_meta_hash_change: bool, + comptime log_level: Options.LogLevel, + ) !void { + var printed_timestamp = false; + if (this.options.do.summary) { + var printer = Lockfile.Printer{ + .lockfile = this.lockfile, + .options = this.options, + .updates = this.update_requests, + .successfully_installed = install_summary.successfully_installed, + }; + + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + try Lockfile.Printer.Tree.print(&printer, this, Output.WriterType, Output.writer(), enable_ansi_colors, log_level); + }, + } + + if (!did_meta_hash_change) { + this.summary.remove = 0; + this.summary.add = 0; + this.summary.update = 0; + } + + if (install_summary.success > 0) { + // it's confusing when it shows 3 packages and says it installed 1 + const pkgs_installed = @max( + install_summary.success, + @as( + u32, + @truncate(this.update_requests.len), + ), + ); + Output.pretty("{d} package{s} installed ", .{ pkgs_installed, if (pkgs_installed == 1) "" else "s" }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + + if (this.summary.remove > 0) { + Output.pretty("Removed: {d}\n", .{this.summary.remove}); + } + } else if (this.summary.remove > 0) { + if (this.subcommand == .remove) { + for (this.update_requests) |request| { + Output.prettyln("- {s}", .{request.name}); + } + } + + Output.pretty("{d} package{s} removed ", .{ this.summary.remove, if (this.summary.remove == 1) "" else "s" }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + } else if (install_summary.skipped > 0 and install_summary.fail == 0 and this.update_requests.len == 0) { + const count = @as(PackageID, @truncate(this.lockfile.packages.len)); + if (count != install_summary.skipped) { + Output.pretty("Checked {d} install{s} across {d} package{s} (no changes) ", .{ + install_summary.skipped, + if (install_summary.skipped == 1) "" else "s", + count, + if (count == 1) "" else "s", + }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + } else { + Output.pretty("Done! Checked {d} package{s} (no changes) ", .{ + install_summary.skipped, + if (install_summary.skipped == 1) "" else "s", + }); + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + printed_timestamp = true; + printBlockedPackagesInfo(install_summary, this.options.global); + } + } + + if (install_summary.fail > 0) { + Output.prettyln("Failed to install {d} package{s}\n", .{ install_summary.fail, if (install_summary.fail == 1) "" else "s" }); + Output.flush(); + } + } + + if (this.options.do.summary) { + if (!printed_timestamp) { + Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); + Output.prettyln(" done", .{}); + printed_timestamp = true; + } + } + } + + fn saveLockfile( + this: *PackageManager, + load_result: *const Lockfile.LoadResult, + save_format: Lockfile.LoadResult.LockfileFormat, + had_any_diffs: bool, + // TODO(dylan-conway): this and `packages_len_before_install` can most likely be deleted + // now that git dependnecies don't append to lockfile during installation. + lockfile_before_install: *const Lockfile, + packages_len_before_install: usize, + log_level: Options.LogLevel, + ) OOM!void { + if (this.lockfile.isEmpty()) { + if (!this.options.dry_run) delete: { + const delete_format = switch (load_result.*) { + .not_found => break :delete, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + + std.fs.cwd().deleteFileZ(if (delete_format == .text) "bun.lock" else "bun.lockb") catch |err| brk: { + // we don't care + if (err == error.FileNotFound) { + if (had_any_diffs) return; + break :brk; + } + + if (log_level != .silent) { + Output.err(err, "failed to delete empty lockfile", .{}); + } + return; + }; + } + if (!this.options.global) { + if (log_level != .silent) { + switch (this.subcommand) { + .remove => Output.prettyErrorln("\npackage.json has no dependencies! Deleted empty lockfile", .{}), + else => Output.prettyErrorln("No packages! Deleted empty lockfile", .{}), + } + } + } + + return; + } + + var save_node: *Progress.Node = undefined; + + if (log_level.showProgress()) { + this.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; + save_node = this.progress.start(ProgressStrings.save(), 0); + save_node.activate(); + + this.progress.refresh(); + } + + this.lockfile.saveToDisk(save_format, this.options.log_level.isVerbose()); + + if (comptime Environment.allow_assert) { + if (load_result.* != .not_found) { + if (load_result.loadedFromTextLockfile()) { + if (!try this.lockfile.eql(lockfile_before_install, packages_len_before_install, this.allocator)) { + Output.panic("Lockfile non-deterministic after saving", .{}); + } + } else { + if (this.lockfile.hasMetaHashChanged(false, packages_len_before_install) catch false) { + Output.panic("Lockfile metahash non-deterministic after saving", .{}); + } + } + } + } + + if (log_level.showProgress()) { + save_node.end(); + this.progress.refresh(); + this.progress.root.end(); + this.progress = .{}; + } else if (log_level != .silent) { + Output.prettyErrorln("Saved lockfile", .{}); + Output.flush(); + } + } + + fn printBlockedPackagesInfo(summary: *const PackageInstall.Summary, global: bool) void { const packages_count = summary.packages_with_blocked_scripts.count(); var scripts_count: usize = 0; for (summary.packages_with_blocked_scripts.values()) |count| scripts_count += count; diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index e0a0f54a4a..dad7851954 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -15,6 +15,7 @@ const C = bun.C; const JSAst = bun.JSAst; const TextLockfile = @import("./bun.lock.zig"); const OOM = bun.OOM; +const WorkspaceFilter = PackageManager.WorkspaceFilter; const JSLexer = bun.js_lexer; const logger = bun.logger; @@ -688,6 +689,9 @@ pub const Tree = struct { lockfile: *const Lockfile, manager: if (method == .filter) *const PackageManager else void, sort_buf: std.ArrayListUnmanaged(DependencyID) = .{}, + workspace_filters: if (method == .filter) []const WorkspaceFilter else void = if (method == .filter) &.{} else {}, + install_root_dependencies: if (method == .filter) bool else void, + path_buf: []u8, pub fn maybeReportError(this: *@This(), comptime fmt: string, args: anytype) void { this.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, fmt, args) catch {}; @@ -859,6 +863,79 @@ pub const Tree = struct { continue; } + + if (builder.manager.subcommand == .install) dont_skip: { + // only do this when parent is root. workspaces are always dependencies of the root + // package, and the root package is always called with `processSubtree` + if (parent_pkg_id == 0 and builder.workspace_filters.len > 0) { + if (!builder.dependencies[dep_id].behavior.isWorkspaceOnly()) { + if (builder.install_root_dependencies) { + break :dont_skip; + } + + continue; + } + + var match = false; + + for (builder.workspace_filters) |workspace_filter| { + const res_id = builder.resolutions[dep_id]; + + const pattern, const path_or_name = switch (workspace_filter) { + .name => |pattern| .{ pattern, pkg_names[res_id].slice(builder.buf()) }, + + .path => |pattern| path: { + const res = &pkg_resolutions[res_id]; + if (res.tag != .workspace) { + break :dont_skip; + } + const res_path = res.value.workspace.slice(builder.buf()); + + // occupy `builder.path_buf` + var abs_res_path = strings.withoutTrailingSlash(bun.path.joinAbsStringBuf( + FileSystem.instance.top_level_dir, + builder.path_buf, + &.{res_path}, + .auto, + )); + + if (comptime Environment.isWindows) { + abs_res_path = abs_res_path[Path.windowsVolumeNameLen(abs_res_path)[0]..]; + Path.dangerouslyConvertPathToPosixInPlace(u8, builder.path_buf[0..abs_res_path.len]); + } + + break :path .{ + pattern, + abs_res_path, + }; + }, + + .all => { + match = true; + continue; + }, + }; + + switch (bun.glob.walk.matchImpl(pattern, path_or_name)) { + .match, .negate_match => match = true, + + .negate_no_match => { + // always skip if a pattern specifically says "!" + match = false; + break; + }, + + .no_match => { + // keep current + }, + } + } + + if (!match) { + continue; + } + } + } } const hoisted: HoistDependencyResult = hoisted: { @@ -1478,7 +1555,7 @@ const Cloner = struct { this.manager.clearCachedItemsDependingOnLockfileBuffer(); if (this.lockfile.packages.len != 0) { - try this.lockfile.hoist(this.log, .resolvable, {}); + try this.lockfile.resolve(this.log); } // capacity is used for calculating byte size @@ -1488,15 +1565,37 @@ const Cloner = struct { } }; +pub fn resolve( + lockfile: *Lockfile, + log: *logger.Log, +) Tree.SubtreeError!void { + return lockfile.hoist(log, .resolvable, {}, {}, {}); +} + +pub fn filter( + lockfile: *Lockfile, + log: *logger.Log, + manager: *PackageManager, + install_root_dependencies: bool, + workspace_filters: []const WorkspaceFilter, +) Tree.SubtreeError!void { + return lockfile.hoist(log, .filter, manager, install_root_dependencies, workspace_filters); +} + /// Sets `buffers.trees` and `buffers.hoisted_dependencies` pub fn hoist( lockfile: *Lockfile, log: *logger.Log, comptime method: Tree.BuilderMethod, manager: if (method == .filter) *PackageManager else void, + install_root_dependencies: if (method == .filter) bool else void, + workspace_filters: if (method == .filter) []const WorkspaceFilter else void, ) Tree.SubtreeError!void { const allocator = lockfile.allocator; var slice = lockfile.packages.slice(); + + var path_buf: bun.PathBuffer = undefined; + var builder = Tree.Builder(method){ .name_hashes = slice.items(.name_hash), .queue = TreeFiller.init(allocator), @@ -1507,6 +1606,9 @@ pub fn hoist( .log = log, .lockfile = lockfile, .manager = manager, + .path_buf = &path_buf, + .install_root_dependencies = install_root_dependencies, + .workspace_filters = workspace_filters, }; try (Tree{}).processSubtree( @@ -1579,7 +1681,7 @@ pub const Printer = struct { } if (lockfile_path.len > 0 and lockfile_path[0] == std.fs.path.sep) - _ = bun.sys.chdir(std.fs.path.dirname(lockfile_path) orelse std.fs.path.sep_str); + _ = bun.sys.chdir("", std.fs.path.dirname(lockfile_path) orelse std.fs.path.sep_str); _ = try FileSystem.init(null); @@ -7125,7 +7227,7 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool, packag return digest; } -pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Version) ?PackageID { +pub fn resolvePackageFromNameAndVersion(this: *Lockfile, package_name: []const u8, version: Dependency.Version) ?PackageID { const name_hash = String.Builder.stringHash(package_name); const entry = this.package_index.get(name_hash) orelse return null; const buf = this.buffers.string_bytes.items; diff --git a/src/install/migration.zig b/src/install/migration.zig index 1810598c74..d19d3e44cb 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -1017,7 +1017,7 @@ pub fn migrateNPMLockfile( return error.NotAllPackagesGotResolved; } - try this.hoist(log, .resolvable, {}); + try this.resolve(log); // if (Environment.isDebug) { // const dump_file = try std.fs.cwd().createFileZ("after-clean.json", .{}); diff --git a/src/io/fifo.zig b/src/io/fifo.zig deleted file mode 100644 index e9a7ab1fab..0000000000 --- a/src/io/fifo.zig +++ /dev/null @@ -1,57 +0,0 @@ -const std = @import("std"); -const bun = @import("root").bun; -const assert = bun.assert; - -/// An intrusive first in/first out linked list. -/// The element type T must have a field called "next" of type ?*T -pub fn FIFO(comptime T: type) type { - return struct { - const Self = @This(); - - in: ?*T = null, - out: ?*T = null, - - pub fn push(self: *Self, elem: *T) void { - assert(elem.next == null); - if (self.in) |in| { - in.next = elem; - self.in = elem; - } else { - assert(self.out == null); - self.in = elem; - self.out = elem; - } - } - - pub fn pop(self: *Self) ?*T { - const ret = self.out orelse return null; - self.out = ret.next; - ret.next = null; - if (self.in == ret) self.in = null; - return ret; - } - - pub fn peek(self: Self) ?*T { - return self.out; - } - - /// Remove an element from the FIFO. Asserts that the element is - /// in the FIFO. This operation is O(N), if this is done often you - /// probably want a different data structure. - pub fn remove(self: *Self, to_remove: *T) void { - if (to_remove == self.out) { - _ = self.pop(); - return; - } - var it = self.out; - while (it) |elem| : (it = elem.next) { - if (to_remove == elem.next) { - if (to_remove == self.in) self.in = elem; - elem.next = to_remove.next; - to_remove.next = null; - break; - } - } else unreachable; - } - }; -} diff --git a/src/js/builtins.d.ts b/src/js/builtins.d.ts index 2c07c8aa0e..d8df1e8938 100644 --- a/src/js/builtins.d.ts +++ b/src/js/builtins.d.ts @@ -549,6 +549,22 @@ declare interface Error { */ declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedType: string, actualValue: string): TypeError; declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedTypes: any[], actualValue: string): TypeError; +declare function $ERR_INVALID_ARG_VALUE(name: string, value: any, reason?: string): TypeError; + +declare function $ERR_IPC_DISCONNECTED(): Error; +declare function $ERR_SERVER_NOT_RUNNING(): Error; +declare function $ERR_IPC_CHANNEL_CLOSED(): Error; +declare function $ERR_SOCKET_BAD_TYPE(): Error; +declare function $ERR_ZLIB_INITIALIZATION_FAILED(): Error; +declare function $ERR_BUFFER_OUT_OF_BOUNDS(): Error; +declare function $ERR_IPC_ONE_PIPE(): Error; +declare function $ERR_SOCKET_ALREADY_BOUND(): Error; +declare function $ERR_SOCKET_BAD_BUFFER_SIZE(): Error; +declare function $ERR_SOCKET_DGRAM_IS_CONNECTED(): Error; +declare function $ERR_SOCKET_DGRAM_NOT_CONNECTED(): Error; +declare function $ERR_SOCKET_DGRAM_NOT_RUNNING(): Error; +declare function $ERR_INVALID_CURSOR_POS(): Error; + /** * Convert a function to a class-like object. * diff --git a/src/js/builtins/BunBuiltinNames.h b/src/js/builtins/BunBuiltinNames.h index b3cb19b816..b7017f0154 100644 --- a/src/js/builtins/BunBuiltinNames.h +++ b/src/js/builtins/BunBuiltinNames.h @@ -71,6 +71,7 @@ using namespace JSC; macro(dataView) \ macro(decode) \ macro(delimiter) \ + macro(dest) \ macro(destroy) \ macro(dir) \ macro(direct) \ @@ -244,6 +245,7 @@ using namespace JSC; macro(version) \ macro(versions) \ macro(view) \ + macro(warning) \ macro(writable) \ macro(WritableStream) \ macro(WritableStreamDefaultController) \ diff --git a/src/js/builtins/ConsoleObject.ts b/src/js/builtins/ConsoleObject.ts index ee9c6c6cf7..7377e5710c 100644 --- a/src/js/builtins/ConsoleObject.ts +++ b/src/js/builtins/ConsoleObject.ts @@ -142,7 +142,7 @@ export function createConsoleConstructor(console: typeof globalThis.console) { const { inspect, formatWithOptions, stripVTControlCharacters } = require("node:util"); const { isBuffer } = require("node:buffer"); - const { validateObject, validateInteger, validateArray } = require("internal/validators"); + const { validateObject, validateInteger, validateArray, validateOneOf } = require("internal/validators"); const kMaxGroupIndentation = 1000; const StringPrototypeIncludes = String.prototype.includes; @@ -298,11 +298,7 @@ export function createConsoleConstructor(console: typeof globalThis.console) { throw $ERR_CONSOLE_WRITABLE_STREAM("stderr is not a writable stream"); } - if (typeof colorMode !== "boolean" && colorMode !== "auto") { - throw $ERR_INVALID_ARG_VALUE( - "The argument 'colorMode' must be one of: 'auto', true, false. Received " + inspect(colorMode), - ); - } + validateOneOf(colorMode, "colorMode", ["auto", true, false]); if (groupIndentation !== undefined) { validateInteger(groupIndentation, "groupIndentation", 0, kMaxGroupIndentation); diff --git a/src/js/bun/sql.ts b/src/js/bun/sql.ts index 8ce069cb72..abe2a973cc 100644 --- a/src/js/bun/sql.ts +++ b/src/js/bun/sql.ts @@ -65,7 +65,7 @@ function normalizeSSLMode(value: string): SSLMode { } } - throw $ERR_INVALID_ARG_VALUE(`Invalid SSL mode: ${value}`); + throw $ERR_INVALID_ARG_VALUE("sslmode", value); } class Query extends PublicPromise { @@ -454,21 +454,33 @@ function loadOptions(o) { if (idleTimeout != null) { idleTimeout = Number(idleTimeout); if (idleTimeout > 2 ** 31 || idleTimeout < 0 || idleTimeout !== idleTimeout) { - throw $ERR_INVALID_ARG_VALUE("idle_timeout must be a non-negative integer less than 2^31"); + throw $ERR_INVALID_ARG_VALUE( + "options.idle_timeout", + idleTimeout, + "must be a non-negative integer less than 2^31", + ); } } if (connectionTimeout != null) { connectionTimeout = Number(connectionTimeout); if (connectionTimeout > 2 ** 31 || connectionTimeout < 0 || connectionTimeout !== connectionTimeout) { - throw $ERR_INVALID_ARG_VALUE("connection_timeout must be a non-negative integer less than 2^31"); + throw $ERR_INVALID_ARG_VALUE( + "options.connection_timeout", + connectionTimeout, + "must be a non-negative integer less than 2^31", + ); } } if (maxLifetime != null) { maxLifetime = Number(maxLifetime); if (maxLifetime > 2 ** 31 || maxLifetime < 0 || maxLifetime !== maxLifetime) { - throw $ERR_INVALID_ARG_VALUE("max_lifetime must be a non-negative integer less than 2^31"); + throw $ERR_INVALID_ARG_VALUE( + "options.max_lifetime", + maxLifetime, + "must be a non-negative integer less than 2^31", + ); } } diff --git a/src/js/internal-for-testing.ts b/src/js/internal-for-testing.ts index 0cfaa5507e..893ff59006 100644 --- a/src/js/internal-for-testing.ts +++ b/src/js/internal-for-testing.ts @@ -149,3 +149,5 @@ export const bindgen = $zig("bindgen_test.zig", "getBindgenTestFunctions") as { add: (a: any, b: any) => number; requiredAndOptionalArg: (a: any, b?: any, c?: any, d?: any) => number; }; + +export const noOpForTesting = $cpp("NoOpForTesting.cpp", "createNoOpForTesting"); diff --git a/src/js/internal/errors.ts b/src/js/internal/errors.ts index 739958bf03..f12e6a067c 100644 --- a/src/js/internal/errors.ts +++ b/src/js/internal/errors.ts @@ -1,14 +1,7 @@ export default { - ERR_INVALID_ARG_TYPE: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_INVALID_ARG_TYPE", 3), ERR_OUT_OF_RANGE: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_OUT_OF_RANGE", 3), - ERR_IPC_DISCONNECTED: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_IPC_DISCONNECTED", 0), - ERR_SERVER_NOT_RUNNING: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_SERVER_NOT_RUNNING", 0), - ERR_IPC_CHANNEL_CLOSED: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_IPC_CHANNEL_CLOSED", 0), - ERR_SOCKET_BAD_TYPE: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_SOCKET_BAD_TYPE", 0), ERR_INVALID_PROTOCOL: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_INVALID_PROTOCOL", 0), ERR_BROTLI_INVALID_PARAM: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_BROTLI_INVALID_PARAM", 0), ERR_BUFFER_TOO_LARGE: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_BUFFER_TOO_LARGE", 0), - ERR_ZLIB_INITIALIZATION_FAILED: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_ZLIB_INITIALIZATION_FAILED", 0), - ERR_BUFFER_OUT_OF_BOUNDS: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_BUFFER_OUT_OF_BOUNDS", 0), ERR_UNHANDLED_ERROR: $newCppFunction("ErrorCode.cpp", "jsFunction_ERR_UNHANDLED_ERROR", 0), }; diff --git a/src/js/internal/shared.ts b/src/js/internal/shared.ts index df0f652ee9..dc1dcd93e0 100644 --- a/src/js/internal/shared.ts +++ b/src/js/internal/shared.ts @@ -1,10 +1,11 @@ class NotImplementedError extends Error { code: string; - constructor(feature: string, issue?: number) { + constructor(feature: string, issue?: number, extra?: string) { super( feature + " is not yet implemented in Bun." + - (issue ? " Track the status & thumbs up the issue: https://github.com/oven-sh/bun/issues/" + issue : ""), + (issue ? " Track the status & thumbs up the issue: https://github.com/oven-sh/bun/issues/" + issue : "") + + (!!extra ? ". " + extra : ""), ); this.name = "NotImplementedError"; this.code = "ERR_NOT_IMPLEMENTED"; @@ -14,11 +15,11 @@ class NotImplementedError extends Error { } } -function throwNotImplemented(feature: string, issue?: number): never { +function throwNotImplemented(feature: string, issue?: number, extra?: string): never { // in the definition so that it isn't bundled unless used hideFromStack(throwNotImplemented); - throw new NotImplementedError(feature, issue); + throw new NotImplementedError(feature, issue, extra); } function hideFromStack(...fns) { diff --git a/src/js/internal/util/inspect.js b/src/js/internal/util/inspect.js index f98354aae0..a67c9dd49c 100644 --- a/src/js/internal/util/inspect.js +++ b/src/js/internal/util/inspect.js @@ -145,9 +145,9 @@ const ONLY_ENUMERABLE = 2; * Fast path for {@link extractedSplitNewLines} for ASCII/Latin1 strings. * @returns `value` split on newlines (newline included at end), or `undefined` * if non-ascii UTF8/UTF16. - * + * * Passing this a non-string will cause a panic. - * + * * @type {(value: string) => string[] | undefined} */ const extractedSplitNewLinesFastPathStringsOnly = $newZigFunction( @@ -457,7 +457,7 @@ const extractedSplitNewLines = value => { return extractedSplitNewLinesFastPathStringsOnly(value) || extractedSplitNewLinesSlow(value); } return extractedSplitNewLinesSlow(value); -} +}; const keyStrRegExp = /^[a-zA-Z_][a-zA-Z_0-9]*$/; const numberRegExp = /^(0|[1-9][0-9]*)$/; diff --git a/src/js/internal/validators.ts b/src/js/internal/validators.ts index a6612d6db0..2df37ba6ea 100644 --- a/src/js/internal/validators.ts +++ b/src/js/internal/validators.ts @@ -1,6 +1,10 @@ const { hideFromStack } = require("internal/shared"); -const { ArrayIsArray } = require("internal/primordials"); + const RegExpPrototypeExec = RegExp.prototype.exec; +const ArrayPrototypeIncludes = Array.prototype.includes; +const ArrayPrototypeJoin = Array.prototype.join; +const ArrayPrototypeMap = Array.prototype.map; +const ArrayIsArray = Array.isArray; const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/; /** @@ -24,7 +28,9 @@ const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/; function validateLinkHeaderFormat(value, name) { if (typeof value === "undefined" || !RegExpPrototypeExec.$call(linkValueRegExp, value)) { throw $ERR_INVALID_ARG_VALUE( - `The arguments ${name} is invalid must be an array or string of format "; rel=preload; as=style"`, + name, + value, + `must be an array or string of format "; rel=preload; as=style"`, ); } } @@ -55,7 +61,9 @@ function validateLinkHeaderValue(hints) { } throw $ERR_INVALID_ARG_VALUE( - `The arguments hints is invalid must be an array or string of format "; rel=preload; as=style"`, + "hints", + hints, + `must be an array or string of format "; rel=preload; as=style"`, ); } hideFromStack(validateLinkHeaderValue); @@ -65,6 +73,18 @@ function validateObject(value, name) { } hideFromStack(validateObject); +function validateOneOf(value, name, oneOf) { + if (!ArrayPrototypeIncludes.$call(oneOf, value)) { + const allowed = ArrayPrototypeJoin.$call( + ArrayPrototypeMap.$call(oneOf, v => (typeof v === "string" ? `'${v}'` : String(v))), + ", ", + ); + const reason = "must be one of: " + allowed; + throw $ERR_INVALID_ARG_VALUE(name, value, reason); + } +} +hideFromStack(validateOneOf); + export default { validateObject: validateObject, validateLinkHeaderValue: validateLinkHeaderValue, @@ -103,4 +123,6 @@ export default { validateUndefined: $newCppFunction("NodeValidator.cpp", "jsFunction_validateUndefined", 0), /** `(buffer, name = 'buffer')` */ validateBuffer: $newCppFunction("NodeValidator.cpp", "jsFunction_validateBuffer", 0), + /** `(value, name, oneOf)` */ + validateOneOf, }; diff --git a/src/js/node/assert.ts b/src/js/node/assert.ts index e5343bd18e..daf536b8da 100644 --- a/src/js/node/assert.ts +++ b/src/js/node/assert.ts @@ -139,19 +139,6 @@ var require_errors = __commonJS({ }, TypeError, ); - createErrorType( - "ERR_INVALID_ARG_VALUE", - function (name, value) { - var reason = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : "is invalid"; - var inspected = util.inspect(value); - return ( - inspected.length > 128 && (inspected = "".concat(inspected.slice(0, 128), "...")), - "The argument '".concat(name, "' ").concat(reason, ". Received ").concat(inspected) - ); - }, - TypeError, - RangeError, - ); createErrorType( "ERR_INVALID_RETURN_VALUE", function (input, name, value) { @@ -835,7 +822,6 @@ var require_assert = __commonJS({ _require$codes = _require.codes, ERR_AMBIGUOUS_ARGUMENT = _require$codes.ERR_AMBIGUOUS_ARGUMENT, ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_INVALID_ARG_VALUE = _require$codes.ERR_INVALID_ARG_VALUE, ERR_INVALID_RETURN_VALUE = _require$codes.ERR_INVALID_RETURN_VALUE, ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, AssertionError = require_assertion_error(), @@ -1065,7 +1051,7 @@ var require_assert = __commonJS({ } var keys = Object.keys(expected); if (expected instanceof Error) keys.push("name", "message"); - else if (keys.length === 0) throw new ERR_INVALID_ARG_VALUE("error", expected, "may not be an empty object"); + else if (keys.length === 0) throw $ERR_INVALID_ARG_VALUE("error", expected, "may not be an empty object"); return ( keys.forEach(function (key) { return ( diff --git a/src/js/node/child_process.ts b/src/js/node/child_process.ts index 30e2077b80..fee02dcbd1 100644 --- a/src/js/node/child_process.ts +++ b/src/js/node/child_process.ts @@ -2,7 +2,6 @@ const EventEmitter = require("node:events"); const StreamModule = require("node:stream"); const OsModule = require("node:os"); -const { ERR_INVALID_ARG_TYPE, ERR_IPC_DISCONNECTED } = require("internal/errors"); const { kHandle } = require("internal/shared"); const { validateBoolean, @@ -10,6 +9,8 @@ const { validateString, validateAbortSignal, validateArray, + validateObject, + validateOneOf, } = require("internal/validators"); var NetModule; @@ -76,9 +77,7 @@ var ReadableFromWeb; // TODO: Add these params after support added in Bun.spawn // uid Sets the user identity of the process (see setuid(2)). // gid Sets the group identity of the process (see setgid(2)). -// detached Prepare child to run independently of its parent process. Specific behavior depends on the platform, see options.detached). -// TODO: Add support for ipc option, verify only one IPC channel in array // stdio | Child's stdio configuration (see options.stdio). // Support wrapped ipc types (e.g. net.Socket, dgram.Socket, TTY, etc.) // IPC FD passing support @@ -556,7 +555,7 @@ function spawnSync(file, args, options) { } else if (typeof input === "string") { bunStdio[0] = Buffer.from(input, encoding || "utf8"); } else { - throw ERR_INVALID_ARG_TYPE(`options.stdio[0]`, ["Buffer", "TypedArray", "DataView", "string"], input); + throw $ERR_INVALID_ARG_TYPE(`options.stdio[0]`, ["Buffer", "TypedArray", "DataView", "string"], input); } } @@ -699,7 +698,7 @@ function stdioStringToArray(stdio, channel) { options = [0, 1, 2]; break; default: - throw ERR_INVALID_ARG_VALUE("stdio", stdio); + throw $ERR_INVALID_ARG_VALUE("stdio", stdio); } if (channel) $arrayPush(options, channel); @@ -797,7 +796,7 @@ function sanitizeKillSignal(killSignal) { if (typeof killSignal === "string" || typeof killSignal === "number") { return convertToValidSignal(killSignal); } else if (killSignal != null) { - throw ERR_INVALID_ARG_TYPE("options.killSignal", ["string", "number"], killSignal); + throw $ERR_INVALID_ARG_TYPE("options.killSignal", ["string", "number"], killSignal); } } @@ -877,14 +876,14 @@ function normalizeSpawnArguments(file, args, options) { validateString(file, "file"); validateArgumentNullCheck(file, "file"); - if (file.length === 0) throw ERR_INVALID_ARG_VALUE("file", file, "cannot be empty"); + if (file.length === 0) throw $ERR_INVALID_ARG_VALUE("file", file, "cannot be empty"); if ($isJSArray(args)) { args = ArrayPrototypeSlice.$call(args); } else if (args == null) { args = []; } else if (typeof args !== "object") { - throw ERR_INVALID_ARG_TYPE("args", "object", args); + throw $ERR_INVALID_ARG_TYPE("args", "object", args); } else { options = args; args = []; @@ -909,17 +908,17 @@ function normalizeSpawnArguments(file, args, options) { // Validate the uid, if present. if (options.uid != null && !isInt32(options.uid)) { - throw ERR_INVALID_ARG_TYPE("options.uid", "int32", options.uid); + throw $ERR_INVALID_ARG_TYPE("options.uid", "int32", options.uid); } // Validate the gid, if present. if (options.gid != null && !isInt32(options.gid)) { - throw ERR_INVALID_ARG_TYPE("options.gid", "int32", options.gid); + throw $ERR_INVALID_ARG_TYPE("options.gid", "int32", options.gid); } // Validate the shell, if present. if (options.shell != null && typeof options.shell !== "boolean" && typeof options.shell !== "string") { - throw ERR_INVALID_ARG_TYPE("options.shell", ["boolean", "string"], options.shell); + throw $ERR_INVALID_ARG_TYPE("options.shell", ["boolean", "string"], options.shell); } // Validate argv0, if present. @@ -1340,7 +1339,7 @@ class ChildProcess extends EventEmitter { options = undefined; } else if (options !== undefined) { if (typeof options !== "object" || options === null) { - throw ERR_INVALID_ARG_TYPE("options", "object", options); + throw $ERR_INVALID_ARG_TYPE("options", "object", options); } } @@ -1373,7 +1372,7 @@ class ChildProcess extends EventEmitter { $assert(this.connected); this.#handle.disconnect(); } else if (!ok) { - this.emit("error", ERR_IPC_DISCONNECTED()); + this.emit("error", $ERR_IPC_DISCONNECTED()); return; } this.#handle.disconnect(); @@ -1432,7 +1431,7 @@ const nodeToBunLookup = { ipc: "ipc", }; -function nodeToBun(item, index) { +function nodeToBun(item: string, index: number): string | number | null { // If not defined, use the default. // For stdin/stdout/stderr, it's pipe. For others, it's ignore. if (item == null) { @@ -1501,6 +1500,7 @@ function fdToStdioName(fd) { function getBunStdioFromOptions(stdio) { const normalizedStdio = normalizeStdio(stdio); + if (normalizedStdio.filter(v => v === "ipc").length > 1) throw $ERR_IPC_ONE_PIPE(); // Node options: // pipe: just a pipe // ipc = can only be one in array @@ -1527,7 +1527,7 @@ function getBunStdioFromOptions(stdio) { return bunStdio; } -function normalizeStdio(stdio) { +function normalizeStdio(stdio): string[] { if (typeof stdio === "string") { switch (stdio) { case "ignore": @@ -1616,7 +1616,7 @@ function validateMaxBuffer(maxBuffer) { function validateArgumentNullCheck(arg, propName) { if (typeof arg === "string" && StringPrototypeIncludes.$call(arg, "\u0000")) { - throw ERR_INVALID_ARG_VALUE(propName, arg, "must be a string without null bytes"); + throw $ERR_INVALID_ARG_VALUE(propName, arg, "must be a string without null bytes"); } } @@ -1632,53 +1632,6 @@ function validateTimeout(timeout) { } } -/** - * @callback validateOneOf - * @template T - * @param {T} value - * @param {string} name - * @param {T[]} oneOf - */ - -/** @type {validateOneOf} */ -const validateOneOf = (value, name, oneOf) => { - // const validateOneOf = hideStackFrames((value, name, oneOf) => { - if (!ArrayPrototypeIncludes.$call(oneOf, value)) { - const allowed = ArrayPrototypeJoin.$call( - ArrayPrototypeMap.$call(oneOf, v => (typeof v === "string" ? `'${v}'` : String(v))), - ", ", - ); - const reason = "must be one of: " + allowed; - throw ERR_INVALID_ARG_VALUE(name, value, reason); - } -}; - -/** - * @callback validateObject - * @param {*} value - * @param {string} name - * @param {{ - * allowArray?: boolean, - * allowFunction?: boolean, - * nullable?: boolean - * }} [options] - */ - -/** @type {validateObject} */ -const validateObject = (value, name, options = null) => { - // const validateObject = hideStackFrames((value, name, options = null) => { - const allowArray = options?.allowArray ?? false; - const allowFunction = options?.allowFunction ?? false; - const nullable = options?.nullable ?? false; - if ( - (!nullable && value === null) || - (!allowArray && $isJSArray(value)) || - (typeof value !== "object" && (!allowFunction || typeof value !== "function")) - ) { - throw ERR_INVALID_ARG_TYPE(name, "object", value); - } -}; - function isInt32(value) { return value === (value | 0); } @@ -1696,7 +1649,7 @@ function nullCheck(path, propName, throwError = true) { return; } - const err = ERR_INVALID_ARG_VALUE(propName, path, "must be a string or Uint8Array without null bytes"); + const err = $ERR_INVALID_ARG_VALUE(propName, path, "must be a string or Uint8Array without null bytes"); if (throwError) { throw err; } @@ -1705,7 +1658,7 @@ function nullCheck(path, propName, throwError = true) { function validatePath(path, propName = "path") { if (typeof path !== "string" && !isUint8Array(path)) { - throw ERR_INVALID_ARG_TYPE(propName, ["string", "Buffer", "URL"], path); + throw $ERR_INVALID_ARG_TYPE(propName, ["string", "Buffer", "URL"], path); } const err = nullCheck(path, propName, false); @@ -1751,7 +1704,7 @@ class AbortError extends Error { name = "AbortError"; constructor(message = "The operation was aborted", options = undefined) { if (options !== undefined && typeof options !== "object") { - throw ERR_INVALID_ARG_TYPE("options", "object", options); + throw $ERR_INVALID_ARG_TYPE("options", "object", options); } super(message, options); } @@ -1937,12 +1890,6 @@ function ERR_INVALID_OPT_VALUE(name, value) { return err; } -function ERR_INVALID_ARG_VALUE(name, value, reason) { - const err = new Error(`The value "${value}" is invalid for argument '${name}'. Reason: ${reason}`); - err.code = "ERR_INVALID_ARG_VALUE"; - return err; -} - function ERR_CHILD_PROCESS_IPC_REQUIRED(name) { const err = new TypeError(`Forked processes must have an IPC channel, missing value 'ipc' in ${name}`); err.code = "ERR_CHILD_PROCESS_IPC_REQUIRED"; diff --git a/src/js/node/dgram.ts b/src/js/node/dgram.ts index b83c64bafd..ed652132e5 100644 --- a/src/js/node/dgram.ts +++ b/src/js/node/dgram.ts @@ -34,7 +34,6 @@ const kStateSymbol = Symbol("state symbol"); const async_id_symbol = Symbol("async_id_symbol"); const { hideFromStack, throwNotImplemented } = require("internal/shared"); -const { ERR_SOCKET_BAD_TYPE } = require("internal/errors"); const { validateString, validateNumber, @@ -54,48 +53,6 @@ const { const EventEmitter = require("node:events"); -class ERR_OUT_OF_RANGE extends Error { - constructor(argumentName, range, received) { - super(`The value of "${argumentName}" is out of range. It must be ${range}. Received ${received}`); - this.code = "ERR_OUT_OF_RANGE"; - } -} - -class ERR_BUFFER_OUT_OF_BOUNDS extends Error { - constructor() { - super("Buffer offset or length is out of bounds"); - this.code = "ERR_BUFFER_OUT_OF_BOUNDS"; - } -} - -class ERR_INVALID_ARG_TYPE extends Error { - constructor(argName, expected, actual) { - super(`The "${argName}" argument must be of type ${expected}. Received type ${typeof actual}`); - this.code = "ERR_INVALID_ARG_TYPE"; - } -} - -class ERR_MISSING_ARGS extends Error { - constructor(argName) { - super(`The "${argName}" argument is required`); - this.code = "ERR_MISSING_ARGS"; - } -} - -class ERR_SOCKET_ALREADY_BOUND extends Error { - constructor() { - super("Socket is already bound"); - this.code = "ERR_SOCKET_ALREADY_BOUND"; - } -} - -class ERR_SOCKET_BAD_BUFFER_SIZE extends Error { - constructor() { - super("Buffer size must be a number"); - this.code = "ERR_SOCKET_BAD_BUFFER_SIZE"; - } -} - class ERR_SOCKET_BUFFER_SIZE extends Error { constructor(ctx) { super(`Invalid buffer size: ${ctx}`); @@ -103,34 +60,6 @@ class ERR_SOCKET_BUFFER_SIZE extends Error { } } -class ERR_SOCKET_DGRAM_IS_CONNECTED extends Error { - constructor() { - super("Socket is connected"); - this.code = "ERR_SOCKET_DGRAM_IS_CONNECTED"; - } -} - -class ERR_SOCKET_DGRAM_NOT_CONNECTED extends Error { - constructor() { - super("Socket is not connected"); - this.code = "ERR_SOCKET_DGRAM_NOT_CONNECTED"; - } -} - -class ERR_SOCKET_BAD_PORT extends Error { - constructor(name, port, allowZero) { - super(`Invalid ${name}: ${port}. Ports must be >= 0 and <= 65535. ${allowZero ? "0" : ""}`); - this.code = "ERR_SOCKET_BAD_PORT"; - } -} - -class ERR_SOCKET_DGRAM_NOT_RUNNING extends Error { - constructor() { - super("Socket is not running"); - this.code = "ERR_SOCKET_DGRAM_NOT_RUNNING"; - } -} - function isInt32(value) { return value === (value | 0); } @@ -167,7 +96,7 @@ function newHandle(type, lookup) { } else if (type === "udp6") { handle.lookup = FunctionPrototypeBind(lookup6, handle, lookup); } else { - throw new ERR_SOCKET_BAD_TYPE(); + throw $ERR_SOCKET_BAD_TYPE(); } return handle; @@ -241,7 +170,7 @@ function createSocket(type, listener) { } function bufferSize(self, size, buffer) { - if (size >>> 0 !== size) throw new ERR_SOCKET_BAD_BUFFER_SIZE(); + if (size >>> 0 !== size) throw $ERR_SOCKET_BAD_BUFFER_SIZE(); const ctx = {}; // const ret = self[kStateSymbol].handle.bufferSize(size, buffer, ctx); @@ -257,7 +186,7 @@ Socket.prototype.bind = function (port_, address_ /* , callback */) { const state = this[kStateSymbol]; - if (state.bindState !== BIND_STATE_UNBOUND) throw new ERR_SOCKET_ALREADY_BOUND(); + if (state.bindState !== BIND_STATE_UNBOUND) throw $ERR_SOCKET_ALREADY_BOUND(); state.bindState = BIND_STATE_BINDING; @@ -393,7 +322,7 @@ Socket.prototype.connect = function (port, address, callback) { const state = this[kStateSymbol]; - if (state.connectState !== CONNECT_STATE_DISCONNECTED) throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + if (state.connectState !== CONNECT_STATE_DISCONNECTED) throw $ERR_SOCKET_DGRAM_IS_CONNECTED(); state.connectState = CONNECT_STATE_CONNECTING; if (state.bindState === BIND_STATE_UNBOUND) this.bind({ port: 0, exclusive: true }, null); @@ -451,7 +380,7 @@ const disconnectFn = $newZigFunction("udp_socket.zig", "UDPSocket.jsDisconnect", Socket.prototype.disconnect = function () { const state = this[kStateSymbol]; - if (state.connectState !== CONNECT_STATE_CONNECTED) throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + if (state.connectState !== CONNECT_STATE_CONNECTED) throw $ERR_SOCKET_DGRAM_NOT_CONNECTED(); disconnectFn.$call(state.handle.socket); state.connectState = CONNECT_STATE_DISCONNECTED; @@ -471,17 +400,17 @@ function sliceBuffer(buffer, offset, length) { if (typeof buffer === "string") { buffer = Buffer.from(buffer); } else if (!ArrayBuffer.isView(buffer)) { - throw new ERR_INVALID_ARG_TYPE("buffer", ["Buffer", "TypedArray", "DataView", "string"], buffer); + throw $ERR_INVALID_ARG_TYPE("buffer", ["Buffer", "TypedArray", "DataView", "string"], buffer); } offset = offset >>> 0; length = length >>> 0; if (offset > buffer.byteLength) { - throw new ERR_BUFFER_OUT_OF_BOUNDS("offset"); + throw $ERR_BUFFER_OUT_OF_BOUNDS("offset"); } if (offset + length > buffer.byteLength) { - throw new ERR_BUFFER_OUT_OF_BOUNDS("length"); + throw $ERR_BUFFER_OUT_OF_BOUNDS("length"); } return Buffer.from(buffer.buffer, buffer.byteOffset + offset, length); @@ -570,19 +499,19 @@ Socket.prototype.send = function (buffer, offset, length, port, address, callbac callback = offset; } - if (port || address) throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + if (port || address) throw $ERR_SOCKET_DGRAM_IS_CONNECTED(); } if (!Array.isArray(buffer)) { if (typeof buffer === "string") { list = [Buffer.from(buffer)]; } else if (!ArrayBuffer.isView(buffer)) { - throw new ERR_INVALID_ARG_TYPE("buffer", ["Buffer", "TypedArray", "DataView", "string"], buffer); + throw $ERR_INVALID_ARG_TYPE("buffer", ["Buffer", "TypedArray", "DataView", "string"], buffer); } else { list = [buffer]; } } else if (!(list = fixBufferList(buffer))) { - throw new ERR_INVALID_ARG_TYPE("buffer list arguments", ["Buffer", "TypedArray", "DataView", "string"], buffer); + throw $ERR_INVALID_ARG_TYPE("buffer list arguments", ["Buffer", "TypedArray", "DataView", "string"], buffer); } if (!connected) port = validatePort(port, "Port", false); @@ -747,7 +676,7 @@ function socketCloseNT(self) { Socket.prototype.address = function () { const addr = this[kStateSymbol].handle.socket?.address; - if (!addr) throw new ERR_SOCKET_DGRAM_NOT_RUNNING(); + if (!addr) throw $ERR_SOCKET_DGRAM_NOT_RUNNING(); return addr; }; @@ -755,11 +684,11 @@ Socket.prototype.remoteAddress = function () { const state = this[kStateSymbol]; const socket = state.handle.socket; - if (!socket) throw new ERR_SOCKET_DGRAM_NOT_RUNNING(); + if (!socket) throw $ERR_SOCKET_DGRAM_NOT_RUNNING(); - if (state.connectState !== CONNECT_STATE_CONNECTED) throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + if (state.connectState !== CONNECT_STATE_CONNECTED) throw $ERR_SOCKET_DGRAM_NOT_CONNECTED(); - if (!socket.remoteAddress) throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + if (!socket.remoteAddress) throw $ERR_SOCKET_DGRAM_NOT_CONNECTED(); return socket.remoteAddress; }; diff --git a/src/js/node/diagnostics_channel.ts b/src/js/node/diagnostics_channel.ts index 57722725e7..2aa78dbb12 100644 --- a/src/js/node/diagnostics_channel.ts +++ b/src/js/node/diagnostics_channel.ts @@ -2,7 +2,6 @@ // Reference: https://github.com/nodejs/node/blob/fb47afc335ef78a8cef7eac52b8ee7f045300696/lib/diagnostics_channel.js const { validateFunction } = require("internal/validators"); -const { ERR_INVALID_ARG_TYPE } = require("internal/errors"); const SafeMap = Map; const SafeFinalizationRegistry = FinalizationRegistry; @@ -212,7 +211,7 @@ function channel(name) { if (channel) return channel; if (typeof name !== "string" && typeof name !== "symbol") { - throw ERR_INVALID_ARG_TYPE("channel", "string or symbol", name); + throw $ERR_INVALID_ARG_TYPE("channel", "string or symbol", name); } return new Channel(name); @@ -237,7 +236,7 @@ const traceEvents = ["start", "end", "asyncStart", "asyncEnd", "error"]; function assertChannel(value, name) { if (!(value instanceof Channel)) { - throw ERR_INVALID_ARG_TYPE(name, ["Channel"], value); + throw $ERR_INVALID_ARG_TYPE(name, ["Channel"], value); } } @@ -264,7 +263,7 @@ class TracingChannel { this.asyncEnd = asyncEnd; this.error = error; } else { - throw ERR_INVALID_ARG_TYPE("nameOrChannels", ["string, object, or Channel"], nameOrChannels); + throw $ERR_INVALID_ARG_TYPE("nameOrChannels", ["string, object, or Channel"], nameOrChannels); } } diff --git a/src/js/node/events.ts b/src/js/node/events.ts index 85a5c7707c..64a14f8edb 100644 --- a/src/js/node/events.ts +++ b/src/js/node/events.ts @@ -23,7 +23,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -const { ERR_INVALID_ARG_TYPE, ERR_UNHANDLED_ERROR } = require("internal/errors"); +const { ERR_UNHANDLED_ERROR } = require("internal/errors"); const { validateObject, validateInteger, @@ -55,7 +55,7 @@ const kEmptyObject = Object.freeze({ __proto__: null }); var defaultMaxListeners = 10; // EventEmitter must be a standard function because some old code will do weird tricks like `EventEmitter.$apply(this)`. -const EventEmitter = function EventEmitter(opts) { +function EventEmitter(opts) { if (this._events === undefined || this._events === this.__proto__._events) { this._events = { __proto__: null }; this._eventsCount = 0; @@ -65,13 +65,10 @@ const EventEmitter = function EventEmitter(opts) { if ((this[kCapture] = opts?.captureRejections ? Boolean(opts?.captureRejections) : EventEmitterPrototype[kCapture])) { this.emit = emitWithRejectionCapture; } -}; +} Object.defineProperty(EventEmitter, "name", { value: "EventEmitter", configurable: true }); const EventEmitterPrototype = (EventEmitter.prototype = {}); -EventEmitterPrototype._events = undefined; -EventEmitterPrototype._eventsCount = 0; -EventEmitterPrototype._maxListeners = undefined; EventEmitterPrototype.setMaxListeners = function setMaxListeners(n) { validateNumber(n, "setMaxListeners", 0); this._maxListeners = n; @@ -530,7 +527,7 @@ function on(emitter, event, options = kEmptyObject) { throw(err) { if (!err || !(err instanceof Error)) { - throw ERR_INVALID_ARG_TYPE("EventEmitter.AsyncIterator", "Error", err); + throw $ERR_INVALID_ARG_TYPE("EventEmitter.AsyncIterator", "Error", err); } errorHandler(err); }, @@ -661,7 +658,7 @@ function setMaxListeners(n = defaultMaxListeners, ...eventTargets) { } else if (typeof target.setMaxListeners === "function") { target.setMaxListeners(n); } else { - throw ERR_INVALID_ARG_TYPE("eventTargets", ["EventEmitter", "EventTarget"], target); + throw $ERR_INVALID_ARG_TYPE("eventTargets", ["EventEmitter", "EventTarget"], target); } } } @@ -689,7 +686,7 @@ function eventTargetAgnosticRemoveListener(emitter, name, listener, flags) { } else if (typeof emitter.removeEventListener === "function") { emitter.removeEventListener(name, listener, flags); } else { - throw ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + throw $ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); } } @@ -703,14 +700,14 @@ function eventTargetAgnosticAddListener(emitter, name, listener, flags) { } else if (typeof emitter.addEventListener === "function") { emitter.addEventListener(name, listener, flags); } else { - throw ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + throw $ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); } } class AbortError extends Error { constructor(message = "The operation was aborted", options = undefined) { if (options !== undefined && typeof options !== "object") { - throw ERR_INVALID_ARG_TYPE("options", "object", options); + throw $ERR_INVALID_ARG_TYPE("options", "object", options); } super(message, options); this.code = "ABORT_ERR"; @@ -718,12 +715,6 @@ class AbortError extends Error { } } -function ERR_OUT_OF_RANGE(name, range, value) { - const err = new RangeError(`The "${name}" argument is out of range. It must be ${range}. Received ${value}`); - err.code = "ERR_OUT_OF_RANGE"; - return err; -} - function checkListener(listener) { validateFunction(listener, "listener"); } @@ -741,19 +732,19 @@ function getMaxListeners(emitterOrTarget) { emitterOrTarget[kMaxEventTargetListeners] ??= defaultMaxListeners; return emitterOrTarget[kMaxEventTargetListeners]; } - throw ERR_INVALID_ARG_TYPE("emitter", ["EventEmitter", "EventTarget"], emitterOrTarget); + throw $ERR_INVALID_ARG_TYPE("emitter", ["EventEmitter", "EventTarget"], emitterOrTarget); } Object.defineProperty(getMaxListeners, "name", { value: "getMaxListeners" }); // Copy-pasta from Node.js source code function addAbortListener(signal, listener) { if (signal === undefined) { - throw ERR_INVALID_ARG_TYPE("signal", "AbortSignal", signal); + throw $ERR_INVALID_ARG_TYPE("signal", "AbortSignal", signal); } validateAbortSignal(signal, "signal"); if (typeof listener !== "function") { - throw ERR_INVALID_ARG_TYPE("listener", "function", listener); + throw $ERR_INVALID_ARG_TYPE("listener", "function", listener); } let removeEventListener; diff --git a/src/js/node/http.ts b/src/js/node/http.ts index 08cd434ade..46c29c17ba 100644 --- a/src/js/node/http.ts +++ b/src/js/node/http.ts @@ -2,7 +2,7 @@ const EventEmitter = require("node:events"); const { isTypedArray } = require("node:util/types"); const { Duplex, Readable, Writable } = require("node:stream"); -const { ERR_INVALID_ARG_TYPE, ERR_INVALID_PROTOCOL } = require("internal/errors"); +const { ERR_INVALID_PROTOCOL } = require("internal/errors"); const { isPrimary } = require("internal/cluster/isPrimary"); const { kAutoDestroyed } = require("internal/shared"); const { urlToHttpOptions } = require("internal/url"); @@ -126,7 +126,7 @@ function isValidTLSArray(obj) { function validateMsecs(numberlike: any, field: string) { if (typeof numberlike !== "number" || numberlike < 0) { - throw ERR_INVALID_ARG_TYPE(field, "number", numberlike); + throw $ERR_INVALID_ARG_TYPE(field, "number", numberlike); } return numberlike; @@ -1806,7 +1806,7 @@ class ClientRequest extends OutgoingMessage { } else if (agent == null) { agent = defaultAgent; } else if (typeof agent.addRequest !== "function") { - throw ERR_INVALID_ARG_TYPE("options.agent", "Agent-like Object, undefined, or false", agent); + throw $ERR_INVALID_ARG_TYPE("options.agent", "Agent-like Object, undefined, or false", agent); } this.#agent = agent; @@ -1852,8 +1852,7 @@ class ClientRequest extends OutgoingMessage { let method = options.method; const methodIsString = typeof method === "string"; if (method !== null && method !== undefined && !methodIsString) { - // throw ERR_INVALID_ARG_TYPE("options.method", "string", method); - throw new Error("ERR_INVALID_ARG_TYPE: options.method"); + throw $ERR_INVALID_ARG_TYPE("options.method", "string", method); } if (methodIsString && method) { @@ -2088,12 +2087,7 @@ class ClientRequest extends OutgoingMessage { function validateHost(host, name) { if (host !== null && host !== undefined && typeof host !== "string") { - // throw ERR_INVALID_ARG_TYPE( - // `options.${name}`, - // ["string", "undefined", "null"], - // host, - // ); - throw new Error("Invalid arg type in options"); + throw $ERR_INVALID_ARG_TYPE(`options.${name}`, ["string", "undefined", "null"], host); } return host; } diff --git a/src/js/node/http2.ts b/src/js/node/http2.ts index bb7544bdbc..a48c0027bc 100644 --- a/src/js/node/http2.ts +++ b/src/js/node/http2.ts @@ -349,8 +349,7 @@ class Http2ServerRequest extends Readable { set method(method) { validateString(method, "method"); - if (StringPrototypeTrim(method) === "") - throw $ERR_INVALID_ARG_VALUE(`The arguments method is invalid. Received ${method}`); + if (StringPrototypeTrim(method) === "") throw $ERR_INVALID_ARG_VALUE("method", method); this[kHeaders][HTTP2_HEADER_METHOD] = method; } @@ -642,7 +641,7 @@ class Http2ServerResponse extends Stream { } } else { if (headers.length % 2 !== 0) { - throw $ERR_INVALID_ARG_VALUE(`The arguments headers is invalid.`); + throw $ERR_INVALID_ARG_VALUE("headers", headers); } for (i = 0; i < headers.length; i += 2) { @@ -1637,7 +1636,7 @@ class Http2Stream extends Duplex { const sensitiveNames = {}; if (sensitives) { if (!$isJSArray(sensitives)) { - throw $ERR_INVALID_ARG_VALUE("The arguments headers[http2.neverIndex] is invalid"); + throw $ERR_INVALID_ARG_VALUE("headers[http2.neverIndex]", sensitives); } for (let i = 0; i < sensitives.length; i++) { sensitiveNames[sensitives[i]] = true; @@ -2048,7 +2047,7 @@ class ServerHttp2Stream extends Http2Stream { const sensitiveNames = {}; if (sensitives) { if (!$isArray(sensitives)) { - throw $ERR_INVALID_ARG_VALUE("The arguments headers[http2.neverIndex] is invalid."); + throw $ERR_INVALID_ARG_VALUE("headers[http2.neverIndex]", sensitives); } for (let i = 0; i < sensitives.length; i++) { sensitiveNames[sensitives[i]] = true; @@ -2099,7 +2098,7 @@ class ServerHttp2Stream extends Http2Stream { const sensitiveNames = {}; if (sensitives) { if (!$isArray(sensitives)) { - throw $ERR_INVALID_ARG_VALUE("The arguments headers[http2.neverIndex] is invalid."); + throw $ERR_INVALID_ARG_VALUE("headers[http2.neverIndex]", sensitives); } for (let i = 0; i < sensitives.length; i++) { sensitiveNames[sensitives[i]] = true; @@ -3091,7 +3090,7 @@ class ClientHttp2Session extends Http2Session { const sensitiveNames = {}; if (sensitives) { if (!$isArray(sensitives)) { - throw $ERR_INVALID_ARG_VALUE("The arguments headers[http2.neverIndex] is invalid."); + throw $ERR_INVALID_ARG_VALUE("headers[http2.neverIndex]", sensitives); } for (let i = 0; i < sensitives.length; i++) { sensitiveNames[sensitives[i]] = true; diff --git a/src/js/node/net.ts b/src/js/node/net.ts index 277900dd99..1fad065074 100644 --- a/src/js/node/net.ts +++ b/src/js/node/net.ts @@ -24,7 +24,6 @@ const { Duplex } = require("node:stream"); const EventEmitter = require("node:events"); const { addServerName, upgradeDuplexToTLS, isNamedPipeSocket } = require("../internal/net"); const { ExceptionWithHostPort } = require("internal/shared"); -const { ERR_SERVER_NOT_RUNNING } = require("internal/errors"); // IPv4 Segment const v4Seg = "(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])"; @@ -1122,7 +1121,7 @@ class Server extends EventEmitter { if (typeof callback === "function") { if (!this._handle) { this.once("close", function close() { - callback(ERR_SERVER_NOT_RUNNING()); + callback($ERR_SERVER_NOT_RUNNING()); }); } else { this.once("close", callback); diff --git a/src/js/node/path.ts b/src/js/node/path.ts index f7364a82bb..8129e60bbf 100644 --- a/src/js/node/path.ts +++ b/src/js/node/path.ts @@ -1,4 +1,6 @@ // Hardcoded module "node:path" +const { validateString } = require("internal/validators"); + const [bindingPosix, bindingWin32] = $cpp("Path.cpp", "createNodePathBinding"); const toNamespacedPathPosix = bindingPosix.toNamespacedPath.bind(bindingPosix); const toNamespacedPathWin32 = bindingWin32.toNamespacedPath.bind(bindingWin32); @@ -40,4 +42,48 @@ const win32 = { }; posix.win32 = win32.win32 = win32; posix.posix = posix; + +type Glob = import("bun").Glob; + +let LazyGlob: Glob | undefined; +function loadGlob(): LazyGlob { + LazyGlob = require("bun").Glob; +} + +// the most-recently used glob is memoized in case `matchesGlob` is called in a +// loop with the same pattern +let prevGlob: Glob | undefined; +let prevPattern: string | undefined; +function matchesGlob(isWindows, path, pattern) { + let glob: Glob; + + validateString(path, "path"); + if (isWindows) path = path.replaceAll("\\", "/"); + + if (prevGlob) { + $assert(prevPattern !== undefined); + if (prevPattern === pattern) { + glob = prevGlob; + } else { + if (LazyGlob === undefined) loadGlob(); + validateString(pattern, "pattern"); + if (isWindows) pattern = pattern.replaceAll("\\", "/"); + glob = prevGlob = new LazyGlob(pattern); + prevPattern = pattern; + } + } else { + loadGlob(); // no prevGlob implies LazyGlob isn't loaded + validateString(pattern, "pattern"); + if (isWindows) pattern = pattern.replaceAll("\\", "/"); + glob = prevGlob = new LazyGlob(pattern); + prevPattern = pattern; + } + + return glob.match(path); +} + +// posix.matchesGlob = win32.matchesGlob = matchesGlob; +posix.matchesGlob = matchesGlob.bind(null, false); +win32.matchesGlob = matchesGlob.bind(null, true); + export default process.platform === "win32" ? win32 : posix; diff --git a/src/js/node/readline.ts b/src/js/node/readline.ts index 9db2708f43..8cd6e67421 100644 --- a/src/js/node/readline.ts +++ b/src/js/node/readline.ts @@ -270,30 +270,6 @@ var NodeError = getNodeErrorByName("Error"); var NodeTypeError = getNodeErrorByName("TypeError"); var NodeRangeError = getNodeErrorByName("RangeError"); -class ERR_INVALID_ARG_VALUE extends NodeTypeError { - constructor(name, value, reason = "not specified") { - super(`The value "${String(value)}" is invalid for argument '${name}'. Reason: ${reason}`, { - code: "ERR_INVALID_ARG_VALUE", - }); - } -} - -class ERR_INVALID_CURSOR_POS extends NodeTypeError { - constructor() { - super("Cannot set cursor row without setting its column", { - code: "ERR_INVALID_CURSOR_POS", - }); - } -} - -class ERR_OUT_OF_RANGE extends NodeRangeError { - constructor(name, range, received) { - super(`The value of "${name}" is out of range. It must be ${range}. Received ${received}`, { - code: "ERR_OUT_OF_RANGE", - }); - } -} - class ERR_USE_AFTER_CLOSE extends NodeError { constructor() { super("This socket has been ended by the other party", { @@ -881,15 +857,15 @@ function cursorTo(stream, x, y, callback) { y = undefined; } - if (NumberIsNaN(x)) throw new ERR_INVALID_ARG_VALUE("x", x); - if (NumberIsNaN(y)) throw new ERR_INVALID_ARG_VALUE("y", y); + if (NumberIsNaN(x)) throw $ERR_INVALID_ARG_VALUE("x", x); + if (NumberIsNaN(y)) throw $ERR_INVALID_ARG_VALUE("y", y); if (stream == null || (typeof x !== "number" && typeof y !== "number")) { if (typeof callback === "function") process.nextTick(callback, null); return true; } - if (typeof x !== "number") throw new ERR_INVALID_CURSOR_POS(); + if (typeof x !== "number") throw $ERR_INVALID_CURSOR_POS(); var data = typeof y !== "number" ? CSI`${x + 1}G` : CSI`${y + 1};${x + 1}H`; return stream.write(data, callback); @@ -1286,7 +1262,7 @@ function InterfaceConstructor(input, output, completer, terminal) { if (NumberIsFinite(inputEscapeCodeTimeout)) { this.escapeCodeTimeout = inputEscapeCodeTimeout; } else { - throw new ERR_INVALID_ARG_VALUE("input.escapeCodeTimeout", this.escapeCodeTimeout); + throw $ERR_INVALID_ARG_VALUE("input.escapeCodeTimeout", this.escapeCodeTimeout); } } @@ -1299,7 +1275,7 @@ function InterfaceConstructor(input, output, completer, terminal) { } if (completer !== undefined && typeof completer !== "function") { - throw new ERR_INVALID_ARG_VALUE("completer", completer); + throw $ERR_INVALID_ARG_VALUE("completer", completer); } if (history === undefined) { @@ -1313,7 +1289,7 @@ function InterfaceConstructor(input, output, completer, terminal) { } if (typeof historySize !== "number" || NumberIsNaN(historySize) || historySize < 0) { - throw new ERR_INVALID_ARG_VALUE("historySize", historySize); + throw $ERR_INVALID_ARG_VALUE("historySize", historySize); } // Backwards compat; check the isTTY prop of the output stream diff --git a/src/js/node/stream.ts b/src/js/node/stream.ts index 26ba3188a6..49433352a4 100644 --- a/src/js/node/stream.ts +++ b/src/js/node/stream.ts @@ -31,24 +31,13 @@ const transferToNativeReadable = $newCppFunction("ReadableStream.cpp", "jsFuncti const { kAutoDestroyed } = require("internal/shared"); const { validateBoolean, - validateString, - validateNumber, - validateSignalName, - validateEncoding, - validatePort, validateInteger, validateInt32, - validateUint32, - validateArray, - validateBuffer, validateAbortSignal, validateFunction, - validatePlainFunction, - validateUndefined, + validateObject, } = require("internal/validators"); -const ObjectSetPrototypeOf = Object.setPrototypeOf; - const ProcessNextTick = process.nextTick; const EE = require("node:events").EventEmitter; @@ -70,14 +59,6 @@ $debug("node:stream loaded"); // Node error polyfills //------------------------------------------------------------------------------ -function ERR_INVALID_ARG_TYPE(name, type, value) { - return new Error(`The argument '${name}' is invalid. Received '${value}' for type '${type}'`); -} - -function ERR_INVALID_ARG_VALUE(name, value, reason) { - return new Error(`The value '${value}' is invalid for argument '${name}'. Reason: ${reason}`); -} - // node_modules/readable-stream/lib/ours/primordials.js var require_primordials = __commonJS({ "node_modules/readable-stream/lib/ours/primordials.js"(exports, module) { @@ -516,18 +497,6 @@ var require_errors = __commonJS({ }, TypeError, ); - E( - "ERR_INVALID_ARG_VALUE", - (name, value, reason = "is invalid") => { - let inspected = inspect(value); - if (inspected.length > 128) { - inspected = inspected.slice(0, 128) + "..."; - } - const type = name.includes(".") ? "property" : "argument"; - return `The ${type} '${name}' ${reason}. Received ${inspected}`; - }, - TypeError, - ); E( "ERR_INVALID_RETURN_VALUE", (input, name, value) => { @@ -609,79 +578,6 @@ var require_errors = __commonJS({ }, }); -// node_modules/readable-stream/lib/internal/validators.js -var require_validators = __commonJS({ - "node_modules/readable-stream/lib/internal/validators.js"(exports, module) { - "use strict"; - var { - ArrayPrototypeIncludes, - ArrayPrototypeJoin, - ArrayPrototypeMap, - NumberParseInt, - RegExpPrototypeTest, - String: String2, - } = require_primordials(); - var { - hideStackFrames, - codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL }, - } = require_errors(); - var { normalizeEncoding } = require_util(); - var { isAsyncFunction, isArrayBufferView } = require_util().types; - var signals = {}; - function isInt32(value) { - return value === (value | 0); - } - function isUint32(value) { - return value === value >>> 0; - } - var octalReg = /^[0-7]+$/; - var modeDesc = "must be a 32-bit unsigned integer or an octal string"; - function parseFileMode(value, name, def) { - if (typeof value === "undefined") { - value = def; - } - if (typeof value === "string") { - if (!RegExpPrototypeTest(octalReg, value)) { - throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc); - } - value = NumberParseInt(value, 8); - } - validateInt32(value, name, 0, 2 ** 32 - 1); - return value; - } - var validateOneOf = hideStackFrames((value, name, oneOf) => { - if (!ArrayPrototypeIncludes(oneOf, value)) { - const allowed = ArrayPrototypeJoin( - ArrayPrototypeMap(oneOf, v => (typeof v === "string" ? `'${v}'` : String2(v))), - ", ", - ); - const reason = "must be one of: " + allowed; - throw new ERR_INVALID_ARG_VALUE(name, value, reason); - } - }); - var validateObject = hideStackFrames((value, name, options) => { - const useDefaultOptions = options == null; - const allowArray = useDefaultOptions ? false : options.allowArray; - const allowFunction = useDefaultOptions ? false : options.allowFunction; - const nullable = useDefaultOptions ? false : options.nullable; - if ( - (!nullable && value === null) || - (!allowArray && $isJSArray(value)) || - (typeof value !== "object" && (!allowFunction || typeof value !== "function")) - ) { - throw new ERR_INVALID_ARG_TYPE(name, "Object", value); - } - }); - module.exports = { - isInt32, - isUint32, - parseFileMode, - validateObject, - validateOneOf, - }; - }, -}); - // node_modules/readable-stream/lib/internal/streams/utils.js var require_utils = __commonJS({ "node_modules/readable-stream/lib/internal/streams/utils.js"(exports, module) { @@ -975,7 +871,6 @@ var require_end_of_stream = __commonJS({ var { AbortError, codes } = require_errors(); var { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes; var { once } = require_util(); - var { validateObject } = require_validators(); var { Promise: Promise2 } = require_primordials(); var { isClosed, @@ -1185,7 +1080,6 @@ var require_operators = __commonJS({ codes: { ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, AbortError, } = require_errors(); - var { validateObject } = require_validators(); var kWeakHandler = require_primordials().Symbol("kWeak"); var { finished } = require_end_of_stream(); var { @@ -2018,7 +1912,7 @@ function getHighWaterMark(state, options, duplexKey, isDuplex) { if (hwm != null) { if (!NumberIsInteger(hwm) || hwm < 0) { const name = isDuplex ? `options.${duplexKey}` : "options.highWaterMark"; - throw new ERR_INVALID_ARG_VALUE(name, hwm); + throw $ERR_INVALID_ARG_VALUE(name, hwm); } return MathFloor(hwm); } @@ -2467,7 +2361,7 @@ var require_readable = __commonJS({ } = options; if (encoding !== undefined && !Buffer.isEncoding(encoding)) - throw new ERR_INVALID_ARG_VALUE(encoding, "options.encoding"); + throw $ERR_INVALID_ARG_VALUE("options.encoding", encoding); validateBoolean(objectMode, "options.objectMode"); // validateBoolean(native, "options.native"); @@ -2592,7 +2486,6 @@ var require_readable = __commonJS({ ERR_STREAM_UNSHIFT_AFTER_END_EVENT, }, } = require_errors(); - var { validateObject } = require_validators(); var from = require_from(); var nop = () => {}; var { errorOrDestroy } = destroyImpl; @@ -5103,10 +4996,10 @@ var require_compose = __commonJS({ continue; } if (n < streams.length - 1 && !isReadable(streams[n])) { - throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be readable"); + throw $ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be readable"); } if (n > 0 && !isWritable(streams[n])) { - throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be writable"); + throw $ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], "must be writable"); } } let ondrain; diff --git a/src/js/node/test.ts b/src/js/node/test.ts new file mode 100644 index 0000000000..01470f3c9c --- /dev/null +++ b/src/js/node/test.ts @@ -0,0 +1,38 @@ +// Hardcoded module "node:test" + +const { throwNotImplemented } = require("internal/shared"); + +function suite() { + throwNotImplemented("node:test", 5090, "bun:test in available in the interim."); +} + +function test() { + throwNotImplemented("node:test", 5090, "bun:test in available in the interim."); +} + +function before() { + throwNotImplemented("node:test", 5090, "bun:test in available in the interim."); +} + +function after() { + throwNotImplemented("node:test", 5090, "bun:test in available in the interim."); +} + +function beforeEach() { + throwNotImplemented("node:test", 5090, "bun:test in available in the interim."); +} + +function afterEach() { + throwNotImplemented("node:test", 5090, "bun:test in available in the interim."); +} + +export default { + suite, + test, + describe: suite, + it: test, + before, + after, + beforeEach, + afterEach, +}; diff --git a/src/js/node/timers.promises.ts b/src/js/node/timers.promises.ts index 68ac1fa3f6..6d011ec78a 100644 --- a/src/js/node/timers.promises.ts +++ b/src/js/node/timers.promises.ts @@ -1,17 +1,10 @@ // Hardcoded module "node:timers/promises" // https://github.com/niksy/isomorphic-timers-promises/blob/master/index.js -const { validateBoolean, validateAbortSignal } = require("internal/validators"); +const { validateBoolean, validateAbortSignal, validateObject } = require("internal/validators"); const symbolAsyncIterator = Symbol.asyncIterator; -class ERR_INVALID_ARG_TYPE extends Error { - constructor(name, expected, actual) { - super(`${name} must be ${expected}, ${typeof actual} given`); - this.code = "ERR_INVALID_ARG_TYPE"; - } -} - class AbortError extends Error { constructor() { super("The operation was aborted"); @@ -19,12 +12,6 @@ class AbortError extends Error { } } -function validateObject(object, name) { - if (object === null || typeof object !== "object") { - throw new ERR_INVALID_ARG_TYPE(name, "Object", object); - } -} - function asyncIterator({ next: nextFunction, return: returnFunction }) { const result = {}; if (typeof nextFunction === "function") { diff --git a/src/js/node/util.ts b/src/js/node/util.ts index 562aad9d15..a9ddd44135 100644 --- a/src/js/node/util.ts +++ b/src/js/node/util.ts @@ -2,12 +2,14 @@ const types = require("node:util/types"); /** @type {import('node-inspect-extracted')} */ const utl = require("internal/util/inspect"); -const { ERR_INVALID_ARG_TYPE, ERR_OUT_OF_RANGE } = require("internal/errors"); +const { ERR_OUT_OF_RANGE } = require("internal/errors"); const { promisify } = require("internal/promisify"); +const { validateString, validateOneOf } = require("internal/validators"); const internalErrorName = $newZigFunction("node_util_binding.zig", "internalErrorName", 1); const NumberIsSafeInteger = Number.isSafeInteger; +const ObjectKeys = Object.keys; var cjs_exports; @@ -137,15 +139,15 @@ var log = function log() { }; var inherits = function inherits(ctor, superCtor) { if (ctor === undefined || ctor === null) { - throw ERR_INVALID_ARG_TYPE("ctor", "function", ctor); + throw $ERR_INVALID_ARG_TYPE("ctor", "function", ctor); } if (superCtor === undefined || superCtor === null) { - throw ERR_INVALID_ARG_TYPE("superCtor", "function", superCtor); + throw $ERR_INVALID_ARG_TYPE("superCtor", "function", superCtor); } if (superCtor.prototype === undefined) { - throw ERR_INVALID_ARG_TYPE("superCtor.prototype", "object", superCtor.prototype); + throw $ERR_INVALID_ARG_TYPE("superCtor.prototype", "object", superCtor.prototype); } ctor.super_ = superCtor; Object.setPrototypeOf(ctor.prototype, superCtor.prototype); @@ -201,11 +203,7 @@ var toUSVString = input => { }; function styleText(format, text) { - if (typeof text !== "string") { - const e = new Error(`The text argument must be of type string. Received type ${typeof text}`); - e.code = "ERR_INVALID_ARG_TYPE"; - throw e; - } + validateString(text, "text"); if ($isJSArray(format)) { let left = ""; @@ -213,11 +211,7 @@ function styleText(format, text) { for (const key of format) { const formatCodes = inspect.colors[key]; if (formatCodes == null) { - const e = new Error( - `The value "${typeof key === "symbol" ? key.description : key}" is invalid for argument 'format'. Reason: must be one of: ${Object.keys(inspect.colors).join(", ")}`, - ); - e.code = "ERR_INVALID_ARG_VALUE"; - throw e; + validateOneOf(key, "format", ObjectKeys(inspect.colors)); } left += `\u001b[${formatCodes[0]}m`; right = `\u001b[${formatCodes[1]}m${right}`; @@ -229,17 +223,13 @@ function styleText(format, text) { let formatCodes = inspect.colors[format]; if (formatCodes == null) { - const e = new Error( - `The value "${typeof format === "symbol" ? format.description : format}" is invalid for argument 'format'. Reason: must be one of: ${Object.keys(inspect.colors).join(", ")}`, - ); - e.code = "ERR_INVALID_ARG_VALUE"; - throw e; + validateOneOf(format, "format", ObjectKeys(inspect.colors)); } return `\u001b[${formatCodes[0]}m${text}\u001b[${formatCodes[1]}m`; } function getSystemErrorName(err: any) { - if (typeof err !== "number") throw ERR_INVALID_ARG_TYPE("err", "number", err); + if (typeof err !== "number") throw $ERR_INVALID_ARG_TYPE("err", "number", err); if (err >= 0 || !NumberIsSafeInteger(err)) throw ERR_OUT_OF_RANGE("err", "a negative integer", err); return internalErrorName(err); } @@ -256,11 +246,11 @@ function onAbortedCallback(resolveFn: Function) { function aborted(signal: AbortSignal, resource: object) { if (!$isObject(signal) || !(signal instanceof AbortSignal)) { - throw ERR_INVALID_ARG_TYPE("signal", "AbortSignal", signal); + throw $ERR_INVALID_ARG_TYPE("signal", "AbortSignal", signal); } if (!$isObject(resource)) { - throw ERR_INVALID_ARG_TYPE("resource", "object", resource); + throw $ERR_INVALID_ARG_TYPE("resource", "object", resource); } if (signal.aborted) { diff --git a/src/js/node/v8.ts b/src/js/node/v8.ts index ada998d747..082fbfe1dc 100644 --- a/src/js/node/v8.ts +++ b/src/js/node/v8.ts @@ -1,4 +1,5 @@ // Hardcoded module "node:v8" + // This is a stub! None of this is actually implemented yet. const { hideFromStack, throwNotImplemented } = require("internal/shared"); const jsc: typeof import("bun:jsc") = require("bun:jsc"); @@ -28,8 +29,21 @@ class GCProfiler { function cachedDataVersionTag() { notimpl("cachedDataVersionTag"); } +var HeapSnapshotReadable_; function getHeapSnapshot() { - notimpl("getHeapSnapshot"); + if (!HeapSnapshotReadable_) { + const Readable = require("node:stream").Readable; + class HeapSnapshotReadable extends Readable { + constructor() { + super(); + this.push(Bun.generateHeapSnapshot("v8")); + this.push(null); + } + } + HeapSnapshotReadable_ = HeapSnapshotReadable; + } + + return new HeapSnapshotReadable_(); } let totalmem_ = -1; @@ -92,8 +106,45 @@ function stopCoverage() { function serialize(arg1) { return jsc.serialize(arg1, { binaryType: "nodebuffer" }); } -function writeHeapSnapshot() { - notimpl("writeHeapSnapshot"); + +function getDefaultHeapSnapshotPath() { + const date = new Date(); + + const worker_threads = require("node:worker_threads"); + const thread_id = worker_threads.threadId; + + const yyyy = date.getFullYear(); + const mm = date.getMonth().toString().padStart(2, "0"); + const dd = date.getDate().toString().padStart(2, "0"); + const hh = date.getHours().toString().padStart(2, "0"); + const MM = date.getMinutes().toString().padStart(2, "0"); + const ss = date.getSeconds().toString().padStart(2, "0"); + + // 'Heap-${yyyymmdd}-${hhmmss}-${pid}-${thread_id}.heapsnapshot' + return `Heap-${yyyy}${mm}${dd}-${hh}${MM}${ss}-${process.pid}-${thread_id}.heapsnapshot`; +} + +let fs; + +function writeHeapSnapshot(path, options) { + if (path !== undefined) { + if (typeof path !== "string") { + throw $ERR_INVALID_ARG_TYPE("path", "string", path); + } + + if (!path) { + throw $ERR_INVALID_ARG_VALUE("path", path, "must be a non-empty string"); + } + } else { + path = getDefaultHeapSnapshotPath(); + } + + if (!fs) { + fs = require("node:fs"); + } + fs.writeFileSync(path, Bun.generateHeapSnapshot("v8"), "utf-8"); + + return path; } function setHeapSnapshotNearHeapLimit() { notimpl("setHeapSnapshotNearHeapLimit"); diff --git a/src/js/node/zlib.ts b/src/js/node/zlib.ts index 77651013eb..b8bd4feadc 100644 --- a/src/js/node/zlib.ts +++ b/src/js/node/zlib.ts @@ -24,13 +24,7 @@ const isArrayBufferView = ArrayBufferIsView; const isAnyArrayBuffer = b => b instanceof ArrayBuffer || b instanceof SharedArrayBuffer; const kMaxLength = $requireMap.$get("buffer")?.exports.kMaxLength ?? BufferModule.kMaxLength; -const { - ERR_BROTLI_INVALID_PARAM, - ERR_BUFFER_TOO_LARGE, - ERR_INVALID_ARG_TYPE, - ERR_OUT_OF_RANGE, - ERR_ZLIB_INITIALIZATION_FAILED, -} = require("internal/errors"); +const { ERR_BROTLI_INVALID_PARAM, ERR_BUFFER_TOO_LARGE, ERR_OUT_OF_RANGE } = require("internal/errors"); const { Transform, finished } = require("node:stream"); const owner_symbol = Symbol("owner_symbol"); const { @@ -126,7 +120,7 @@ function zlibBufferSync(engine, buffer) { if (isAnyArrayBuffer(buffer)) { buffer = Buffer.from(buffer); } else { - throw ERR_INVALID_ARG_TYPE("buffer", "string, Buffer, TypedArray, DataView, or ArrayBuffer", buffer); + throw $ERR_INVALID_ARG_TYPE("buffer", "string, Buffer, TypedArray, DataView, or ArrayBuffer", buffer); } } buffer = processChunkSync(engine, buffer, engine._finishFlushFlag); @@ -562,7 +556,7 @@ function Zlib(opts, mode) { if (isAnyArrayBuffer(dictionary)) { dictionary = Buffer.from(dictionary); } else { - throw ERR_INVALID_ARG_TYPE("options.dictionary", "Buffer, TypedArray, DataView, or ArrayBuffer", dictionary); + throw $ERR_INVALID_ARG_TYPE("options.dictionary", "Buffer, TypedArray, DataView, or ArrayBuffer", dictionary); } } } @@ -686,7 +680,7 @@ function Brotli(opts, mode) { const value = opts.params[origKey]; if (typeof value !== "number" && typeof value !== "boolean") { - throw ERR_INVALID_ARG_TYPE("options.params[key]", "number", opts.params[origKey]); + throw $ERR_INVALID_ARG_TYPE("options.params[key]", "number", opts.params[origKey]); } brotliInitParamsArray[key] = value; }); @@ -696,7 +690,7 @@ function Brotli(opts, mode) { this._writeState = new Uint32Array(2); if (!handle.init(brotliInitParamsArray, this._writeState, processCallback)) { - throw ERR_ZLIB_INITIALIZATION_FAILED(); + throw $ERR_ZLIB_INITIALIZATION_FAILED(); } ZlibBase.$apply(this, [opts, mode, handle, brotliDefaultOpts]); diff --git a/src/js_ast.zig b/src/js_ast.zig index bcc97689c0..71489b13b9 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -8220,7 +8220,7 @@ pub const Macro = struct { threadlocal var args_buf: [3]js.JSObjectRef = undefined; threadlocal var exception_holder: Zig.ZigException.Holder = undefined; - pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError; + pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError || bun.JSError; pub const Run = struct { caller: Expr, @@ -8435,7 +8435,7 @@ pub const Macro = struct { return _entry.value_ptr.*; } - var object_iter = JSC.JSPropertyIterator(.{ + var object_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(this.global, value); @@ -8452,7 +8452,7 @@ pub const Macro = struct { ); _entry.value_ptr.* = out; - while (object_iter.next()) |prop| { + while (try object_iter.next()) |prop| { properties[object_iter.i] = G.Property{ .key = Expr.init(E.String, E.String.init(prop.toOwnedSlice(this.allocator) catch unreachable), this.caller.loc), .value = try this.run(object_iter.value), diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index b801b6c11c..c3435c06cb 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -864,7 +864,7 @@ pub const PackageJSON = struct { pm, )) |dependency_version| { if (dependency_version.value.npm.version.isExact()) { - if (pm.lockfile.resolve(package_json.name, dependency_version)) |resolved| { + if (pm.lockfile.resolvePackageFromNameAndVersion(package_json.name, dependency_version)) |resolved| { package_json.package_manager_package_id = resolved; if (resolved > 0) { break :update_dependencies; diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index f073394c1f..46f2dcd03a 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -583,10 +583,10 @@ pub fn relativeAlloc(allocator: std.mem.Allocator, from: []const u8, to: []const // This function is based on Go's volumeNameLen function // https://cs.opensource.google/go/go/+/refs/tags/go1.17.6:src/path/filepath/path_windows.go;l=57 // volumeNameLen returns length of the leading volume name on Windows. -fn windowsVolumeNameLen(path: []const u8) struct { usize, usize } { +pub fn windowsVolumeNameLen(path: []const u8) struct { usize, usize } { return windowsVolumeNameLenT(u8, path); } -fn windowsVolumeNameLenT(comptime T: type, path: []const T) struct { usize, usize } { +pub fn windowsVolumeNameLenT(comptime T: type, path: []const T) struct { usize, usize } { if (path.len < 2) return .{ 0, 0 }; // with drive letter const c = path[0]; @@ -1252,9 +1252,11 @@ pub fn joinAbs(cwd: []const u8, comptime _platform: Platform, part: []const u8) return joinAbsString(cwd, &.{part}, _platform); } -// Convert parts of potentially invalid file paths into a single valid filpeath -// without querying the filesystem -// This is the equivalent of path.resolve +/// Convert parts of potentially invalid file paths into a single valid filpeath +/// without querying the filesystem +/// This is the equivalent of path.resolve +/// +/// Returned path is stored in a temporary buffer. It must be copied if it needs to be stored. pub fn joinAbsString(_cwd: []const u8, parts: anytype, comptime _platform: Platform) []const u8 { return joinAbsStringBuf( _cwd, @@ -1264,6 +1266,11 @@ pub fn joinAbsString(_cwd: []const u8, parts: anytype, comptime _platform: Platf ); } +/// Convert parts of potentially invalid file paths into a single valid filpeath +/// without querying the filesystem +/// This is the equivalent of path.resolve +/// +/// Returned path is stored in a temporary buffer. It must be copied if it needs to be stored. pub fn joinAbsStringZ(_cwd: []const u8, parts: anytype, comptime _platform: Platform) [:0]const u8 { return joinAbsStringBufZ( _cwd, diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 3ef06e90ee..d737f4af3d 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -1877,7 +1877,7 @@ pub const Resolver = struct { ) orelse break :load_module_from_cache; } - if (manager.lockfile.resolve(esm.name, dependency_version)) |id| { + if (manager.lockfile.resolvePackageFromNameAndVersion(esm.name, dependency_version)) |id| { resolved_package_id = id; } } @@ -2186,7 +2186,7 @@ pub const Resolver = struct { var pm = r.getPackageManager(); if (comptime Environment.allow_assert) { // we should never be trying to resolve a dependency that is already resolved - assert(pm.lockfile.resolve(esm.name, version) == null); + assert(pm.lockfile.resolvePackageFromNameAndVersion(esm.name, version) == null); } // Add the containing package to the lockfile diff --git a/src/s3.zig b/src/s3.zig deleted file mode 100644 index aa12dadd49..0000000000 --- a/src/s3.zig +++ /dev/null @@ -1,2182 +0,0 @@ -const bun = @import("root").bun; -const picohttp = bun.picohttp; -const std = @import("std"); -const DotEnv = @import("./env_loader.zig"); -pub const RareData = @import("./bun.js/rare_data.zig"); - -const JSC = bun.JSC; -const strings = bun.strings; - -pub const AWSCredentials = struct { - accessKeyId: []const u8, - secretAccessKey: []const u8, - region: []const u8, - endpoint: []const u8, - bucket: []const u8, - - ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); - - pub fn estimatedSize(this: *const @This()) usize { - return @sizeOf(AWSCredentials) + this.accessKeyId.len + this.region.len + this.secretAccessKey.len + this.endpoint.len + this.bucket.len; - } - - pub const AWSCredentialsWithOptions = struct { - credentials: AWSCredentials, - options: MultiPartUpload.MultiPartUploadOptions = .{}, - - _accessKeyIdSlice: ?JSC.ZigString.Slice = null, - _secretAccessKeySlice: ?JSC.ZigString.Slice = null, - _regionSlice: ?JSC.ZigString.Slice = null, - _endpointSlice: ?JSC.ZigString.Slice = null, - _bucketSlice: ?JSC.ZigString.Slice = null, - - pub fn deinit(this: *@This()) void { - if (this._accessKeyIdSlice) |slice| slice.deinit(); - if (this._secretAccessKeySlice) |slice| slice.deinit(); - if (this._regionSlice) |slice| slice.deinit(); - if (this._endpointSlice) |slice| slice.deinit(); - if (this._bucketSlice) |slice| slice.deinit(); - } - }; - pub fn getCredentialsWithOptions(this: AWSCredentials, options: ?JSC.JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!AWSCredentialsWithOptions { - // get ENV config - var new_credentials = AWSCredentialsWithOptions{ - .credentials = this, - .options = .{}, - }; - errdefer { - new_credentials.deinit(); - } - - if (options) |opts| { - if (opts.isObject()) { - if (try opts.getTruthyComptime(globalObject, "accessKeyId")) |js_value| { - if (!js_value.isEmptyOrUndefinedOrNull()) { - if (js_value.isString()) { - const str = bun.String.fromJS(js_value, globalObject); - defer str.deref(); - if (str.tag != .Empty and str.tag != .Dead) { - new_credentials._accessKeyIdSlice = str.toUTF8(bun.default_allocator); - new_credentials.credentials.accessKeyId = new_credentials._accessKeyIdSlice.?.slice(); - } - } else { - return globalObject.throwInvalidArgumentTypeValue("accessKeyId", "string", js_value); - } - } - } - if (try opts.getTruthyComptime(globalObject, "secretAccessKey")) |js_value| { - if (!js_value.isEmptyOrUndefinedOrNull()) { - if (js_value.isString()) { - const str = bun.String.fromJS(js_value, globalObject); - defer str.deref(); - if (str.tag != .Empty and str.tag != .Dead) { - new_credentials._secretAccessKeySlice = str.toUTF8(bun.default_allocator); - new_credentials.credentials.secretAccessKey = new_credentials._secretAccessKeySlice.?.slice(); - } - } else { - return globalObject.throwInvalidArgumentTypeValue("secretAccessKey", "string", js_value); - } - } - } - if (try opts.getTruthyComptime(globalObject, "region")) |js_value| { - if (!js_value.isEmptyOrUndefinedOrNull()) { - if (js_value.isString()) { - const str = bun.String.fromJS(js_value, globalObject); - defer str.deref(); - if (str.tag != .Empty and str.tag != .Dead) { - new_credentials._regionSlice = str.toUTF8(bun.default_allocator); - new_credentials.credentials.region = new_credentials._regionSlice.?.slice(); - } - } else { - return globalObject.throwInvalidArgumentTypeValue("region", "string", js_value); - } - } - } - if (try opts.getTruthyComptime(globalObject, "endpoint")) |js_value| { - if (!js_value.isEmptyOrUndefinedOrNull()) { - if (js_value.isString()) { - const str = bun.String.fromJS(js_value, globalObject); - defer str.deref(); - if (str.tag != .Empty and str.tag != .Dead) { - new_credentials._endpointSlice = str.toUTF8(bun.default_allocator); - const normalized_endpoint = bun.URL.parse(new_credentials._endpointSlice.?.slice()).host; - if (normalized_endpoint.len > 0) { - new_credentials.credentials.endpoint = normalized_endpoint; - } - } - } else { - return globalObject.throwInvalidArgumentTypeValue("endpoint", "string", js_value); - } - } - } - if (try opts.getTruthyComptime(globalObject, "bucket")) |js_value| { - if (!js_value.isEmptyOrUndefinedOrNull()) { - if (js_value.isString()) { - const str = bun.String.fromJS(js_value, globalObject); - defer str.deref(); - if (str.tag != .Empty and str.tag != .Dead) { - new_credentials._bucketSlice = str.toUTF8(bun.default_allocator); - new_credentials.credentials.bucket = new_credentials._bucketSlice.?.slice(); - } - } else { - return globalObject.throwInvalidArgumentTypeValue("bucket", "string", js_value); - } - } - } - - if (try opts.getOptional(globalObject, "pageSize", i32)) |pageSize| { - if (pageSize < MultiPartUpload.MIN_SINGLE_UPLOAD_SIZE_IN_MiB and pageSize > MultiPartUpload.MAX_SINGLE_UPLOAD_SIZE_IN_MiB) { - return globalObject.throwRangeError(pageSize, .{ - .min = @intCast(MultiPartUpload.MIN_SINGLE_UPLOAD_SIZE_IN_MiB), - .max = @intCast(MultiPartUpload.MAX_SINGLE_UPLOAD_SIZE_IN_MiB), - .field_name = "pageSize", - }); - } else { - new_credentials.options.partSize = @intCast(pageSize); - } - } - - if (try opts.getOptional(globalObject, "queueSize", i32)) |queueSize| { - if (queueSize < 1) { - return globalObject.throwRangeError(queueSize, .{ - .min = 1, - .field_name = "queueSize", - }); - } else { - new_credentials.options.queueSize = @intCast(@max(queueSize, std.math.maxInt(u8))); - } - } - } - } - return new_credentials; - } - pub fn dupe(this: *const @This()) *AWSCredentials { - return AWSCredentials.new(.{ - .accessKeyId = if (this.accessKeyId.len > 0) - bun.default_allocator.dupe(u8, this.accessKeyId) catch bun.outOfMemory() - else - "", - - .secretAccessKey = if (this.secretAccessKey.len > 0) - bun.default_allocator.dupe(u8, this.secretAccessKey) catch bun.outOfMemory() - else - "", - - .region = if (this.region.len > 0) - bun.default_allocator.dupe(u8, this.region) catch bun.outOfMemory() - else - "", - - .endpoint = if (this.endpoint.len > 0) - bun.default_allocator.dupe(u8, this.endpoint) catch bun.outOfMemory() - else - "", - - .bucket = if (this.bucket.len > 0) - bun.default_allocator.dupe(u8, this.bucket) catch bun.outOfMemory() - else - "", - }); - } - pub fn deinit(this: *@This()) void { - if (this.accessKeyId.len > 0) { - bun.default_allocator.free(this.accessKeyId); - } - if (this.secretAccessKey.len > 0) { - bun.default_allocator.free(this.secretAccessKey); - } - if (this.region.len > 0) { - bun.default_allocator.free(this.region); - } - if (this.endpoint.len > 0) { - bun.default_allocator.free(this.endpoint); - } - if (this.bucket.len > 0) { - bun.default_allocator.free(this.bucket); - } - this.destroy(); - } - - const log = bun.Output.scoped(.AWS, false); - - const DateResult = struct { - // numeric representation of year, month and day (excluding time components) - numeric_day: u64, - date: []const u8, - }; - - fn getAMZDate(allocator: std.mem.Allocator) DateResult { - // We can also use Date.now() but would be slower and would add JSC dependency - // var buffer: [28]u8 = undefined; - // the code bellow is the same as new Date(Date.now()).toISOString() - // JSC.JSValue.getDateNowISOString(globalObject, &buffer); - - // Create UTC timestamp - const secs: u64 = @intCast(@divFloor(std.time.milliTimestamp(), 1000)); - const utc_seconds = std.time.epoch.EpochSeconds{ .secs = secs }; - const utc_day = utc_seconds.getEpochDay(); - const year_and_day = utc_day.calculateYearDay(); - const month_and_day = year_and_day.calculateMonthDay(); - // Get UTC date components - const year = year_and_day.year; - const day = @as(u32, month_and_day.day_index) + 1; // this starts in 0 - const month = month_and_day.month.numeric(); // starts in 1 - - // Get UTC time components - const time = utc_seconds.getDaySeconds(); - const hours = time.getHoursIntoDay(); - const minutes = time.getMinutesIntoHour(); - const seconds = time.getSecondsIntoMinute(); - - // Format the date - return .{ - .numeric_day = secs - time.secs, - .date = std.fmt.allocPrint(allocator, "{d:0>4}{d:0>2}{d:0>2}T{d:0>2}{d:0>2}{d:0>2}Z", .{ - year, - month, - day, - hours, - minutes, - seconds, - }) catch bun.outOfMemory(), - }; - } - - const DIGESTED_HMAC_256_LEN = 32; - pub const SignResult = struct { - amz_date: []const u8, - host: []const u8, - authorization: []const u8, - url: []const u8, - - content_disposition: []const u8, - _headers: [5]picohttp.Header, - _headers_len: u8 = 4, - - pub fn headers(this: *const @This()) []const picohttp.Header { - return this._headers[0..this._headers_len]; - } - - pub fn deinit(this: *const @This()) void { - if (this.amz_date.len > 0) { - bun.default_allocator.free(this.amz_date); - } - - if (this.content_disposition.len > 0) { - bun.default_allocator.free(this.content_disposition); - } - - if (this.host.len > 0) { - bun.default_allocator.free(this.host); - } - - if (this.authorization.len > 0) { - bun.default_allocator.free(this.authorization); - } - - if (this.url.len > 0) { - bun.default_allocator.free(this.url); - } - } - }; - - pub const SignQueryOptions = struct { - expires: usize = 86400, - }; - - pub const SignOptions = struct { - path: []const u8, - method: bun.http.Method, - content_hash: ?[]const u8 = null, - search_params: ?[]const u8 = null, - content_disposition: ?[]const u8 = null, - }; - fn guessRegion(endpoint: []const u8) []const u8 { - if (endpoint.len > 0) { - if (strings.endsWith(endpoint, ".r2.cloudflarestorage.com")) return "auto"; - if (strings.indexOf(endpoint, ".amazonaws.com")) |end| { - if (strings.indexOf(endpoint, "s3.")) |start| { - return endpoint[start + 3 .. end]; - } - } - } - return "us-east-1"; - } - fn toHexChar(value: u8) !u8 { - return switch (value) { - 0...9 => value + '0', - 10...15 => (value - 10) + 'A', - else => error.InvalidHexChar, - }; - } - fn encodeURIComponent(input: []const u8, buffer: []u8) ![]const u8 { - var written: usize = 0; - - for (input) |c| { - switch (c) { - // RFC 3986 Unreserved Characters (do not encode) - 'A'...'Z', 'a'...'z', '0'...'9', '-', '_', '.', '~' => { - if (written >= buffer.len) return error.BufferTooSmall; - buffer[written] = c; - written += 1; - }, - // All other characters need to be percent-encoded - else => { - if (written + 3 > buffer.len) return error.BufferTooSmall; - buffer[written] = '%'; - // Convert byte to hex - const high_nibble: u8 = (c >> 4) & 0xF; - const low_nibble: u8 = c & 0xF; - buffer[written + 1] = try toHexChar(high_nibble); - buffer[written + 2] = try toHexChar(low_nibble); - written += 3; - }, - } - } - - return buffer[0..written]; - } - - const ErrorCodeAndMessage = struct { - code: []const u8, - message: []const u8, - }; - fn getSignErrorMessage(comptime err: anyerror) [:0]const u8 { - return switch (err) { - error.MissingCredentials => return "missing s3 credentials", - error.InvalidMethod => return "method must be GET, PUT, DELETE or HEAD when using s3 protocol", - error.InvalidPath => return "invalid s3 bucket, key combination", - error.InvalidEndpoint => return "invalid s3 endpoint", - else => return "failed to retrieve s3 content check your credentials", - }; - } - pub fn getJSSignError(err: anyerror, globalThis: *JSC.JSGlobalObject) JSC.JSValue { - return switch (err) { - error.MissingCredentials => return globalThis.ERR_AWS_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).toJS(), - error.InvalidMethod => return globalThis.ERR_AWS_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).toJS(), - error.InvalidPath => return globalThis.ERR_AWS_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).toJS(), - error.InvalidEndpoint => return globalThis.ERR_AWS_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).toJS(), - else => return globalThis.ERR_AWS_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).toJS(), - }; - } - pub fn throwSignError(err: anyerror, globalThis: *JSC.JSGlobalObject) bun.JSError { - return switch (err) { - error.MissingCredentials => globalThis.ERR_AWS_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).throw(), - error.InvalidMethod => globalThis.ERR_AWS_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).throw(), - error.InvalidPath => globalThis.ERR_AWS_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).throw(), - error.InvalidEndpoint => globalThis.ERR_AWS_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).throw(), - else => globalThis.ERR_AWS_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).throw(), - }; - } - pub fn getSignErrorCodeAndMessage(err: anyerror) ErrorCodeAndMessage { - return switch (err) { - error.MissingCredentials => .{ .code = "MissingCredentials", .message = getSignErrorMessage(error.MissingCredentials) }, - error.InvalidMethod => .{ .code = "InvalidMethod", .message = getSignErrorMessage(error.InvalidMethod) }, - error.InvalidPath => .{ .code = "InvalidPath", .message = getSignErrorMessage(error.InvalidPath) }, - error.InvalidEndpoint => .{ .code = "InvalidEndpoint", .message = getSignErrorMessage(error.InvalidEndpoint) }, - else => .{ .code = "SignError", .message = getSignErrorMessage(error.SignError) }, - }; - } - pub fn signRequest(this: *const @This(), signOptions: SignOptions, signQueryOption: ?SignQueryOptions) !SignResult { - const method = signOptions.method; - const request_path = signOptions.path; - const content_hash = signOptions.content_hash; - const search_params = signOptions.search_params; - - var content_disposition = signOptions.content_disposition; - if (content_disposition != null and content_disposition.?.len == 0) { - content_disposition = null; - } - - if (this.accessKeyId.len == 0 or this.secretAccessKey.len == 0) return error.MissingCredentials; - const signQuery = signQueryOption != null; - const expires = if (signQueryOption) |options| options.expires else 0; - const method_name = switch (method) { - .GET => "GET", - .POST => "POST", - .PUT => "PUT", - .DELETE => "DELETE", - .HEAD => "HEAD", - else => return error.InvalidMethod, - }; - - const region = if (this.region.len > 0) this.region else guessRegion(this.endpoint); - var full_path = request_path; - if (strings.startsWith(full_path, "/")) { - full_path = full_path[1..]; - } - var path: []const u8 = full_path; - var bucket: []const u8 = this.bucket; - - if (bucket.len == 0) { - //TODO: r2 supports bucket in the endpoint - - // guess bucket using path - if (strings.indexOf(full_path, "/")) |end| { - bucket = full_path[0..end]; - path = full_path[end + 1 ..]; - } else { - return error.InvalidPath; - } - } - if (strings.endsWith(path, "/")) { - path = path[0..path.len]; - } - if (strings.startsWith(path, "/")) { - path = path[1..]; - } - - // if we allow path.len == 0 it will list the bucket for now we disallow - if (path.len == 0) return error.InvalidPath; - - var path_buffer: [1024 + 63 + 2]u8 = undefined; // 1024 max key size and 63 max bucket name - - const normalizedPath = std.fmt.bufPrint(&path_buffer, "/{s}/{s}", .{ bucket, path }) catch return error.InvalidPath; - - const date_result = getAMZDate(bun.default_allocator); - const amz_date = date_result.date; - errdefer bun.default_allocator.free(amz_date); - - const amz_day = amz_date[0..8]; - const signed_headers = if (signQuery) "host" else brk: { - if (content_disposition != null) { - break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date"; - } else { - break :brk "host;x-amz-content-sha256;x-amz-date"; - } - }; - // detect service name and host from region or endpoint - var encoded_host_buffer: [512]u8 = undefined; - var encoded_host: []const u8 = ""; - const host = brk_host: { - if (this.endpoint.len > 0) { - encoded_host = encodeURIComponent(this.endpoint, &encoded_host_buffer) catch return error.InvalidEndpoint; - break :brk_host try bun.default_allocator.dupe(u8, this.endpoint); - } else { - break :brk_host try std.fmt.allocPrint(bun.default_allocator, "s3.{s}.amazonaws.com", .{region}); - } - }; - const service_name = "s3"; - - errdefer bun.default_allocator.free(host); - - const aws_content_hash = if (content_hash) |hash| hash else ("UNSIGNED-PAYLOAD"); - var tmp_buffer: [2048]u8 = undefined; - - const authorization = brk: { - // we hash the hash so we need 2 buffers - var hmac_sig_service: [bun.BoringSSL.EVP_MAX_MD_SIZE]u8 = undefined; - var hmac_sig_service2: [bun.BoringSSL.EVP_MAX_MD_SIZE]u8 = undefined; - - const sigDateRegionServiceReq = brk_sign: { - const key = try std.fmt.bufPrint(&tmp_buffer, "{s}{s}{s}", .{ region, service_name, this.secretAccessKey }); - var cache = (JSC.VirtualMachine.getMainThreadVM() orelse JSC.VirtualMachine.get()).rareData().awsCache(); - if (cache.get(date_result.numeric_day, key)) |cached| { - break :brk_sign cached; - } - // not cached yet lets generate a new one - const sigDate = bun.hmac.generate(try std.fmt.bufPrint(&tmp_buffer, "AWS4{s}", .{this.secretAccessKey}), amz_day, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; - const sigDateRegion = bun.hmac.generate(sigDate, region, .sha256, &hmac_sig_service2) orelse return error.FailedToGenerateSignature; - const sigDateRegionService = bun.hmac.generate(sigDateRegion, service_name, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; - const result = bun.hmac.generate(sigDateRegionService, "aws4_request", .sha256, &hmac_sig_service2) orelse return error.FailedToGenerateSignature; - - cache.set(date_result.numeric_day, key, hmac_sig_service2[0..DIGESTED_HMAC_256_LEN].*); - break :brk_sign result; - }; - if (signQuery) { - const canonical = try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, if (encoded_host.len > 0) encoded_host else host, signed_headers, aws_content_hash }); - var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); - bun.sha.SHA256.hash(canonical, &sha_digest, JSC.VirtualMachine.get().rareData().boringEngine()); - - const signValue = try std.fmt.bufPrint(&tmp_buffer, "AWS4-HMAC-SHA256\n{s}\n{s}/{s}/{s}/aws4_request\n{s}", .{ amz_date, amz_day, region, service_name, bun.fmt.bytesToHex(sha_digest[0..bun.sha.SHA256.digest], .lower) }); - - const signature = bun.hmac.generate(sigDateRegionServiceReq, signValue, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; - break :brk try std.fmt.allocPrint( - bun.default_allocator, - "https://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", - .{ host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, - ); - } else { - var encoded_content_disposition_buffer: [255]u8 = undefined; - const encoded_content_disposition: []const u8 = if (content_disposition) |cd| encodeURIComponent(cd, &encoded_content_disposition_buffer) catch return error.ContentTypeIsTooLong else ""; - const canonical = brk_canonical: { - if (content_disposition != null) { - break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); - } else { - break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", if (encoded_host.len > 0) encoded_host else host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); - } - }; - var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); - bun.sha.SHA256.hash(canonical, &sha_digest, JSC.VirtualMachine.get().rareData().boringEngine()); - - const signValue = try std.fmt.bufPrint(&tmp_buffer, "AWS4-HMAC-SHA256\n{s}\n{s}/{s}/{s}/aws4_request\n{s}", .{ amz_date, amz_day, region, service_name, bun.fmt.bytesToHex(sha_digest[0..bun.sha.SHA256.digest], .lower) }); - - const signature = bun.hmac.generate(sigDateRegionServiceReq, signValue, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; - - break :brk try std.fmt.allocPrint( - bun.default_allocator, - "AWS4-HMAC-SHA256 Credential={s}/{s}/{s}/{s}/aws4_request, SignedHeaders={s}, Signature={s}", - .{ this.accessKeyId, amz_day, region, service_name, signed_headers, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, - ); - } - }; - errdefer bun.default_allocator.free(authorization); - - if (signQuery) { - defer bun.default_allocator.free(host); - defer bun.default_allocator.free(amz_date); - - return SignResult{ - .amz_date = "", - .host = "", - .authorization = "", - .url = authorization, - .content_disposition = "", - ._headers = .{ - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - .{ .name = "", .value = "" }, - }, - ._headers_len = 0, - }; - } - - if (content_disposition) |cd| { - const content_disposition_value = bun.default_allocator.dupe(u8, cd) catch bun.outOfMemory(); - return SignResult{ - .amz_date = amz_date, - .host = host, - .authorization = authorization, - .url = try std.fmt.allocPrint(bun.default_allocator, "https://{s}{s}{s}", .{ host, normalizedPath, if (search_params) |s| s else "" }), - .content_disposition = content_disposition_value, - ._headers = .{ - .{ .name = "x-amz-content-sha256", .value = aws_content_hash }, - .{ .name = "x-amz-date", .value = amz_date }, - .{ .name = "Authorization", .value = authorization[0..] }, - .{ .name = "Host", .value = host }, - .{ .name = "Content-Disposition", .value = content_disposition_value }, - }, - ._headers_len = 5, - }; - } - return SignResult{ - .amz_date = amz_date, - .host = host, - .authorization = authorization, - .url = try std.fmt.allocPrint(bun.default_allocator, "https://{s}{s}{s}", .{ host, normalizedPath, if (search_params) |s| s else "" }), - .content_disposition = "", - ._headers = .{ - .{ .name = "x-amz-content-sha256", .value = aws_content_hash }, - .{ .name = "x-amz-date", .value = amz_date }, - .{ .name = "Authorization", .value = authorization[0..] }, - .{ .name = "Host", .value = host }, - .{ .name = "", .value = "" }, - }, - ._headers_len = 4, - }; - } - pub const S3Error = struct { - code: []const u8, - message: []const u8, - - pub fn toJS(err: *const @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { - const js_err = globalObject.createErrorInstance("{s}", .{err.message}); - js_err.put(globalObject, JSC.ZigString.static("code"), JSC.ZigString.init(err.code).toJS(globalObject)); - return js_err; - } - }; - pub const S3StatResult = union(enum) { - success: struct { - size: usize = 0, - /// etag is not owned and need to be copied if used after this callback - etag: []const u8 = "", - }, - not_found: void, - - /// failure error is not owned and need to be copied if used after this callback - failure: S3Error, - }; - pub const S3DownloadResult = union(enum) { - success: struct { - /// etag is not owned and need to be copied if used after this callback - etag: []const u8 = "", - /// body is owned and dont need to be copied, but dont forget to free it - body: bun.MutableString, - }, - not_found: void, - /// failure error is not owned and need to be copied if used after this callback - failure: S3Error, - }; - pub const S3UploadResult = union(enum) { - success: void, - /// failure error is not owned and need to be copied if used after this callback - failure: S3Error, - }; - pub const S3DeleteResult = union(enum) { - success: void, - not_found: void, - - /// failure error is not owned and need to be copied if used after this callback - failure: S3Error, - }; - // commit result also fails if status 200 but with body containing an Error - pub const S3CommitResult = union(enum) { - success: void, - /// failure error is not owned and need to be copied if used after this callback - failure: S3Error, - }; - // commit result also fails if status 200 but with body containing an Error - pub const S3PartResult = union(enum) { - etag: []const u8, - /// failure error is not owned and need to be copied if used after this callback - failure: S3Error, - }; - pub const S3HttpSimpleTask = struct { - http: bun.http.AsyncHTTP, - vm: *JSC.VirtualMachine, - sign_result: SignResult, - headers: JSC.WebCore.Headers, - callback_context: *anyopaque, - callback: Callback, - response_buffer: bun.MutableString = .{ - .allocator = bun.default_allocator, - .list = .{ - .items = &.{}, - .capacity = 0, - }, - }, - result: bun.http.HTTPClientResult = .{}, - concurrent_task: JSC.ConcurrentTask = .{}, - range: ?[]const u8, - poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(), - - usingnamespace bun.New(@This()); - pub const Callback = union(enum) { - stat: *const fn (S3StatResult, *anyopaque) void, - download: *const fn (S3DownloadResult, *anyopaque) void, - upload: *const fn (S3UploadResult, *anyopaque) void, - delete: *const fn (S3DeleteResult, *anyopaque) void, - commit: *const fn (S3CommitResult, *anyopaque) void, - part: *const fn (S3PartResult, *anyopaque) void, - - pub fn fail(this: @This(), code: []const u8, message: []const u8, context: *anyopaque) void { - switch (this) { - inline .upload, - .download, - .stat, - .delete, - .commit, - .part, - => |callback| callback(.{ - .failure = .{ - .code = code, - .message = message, - }, - }, context), - } - } - }; - pub fn deinit(this: *@This()) void { - if (this.result.certificate_info) |*certificate| { - certificate.deinit(bun.default_allocator); - } - this.poll_ref.unref(this.vm); - this.response_buffer.deinit(); - this.headers.deinit(); - this.sign_result.deinit(); - this.http.clearData(); - if (this.range) |range| { - bun.default_allocator.free(range); - } - if (this.result.metadata) |*metadata| { - metadata.deinit(bun.default_allocator); - } - this.destroy(); - } - - fn fail(this: *@This()) void { - var code: []const u8 = "UnknownError"; - var message: []const u8 = "an unexpected error has occurred"; - if (this.result.fail) |err| { - code = @errorName(err); - } else if (this.result.body) |body| { - const bytes = body.list.items; - if (bytes.len > 0) { - message = bytes[0..]; - if (strings.indexOf(bytes, "")) |start| { - if (strings.indexOf(bytes, "")) |end| { - code = bytes[start + "".len .. end]; - } - } - if (strings.indexOf(bytes, "")) |start| { - if (strings.indexOf(bytes, "")) |end| { - message = bytes[start + "".len .. end]; - } - } - } - } - this.callback.fail(code, message, this.callback_context); - } - - fn failIfContainsError(this: *@This(), status: u32) bool { - var code: []const u8 = "UnknownError"; - var message: []const u8 = "an unexpected error has occurred"; - - if (this.result.fail) |err| { - code = @errorName(err); - } else if (this.result.body) |body| { - const bytes = body.list.items; - var has_error = false; - if (bytes.len > 0) { - message = bytes[0..]; - if (strings.indexOf(bytes, "") != null) { - has_error = true; - if (strings.indexOf(bytes, "")) |start| { - if (strings.indexOf(bytes, "")) |end| { - code = bytes[start + "".len .. end]; - } - } - if (strings.indexOf(bytes, "")) |start| { - if (strings.indexOf(bytes, "")) |end| { - message = bytes[start + "".len .. end]; - } - } - } - } - if (!has_error and status == 200 or status == 206) { - return false; - } - } else if (status == 200 or status == 206) { - return false; - } - this.callback.fail(code, message, this.callback_context); - return true; - } - - pub fn onResponse(this: *@This()) void { - defer this.deinit(); - if (!this.result.isSuccess()) { - this.fail(); - return; - } - bun.assert(this.result.metadata != null); - const response = this.result.metadata.?.response; - switch (this.callback) { - .stat => |callback| { - switch (response.status_code) { - 404 => { - callback(.{ .not_found = {} }, this.callback_context); - }, - 200 => { - callback(.{ - .success = .{ - .etag = response.headers.get("etag") orelse "", - .size = if (response.headers.get("content-length")) |content_len| (std.fmt.parseInt(usize, content_len, 10) catch 0) else 0, - }, - }, this.callback_context); - }, - else => { - this.fail(); - }, - } - }, - .delete => |callback| { - switch (response.status_code) { - 404 => { - callback(.{ .not_found = {} }, this.callback_context); - }, - 200, 204 => { - callback(.{ .success = {} }, this.callback_context); - }, - else => { - this.fail(); - }, - } - }, - .upload => |callback| { - switch (response.status_code) { - 200 => { - callback(.{ .success = {} }, this.callback_context); - }, - else => { - this.fail(); - }, - } - }, - .download => |callback| { - switch (response.status_code) { - 404 => { - callback(.{ .not_found = {} }, this.callback_context); - }, - 200, 204, 206 => { - const body = this.response_buffer; - this.response_buffer = .{ - .allocator = bun.default_allocator, - .list = .{ - .items = &.{}, - .capacity = 0, - }, - }; - callback(.{ - .success = .{ - .etag = response.headers.get("etag") orelse "", - .body = body, - }, - }, this.callback_context); - }, - else => { - //error - this.fail(); - }, - } - }, - .commit => |callback| { - // commit multipart upload can fail with status 200 - if (!this.failIfContainsError(response.status_code)) { - callback(.{ .success = {} }, this.callback_context); - } - }, - .part => |callback| { - if (!this.failIfContainsError(response.status_code)) { - if (response.headers.get("etag")) |etag| { - callback(.{ .etag = etag }, this.callback_context); - } else { - this.fail(); - } - } - }, - } - } - - pub fn http_callback(this: *@This(), async_http: *bun.http.AsyncHTTP, result: bun.http.HTTPClientResult) void { - const is_done = !result.has_more; - this.result = result; - this.http = async_http.*; - this.response_buffer = async_http.response_buffer.*; - if (is_done) { - this.vm.eventLoop().enqueueTaskConcurrent(this.concurrent_task.from(this, .manual_deinit)); - } - } - }; - - pub const S3HttpDownloadStreamingTask = struct { - http: bun.http.AsyncHTTP, - vm: *JSC.VirtualMachine, - sign_result: SignResult, - headers: JSC.WebCore.Headers, - callback_context: *anyopaque, - // this transfers ownership from the chunk - callback: *const fn (chunk: bun.MutableString, has_more: bool, err: ?S3Error, *anyopaque) void, - has_schedule_callback: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), - signal_store: bun.http.Signals.Store = .{}, - signals: bun.http.Signals = .{}, - poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(), - - response_buffer: bun.MutableString = .{ - .allocator = bun.default_allocator, - .list = .{ - .items = &.{}, - .capacity = 0, - }, - }, - reported_response_lock: bun.Lock = .{}, - reported_response_buffer: bun.MutableString = .{ - .allocator = bun.default_allocator, - .list = .{ - .items = &.{}, - .capacity = 0, - }, - }, - state: State.AtomicType = State.AtomicType.init(0), - - concurrent_task: JSC.ConcurrentTask = .{}, - range: ?[]const u8, - proxy_url: []const u8, - - usingnamespace bun.New(@This()); - pub const State = packed struct(u64) { - pub const AtomicType = std.atomic.Value(u64); - status_code: u32 = 0, - request_error: u16 = 0, - has_more: bool = false, - _reserved: u15 = 0, - }; - - pub fn getState(this: @This()) State { - const state: State = @bitCast(this.state.load(.acquire)); - return state; - } - - pub fn setState(this: *@This(), state: State) void { - this.state.store(@bitCast(state), .monotonic); - } - - pub fn deinit(this: *@This()) void { - this.poll_ref.unref(this.vm); - this.response_buffer.deinit(); - this.reported_response_buffer.deinit(); - this.headers.deinit(); - this.sign_result.deinit(); - this.http.clearData(); - if (this.range) |range| { - bun.default_allocator.free(range); - } - if (this.proxy_url.len > 0) { - bun.default_allocator.free(this.proxy_url); - } - - this.destroy(); - } - - fn reportProgress(this: *@This()) bool { - var has_more = true; - var err: ?S3Error = null; - var failed = false; - this.reported_response_lock.lock(); - defer this.reported_response_lock.unlock(); - const chunk = brk: { - const state = this.getState(); - has_more = state.has_more; - switch (state.status_code) { - 200, 204, 206 => { - failed = state.request_error != 0; - }, - else => { - failed = true; - }, - } - if (failed) { - if (!has_more) { - var has_body_code = false; - var has_body_message = false; - - var code: []const u8 = "UnknownError"; - var message: []const u8 = "an unexpected error has occurred"; - if (state.request_error != 0) { - const req_err = @errorFromInt(state.request_error); - code = @errorName(req_err); - has_body_code = true; - } else { - const bytes = this.reported_response_buffer.list.items; - if (bytes.len > 0) { - message = bytes[0..]; - - if (strings.indexOf(bytes, "")) |start| { - if (strings.indexOf(bytes, "")) |end| { - code = bytes[start + "".len .. end]; - has_body_code = true; - } - } - if (strings.indexOf(bytes, "")) |start| { - if (strings.indexOf(bytes, "")) |end| { - message = bytes[start + "".len .. end]; - has_body_message = true; - } - } - } - } - if (state.status_code == 404) { - if (!has_body_code) { - code = "FileNotFound"; - } - if (!has_body_message) { - message = "File not found"; - } - } - err = .{ - .code = code, - .message = message, - }; - } - break :brk bun.MutableString{ .allocator = bun.default_allocator, .list = .{} }; - } else { - const buffer = this.reported_response_buffer; - break :brk buffer; - } - }; - log("reportProgres failed: {} has_more: {} len: {d}", .{ failed, has_more, chunk.list.items.len }); - if (failed) { - if (!has_more) { - this.callback(chunk, false, err, this.callback_context); - } - } else { - // dont report empty chunks if we have more data to read - if (!has_more or chunk.list.items.len > 0) { - this.callback(chunk, has_more, null, this.callback_context); - this.reported_response_buffer.reset(); - } - } - - return has_more; - } - - pub fn onResponse(this: *@This()) void { - this.has_schedule_callback.store(false, .monotonic); - const has_more = this.reportProgress(); - if (!has_more) this.deinit(); - } - - pub fn http_callback(this: *@This(), async_http: *bun.http.AsyncHTTP, result: bun.http.HTTPClientResult) void { - const is_done = !result.has_more; - var state = this.getState(); - - var wait_until_done = false; - { - state.has_more = !is_done; - - state.request_error = if (result.fail) |err| @intFromError(err) else 0; - if (state.status_code == 0) { - if (result.certificate_info) |*certificate| { - certificate.deinit(bun.default_allocator); - } - if (result.metadata) |m| { - var metadata = m; - state.status_code = metadata.response.status_code; - metadata.deinit(bun.default_allocator); - } - } - switch (state.status_code) { - 200, 204, 206 => wait_until_done = state.request_error != 0, - else => wait_until_done = true, - } - this.setState(state); - this.http = async_http.*; - } - // if we got a error or fail wait until we are done buffering the response body to report - const should_enqueue = !wait_until_done or is_done; - log("state err: {} status_code: {} has_more: {} should_enqueue: {}", .{ state.request_error, state.status_code, state.has_more, should_enqueue }); - if (should_enqueue) { - if (result.body) |body| { - this.reported_response_lock.lock(); - defer this.reported_response_lock.unlock(); - this.response_buffer = body.*; - if (body.list.items.len > 0) { - _ = this.reported_response_buffer.write(body.list.items) catch bun.outOfMemory(); - } - this.response_buffer.reset(); - if (this.reported_response_buffer.list.items.len == 0 and !is_done) { - return; - } - } else if (!is_done) { - return; - } - if (this.has_schedule_callback.cmpxchgStrong(false, true, .acquire, .monotonic)) |has_schedule_callback| { - if (has_schedule_callback) { - return; - } - } - this.vm.eventLoop().enqueueTaskConcurrent(this.concurrent_task.from(this, .manual_deinit)); - } - } - }; - - pub const S3SimpleRequestOptions = struct { - // signing options - path: []const u8, - method: bun.http.Method, - search_params: ?[]const u8 = null, - content_type: ?[]const u8 = null, - content_disposition: ?[]const u8 = null, - - // http request options - body: []const u8, - proxy_url: ?[]const u8 = null, - range: ?[]const u8 = null, - }; - - pub fn executeSimpleS3Request( - this: *const @This(), - options: S3SimpleRequestOptions, - callback: S3HttpSimpleTask.Callback, - callback_context: *anyopaque, - ) void { - var result = this.signRequest(.{ - .path = options.path, - .method = options.method, - .search_params = options.search_params, - .content_disposition = options.content_disposition, - }, null) catch |sign_err| { - if (options.range) |range_| bun.default_allocator.free(range_); - const error_code_and_message = getSignErrorCodeAndMessage(sign_err); - callback.fail(error_code_and_message.code, error_code_and_message.message, callback_context); - return; - }; - - const headers = brk: { - if (options.range) |range_| { - const _headers = result.headers(); - var headersWithRange: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "range", .value = range_ }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithRange, bun.default_allocator) catch bun.outOfMemory(); - } else { - if (options.content_type) |content_type| { - if (content_type.len > 0) { - const _headers = result.headers(); - if (_headers.len > 4) { - var headersWithContentType: [6]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - _headers[4], - .{ .name = "Content-Type", .value = content_type }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithContentType, bun.default_allocator) catch bun.outOfMemory(); - } - - var headersWithContentType: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "Content-Type", .value = content_type }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithContentType, bun.default_allocator) catch bun.outOfMemory(); - } - } - - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); - } - }; - const task = S3HttpSimpleTask.new(.{ - .http = undefined, - .sign_result = result, - .callback_context = callback_context, - .callback = callback, - .range = options.range, - .headers = headers, - .vm = JSC.VirtualMachine.get(), - }); - task.poll_ref.ref(task.vm); - - const url = bun.URL.parse(result.url); - const proxy = options.proxy_url orelse ""; - task.http = bun.http.AsyncHTTP.init( - bun.default_allocator, - options.method, - url, - task.headers.entries, - task.headers.buf.items, - &task.response_buffer, - options.body, - bun.http.HTTPClientResult.Callback.New( - *S3HttpSimpleTask, - S3HttpSimpleTask.http_callback, - ).init(task), - .follow, - .{ - .http_proxy = if (proxy.len > 0) bun.URL.parse(proxy) else null, - .verbose = .none, - .reject_unauthorized = task.vm.getTLSRejectUnauthorized(), - }, - ); - // queue http request - bun.http.HTTPThread.init(&.{}); - var batch = bun.ThreadPool.Batch{}; - task.http.schedule(bun.default_allocator, &batch); - bun.http.http_thread.schedule(batch); - } - - pub fn s3Stat(this: *const @This(), path: []const u8, callback: *const fn (S3StatResult, *anyopaque) void, callback_context: *anyopaque, proxy_url: ?[]const u8) void { - this.executeSimpleS3Request(.{ - .path = path, - .method = .HEAD, - .proxy_url = proxy_url, - .body = "", - }, .{ .stat = callback }, callback_context); - } - - pub fn s3Download(this: *const @This(), path: []const u8, callback: *const fn (S3DownloadResult, *anyopaque) void, callback_context: *anyopaque, proxy_url: ?[]const u8) void { - this.executeSimpleS3Request(.{ - .path = path, - .method = .GET, - .proxy_url = proxy_url, - .body = "", - }, .{ .download = callback }, callback_context); - } - - pub fn s3DownloadSlice(this: *const @This(), path: []const u8, offset: usize, size: ?usize, callback: *const fn (S3DownloadResult, *anyopaque) void, callback_context: *anyopaque, proxy_url: ?[]const u8) void { - const range = brk: { - if (size) |size_| { - if (offset == 0) break :brk null; - - var end = (offset + size_); - if (size_ > 0) { - end -= 1; - } - break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-{}", .{ offset, end }) catch bun.outOfMemory(); - } - if (offset == 0) break :brk null; - break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-", .{offset}) catch bun.outOfMemory(); - }; - - this.executeSimpleS3Request(.{ - .path = path, - .method = .GET, - .proxy_url = proxy_url, - .body = "", - .range = range, - }, .{ .download = callback }, callback_context); - } - - pub fn s3StreamDownload(this: *@This(), path: []const u8, offset: usize, size: ?usize, proxy_url: ?[]const u8, callback: *const fn (chunk: bun.MutableString, has_more: bool, err: ?S3Error, *anyopaque) void, callback_context: *anyopaque) void { - const range = brk: { - if (size) |size_| { - if (offset == 0) break :brk null; - - var end = (offset + size_); - if (size_ > 0) { - end -= 1; - } - break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-{}", .{ offset, end }) catch bun.outOfMemory(); - } - if (offset == 0) break :brk null; - break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-", .{offset}) catch bun.outOfMemory(); - }; - - var result = this.signRequest(.{ - .path = path, - .method = .GET, - }, null) catch |sign_err| { - if (range) |range_| bun.default_allocator.free(range_); - const error_code_and_message = getSignErrorCodeAndMessage(sign_err); - callback(.{ .allocator = bun.default_allocator, .list = .{} }, false, .{ .code = error_code_and_message.code, .message = error_code_and_message.message }, callback_context); - return; - }; - - const headers = brk: { - if (range) |range_| { - const _headers = result.headers(); - var headersWithRange: [5]picohttp.Header = .{ - _headers[0], - _headers[1], - _headers[2], - _headers[3], - .{ .name = "range", .value = range_ }, - }; - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(&headersWithRange, bun.default_allocator) catch bun.outOfMemory(); - } else { - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); - } - }; - const proxy = proxy_url orelse ""; - const owned_proxy = if (proxy.len > 0) bun.default_allocator.dupe(u8, proxy) catch bun.outOfMemory() else ""; - const task = S3HttpDownloadStreamingTask.new(.{ - .http = undefined, - .sign_result = result, - .proxy_url = owned_proxy, - .callback_context = callback_context, - .callback = callback, - .range = range, - .headers = headers, - .vm = JSC.VirtualMachine.get(), - }); - task.poll_ref.ref(task.vm); - - const url = bun.URL.parse(result.url); - - task.signals = task.signal_store.to(); - - task.http = bun.http.AsyncHTTP.init( - bun.default_allocator, - .GET, - url, - task.headers.entries, - task.headers.buf.items, - &task.response_buffer, - "", - bun.http.HTTPClientResult.Callback.New( - *S3HttpDownloadStreamingTask, - S3HttpDownloadStreamingTask.http_callback, - ).init(task), - .follow, - .{ - .http_proxy = if (owned_proxy.len > 0) bun.URL.parse(owned_proxy) else null, - .verbose = .none, - .signals = task.signals, - .reject_unauthorized = task.vm.getTLSRejectUnauthorized(), - }, - ); - // enable streaming - task.http.enableBodyStreaming(); - // queue http request - bun.http.HTTPThread.init(&.{}); - var batch = bun.ThreadPool.Batch{}; - task.http.schedule(bun.default_allocator, &batch); - bun.http.http_thread.schedule(batch); - } - - pub fn s3ReadableStream(this: *@This(), path: []const u8, offset: usize, size: ?usize, proxy_url: ?[]const u8, globalThis: *JSC.JSGlobalObject) JSC.JSValue { - var reader = JSC.WebCore.ByteStream.Source.new(.{ - .context = undefined, - .globalThis = globalThis, - }); - - reader.context.setup(); - const readable_value = reader.toReadableStream(globalThis); - - this.s3StreamDownload(path, offset, size, proxy_url, @ptrCast(&S3DownloadStreamWrapper.callback), S3DownloadStreamWrapper.new(.{ - .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(.{ - .ptr = .{ .Bytes = &reader.context }, - .value = readable_value, - }, globalThis), - })); - return readable_value; - } - - const S3DownloadStreamWrapper = struct { - readable_stream_ref: JSC.WebCore.ReadableStream.Strong, - pub usingnamespace bun.New(@This()); - - pub fn callback(chunk: bun.MutableString, has_more: bool, request_err: ?S3Error, this: *@This()) void { - defer if (!has_more) this.deinit(); - - if (this.readable_stream_ref.get()) |readable| { - if (readable.ptr == .Bytes) { - const globalThis = this.readable_stream_ref.globalThis().?; - - if (request_err) |err| { - log("S3DownloadStreamWrapper.callback .temporary", .{}); - - readable.ptr.Bytes.onData( - .{ - .err = .{ .JSValue = err.toJS(globalThis) }, - }, - bun.default_allocator, - ); - return; - } - if (has_more) { - log("S3DownloadStreamWrapper.callback .temporary", .{}); - - readable.ptr.Bytes.onData( - .{ - .temporary = bun.ByteList.initConst(chunk.list.items), - }, - bun.default_allocator, - ); - return; - } - log("S3DownloadStreamWrapper.callback .temporary_and_done", .{}); - - readable.ptr.Bytes.onData( - .{ - .temporary_and_done = bun.ByteList.initConst(chunk.list.items), - }, - bun.default_allocator, - ); - return; - } - } - log("S3DownloadStreamWrapper.callback invalid readable stream", .{}); - } - - pub fn deinit(this: *@This()) void { - this.readable_stream_ref.deinit(); - this.destroy(); - } - }; - - pub fn s3Delete(this: *const @This(), path: []const u8, callback: *const fn (S3DeleteResult, *anyopaque) void, callback_context: *anyopaque, proxy_url: ?[]const u8) void { - this.executeSimpleS3Request(.{ - .path = path, - .method = .DELETE, - .proxy_url = proxy_url, - .body = "", - }, .{ .delete = callback }, callback_context); - } - - pub fn s3Upload(this: *const @This(), path: []const u8, content: []const u8, content_type: ?[]const u8, proxy_url: ?[]const u8, callback: *const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque) void { - this.executeSimpleS3Request(.{ - .path = path, - .method = .PUT, - .proxy_url = proxy_url, - .body = content, - .content_type = content_type, - }, .{ .upload = callback }, callback_context); - } - - const S3UploadStreamWrapper = struct { - readable_stream_ref: JSC.WebCore.ReadableStream.Strong, - sink: *JSC.WebCore.FetchTaskletChunkedRequestSink, - callback: ?*const fn (S3UploadResult, *anyopaque) void, - callback_context: *anyopaque, - ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); - pub fn resolve(result: S3UploadResult, self: *@This()) void { - const sink = self.sink; - defer self.deref(); - - if (sink.endPromise.globalObject()) |globalObject| { - switch (result) { - .success => sink.endPromise.resolve(globalObject, JSC.jsNumber(0)), - .failure => |err| { - if (!sink.done) { - sink.abort(); - return; - } - sink.endPromise.rejectOnNextTick(globalObject, err.toJS(globalObject)); - }, - } - } - if (self.callback) |callback| { - callback(result, self.callback_context); - } - } - - pub fn deinit(self: *@This()) void { - self.readable_stream_ref.deinit(); - self.sink.finalize(); - self.sink.destroy(); - self.destroy(); - } - }; - pub fn onUploadStreamResolveRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var args = callframe.arguments_old(2); - var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); - defer this.deref(); - if (this.sink.endPromise.hasValue()) { - this.sink.endPromise.resolve(globalThis, JSC.jsNumber(0)); - } - if (this.readable_stream_ref.get()) |stream| { - stream.done(globalThis); - } - this.readable_stream_ref.deinit(); - - return .undefined; - } - - pub fn onUploadStreamRejectRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const args = callframe.arguments_old(2); - var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); - defer this.deref(); - const err = args.ptr[0]; - if (this.sink.endPromise.hasValue()) { - this.sink.endPromise.rejectOnNextTick(globalThis, err); - } - - if (this.readable_stream_ref.get()) |stream| { - stream.cancel(globalThis); - this.readable_stream_ref.deinit(); - } - if (this.sink.task) |task| { - if (task == .s3_upload) { - task.s3_upload.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - } - } - return .undefined; - } - pub const shim = JSC.Shimmer("Bun", "S3UploadStream", @This()); - - pub const Export = shim.exportFunctions(.{ - .onResolveRequestStream = onUploadStreamResolveRequestStream, - .onRejectRequestStream = onUploadStreamRejectRequestStream, - }); - comptime { - const jsonResolveRequestStream = JSC.toJSHostFunction(onUploadStreamResolveRequestStream); - @export(jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); - const jsonRejectRequestStream = JSC.toJSHostFunction(onUploadStreamRejectRequestStream); - @export(jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); - } - - /// consumes the readable stream and upload to s3 - pub fn s3UploadStream(this: *@This(), path: []const u8, readable_stream: JSC.WebCore.ReadableStream, globalThis: *JSC.JSGlobalObject, options: MultiPartUpload.MultiPartUploadOptions, content_type: ?[]const u8, proxy: ?[]const u8, callback: ?*const fn (S3UploadResult, *anyopaque) void, callback_context: *anyopaque) JSC.JSValue { - this.ref(); // ref the credentials - const proxy_url = (proxy orelse ""); - - const task = MultiPartUpload.new(.{ - .credentials = this, - .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), - .proxy = if (proxy_url.len > 0) bun.default_allocator.dupe(u8, proxy_url) catch bun.outOfMemory() else "", - .content_type = if (content_type) |ct| bun.default_allocator.dupe(u8, ct) catch bun.outOfMemory() else null, - .callback = @ptrCast(&S3UploadStreamWrapper.resolve), - .callback_context = undefined, - .globalThis = globalThis, - .options = options, - .vm = JSC.VirtualMachine.get(), - }); - - task.poll_ref.ref(task.vm); - - task.ref(); // + 1 for the stream - - var response_stream = JSC.WebCore.FetchTaskletChunkedRequestSink.new(.{ - .task = .{ .s3_upload = task }, - .buffer = .{}, - .globalThis = globalThis, - .encoded = false, - .endPromise = JSC.JSPromise.Strong.init(globalThis), - }).toSink(); - const endPromise = response_stream.sink.endPromise.value(); - const ctx = S3UploadStreamWrapper.new(.{ - .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(readable_stream, globalThis), - .sink = &response_stream.sink, - .callback = callback, - .callback_context = callback_context, - }); - task.callback_context = @ptrCast(ctx); - var signal = &response_stream.sink.signal; - - signal.* = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink.SinkSignal.init(.zero); - - // explicitly set it to a dead pointer - // we use this memory address to disable signals being sent - signal.clear(); - bun.assert(signal.isDead()); - - // We are already corked! - const assignment_result: JSC.JSValue = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink.assignToStream( - globalThis, - readable_stream.value, - response_stream, - @as(**anyopaque, @ptrCast(&signal.ptr)), - ); - - assignment_result.ensureStillAlive(); - - // assert that it was updated - bun.assert(!signal.isDead()); - - if (assignment_result.toError()) |err| { - readable_stream.cancel(globalThis); - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.rejectOnNextTick(globalThis, err); - } - task.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - return endPromise; - } - - if (!assignment_result.isEmptyOrUndefinedOrNull()) { - task.vm.drainMicrotasks(); - - assignment_result.ensureStillAlive(); - // it returns a Promise when it goes through ReadableStreamDefaultReader - if (assignment_result.asAnyPromise()) |promise| { - switch (promise.status(globalThis.vm())) { - .pending => { - ctx.ref(); - assignment_result.then( - globalThis, - task.callback_context, - onUploadStreamResolveRequestStream, - onUploadStreamRejectRequestStream, - ); - }, - .fulfilled => { - readable_stream.done(globalThis); - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.resolve(globalThis, JSC.jsNumber(0)); - } - }, - .rejected => { - readable_stream.cancel(globalThis); - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.rejectOnNextTick(globalThis, promise.result(globalThis.vm())); - } - task.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - }, - } - } else { - readable_stream.cancel(globalThis); - if (response_stream.sink.endPromise.hasValue()) { - response_stream.sink.endPromise.rejectOnNextTick(globalThis, assignment_result); - } - task.fail(.{ - .code = "UnknownError", - .message = "ReadableStream ended with an error", - }); - } - } - return endPromise; - } - /// returns a writable stream that writes to the s3 path - pub fn s3WritableStream(this: *@This(), path: []const u8, globalThis: *JSC.JSGlobalObject, options: MultiPartUpload.MultiPartUploadOptions, content_type: ?[]const u8, proxy: ?[]const u8) bun.JSError!JSC.JSValue { - const Wrapper = struct { - pub fn callback(result: S3UploadResult, sink: *JSC.WebCore.FetchTaskletChunkedRequestSink) void { - if (sink.endPromise.globalObject()) |globalObject| { - const event_loop = globalObject.bunVM().eventLoop(); - event_loop.enter(); - defer event_loop.exit(); - switch (result) { - .success => { - sink.endPromise.resolve(globalObject, JSC.jsNumber(0)); - }, - .failure => |err| { - if (!sink.done) { - sink.abort(); - return; - } - - sink.endPromise.rejectOnNextTick(globalObject, err.toJS(globalObject)); - }, - } - } - sink.finalize(); - } - }; - const proxy_url = (proxy orelse ""); - this.ref(); // ref the credentials - const task = MultiPartUpload.new(.{ - .credentials = this, - .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), - .proxy = if (proxy_url.len > 0) bun.default_allocator.dupe(u8, proxy_url) catch bun.outOfMemory() else "", - .content_type = if (content_type) |ct| bun.default_allocator.dupe(u8, ct) catch bun.outOfMemory() else null, - - .callback = @ptrCast(&Wrapper.callback), - .callback_context = undefined, - .globalThis = globalThis, - .options = options, - .vm = JSC.VirtualMachine.get(), - }); - - task.poll_ref.ref(task.vm); - - task.ref(); // + 1 for the stream - var response_stream = JSC.WebCore.FetchTaskletChunkedRequestSink.new(.{ - .task = .{ .s3_upload = task }, - .buffer = .{}, - .globalThis = globalThis, - .encoded = false, - .endPromise = JSC.JSPromise.Strong.init(globalThis), - }).toSink(); - - task.callback_context = @ptrCast(response_stream); - var signal = &response_stream.sink.signal; - - signal.* = JSC.WebCore.FetchTaskletChunkedRequestSink.JSSink.SinkSignal.init(.zero); - - // explicitly set it to a dead pointer - // we use this memory address to disable signals being sent - signal.clear(); - bun.assert(signal.isDead()); - return response_stream.sink.toJS(globalThis); - } -}; - -pub const MultiPartUpload = struct { - pub const OneMiB: usize = 1048576; - pub const MAX_SINGLE_UPLOAD_SIZE_IN_MiB: usize = 5120; // we limit to 5 GiB - pub const MAX_SINGLE_UPLOAD_SIZE: usize = MAX_SINGLE_UPLOAD_SIZE_IN_MiB * OneMiB; // we limit to 5 GiB - pub const MIN_SINGLE_UPLOAD_SIZE_IN_MiB: usize = 5; - pub const DefaultPartSize = OneMiB * MIN_SINGLE_UPLOAD_SIZE_IN_MiB; - const MAX_QUEUE_SIZE = 64; // dont make sense more than this because we use fetch anything greater will be 64 - const AWS = AWSCredentials; - queue: std.ArrayListUnmanaged(UploadPart) = .{}, - available: bun.bit_set.IntegerBitSet(MAX_QUEUE_SIZE) = bun.bit_set.IntegerBitSet(MAX_QUEUE_SIZE).initFull(), - - currentPartNumber: u16 = 1, - ref_count: u16 = 1, - ended: bool = false, - - options: MultiPartUploadOptions = .{}, - credentials: *AWSCredentials, - poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(), - vm: *JSC.VirtualMachine, - globalThis: *JSC.JSGlobalObject, - - buffered: std.ArrayListUnmanaged(u8) = .{}, - offset: usize = 0, - - path: []const u8, - proxy: []const u8, - content_type: ?[]const u8 = null, - upload_id: []const u8 = "", - uploadid_buffer: bun.MutableString = .{ .allocator = bun.default_allocator, .list = .{} }, - - multipart_etags: std.ArrayListUnmanaged(UploadPart.UploadPartResult) = .{}, - multipart_upload_list: bun.ByteList = .{}, - - state: enum { - not_started, - multipart_started, - multipart_completed, - singlefile_started, - finished, - } = .not_started, - - callback: *const fn (AWS.S3UploadResult, *anyopaque) void, - callback_context: *anyopaque, - - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); - - const log = bun.Output.scoped(.S3MultiPartUpload, true); - pub const MultiPartUploadOptions = struct { - /// more than 255 dont make sense http thread cannot handle more than that - queueSize: u8 = 5, - /// in s3 client sdk they set it in bytes but the min is still 5 MiB - /// var params = {Bucket: 'bucket', Key: 'key', Body: stream}; - /// var options = {partSize: 10 * 1024 * 1024, queueSize: 1}; - /// s3.upload(params, options, function(err, data) { - /// console.log(err, data); - /// }); - /// See. https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property - /// The value is in MiB min is 5 and max 5120 (but we limit to 4 GiB aka 4096) - partSize: u16 = 5, - /// default is 3 max 255 - retry: u8 = 3, - }; - - pub const UploadPart = struct { - data: []const u8, - state: enum { - pending, - started, - completed, - canceled, - }, - owns_data: bool, - partNumber: u16, // max is 10,000 - retry: u8, // auto retry, decrement until 0 and fail after this - index: u8, - ctx: *MultiPartUpload, - - pub const UploadPartResult = struct { - number: u16, - etag: []const u8, - }; - fn sortEtags(_: *MultiPartUpload, a: UploadPart.UploadPartResult, b: UploadPart.UploadPartResult) bool { - return a.number < b.number; - } - - pub fn onPartResponse(result: AWS.S3PartResult, this: *@This()) void { - if (this.state == .canceled) { - log("onPartResponse {} canceled", .{this.partNumber}); - if (this.owns_data) bun.default_allocator.free(this.data); - this.ctx.deref(); - return; - } - - this.state = .completed; - - switch (result) { - .failure => |err| { - if (this.retry > 0) { - log("onPartResponse {} retry", .{this.partNumber}); - this.retry -= 1; - // retry failed - this.perform(); - return; - } else { - log("onPartResponse {} failed", .{this.partNumber}); - if (this.owns_data) bun.default_allocator.free(this.data); - defer this.ctx.deref(); - return this.ctx.fail(err); - } - }, - .etag => |etag| { - log("onPartResponse {} success", .{this.partNumber}); - - if (this.owns_data) bun.default_allocator.free(this.data); - // we will need to order this - this.ctx.multipart_etags.append(bun.default_allocator, .{ - .number = this.partNumber, - .etag = bun.default_allocator.dupe(u8, etag) catch bun.outOfMemory(), - }) catch bun.outOfMemory(); - - defer this.ctx.deref(); - // mark as available - this.ctx.available.set(this.index); - // drain more - this.ctx.drainEnqueuedParts(); - }, - } - } - - fn perform(this: *@This()) void { - var params_buffer: [2048]u8 = undefined; - const search_params = std.fmt.bufPrint(¶ms_buffer, "?partNumber={}&uploadId={s}&x-id=UploadPart", .{ - this.partNumber, - this.ctx.upload_id, - }) catch unreachable; - this.ctx.credentials.executeSimpleS3Request(.{ - .path = this.ctx.path, - .method = .PUT, - .proxy_url = this.ctx.proxyUrl(), - .body = this.data, - .search_params = search_params, - }, .{ .part = @ptrCast(&onPartResponse) }, this); - } - pub fn start(this: *@This()) void { - if (this.state != .pending or this.ctx.state != .multipart_completed) return; - this.ctx.ref(); - this.state = .started; - this.perform(); - } - pub fn cancel(this: *@This()) void { - const state = this.state; - this.state = .canceled; - - switch (state) { - .pending => { - if (this.owns_data) bun.default_allocator.free(this.data); - }, - // if is not pending we will free later or is already freed - else => {}, - } - } - }; - - fn deinit(this: *@This()) void { - log("deinit", .{}); - if (this.queue.capacity > 0) - this.queue.deinit(bun.default_allocator); - this.poll_ref.unref(this.vm); - bun.default_allocator.free(this.path); - if (this.proxy.len > 0) { - bun.default_allocator.free(this.proxy); - } - if (this.content_type) |ct| { - if (ct.len > 0) { - bun.default_allocator.free(ct); - } - } - this.credentials.deref(); - this.uploadid_buffer.deinit(); - for (this.multipart_etags.items) |tag| { - bun.default_allocator.free(tag.etag); - } - if (this.multipart_etags.capacity > 0) - this.multipart_etags.deinit(bun.default_allocator); - if (this.multipart_upload_list.cap > 0) - this.multipart_upload_list.deinitWithAllocator(bun.default_allocator); - this.destroy(); - } - - pub fn singleSendUploadResponse(result: AWS.S3UploadResult, this: *@This()) void { - switch (result) { - .failure => |err| { - if (this.options.retry > 0) { - log("singleSendUploadResponse {} retry", .{this.options.retry}); - this.options.retry -= 1; - // retry failed - this.credentials.executeSimpleS3Request(.{ - .path = this.path, - .method = .PUT, - .proxy_url = this.proxyUrl(), - .body = this.buffered.items, - .content_type = this.content_type, - }, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this); - - return; - } else { - log("singleSendUploadResponse failed", .{}); - return this.fail(err); - } - }, - .success => { - log("singleSendUploadResponse success", .{}); - this.done(); - }, - } - } - - fn getCreatePart(this: *@This(), chunk: []const u8, owns_data: bool) ?*UploadPart { - const index = this.available.findFirstSet() orelse { - // this means that the queue is full and we cannot flush it - return null; - }; - - if (index >= this.options.queueSize) { - // ops too much concurrency wait more - return null; - } - this.available.unset(index); - defer this.currentPartNumber += 1; - - if (this.queue.items.len <= index) { - this.queue.append(bun.default_allocator, .{ - .data = chunk, - .partNumber = this.currentPartNumber, - .owns_data = owns_data, - .ctx = this, - .index = @truncate(index), - .retry = this.options.retry, - .state = .pending, - }) catch bun.outOfMemory(); - return &this.queue.items[index]; - } - this.queue.items[index] = .{ - .data = chunk, - .partNumber = this.currentPartNumber, - .owns_data = owns_data, - .ctx = this, - .index = @truncate(index), - .retry = this.options.retry, - .state = .pending, - }; - return &this.queue.items[index]; - } - - fn drainEnqueuedParts(this: *@This()) void { - // check pending to start or transformed buffered ones into tasks - if (this.state == .multipart_completed) { - for (this.queue.items) |*part| { - if (part.state == .pending) { - // lets start the part request - part.start(); - } - } - } - const partSize = this.partSizeInBytes(); - if (this.ended or this.buffered.items.len >= partSize) { - this.processMultiPart(partSize); - } - - if (this.ended and this.available.mask == std.bit_set.IntegerBitSet(MAX_QUEUE_SIZE).initFull().mask) { - // we are done - this.done(); - } - } - pub fn fail(this: *@This(), _err: AWS.S3Error) void { - log("fail {s}:{s}", .{ _err.code, _err.message }); - for (this.queue.items) |*task| { - task.cancel(); - } - if (this.state != .finished) { - this.callback(.{ .failure = _err }, this.callback_context); - this.state = .finished; - if (this.state == .multipart_completed) { - // will deref after rollback - this.rollbackMultiPartRequest(); - } else { - this.deref(); - } - } - } - - fn done(this: *@This()) void { - if (this.state == .multipart_completed) { - this.state = .finished; - - std.sort.block(UploadPart.UploadPartResult, this.multipart_etags.items, this, UploadPart.sortEtags); - this.multipart_upload_list.append(bun.default_allocator, "") catch bun.outOfMemory(); - for (this.multipart_etags.items) |tag| { - this.multipart_upload_list.appendFmt(bun.default_allocator, "{}{s}", .{ tag.number, tag.etag }) catch bun.outOfMemory(); - - bun.default_allocator.free(tag.etag); - } - this.multipart_etags.deinit(bun.default_allocator); - this.multipart_etags = .{}; - this.multipart_upload_list.append(bun.default_allocator, "") catch bun.outOfMemory(); - // will deref and ends after commit - this.commitMultiPartRequest(); - } else { - this.callback(.{ .success = {} }, this.callback_context); - this.state = .finished; - this.deref(); - } - } - pub fn startMultiPartRequestResult(result: AWS.S3DownloadResult, this: *@This()) void { - switch (result) { - .failure => |err| { - log("startMultiPartRequestResult {s} failed {s}: {s}", .{ this.path, err.message, err.message }); - this.fail(err); - }, - .success => |response| { - const slice = response.body.list.items; - this.uploadid_buffer = result.success.body; - - if (strings.indexOf(slice, "")) |start| { - if (strings.indexOf(slice, "")) |end| { - this.upload_id = slice[start + 10 .. end]; - } - } - if (this.upload_id.len == 0) { - // Unknown type of response error from AWS - log("startMultiPartRequestResult {s} failed invalid id", .{this.path}); - this.fail(.{ - .code = "UnknownError", - .message = "Failed to initiate multipart upload", - }); - return; - } - log("startMultiPartRequestResult {s} success id: {s}", .{ this.path, this.upload_id }); - this.state = .multipart_completed; - this.drainEnqueuedParts(); - }, - // this is "unreachable" but we cover in case AWS returns 404 - .not_found => this.fail(.{ - .code = "UnknownError", - .message = "Failed to initiate multipart upload", - }), - } - } - - pub fn onCommitMultiPartRequest(result: AWS.S3CommitResult, this: *@This()) void { - log("onCommitMultiPartRequest {s}", .{this.upload_id}); - switch (result) { - .failure => |err| { - if (this.options.retry > 0) { - this.options.retry -= 1; - // retry commit - this.commitMultiPartRequest(); - return; - } - this.callback(.{ .failure = err }, this.callback_context); - this.deref(); - }, - .success => { - this.callback(.{ .success = {} }, this.callback_context); - this.state = .finished; - this.deref(); - }, - } - } - - pub fn onRollbackMultiPartRequest(result: AWS.S3UploadResult, this: *@This()) void { - log("onRollbackMultiPartRequest {s}", .{this.upload_id}); - switch (result) { - .failure => { - if (this.options.retry > 0) { - this.options.retry -= 1; - // retry rollback - this.rollbackMultiPartRequest(); - return; - } - this.deref(); - }, - .success => { - this.deref(); - }, - } - } - - fn commitMultiPartRequest(this: *@This()) void { - log("commitMultiPartRequest {s}", .{this.upload_id}); - var params_buffer: [2048]u8 = undefined; - const searchParams = std.fmt.bufPrint(¶ms_buffer, "?uploadId={s}", .{ - this.upload_id, - }) catch unreachable; - - this.credentials.executeSimpleS3Request(.{ - .path = this.path, - .method = .POST, - .proxy_url = this.proxyUrl(), - .body = this.multipart_upload_list.slice(), - .search_params = searchParams, - }, .{ .commit = @ptrCast(&onCommitMultiPartRequest) }, this); - } - fn rollbackMultiPartRequest(this: *@This()) void { - log("rollbackMultiPartRequest {s}", .{this.upload_id}); - var params_buffer: [2048]u8 = undefined; - const search_params = std.fmt.bufPrint(¶ms_buffer, "?uploadId={s}", .{ - this.upload_id, - }) catch unreachable; - - this.credentials.executeSimpleS3Request(.{ - .path = this.path, - .method = .DELETE, - .proxy_url = this.proxyUrl(), - .body = "", - .search_params = search_params, - }, .{ .upload = @ptrCast(&onRollbackMultiPartRequest) }, this); - } - fn enqueuePart(this: *@This(), chunk: []const u8, owns_data: bool) bool { - const part = this.getCreatePart(chunk, owns_data) orelse return false; - - if (this.state == .not_started) { - // will auto start later - this.state = .multipart_started; - this.credentials.executeSimpleS3Request(.{ - .path = this.path, - .method = .POST, - .proxy_url = this.proxyUrl(), - .body = "", - .search_params = "?uploads=", - .content_type = this.content_type, - }, .{ .download = @ptrCast(&startMultiPartRequestResult) }, this); - } else if (this.state == .multipart_completed) { - part.start(); - } - return true; - } - - fn processMultiPart(this: *@This(), part_size: usize) void { - // need to split in multiple parts because of the size - var buffer = this.buffered.items[this.offset..]; - var queue_full = false; - defer if (!this.ended and queue_full == false) { - this.buffered = .{}; - this.offset = 0; - }; - - while (buffer.len > 0) { - const len = @min(part_size, buffer.len); - const slice = buffer[0..len]; - buffer = buffer[len..]; - // its one big buffer lets free after we are done with everything, part dont own the data - if (this.enqueuePart(slice, this.ended)) { - this.offset += len; - } else { - queue_full = true; - break; - } - } - } - - pub fn proxyUrl(this: *@This()) ?[]const u8 { - return this.proxy; - } - fn processBuffered(this: *@This(), part_size: usize) void { - if (this.ended and this.buffered.items.len < this.partSizeInBytes() and this.state == .not_started) { - log("processBuffered {s} singlefile_started", .{this.path}); - this.state = .singlefile_started; - // we can do only 1 request - this.credentials.executeSimpleS3Request(.{ - .path = this.path, - .method = .PUT, - .proxy_url = this.proxyUrl(), - .body = this.buffered.items, - .content_type = this.content_type, - }, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this); - } else { - // we need to split - this.processMultiPart(part_size); - } - } - - pub fn partSizeInBytes(this: *@This()) usize { - return this.options.partSize * OneMiB; - } - - pub fn sendRequestData(this: *@This(), chunk: []const u8, is_last: bool) void { - if (this.ended) return; - - if (is_last) { - this.ended = true; - if (chunk.len > 0) { - this.buffered.appendSlice(bun.default_allocator, chunk) catch bun.outOfMemory(); - } - this.processBuffered(this.partSizeInBytes()); - } else { - // still have more data and receive empty, nothing todo here - if (chunk.len == 0) return; - this.buffered.appendSlice(bun.default_allocator, chunk) catch bun.outOfMemory(); - const partSize = this.partSizeInBytes(); - if (this.buffered.items.len >= partSize) { - // send the part we have enough data - this.processBuffered(partSize); - return; - } - - // wait for more - } - } -}; diff --git a/src/s3/acl.zig b/src/s3/acl.zig new file mode 100644 index 0000000000..2d69bed30d --- /dev/null +++ b/src/s3/acl.zig @@ -0,0 +1,43 @@ +const bun = @import("root").bun; + +pub const ACL = enum { + /// Owner gets FULL_CONTROL. No one else has access rights (default). + private, + /// Owner gets FULL_CONTROL. The AllUsers group (see Who is a grantee?) gets READ access. + public_read, + /// Owner gets FULL_CONTROL. The AllUsers group gets READ and WRITE access. Granting this on a bucket is generally not recommended. + public_read_write, + /// Owner gets FULL_CONTROL. Amazon EC2 gets READ access to GET an Amazon Machine Image (AMI) bundle from Amazon S3. + aws_exec_read, + /// Owner gets FULL_CONTROL. The AuthenticatedUsers group gets READ access. + authenticated_read, + /// Object owner gets FULL_CONTROL. Bucket owner gets READ access. If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + bucket_owner_read, + /// Both the object owner and the bucket owner get FULL_CONTROL over the object. If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + bucket_owner_full_control, + log_delivery_write, + + pub fn toString(this: @This()) []const u8 { + return switch (this) { + .private => "private", + .public_read => "public-read", + .public_read_write => "public-read-write", + .aws_exec_read => "aws-exec-read", + .authenticated_read => "authenticated-read", + .bucket_owner_read => "bucket-owner-read", + .bucket_owner_full_control => "bucket-owner-full-control", + .log_delivery_write => "log-delivery-write", + }; + } + + pub const Map = bun.ComptimeStringMap(ACL, .{ + .{ "private", .private }, + .{ "public-read", .public_read }, + .{ "public-read-write", .public_read_write }, + .{ "aws-exec-read", .aws_exec_read }, + .{ "authenticated-read", .authenticated_read }, + .{ "bucket-owner-read", .bucket_owner_read }, + .{ "bucket-owner-full-control", .bucket_owner_full_control }, + .{ "log-delivery-write", .log_delivery_write }, + }); +}; diff --git a/src/s3/client.zig b/src/s3/client.zig new file mode 100644 index 0000000000..684ae76ca4 --- /dev/null +++ b/src/s3/client.zig @@ -0,0 +1,629 @@ +const std = @import("std"); +const bun = @import("root").bun; +const JSC = bun.JSC; +const picohttp = bun.picohttp; + +pub const ACL = @import("./acl.zig").ACL; +pub const S3HttpDownloadStreamingTask = @import("./download_stream.zig").S3HttpDownloadStreamingTask; +pub const MultiPartUploadOptions = @import("./multipart_options.zig").MultiPartUploadOptions; +pub const MultiPartUpload = @import("./multipart.zig").MultiPartUpload; + +pub const Error = @import("./error.zig"); +pub const throwSignError = Error.throwSignError; +pub const getJSSignError = Error.getJSSignError; + +const Credentials = @import("./credentials.zig"); +pub const S3Credentials = Credentials.S3Credentials; +pub const S3CredentialsWithOptions = Credentials.S3CredentialsWithOptions; + +const S3SimpleRequest = @import("./simple_request.zig"); +pub const S3HttpSimpleTask = S3SimpleRequest.S3HttpSimpleTask; +pub const S3UploadResult = S3SimpleRequest.S3UploadResult; +pub const S3StatResult = S3SimpleRequest.S3StatResult; +pub const S3DownloadResult = S3SimpleRequest.S3DownloadResult; +pub const S3DeleteResult = S3SimpleRequest.S3DeleteResult; + +pub fn stat( + this: *S3Credentials, + path: []const u8, + callback: *const fn (S3StatResult, *anyopaque) void, + callback_context: *anyopaque, + proxy_url: ?[]const u8, +) void { + S3SimpleRequest.executeSimpleS3Request(this, .{ + .path = path, + .method = .HEAD, + .proxy_url = proxy_url, + .body = "", + }, .{ .stat = callback }, callback_context); +} + +pub fn download( + this: *S3Credentials, + path: []const u8, + callback: *const fn (S3DownloadResult, *anyopaque) void, + callback_context: *anyopaque, + proxy_url: ?[]const u8, +) void { + S3SimpleRequest.executeSimpleS3Request(this, .{ + .path = path, + .method = .GET, + .proxy_url = proxy_url, + .body = "", + }, .{ .download = callback }, callback_context); +} + +pub fn downloadSlice( + this: *S3Credentials, + path: []const u8, + offset: usize, + size: ?usize, + callback: *const fn (S3DownloadResult, *anyopaque) void, + callback_context: *anyopaque, + proxy_url: ?[]const u8, +) void { + const range = brk: { + if (size) |size_| { + if (offset == 0) break :brk null; + + var end = (offset + size_); + if (size_ > 0) { + end -= 1; + } + break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-{}", .{ offset, end }) catch bun.outOfMemory(); + } + if (offset == 0) break :brk null; + break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-", .{offset}) catch bun.outOfMemory(); + }; + + S3SimpleRequest.executeSimpleS3Request(this, .{ + .path = path, + .method = .GET, + .proxy_url = proxy_url, + .body = "", + .range = range, + }, .{ .download = callback }, callback_context); +} + +pub fn delete( + this: *S3Credentials, + path: []const u8, + callback: *const fn (S3DeleteResult, *anyopaque) void, + callback_context: *anyopaque, + proxy_url: ?[]const u8, +) void { + S3SimpleRequest.executeSimpleS3Request(this, .{ + .path = path, + .method = .DELETE, + .proxy_url = proxy_url, + .body = "", + }, .{ .delete = callback }, callback_context); +} + +pub fn upload( + this: *S3Credentials, + path: []const u8, + content: []const u8, + content_type: ?[]const u8, + acl: ?ACL, + proxy_url: ?[]const u8, + callback: *const fn (S3UploadResult, *anyopaque) void, + callback_context: *anyopaque, +) void { + S3SimpleRequest.executeSimpleS3Request(this, .{ + .path = path, + .method = .PUT, + .proxy_url = proxy_url, + .body = content, + .content_type = content_type, + .acl = acl, + }, .{ .upload = callback }, callback_context); +} +/// returns a writable stream that writes to the s3 path +pub fn writableStream( + this: *S3Credentials, + path: []const u8, + globalThis: *JSC.JSGlobalObject, + options: MultiPartUploadOptions, + content_type: ?[]const u8, + proxy: ?[]const u8, +) bun.JSError!JSC.JSValue { + const Wrapper = struct { + pub fn callback(result: S3UploadResult, sink: *JSC.WebCore.NetworkSink) void { + if (sink.endPromise.hasValue()) { + if (sink.endPromise.globalObject()) |globalObject| { + const event_loop = globalObject.bunVM().eventLoop(); + event_loop.enter(); + defer event_loop.exit(); + switch (result) { + .success => { + sink.endPromise.resolve(globalObject, JSC.jsNumber(0)); + }, + .failure => |err| { + if (!sink.done) { + sink.abort(); + return; + } + + sink.endPromise.reject(globalObject, err.toJS(globalObject, sink.path())); + }, + } + } + } + sink.finalize(); + } + }; + const proxy_url = (proxy orelse ""); + this.ref(); // ref the credentials + const task = MultiPartUpload.new(.{ + .credentials = this, + .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), + .proxy = if (proxy_url.len > 0) bun.default_allocator.dupe(u8, proxy_url) catch bun.outOfMemory() else "", + .content_type = if (content_type) |ct| bun.default_allocator.dupe(u8, ct) catch bun.outOfMemory() else null, + + .callback = @ptrCast(&Wrapper.callback), + .callback_context = undefined, + .globalThis = globalThis, + .options = options, + .vm = JSC.VirtualMachine.get(), + }); + + task.poll_ref.ref(task.vm); + + task.ref(); // + 1 for the stream + var response_stream = JSC.WebCore.NetworkSink.new(.{ + .task = .{ .s3_upload = task }, + .buffer = .{}, + .globalThis = globalThis, + .encoded = false, + .endPromise = JSC.JSPromise.Strong.init(globalThis), + }).toSink(); + + task.callback_context = @ptrCast(response_stream); + var signal = &response_stream.sink.signal; + + signal.* = JSC.WebCore.NetworkSink.JSSink.SinkSignal.init(.zero); + + // explicitly set it to a dead pointer + // we use this memory address to disable signals being sent + signal.clear(); + bun.assert(signal.isDead()); + return response_stream.sink.toJS(globalThis); +} + +const S3UploadStreamWrapper = struct { + readable_stream_ref: JSC.WebCore.ReadableStream.Strong, + sink: *JSC.WebCore.NetworkSink, + task: *MultiPartUpload, + callback: ?*const fn (S3UploadResult, *anyopaque) void, + callback_context: *anyopaque, + ref_count: u32 = 1, + path: []const u8, // this is owned by the task not by the wrapper + pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + pub fn resolve(result: S3UploadResult, self: *@This()) void { + const sink = self.sink; + defer self.deref(); + if (sink.endPromise.hasValue()) { + if (sink.endPromise.globalObject()) |globalObject| { + switch (result) { + .success => sink.endPromise.resolve(globalObject, JSC.jsNumber(0)), + .failure => |err| { + if (!sink.done) { + sink.abort(); + return; + } + sink.endPromise.reject(globalObject, err.toJS(globalObject, self.path)); + }, + } + } + } + if (self.callback) |callback| { + callback(result, self.callback_context); + } + } + + pub fn deinit(self: *@This()) void { + self.readable_stream_ref.deinit(); + self.sink.finalize(); + self.sink.destroy(); + self.task.deref(); + self.destroy(); + } +}; + +pub fn onUploadStreamResolveRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + var args = callframe.arguments_old(2); + var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); + defer this.deref(); + + if (this.readable_stream_ref.get()) |stream| { + stream.done(globalThis); + } + this.readable_stream_ref.deinit(); + this.task.continueStream(); + + return .undefined; +} + +pub fn onUploadStreamRejectRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const args = callframe.arguments_old(2); + var this = args.ptr[args.len - 1].asPromisePtr(S3UploadStreamWrapper); + defer this.deref(); + + const err = args.ptr[0]; + if (this.sink.endPromise.hasValue()) { + this.sink.endPromise.reject(globalThis, err); + } + + if (this.readable_stream_ref.get()) |stream| { + stream.cancel(globalThis); + this.readable_stream_ref.deinit(); + } + if (this.sink.task) |task| { + if (task == .s3_upload) { + task.s3_upload.fail(.{ + .code = "UnknownError", + .message = "ReadableStream ended with an error", + }); + } + } + this.task.continueStream(); + + return .undefined; +} +pub const shim = JSC.Shimmer("Bun", "S3UploadStream", @This()); + +pub const Export = shim.exportFunctions(.{ + .onResolveRequestStream = onUploadStreamResolveRequestStream, + .onRejectRequestStream = onUploadStreamRejectRequestStream, +}); +comptime { + const jsonResolveRequestStream = JSC.toJSHostFunction(onUploadStreamResolveRequestStream); + @export(jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); + const jsonRejectRequestStream = JSC.toJSHostFunction(onUploadStreamRejectRequestStream); + @export(jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); +} + +/// consumes the readable stream and upload to s3 +pub fn uploadStream( + this: *S3Credentials, + path: []const u8, + readable_stream: JSC.WebCore.ReadableStream, + globalThis: *JSC.JSGlobalObject, + options: MultiPartUploadOptions, + acl: ?ACL, + content_type: ?[]const u8, + proxy: ?[]const u8, + callback: ?*const fn (S3UploadResult, *anyopaque) void, + callback_context: *anyopaque, +) JSC.JSValue { + this.ref(); // ref the credentials + const proxy_url = (proxy orelse ""); + + if (readable_stream.isDisturbed(globalThis)) { + return JSC.JSPromise.rejectedPromiseValue(globalThis, bun.String.static("ReadableStream is already disturbed").toErrorInstance(globalThis)); + } + + switch (readable_stream.ptr) { + .Invalid => { + return JSC.JSPromise.rejectedPromiseValue(globalThis, bun.String.static("ReadableStream is invalid").toErrorInstance(globalThis)); + }, + inline .File, .Bytes => |stream| { + if (stream.pending.result == .err) { + // we got an error, fail early + const err = stream.pending.result.err; + stream.pending = .{ .result = .{ .done = {} } }; + const js_err, const was_strong = err.toJSWeak(globalThis); + if (was_strong == .Strong) { + js_err.unprotect(); + } + js_err.ensureStillAlive(); + return JSC.JSPromise.rejectedPromise(globalThis, js_err).asValue(globalThis); + } + }, + else => {}, + } + + const task = MultiPartUpload.new(.{ + .credentials = this, + .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), + .proxy = if (proxy_url.len > 0) bun.default_allocator.dupe(u8, proxy_url) catch bun.outOfMemory() else "", + .content_type = if (content_type) |ct| bun.default_allocator.dupe(u8, ct) catch bun.outOfMemory() else null, + .callback = @ptrCast(&S3UploadStreamWrapper.resolve), + .callback_context = undefined, + .globalThis = globalThis, + .state = .wait_stream_check, + .options = options, + .acl = acl, + .vm = JSC.VirtualMachine.get(), + }); + + task.poll_ref.ref(task.vm); + + task.ref(); // + 1 for the stream sink + + var response_stream = JSC.WebCore.NetworkSink.new(.{ + .task = .{ .s3_upload = task }, + .buffer = .{}, + .globalThis = globalThis, + .encoded = false, + .endPromise = JSC.JSPromise.Strong.init(globalThis), + }).toSink(); + task.ref(); // + 1 for the stream wrapper + + const endPromise = response_stream.sink.endPromise.value(); + const ctx = S3UploadStreamWrapper.new(.{ + .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(readable_stream, globalThis), + .sink = &response_stream.sink, + .callback = callback, + .callback_context = callback_context, + .path = task.path, + .task = task, + }); + task.callback_context = @ptrCast(ctx); + // keep the task alive until we are done configuring the signal + task.ref(); + defer task.deref(); + + var signal = &response_stream.sink.signal; + + signal.* = JSC.WebCore.NetworkSink.JSSink.SinkSignal.init(.zero); + + // explicitly set it to a dead pointer + // we use this memory address to disable signals being sent + signal.clear(); + bun.assert(signal.isDead()); + + // We are already corked! + const assignment_result: JSC.JSValue = JSC.WebCore.NetworkSink.JSSink.assignToStream( + globalThis, + readable_stream.value, + response_stream, + @as(**anyopaque, @ptrCast(&signal.ptr)), + ); + + assignment_result.ensureStillAlive(); + + // assert that it was updated + bun.assert(!signal.isDead()); + + if (assignment_result.toError()) |err| { + if (response_stream.sink.endPromise.hasValue()) { + response_stream.sink.endPromise.reject(globalThis, err); + } + + task.fail(.{ + .code = "UnknownError", + .message = "ReadableStream ended with an error", + }); + readable_stream.cancel(globalThis); + return endPromise; + } + + if (!assignment_result.isEmptyOrUndefinedOrNull()) { + assignment_result.ensureStillAlive(); + // it returns a Promise when it goes through ReadableStreamDefaultReader + if (assignment_result.asAnyPromise()) |promise| { + switch (promise.status(globalThis.vm())) { + .pending => { + // if we eended and its not canceled the promise is the endPromise + // because assignToStream can return the sink.end() promise + // we set the endPromise in the NetworkSink so we need to resolve it + if (response_stream.sink.ended and !response_stream.sink.cancel) { + task.continueStream(); + + readable_stream.done(globalThis); + return endPromise; + } + ctx.ref(); + + assignment_result.then( + globalThis, + task.callback_context, + onUploadStreamResolveRequestStream, + onUploadStreamRejectRequestStream, + ); + // we need to wait the promise to resolve because can be an error/cancel here + if (!task.ended) + task.continueStream(); + }, + .fulfilled => { + task.continueStream(); + + readable_stream.done(globalThis); + }, + .rejected => { + if (response_stream.sink.endPromise.hasValue()) { + response_stream.sink.endPromise.reject(globalThis, promise.result(globalThis.vm())); + } + + task.fail(.{ + .code = "UnknownError", + .message = "ReadableStream ended with an error", + }); + readable_stream.cancel(globalThis); + }, + } + } else { + if (response_stream.sink.endPromise.hasValue()) { + response_stream.sink.endPromise.reject(globalThis, assignment_result); + } + + task.fail(.{ + .code = "UnknownError", + .message = "ReadableStream ended with an error", + }); + readable_stream.cancel(globalThis); + } + } + return endPromise; +} + +/// download a file from s3 chunk by chunk aka streaming (used on readableStream) +pub fn downloadStream( + this: *S3Credentials, + path: []const u8, + offset: usize, + size: ?usize, + proxy_url: ?[]const u8, + callback: *const fn (chunk: bun.MutableString, has_more: bool, err: ?Error.S3Error, *anyopaque) void, + callback_context: *anyopaque, +) void { + const range = brk: { + if (size) |size_| { + if (offset == 0) break :brk null; + + var end = (offset + size_); + if (size_ > 0) { + end -= 1; + } + break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-{}", .{ offset, end }) catch bun.outOfMemory(); + } + if (offset == 0) break :brk null; + break :brk std.fmt.allocPrint(bun.default_allocator, "bytes={}-", .{offset}) catch bun.outOfMemory(); + }; + + var result = this.signRequest(.{ + .path = path, + .method = .GET, + }, null) catch |sign_err| { + if (range) |range_| bun.default_allocator.free(range_); + const error_code_and_message = Error.getSignErrorCodeAndMessage(sign_err); + callback(.{ .allocator = bun.default_allocator, .list = .{} }, false, .{ + .code = error_code_and_message.code, + .message = error_code_and_message.message, + }, callback_context); + return; + }; + + var header_buffer: [10]picohttp.Header = undefined; + const headers = brk: { + if (range) |range_| { + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); + } else { + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); + } + }; + const proxy = proxy_url orelse ""; + const owned_proxy = if (proxy.len > 0) bun.default_allocator.dupe(u8, proxy) catch bun.outOfMemory() else ""; + const task = S3HttpDownloadStreamingTask.new(.{ + .http = undefined, + .sign_result = result, + .proxy_url = owned_proxy, + .callback_context = callback_context, + .callback = callback, + .range = range, + .headers = headers, + .vm = JSC.VirtualMachine.get(), + }); + task.poll_ref.ref(task.vm); + + const url = bun.URL.parse(result.url); + + task.signals = task.signal_store.to(); + + task.http = bun.http.AsyncHTTP.init( + bun.default_allocator, + .GET, + url, + task.headers.entries, + task.headers.buf.items, + &task.response_buffer, + "", + bun.http.HTTPClientResult.Callback.New( + *S3HttpDownloadStreamingTask, + S3HttpDownloadStreamingTask.httpCallback, + ).init(task), + .follow, + .{ + .http_proxy = if (owned_proxy.len > 0) bun.URL.parse(owned_proxy) else null, + .verbose = task.vm.getVerboseFetch(), + .signals = task.signals, + .reject_unauthorized = task.vm.getTLSRejectUnauthorized(), + }, + ); + // enable streaming + task.http.enableBodyStreaming(); + // queue http request + bun.http.HTTPThread.init(&.{}); + var batch = bun.ThreadPool.Batch{}; + task.http.schedule(bun.default_allocator, &batch); + bun.http.http_thread.schedule(batch); +} + +/// returns a readable stream that reads from the s3 path +pub fn readableStream( + this: *S3Credentials, + path: []const u8, + offset: usize, + size: ?usize, + proxy_url: ?[]const u8, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + var reader = JSC.WebCore.ByteStream.Source.new(.{ + .context = undefined, + .globalThis = globalThis, + }); + + reader.context.setup(); + const readable_value = reader.toReadableStream(globalThis); + + const S3DownloadStreamWrapper = struct { + readable_stream_ref: JSC.WebCore.ReadableStream.Strong, + path: []const u8, + pub usingnamespace bun.New(@This()); + + pub fn callback(chunk: bun.MutableString, has_more: bool, request_err: ?Error.S3Error, self: *@This()) void { + defer if (!has_more) self.deinit(); + + if (self.readable_stream_ref.get()) |readable| { + if (readable.ptr == .Bytes) { + if (request_err) |err| { + readable.ptr.Bytes.onData( + .{ + .err = .{ + .JSValue = err.toJS(self.readable_stream_ref.globalThis().?, self.path), + }, + }, + bun.default_allocator, + ); + return; + } + if (has_more) { + readable.ptr.Bytes.onData( + .{ + .temporary = bun.ByteList.initConst(chunk.list.items), + }, + bun.default_allocator, + ); + return; + } + + readable.ptr.Bytes.onData( + .{ + .temporary_and_done = bun.ByteList.initConst(chunk.list.items), + }, + bun.default_allocator, + ); + return; + } + } + } + + pub fn deinit(self: *@This()) void { + self.readable_stream_ref.deinit(); + bun.default_allocator.free(self.path); + self.destroy(); + } + }; + + downloadStream(this, path, offset, size, proxy_url, @ptrCast(&S3DownloadStreamWrapper.callback), S3DownloadStreamWrapper.new(.{ + .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(.{ + .ptr = .{ .Bytes = &reader.context }, + .value = readable_value, + }, globalThis), + .path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(), + })); + return readable_value; +} diff --git a/src/s3/credentials.zig b/src/s3/credentials.zig new file mode 100644 index 0000000000..d467373e32 --- /dev/null +++ b/src/s3/credentials.zig @@ -0,0 +1,774 @@ +const bun = @import("root").bun; +const picohttp = bun.picohttp; +const std = @import("std"); + +const MultiPartUploadOptions = @import("./multipart_options.zig").MultiPartUploadOptions; +const ACL = @import("./acl.zig").ACL; +const JSC = bun.JSC; +const RareData = JSC.RareData; +const strings = bun.strings; +const DotEnv = bun.DotEnv; + +pub const S3Credentials = struct { + accessKeyId: []const u8, + secretAccessKey: []const u8, + region: []const u8, + endpoint: []const u8, + bucket: []const u8, + sessionToken: []const u8, + + /// Important for MinIO support. + insecure_http: bool = false, + + ref_count: u32 = 1, + pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + + pub fn estimatedSize(this: *const @This()) usize { + return @sizeOf(S3Credentials) + this.accessKeyId.len + this.region.len + this.secretAccessKey.len + this.endpoint.len + this.bucket.len; + } + + fn hashConst(acl: []const u8) u64 { + var hasher = std.hash.Wyhash.init(0); + var remain = acl; + + var buf: [@sizeOf(@TypeOf(hasher.buf))]u8 = undefined; + + while (remain.len > 0) { + const end = @min(hasher.buf.len, remain.len); + + hasher.update(strings.copyLowercaseIfNeeded(remain[0..end], &buf)); + remain = remain[end..]; + } + + return hasher.final(); + } + pub fn getCredentialsWithOptions(this: S3Credentials, default_options: MultiPartUploadOptions, options: ?JSC.JSValue, default_acl: ?ACL, globalObject: *JSC.JSGlobalObject) bun.JSError!S3CredentialsWithOptions { + // get ENV config + var new_credentials = S3CredentialsWithOptions{ + .credentials = this, + .options = default_options, + .acl = default_acl, + }; + errdefer { + new_credentials.deinit(); + } + + if (options) |opts| { + if (opts.isObject()) { + if (try opts.getTruthyComptime(globalObject, "accessKeyId")) |js_value| { + if (!js_value.isEmptyOrUndefinedOrNull()) { + if (js_value.isString()) { + const str = bun.String.fromJS(js_value, globalObject); + defer str.deref(); + if (str.tag != .Empty and str.tag != .Dead) { + new_credentials._accessKeyIdSlice = str.toUTF8(bun.default_allocator); + new_credentials.credentials.accessKeyId = new_credentials._accessKeyIdSlice.?.slice(); + new_credentials.changed_credentials = true; + } + } else { + return globalObject.throwInvalidArgumentTypeValue("accessKeyId", "string", js_value); + } + } + } + if (try opts.getTruthyComptime(globalObject, "secretAccessKey")) |js_value| { + if (!js_value.isEmptyOrUndefinedOrNull()) { + if (js_value.isString()) { + const str = bun.String.fromJS(js_value, globalObject); + defer str.deref(); + if (str.tag != .Empty and str.tag != .Dead) { + new_credentials._secretAccessKeySlice = str.toUTF8(bun.default_allocator); + new_credentials.credentials.secretAccessKey = new_credentials._secretAccessKeySlice.?.slice(); + new_credentials.changed_credentials = true; + } + } else { + return globalObject.throwInvalidArgumentTypeValue("secretAccessKey", "string", js_value); + } + } + } + if (try opts.getTruthyComptime(globalObject, "region")) |js_value| { + if (!js_value.isEmptyOrUndefinedOrNull()) { + if (js_value.isString()) { + const str = bun.String.fromJS(js_value, globalObject); + defer str.deref(); + if (str.tag != .Empty and str.tag != .Dead) { + new_credentials._regionSlice = str.toUTF8(bun.default_allocator); + new_credentials.credentials.region = new_credentials._regionSlice.?.slice(); + new_credentials.changed_credentials = true; + } + } else { + return globalObject.throwInvalidArgumentTypeValue("region", "string", js_value); + } + } + } + if (try opts.getTruthyComptime(globalObject, "endpoint")) |js_value| { + if (!js_value.isEmptyOrUndefinedOrNull()) { + if (js_value.isString()) { + const str = bun.String.fromJS(js_value, globalObject); + defer str.deref(); + if (str.tag != .Empty and str.tag != .Dead) { + new_credentials._endpointSlice = str.toUTF8(bun.default_allocator); + const endpoint = new_credentials._endpointSlice.?.slice(); + const url = bun.URL.parse(endpoint); + const normalized_endpoint = url.host; + if (normalized_endpoint.len > 0) { + new_credentials.credentials.endpoint = normalized_endpoint; + + // Default to https:// + // Only use http:// if the endpoint specifically starts with 'http://' + new_credentials.credentials.insecure_http = url.isHTTP(); + + new_credentials.changed_credentials = true; + } else if (endpoint.len > 0) { + // endpoint is not a valid URL + return globalObject.throwInvalidArgumentTypeValue("endpoint", "string", js_value); + } + } + } else { + return globalObject.throwInvalidArgumentTypeValue("endpoint", "string", js_value); + } + } + } + if (try opts.getTruthyComptime(globalObject, "bucket")) |js_value| { + if (!js_value.isEmptyOrUndefinedOrNull()) { + if (js_value.isString()) { + const str = bun.String.fromJS(js_value, globalObject); + defer str.deref(); + if (str.tag != .Empty and str.tag != .Dead) { + new_credentials._bucketSlice = str.toUTF8(bun.default_allocator); + new_credentials.credentials.bucket = new_credentials._bucketSlice.?.slice(); + new_credentials.changed_credentials = true; + } + } else { + return globalObject.throwInvalidArgumentTypeValue("bucket", "string", js_value); + } + } + } + + if (try opts.getTruthyComptime(globalObject, "sessionToken")) |js_value| { + if (!js_value.isEmptyOrUndefinedOrNull()) { + if (js_value.isString()) { + const str = bun.String.fromJS(js_value, globalObject); + defer str.deref(); + if (str.tag != .Empty and str.tag != .Dead) { + new_credentials._sessionTokenSlice = str.toUTF8(bun.default_allocator); + new_credentials.credentials.sessionToken = new_credentials._sessionTokenSlice.?.slice(); + new_credentials.changed_credentials = true; + } + } else { + return globalObject.throwInvalidArgumentTypeValue("bucket", "string", js_value); + } + } + } + + if (try opts.getOptional(globalObject, "pageSize", i64)) |pageSize| { + if (pageSize < MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE and pageSize > MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { + return globalObject.throwRangeError(pageSize, .{ + .min = @intCast(MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE), + .max = @intCast(MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE), + .field_name = "pageSize", + }); + } else { + new_credentials.options.partSize = @intCast(pageSize); + } + } + + if (try opts.getOptional(globalObject, "queueSize", i32)) |queueSize| { + if (queueSize < 1) { + return globalObject.throwRangeError(queueSize, .{ + .min = 1, + .field_name = "queueSize", + }); + } else { + new_credentials.options.queueSize = @intCast(@max(queueSize, std.math.maxInt(u8))); + } + } + + if (try opts.getOptional(globalObject, "retry", i32)) |retry| { + if (retry < 0 and retry > 255) { + return globalObject.throwRangeError(retry, .{ + .min = 0, + .max = 255, + .field_name = "retry", + }); + } else { + new_credentials.options.retry = @intCast(retry); + } + } + if (try opts.getOptionalEnum(globalObject, "acl", ACL)) |acl| { + new_credentials.acl = acl; + } + } + } + return new_credentials; + } + pub fn dupe(this: *const @This()) *S3Credentials { + return S3Credentials.new(.{ + .accessKeyId = if (this.accessKeyId.len > 0) + bun.default_allocator.dupe(u8, this.accessKeyId) catch bun.outOfMemory() + else + "", + + .secretAccessKey = if (this.secretAccessKey.len > 0) + bun.default_allocator.dupe(u8, this.secretAccessKey) catch bun.outOfMemory() + else + "", + + .region = if (this.region.len > 0) + bun.default_allocator.dupe(u8, this.region) catch bun.outOfMemory() + else + "", + + .endpoint = if (this.endpoint.len > 0) + bun.default_allocator.dupe(u8, this.endpoint) catch bun.outOfMemory() + else + "", + + .bucket = if (this.bucket.len > 0) + bun.default_allocator.dupe(u8, this.bucket) catch bun.outOfMemory() + else + "", + + .sessionToken = if (this.sessionToken.len > 0) + bun.default_allocator.dupe(u8, this.sessionToken) catch bun.outOfMemory() + else + "", + + .insecure_http = this.insecure_http, + }); + } + pub fn deinit(this: *@This()) void { + if (this.accessKeyId.len > 0) { + bun.default_allocator.free(this.accessKeyId); + } + if (this.secretAccessKey.len > 0) { + bun.default_allocator.free(this.secretAccessKey); + } + if (this.region.len > 0) { + bun.default_allocator.free(this.region); + } + if (this.endpoint.len > 0) { + bun.default_allocator.free(this.endpoint); + } + if (this.bucket.len > 0) { + bun.default_allocator.free(this.bucket); + } + if (this.sessionToken.len > 0) { + bun.default_allocator.free(this.sessionToken); + } + this.destroy(); + } + + const log = bun.Output.scoped(.AWS, false); + + const DateResult = struct { + // numeric representation of year, month and day (excluding time components) + numeric_day: u64, + date: []const u8, + }; + + fn getAMZDate(allocator: std.mem.Allocator) DateResult { + // We can also use Date.now() but would be slower and would add JSC dependency + // var buffer: [28]u8 = undefined; + // the code bellow is the same as new Date(Date.now()).toISOString() + // JSC.JSValue.getDateNowISOString(globalObject, &buffer); + + // Create UTC timestamp + const secs: u64 = @intCast(@divFloor(std.time.milliTimestamp(), 1000)); + const utc_seconds = std.time.epoch.EpochSeconds{ .secs = secs }; + const utc_day = utc_seconds.getEpochDay(); + const year_and_day = utc_day.calculateYearDay(); + const month_and_day = year_and_day.calculateMonthDay(); + // Get UTC date components + const year = year_and_day.year; + const day = @as(u32, month_and_day.day_index) + 1; // this starts in 0 + const month = month_and_day.month.numeric(); // starts in 1 + + // Get UTC time components + const time = utc_seconds.getDaySeconds(); + const hours = time.getHoursIntoDay(); + const minutes = time.getMinutesIntoHour(); + const seconds = time.getSecondsIntoMinute(); + + // Format the date + return .{ + .numeric_day = secs - time.secs, + .date = std.fmt.allocPrint(allocator, "{d:0>4}{d:0>2}{d:0>2}T{d:0>2}{d:0>2}{d:0>2}Z", .{ + year, + month, + day, + hours, + minutes, + seconds, + }) catch bun.outOfMemory(), + }; + } + + const DIGESTED_HMAC_256_LEN = 32; + pub const SignResult = struct { + amz_date: []const u8, + host: []const u8, + authorization: []const u8, + url: []const u8, + + content_disposition: []const u8 = "", + session_token: []const u8 = "", + acl: ?ACL = null, + _headers: [7]picohttp.Header = .{ + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + }, + _headers_len: u8 = 0, + + pub fn headers(this: *const @This()) []const picohttp.Header { + return this._headers[0..this._headers_len]; + } + + pub fn mixWithHeader(this: *const @This(), headers_buffer: []picohttp.Header, header: picohttp.Header) []const picohttp.Header { + // copy the headers to buffer + const len = this._headers_len; + for (this._headers[0..len], 0..len) |existing_header, i| { + headers_buffer[i] = existing_header; + } + headers_buffer[len] = header; + return headers_buffer[0 .. len + 1]; + } + + pub fn deinit(this: *const @This()) void { + if (this.amz_date.len > 0) { + bun.default_allocator.free(this.amz_date); + } + + if (this.session_token.len > 0) { + bun.default_allocator.free(this.session_token); + } + + if (this.content_disposition.len > 0) { + bun.default_allocator.free(this.content_disposition); + } + + if (this.host.len > 0) { + bun.default_allocator.free(this.host); + } + + if (this.authorization.len > 0) { + bun.default_allocator.free(this.authorization); + } + + if (this.url.len > 0) { + bun.default_allocator.free(this.url); + } + } + }; + + pub const SignQueryOptions = struct { + expires: usize = 86400, + }; + pub const SignOptions = struct { + path: []const u8, + method: bun.http.Method, + content_hash: ?[]const u8 = null, + search_params: ?[]const u8 = null, + content_disposition: ?[]const u8 = null, + acl: ?ACL = null, + }; + + pub fn guessRegion(endpoint: []const u8) []const u8 { + if (endpoint.len > 0) { + if (strings.endsWith(endpoint, ".r2.cloudflarestorage.com")) return "auto"; + if (strings.indexOf(endpoint, ".amazonaws.com")) |end| { + if (strings.indexOf(endpoint, "s3.")) |start| { + return endpoint[start + 3 .. end]; + } + } + // endpoint is informed but is not s3 so auto detect + return "auto"; + } + + // no endpoint so we default to us-east-1 because s3.us-east-1.amazonaws.com is the default endpoint + return "us-east-1"; + } + fn toHexChar(value: u8) !u8 { + return switch (value) { + 0...9 => value + '0', + 10...15 => (value - 10) + 'A', + else => error.InvalidHexChar, + }; + } + fn encodeURIComponent(input: []const u8, buffer: []u8, comptime encode_slash: bool) ![]const u8 { + var written: usize = 0; + + for (input) |c| { + switch (c) { + // RFC 3986 Unreserved Characters (do not encode) + 'A'...'Z', 'a'...'z', '0'...'9', '-', '_', '.', '~' => { + if (written >= buffer.len) return error.BufferTooSmall; + buffer[written] = c; + written += 1; + }, + // All other characters need to be percent-encoded + else => { + if (!encode_slash and (c == '/' or c == '\\')) { + if (written >= buffer.len) return error.BufferTooSmall; + buffer[written] = if (c == '\\') '/' else c; + written += 1; + continue; + } + if (written + 3 > buffer.len) return error.BufferTooSmall; + buffer[written] = '%'; + // Convert byte to hex + const high_nibble: u8 = (c >> 4) & 0xF; + const low_nibble: u8 = c & 0xF; + buffer[written + 1] = try toHexChar(high_nibble); + buffer[written + 2] = try toHexChar(low_nibble); + written += 3; + }, + } + } + + return buffer[0..written]; + } + + pub fn signRequest(this: *const @This(), signOptions: SignOptions, signQueryOption: ?SignQueryOptions) !SignResult { + const method = signOptions.method; + const request_path = signOptions.path; + const content_hash = signOptions.content_hash; + + const search_params = signOptions.search_params; + + var content_disposition = signOptions.content_disposition; + if (content_disposition != null and content_disposition.?.len == 0) { + content_disposition = null; + } + const session_token: ?[]const u8 = if (this.sessionToken.len == 0) null else this.sessionToken; + + const acl: ?[]const u8 = if (signOptions.acl) |acl_value| acl_value.toString() else null; + + if (this.accessKeyId.len == 0 or this.secretAccessKey.len == 0) return error.MissingCredentials; + const signQuery = signQueryOption != null; + const expires = if (signQueryOption) |options| options.expires else 0; + const method_name = switch (method) { + .GET => "GET", + .POST => "POST", + .PUT => "PUT", + .DELETE => "DELETE", + .HEAD => "HEAD", + else => return error.InvalidMethod, + }; + + const region = if (this.region.len > 0) this.region else guessRegion(this.endpoint); + var full_path = request_path; + // handle \\ on bucket name + if (strings.startsWith(full_path, "/")) { + full_path = full_path[1..]; + } else if (strings.startsWith(full_path, "\\")) { + full_path = full_path[1..]; + } + + var path: []const u8 = full_path; + var bucket: []const u8 = this.bucket; + + if (bucket.len == 0) { + //TODO: r2 supports bucket in the endpoint + + // guess bucket using path + if (strings.indexOf(full_path, "/")) |end| { + if (strings.indexOf(full_path, "\\")) |backslash_index| { + if (backslash_index < end) { + bucket = full_path[0..backslash_index]; + path = full_path[backslash_index + 1 ..]; + } + } + bucket = full_path[0..end]; + path = full_path[end + 1 ..]; + } else if (strings.indexOf(full_path, "\\")) |backslash_index| { + bucket = full_path[0..backslash_index]; + path = full_path[backslash_index + 1 ..]; + } else { + return error.InvalidPath; + } + } + if (strings.endsWith(path, "/")) { + path = path[0..path.len]; + } else if (strings.endsWith(path, "\\")) { + path = path[0 .. path.len - 1]; + } + if (strings.startsWith(path, "/")) { + path = path[1..]; + } else if (strings.startsWith(path, "\\")) { + path = path[1..]; + } + + // if we allow path.len == 0 it will list the bucket for now we disallow + if (path.len == 0) return error.InvalidPath; + + var normalized_path_buffer: [1024 + 63 + 2]u8 = undefined; // 1024 max key size and 63 max bucket name + var path_buffer: [1024]u8 = undefined; + var bucket_buffer: [63]u8 = undefined; + bucket = encodeURIComponent(bucket, &bucket_buffer, false) catch return error.InvalidPath; + path = encodeURIComponent(path, &path_buffer, false) catch return error.InvalidPath; + const normalizedPath = std.fmt.bufPrint(&normalized_path_buffer, "/{s}/{s}", .{ bucket, path }) catch return error.InvalidPath; + + const date_result = getAMZDate(bun.default_allocator); + const amz_date = date_result.date; + errdefer bun.default_allocator.free(amz_date); + + const amz_day = amz_date[0..8]; + const signed_headers = if (signQuery) "host" else brk: { + if (acl != null) { + if (content_disposition != null) { + if (session_token != null) { + break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date"; + } + } else { + if (session_token != null) { + break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "host;x-amz-content-sha256;x-amz-date"; + } + } + } else { + if (content_disposition != null) { + if (session_token != null) { + break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date"; + } + } else { + if (session_token != null) { + break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-security-token"; + } else { + break :brk "host;x-amz-content-sha256;x-amz-date"; + } + } + } + }; + + // Default to https. Only use http if they explicit pass "http://" as the endpoint. + const protocol = if (this.insecure_http) "http" else "https"; + + // detect service name and host from region or endpoint + const host = brk_host: { + if (this.endpoint.len > 0) { + if (this.endpoint.len >= 512) return error.InvalidEndpoint; + break :brk_host try bun.default_allocator.dupe(u8, this.endpoint); + } else { + break :brk_host try std.fmt.allocPrint(bun.default_allocator, "s3.{s}.amazonaws.com", .{region}); + } + }; + const service_name = "s3"; + + errdefer bun.default_allocator.free(host); + + const aws_content_hash = if (content_hash) |hash| hash else ("UNSIGNED-PAYLOAD"); + var tmp_buffer: [4096]u8 = undefined; + + const authorization = brk: { + // we hash the hash so we need 2 buffers + var hmac_sig_service: [bun.BoringSSL.EVP_MAX_MD_SIZE]u8 = undefined; + var hmac_sig_service2: [bun.BoringSSL.EVP_MAX_MD_SIZE]u8 = undefined; + + const sigDateRegionServiceReq = brk_sign: { + const key = try std.fmt.bufPrint(&tmp_buffer, "{s}{s}{s}", .{ region, service_name, this.secretAccessKey }); + var cache = (JSC.VirtualMachine.getMainThreadVM() orelse JSC.VirtualMachine.get()).rareData().awsCache(); + if (cache.get(date_result.numeric_day, key)) |cached| { + break :brk_sign cached; + } + // not cached yet lets generate a new one + const sigDate = bun.hmac.generate(try std.fmt.bufPrint(&tmp_buffer, "AWS4{s}", .{this.secretAccessKey}), amz_day, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; + const sigDateRegion = bun.hmac.generate(sigDate, region, .sha256, &hmac_sig_service2) orelse return error.FailedToGenerateSignature; + const sigDateRegionService = bun.hmac.generate(sigDateRegion, service_name, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; + const result = bun.hmac.generate(sigDateRegionService, "aws4_request", .sha256, &hmac_sig_service2) orelse return error.FailedToGenerateSignature; + + cache.set(date_result.numeric_day, key, hmac_sig_service2[0..DIGESTED_HMAC_256_LEN].*); + break :brk_sign result; + }; + if (signQuery) { + var token_encoded_buffer: [2048]u8 = undefined; // token is normaly like 600-700 but can be up to 2k + var encoded_session_token: ?[]const u8 = null; + if (session_token) |token| { + encoded_session_token = encodeURIComponent(token, &token_encoded_buffer, true) catch return error.InvalidSessionToken; + } + const canonical = brk_canonical: { + if (acl) |acl_value| { + if (encoded_session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash }); + } + } else { + if (encoded_session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash }); + } + } + }; + var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); + bun.sha.SHA256.hash(canonical, &sha_digest, JSC.VirtualMachine.get().rareData().boringEngine()); + + const signValue = try std.fmt.bufPrint(&tmp_buffer, "AWS4-HMAC-SHA256\n{s}\n{s}/{s}/{s}/aws4_request\n{s}", .{ amz_date, amz_day, region, service_name, bun.fmt.bytesToHex(sha_digest[0..bun.sha.SHA256.digest], .lower) }); + + const signature = bun.hmac.generate(sigDateRegionServiceReq, signValue, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; + if (acl) |acl_value| { + if (encoded_session_token) |token| { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "{s}://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ protocol, host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } else { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "{s}://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ protocol, host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } + } else { + if (encoded_session_token) |token| { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "{s}://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ protocol, host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } else { + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "{s}://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}", + .{ protocol, host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } + } + } else { + var encoded_content_disposition_buffer: [255]u8 = undefined; + const encoded_content_disposition: []const u8 = if (content_disposition) |cd| encodeURIComponent(cd, &encoded_content_disposition_buffer, true) catch return error.ContentTypeIsTooLong else ""; + const canonical = brk_canonical: { + if (acl) |acl_value| { + if (content_disposition != null) { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } else { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } + } else { + if (content_disposition != null) { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } else { + if (session_token) |token| { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash }); + } else { + break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, signed_headers, aws_content_hash }); + } + } + } + }; + var sha_digest = std.mem.zeroes(bun.sha.SHA256.Digest); + bun.sha.SHA256.hash(canonical, &sha_digest, JSC.VirtualMachine.get().rareData().boringEngine()); + + const signValue = try std.fmt.bufPrint(&tmp_buffer, "AWS4-HMAC-SHA256\n{s}\n{s}/{s}/{s}/aws4_request\n{s}", .{ amz_date, amz_day, region, service_name, bun.fmt.bytesToHex(sha_digest[0..bun.sha.SHA256.digest], .lower) }); + + const signature = bun.hmac.generate(sigDateRegionServiceReq, signValue, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature; + + break :brk try std.fmt.allocPrint( + bun.default_allocator, + "AWS4-HMAC-SHA256 Credential={s}/{s}/{s}/{s}/aws4_request, SignedHeaders={s}, Signature={s}", + .{ this.accessKeyId, amz_day, region, service_name, signed_headers, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) }, + ); + } + }; + errdefer bun.default_allocator.free(authorization); + + if (signQuery) { + defer bun.default_allocator.free(host); + defer bun.default_allocator.free(amz_date); + + return SignResult{ + .amz_date = "", + .host = "", + .authorization = "", + .acl = signOptions.acl, + .url = authorization, + }; + } + + var result = SignResult{ + .amz_date = amz_date, + .host = host, + .authorization = authorization, + .acl = signOptions.acl, + .url = try std.fmt.allocPrint(bun.default_allocator, "{s}://{s}{s}{s}", .{ protocol, host, normalizedPath, if (search_params) |s| s else "" }), + ._headers = [_]picohttp.Header{ + .{ .name = "x-amz-content-sha256", .value = aws_content_hash }, + .{ .name = "x-amz-date", .value = amz_date }, + .{ .name = "Host", .value = host }, + .{ .name = "Authorization", .value = authorization[0..] }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + .{ .name = "", .value = "" }, + }, + ._headers_len = 4, + }; + + if (acl) |acl_value| { + result._headers[result._headers_len] = .{ .name = "x-amz-acl", .value = acl_value }; + result._headers_len += 1; + } + + if (session_token) |token| { + const session_token_value = bun.default_allocator.dupe(u8, token) catch bun.outOfMemory(); + result.session_token = session_token_value; + result._headers[result._headers_len] = .{ .name = "x-amz-security-token", .value = session_token_value }; + result._headers_len += 1; + } + + if (content_disposition) |cd| { + const content_disposition_value = bun.default_allocator.dupe(u8, cd) catch bun.outOfMemory(); + result.content_disposition = content_disposition_value; + result._headers[result._headers_len] = .{ .name = "Content-Disposition", .value = content_disposition_value }; + result._headers_len += 1; + } + + return result; + } +}; + +pub const S3CredentialsWithOptions = struct { + credentials: S3Credentials, + options: MultiPartUploadOptions = .{}, + acl: ?ACL = null, + /// indicates if the credentials have changed + changed_credentials: bool = false, + + _accessKeyIdSlice: ?JSC.ZigString.Slice = null, + _secretAccessKeySlice: ?JSC.ZigString.Slice = null, + _regionSlice: ?JSC.ZigString.Slice = null, + _endpointSlice: ?JSC.ZigString.Slice = null, + _bucketSlice: ?JSC.ZigString.Slice = null, + _sessionTokenSlice: ?JSC.ZigString.Slice = null, + + pub fn deinit(this: *@This()) void { + if (this._accessKeyIdSlice) |slice| slice.deinit(); + if (this._secretAccessKeySlice) |slice| slice.deinit(); + if (this._regionSlice) |slice| slice.deinit(); + if (this._endpointSlice) |slice| slice.deinit(); + if (this._bucketSlice) |slice| slice.deinit(); + if (this._sessionTokenSlice) |slice| slice.deinit(); + } +}; diff --git a/src/s3/download_stream.zig b/src/s3/download_stream.zig new file mode 100644 index 0000000000..9adfbb65af --- /dev/null +++ b/src/s3/download_stream.zig @@ -0,0 +1,242 @@ +const std = @import("std"); +const bun = @import("root").bun; +const JSC = bun.JSC; +const picohttp = JSC.WebCore.picohttp; +const S3Error = @import("./error.zig").S3Error; +const S3Credentials = @import("./credentials.zig").S3Credentials; +const SignResult = S3Credentials.SignResult; +const strings = bun.strings; +const log = bun.Output.scoped(.S3, true); +pub const S3HttpDownloadStreamingTask = struct { + http: bun.http.AsyncHTTP, + vm: *JSC.VirtualMachine, + sign_result: SignResult, + headers: JSC.WebCore.Headers, + callback_context: *anyopaque, + // this transfers ownership from the chunk + callback: *const fn (chunk: bun.MutableString, has_more: bool, err: ?S3Error, *anyopaque) void, + has_schedule_callback: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), + signal_store: bun.http.Signals.Store = .{}, + signals: bun.http.Signals = .{}, + poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(), + + response_buffer: bun.MutableString = .{ + .allocator = bun.default_allocator, + .list = .{ + .items = &.{}, + .capacity = 0, + }, + }, + mutex: bun.Lock = .{}, + reported_response_buffer: bun.MutableString = .{ + .allocator = bun.default_allocator, + .list = .{ + .items = &.{}, + .capacity = 0, + }, + }, + state: State.AtomicType = State.AtomicType.init(@bitCast(State{})), + + concurrent_task: JSC.ConcurrentTask = .{}, + range: ?[]const u8, + proxy_url: []const u8, + + pub usingnamespace bun.New(@This()); + pub const State = packed struct(u64) { + pub const AtomicType = std.atomic.Value(u64); + status_code: u32 = 0, + request_error: u16 = 0, + has_more: bool = true, + _reserved: u15 = 0, + }; + + pub fn getState(this: @This()) State { + const state: State = @bitCast(this.state.load(.acquire)); + return state; + } + + pub fn setState(this: *@This(), state: State) void { + this.state.store(@bitCast(state), .monotonic); + } + + pub fn deinit(this: *@This()) void { + this.poll_ref.unref(this.vm); + this.response_buffer.deinit(); + this.reported_response_buffer.deinit(); + this.headers.deinit(); + this.sign_result.deinit(); + this.http.clearData(); + if (this.range) |range| { + bun.default_allocator.free(range); + } + if (this.proxy_url.len > 0) { + bun.default_allocator.free(this.proxy_url); + } + + this.destroy(); + } + + fn reportProgress(this: *@This(), state: State) void { + const has_more = state.has_more; + var err: ?S3Error = null; + var failed = false; + + const chunk = brk: { + switch (state.status_code) { + 200, 204, 206 => { + failed = state.request_error != 0; + }, + else => { + failed = true; + }, + } + if (failed) { + if (!has_more) { + var has_body_code = false; + var has_body_message = false; + + var code: []const u8 = "UnknownError"; + var message: []const u8 = "an unexpected error has occurred"; + if (state.request_error != 0) { + const req_err = @errorFromInt(state.request_error); + code = @errorName(req_err); + has_body_code = true; + } else { + const bytes = this.reported_response_buffer.list.items; + if (bytes.len > 0) { + message = bytes[0..]; + + if (strings.indexOf(bytes, "")) |start| { + if (strings.indexOf(bytes, "")) |end| { + code = bytes[start + "".len .. end]; + has_body_code = true; + } + } + if (strings.indexOf(bytes, "")) |start| { + if (strings.indexOf(bytes, "")) |end| { + message = bytes[start + "".len .. end]; + has_body_message = true; + } + } + } + } + + err = .{ + .code = code, + .message = message, + }; + } + break :brk bun.MutableString{ .allocator = bun.default_allocator, .list = .{} }; + } else { + const buffer = this.reported_response_buffer; + break :brk buffer; + } + }; + log("reportProgres failed: {} has_more: {} len: {d}", .{ failed, has_more, chunk.list.items.len }); + if (failed) { + if (!has_more) { + this.callback(chunk, false, err, this.callback_context); + } + } else { + // dont report empty chunks if we have more data to read + if (!has_more or chunk.list.items.len > 0) { + this.callback(chunk, has_more, null, this.callback_context); + this.reported_response_buffer.reset(); + } + } + } + /// this is the task callback from the last task result and is always in the main thread + pub fn onResponse(this: *@This()) void { + // lets lock and unlock the reported response buffer + this.mutex.lock(); + // the state is atomic let's load it once + const state = this.getState(); + const has_more = state.has_more; + defer { + // always unlock when done + this.mutex.unlock(); + // if we dont have more we should deinit at the end of the function + if (!has_more) this.deinit(); + } + + // there is no reason to set has_schedule_callback to true if we dont have more data to read + if (has_more) this.has_schedule_callback.store(false, .monotonic); + this.reportProgress(state); + } + + /// this function is only called from the http callback in the HTTPThread and returns true if we should wait until we are done buffering the response body to report + /// should only be called when already locked + fn updateState(this: *@This(), async_http: *bun.http.AsyncHTTP, result: bun.http.HTTPClientResult, state: *State) bool { + const is_done = !result.has_more; + // if we got a error or fail wait until we are done buffering the response body to report + var wait_until_done = false; + { + state.has_more = !is_done; + + state.request_error = if (result.fail) |err| @intFromError(err) else 0; + if (state.status_code == 0) { + if (result.certificate_info) |*certificate| { + certificate.deinit(bun.default_allocator); + } + if (result.metadata) |m| { + var metadata = m; + state.status_code = metadata.response.status_code; + metadata.deinit(bun.default_allocator); + } + } + switch (state.status_code) { + 200, 204, 206 => wait_until_done = state.request_error != 0, + else => wait_until_done = true, + } + // store the new state + this.setState(state.*); + this.http = async_http.*; + } + return wait_until_done; + } + + /// this functions is only called from the http callback in the HTTPThread and returns true if we should enqueue another task + fn processHttpCallback(this: *@This(), async_http: *bun.http.AsyncHTTP, result: bun.http.HTTPClientResult) bool { + // lets lock and unlock to be safe we know the state is not in the middle of a callback when locked + this.mutex.lock(); + defer this.mutex.unlock(); + + // remember the state is atomic load it once, and store it again + var state = this.getState(); + // old state should have more otherwise its a http.zig bug + bun.assert(state.has_more); + const is_done = !result.has_more; + const wait_until_done = updateState(this, async_http, result, &state); + const should_enqueue = !wait_until_done or is_done; + log("state err: {} status_code: {} has_more: {} should_enqueue: {}", .{ state.request_error, state.status_code, state.has_more, should_enqueue }); + + if (should_enqueue) { + if (result.body) |body| { + this.response_buffer = body.*; + if (body.list.items.len > 0) { + _ = this.reported_response_buffer.write(body.list.items) catch bun.outOfMemory(); + } + this.response_buffer.reset(); + if (this.reported_response_buffer.list.items.len == 0 and !is_done) { + return false; + } + } else if (!is_done) { + return false; + } + if (this.has_schedule_callback.cmpxchgStrong(false, true, .acquire, .monotonic)) |has_schedule_callback| { + if (has_schedule_callback) { + return false; + } + } + return true; + } + return false; + } + /// this is the callback from the http.zig AsyncHTTP is always called from the HTTPThread + pub fn httpCallback(this: *@This(), async_http: *bun.http.AsyncHTTP, result: bun.http.HTTPClientResult) void { + if (processHttpCallback(this, async_http, result)) { + // we are always unlocked here and its safe to enqueue + this.vm.eventLoop().enqueueTaskConcurrent(this.concurrent_task.from(this, .manual_deinit)); + } + } +}; diff --git a/src/s3/error.zig b/src/s3/error.zig new file mode 100644 index 0000000000..fd8f732e84 --- /dev/null +++ b/src/s3/error.zig @@ -0,0 +1,86 @@ +const bun = @import("root").bun; +const JSC = bun.JSC; +pub const ErrorCodeAndMessage = struct { + code: []const u8, + message: []const u8, +}; +pub fn getSignErrorMessage(comptime err: anyerror) [:0]const u8 { + return switch (err) { + error.MissingCredentials => return "Missing S3 credentials. 'accessKeyId', 'secretAccessKey', 'bucket', and 'endpoint' are required", + error.InvalidMethod => return "Method must be GET, PUT, DELETE or HEAD when using s3:// protocol", + error.InvalidPath => return "Invalid S3 bucket, key combination", + error.InvalidEndpoint => return "Invalid S3 endpoint", + error.InvalidSessionToken => return "Invalid session token", + else => return "Failed to retrieve S3 content. Are the credentials correct?", + }; +} +pub fn getJSSignError(err: anyerror, globalThis: *JSC.JSGlobalObject) JSC.JSValue { + return switch (err) { + error.MissingCredentials => return globalThis.ERR_S3_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).toJS(), + error.InvalidMethod => return globalThis.ERR_S3_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).toJS(), + error.InvalidPath => return globalThis.ERR_S3_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).toJS(), + error.InvalidEndpoint => return globalThis.ERR_S3_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).toJS(), + error.InvalidSessionToken => return globalThis.ERR_S3_INVALID_SESSION_TOKEN(getSignErrorMessage(error.InvalidSessionToken), .{}).toJS(), + else => return globalThis.ERR_S3_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).toJS(), + }; +} +pub fn throwSignError(err: anyerror, globalThis: *JSC.JSGlobalObject) bun.JSError { + return switch (err) { + error.MissingCredentials => globalThis.ERR_S3_MISSING_CREDENTIALS(getSignErrorMessage(error.MissingCredentials), .{}).throw(), + error.InvalidMethod => globalThis.ERR_S3_INVALID_METHOD(getSignErrorMessage(error.InvalidMethod), .{}).throw(), + error.InvalidPath => globalThis.ERR_S3_INVALID_PATH(getSignErrorMessage(error.InvalidPath), .{}).throw(), + error.InvalidEndpoint => globalThis.ERR_S3_INVALID_ENDPOINT(getSignErrorMessage(error.InvalidEndpoint), .{}).throw(), + error.InvalidSessionToken => globalThis.ERR_S3_INVALID_SESSION_TOKEN(getSignErrorMessage(error.InvalidSessionToken), .{}).throw(), + else => globalThis.ERR_S3_INVALID_SIGNATURE(getSignErrorMessage(error.SignError), .{}).throw(), + }; +} +pub fn getSignErrorCodeAndMessage(err: anyerror) ErrorCodeAndMessage { + // keep error codes consistent for internal errors + return switch (err) { + error.MissingCredentials => .{ .code = "ERR_S3_MISSING_CREDENTIALS", .message = getSignErrorMessage(error.MissingCredentials) }, + error.InvalidMethod => .{ .code = "ERR_S3_INVALID_METHOD", .message = getSignErrorMessage(error.InvalidMethod) }, + error.InvalidPath => .{ .code = "ERR_S3_INVALID_PATH", .message = getSignErrorMessage(error.InvalidPath) }, + error.InvalidEndpoint => .{ .code = "ERR_S3_INVALID_ENDPOINT", .message = getSignErrorMessage(error.InvalidEndpoint) }, + error.InvalidSessionToken => .{ .code = "ERR_S3_INVALID_SESSION_TOKEN", .message = getSignErrorMessage(error.InvalidSessionToken) }, + else => .{ .code = "ERR_S3_INVALID_SIGNATURE", .message = getSignErrorMessage(error.SignError) }, + }; +} + +const JSS3Error = extern struct { + code: bun.String = bun.String.empty, + message: bun.String = bun.String.empty, + path: bun.String = bun.String.empty, + + pub fn init(code: []const u8, message: []const u8, path: ?[]const u8) @This() { + return .{ + // lets make sure we can reuse code and message and keep it service independent + .code = bun.String.createAtomIfPossible(code), + .message = bun.String.createAtomIfPossible(message), + .path = if (path) |p| bun.String.init(p) else bun.String.empty, + }; + } + + pub fn deinit(this: *const @This()) void { + this.path.deref(); + this.code.deref(); + this.message.deref(); + } + + pub fn toErrorInstance(this: *const @This(), global: *JSC.JSGlobalObject) JSC.JSValue { + defer this.deinit(); + + return S3Error__toErrorInstance(this, global); + } + extern fn S3Error__toErrorInstance(this: *const @This(), global: *JSC.JSGlobalObject) callconv(JSC.conv) JSC.JSValue; +}; + +pub const S3Error = struct { + code: []const u8, + message: []const u8, + + pub fn toJS(err: *const @This(), globalObject: *JSC.JSGlobalObject, path: ?[]const u8) JSC.JSValue { + const value = JSS3Error.init(err.code, err.message, path).toErrorInstance(globalObject); + bun.assert(!globalObject.hasException()); + return value; + } +}; diff --git a/src/s3/multipart.zig b/src/s3/multipart.zig new file mode 100644 index 0000000000..af5a37ebcb --- /dev/null +++ b/src/s3/multipart.zig @@ -0,0 +1,537 @@ +const std = @import("std"); +const bun = @import("root").bun; +const strings = bun.strings; +const S3Credentials = @import("./credentials.zig").S3Credentials; +const ACL = @import("./acl.zig").ACL; +const JSC = bun.JSC; +const MultiPartUploadOptions = @import("./multipart_options.zig").MultiPartUploadOptions; +const S3SimpleRequest = @import("./simple_request.zig"); +const executeSimpleS3Request = S3SimpleRequest.executeSimpleS3Request; +const S3Error = @import("./error.zig").S3Error; + +pub const MultiPartUpload = struct { + const OneMiB: usize = MultiPartUploadOptions.OneMiB; + const MAX_SINGLE_UPLOAD_SIZE: usize = MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE; // we limit to 5 GiB + const MIN_SINGLE_UPLOAD_SIZE: usize = MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE; + const DefaultPartSize = MultiPartUploadOptions.DefaultPartSize; + const MAX_QUEUE_SIZE = MultiPartUploadOptions.MAX_QUEUE_SIZE; + const AWS = S3Credentials; + queue: std.ArrayListUnmanaged(UploadPart) = .{}, + available: bun.bit_set.IntegerBitSet(MAX_QUEUE_SIZE) = bun.bit_set.IntegerBitSet(MAX_QUEUE_SIZE).initFull(), + + currentPartNumber: u16 = 1, + ref_count: u16 = 1, + ended: bool = false, + + options: MultiPartUploadOptions = .{}, + acl: ?ACL = null, + credentials: *S3Credentials, + poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(), + vm: *JSC.VirtualMachine, + globalThis: *JSC.JSGlobalObject, + + buffered: std.ArrayListUnmanaged(u8) = .{}, + offset: usize = 0, + + path: []const u8, + proxy: []const u8, + content_type: ?[]const u8 = null, + upload_id: []const u8 = "", + uploadid_buffer: bun.MutableString = .{ .allocator = bun.default_allocator, .list = .{} }, + + multipart_etags: std.ArrayListUnmanaged(UploadPart.UploadPartResult) = .{}, + multipart_upload_list: bun.ByteList = .{}, + + state: enum { + wait_stream_check, + not_started, + multipart_started, + multipart_completed, + singlefile_started, + finished, + } = .not_started, + + callback: *const fn (S3SimpleRequest.S3UploadResult, *anyopaque) void, + callback_context: *anyopaque, + + pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + + const log = bun.Output.scoped(.S3MultiPartUpload, true); + + pub const UploadPart = struct { + data: []const u8, + state: enum { + pending, + started, + completed, + canceled, + }, + owns_data: bool, + partNumber: u16, // max is 10,000 + retry: u8, // auto retry, decrement until 0 and fail after this + index: u8, + ctx: *MultiPartUpload, + + pub const UploadPartResult = struct { + number: u16, + etag: []const u8, + }; + fn sortEtags(_: *MultiPartUpload, a: UploadPart.UploadPartResult, b: UploadPart.UploadPartResult) bool { + return a.number < b.number; + } + + pub fn onPartResponse(result: S3SimpleRequest.S3PartResult, this: *@This()) void { + if (this.state == .canceled or this.ctx.state == .finished) { + log("onPartResponse {} canceled", .{this.partNumber}); + if (this.owns_data) bun.default_allocator.free(this.data); + this.ctx.deref(); + return; + } + + this.state = .completed; + + switch (result) { + .failure => |err| { + if (this.retry > 0) { + log("onPartResponse {} retry", .{this.partNumber}); + this.retry -= 1; + // retry failed + this.perform(); + return; + } else { + log("onPartResponse {} failed", .{this.partNumber}); + if (this.owns_data) bun.default_allocator.free(this.data); + defer this.ctx.deref(); + return this.ctx.fail(err); + } + }, + .etag => |etag| { + log("onPartResponse {} success", .{this.partNumber}); + + if (this.owns_data) bun.default_allocator.free(this.data); + // we will need to order this + this.ctx.multipart_etags.append(bun.default_allocator, .{ + .number = this.partNumber, + .etag = bun.default_allocator.dupe(u8, etag) catch bun.outOfMemory(), + }) catch bun.outOfMemory(); + + defer this.ctx.deref(); + // mark as available + this.ctx.available.set(this.index); + // drain more + this.ctx.drainEnqueuedParts(); + }, + } + } + + fn perform(this: *@This()) void { + var params_buffer: [2048]u8 = undefined; + const search_params = std.fmt.bufPrint(¶ms_buffer, "?partNumber={}&uploadId={s}&x-id=UploadPart", .{ + this.partNumber, + this.ctx.upload_id, + }) catch unreachable; + executeSimpleS3Request(this.ctx.credentials, .{ + .path = this.ctx.path, + .method = .PUT, + .proxy_url = this.ctx.proxyUrl(), + .body = this.data, + .search_params = search_params, + }, .{ .part = @ptrCast(&onPartResponse) }, this); + } + pub fn start(this: *@This()) void { + if (this.state != .pending or this.ctx.state != .multipart_completed or this.ctx.state == .finished) return; + this.ctx.ref(); + this.state = .started; + this.perform(); + } + pub fn cancel(this: *@This()) void { + const state = this.state; + this.state = .canceled; + + switch (state) { + .pending => { + if (this.owns_data) bun.default_allocator.free(this.data); + }, + // if is not pending we will free later or is already freed + else => {}, + } + } + }; + + fn deinit(this: *@This()) void { + log("deinit", .{}); + if (this.queue.capacity > 0) + this.queue.deinit(bun.default_allocator); + this.poll_ref.unref(this.vm); + bun.default_allocator.free(this.path); + if (this.proxy.len > 0) { + bun.default_allocator.free(this.proxy); + } + if (this.content_type) |ct| { + if (ct.len > 0) { + bun.default_allocator.free(ct); + } + } + this.credentials.deref(); + this.uploadid_buffer.deinit(); + for (this.multipart_etags.items) |tag| { + bun.default_allocator.free(tag.etag); + } + if (this.multipart_etags.capacity > 0) + this.multipart_etags.deinit(bun.default_allocator); + if (this.multipart_upload_list.cap > 0) + this.multipart_upload_list.deinitWithAllocator(bun.default_allocator); + this.destroy(); + } + + pub fn singleSendUploadResponse(result: S3SimpleRequest.S3UploadResult, this: *@This()) void { + defer this.deref(); + if (this.state == .finished) return; + switch (result) { + .failure => |err| { + if (this.options.retry > 0) { + log("singleSendUploadResponse {} retry", .{this.options.retry}); + this.options.retry -= 1; + this.ref(); + // retry failed + executeSimpleS3Request(this.credentials, .{ + .path = this.path, + .method = .PUT, + .proxy_url = this.proxyUrl(), + .body = this.buffered.items, + .content_type = this.content_type, + .acl = this.acl, + }, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this); + + return; + } else { + log("singleSendUploadResponse failed", .{}); + return this.fail(err); + } + }, + .success => { + log("singleSendUploadResponse success", .{}); + this.done(); + }, + } + } + + fn getCreatePart(this: *@This(), chunk: []const u8, owns_data: bool) ?*UploadPart { + const index = this.available.findFirstSet() orelse { + // this means that the queue is full and we cannot flush it + return null; + }; + + if (index >= this.options.queueSize) { + // ops too much concurrency wait more + return null; + } + this.available.unset(index); + defer this.currentPartNumber += 1; + + if (this.queue.items.len <= index) { + this.queue.append(bun.default_allocator, .{ + .data = chunk, + .partNumber = this.currentPartNumber, + .owns_data = owns_data, + .ctx = this, + .index = @truncate(index), + .retry = this.options.retry, + .state = .pending, + }) catch bun.outOfMemory(); + return &this.queue.items[index]; + } + this.queue.items[index] = .{ + .data = chunk, + .partNumber = this.currentPartNumber, + .owns_data = owns_data, + .ctx = this, + .index = @truncate(index), + .retry = this.options.retry, + .state = .pending, + }; + return &this.queue.items[index]; + } + + fn drainEnqueuedParts(this: *@This()) void { + if (this.state == .finished) { + return; + } + // check pending to start or transformed buffered ones into tasks + if (this.state == .multipart_completed) { + for (this.queue.items) |*part| { + if (part.state == .pending) { + // lets start the part request + part.start(); + } + } + } + const partSize = this.partSizeInBytes(); + if (this.ended or this.buffered.items.len >= partSize) { + this.processMultiPart(partSize); + } + + if (this.ended and this.available.mask == std.bit_set.IntegerBitSet(MAX_QUEUE_SIZE).initFull().mask) { + // we are done + this.done(); + } + } + pub fn fail(this: *@This(), _err: S3Error) void { + log("fail {s}:{s}", .{ _err.code, _err.message }); + this.ended = true; + for (this.queue.items) |*task| { + task.cancel(); + } + if (this.state != .finished) { + const old_state = this.state; + this.state = .finished; + this.callback(.{ .failure = _err }, this.callback_context); + + if (old_state == .multipart_completed) { + // will deref after rollback + this.rollbackMultiPartRequest(); + } else { + this.deref(); + } + } + } + + fn done(this: *@This()) void { + if (this.state == .multipart_completed) { + this.state = .finished; + + std.sort.block(UploadPart.UploadPartResult, this.multipart_etags.items, this, UploadPart.sortEtags); + this.multipart_upload_list.append(bun.default_allocator, "") catch bun.outOfMemory(); + for (this.multipart_etags.items) |tag| { + this.multipart_upload_list.appendFmt(bun.default_allocator, "{}{s}", .{ tag.number, tag.etag }) catch bun.outOfMemory(); + + bun.default_allocator.free(tag.etag); + } + this.multipart_etags.deinit(bun.default_allocator); + this.multipart_etags = .{}; + this.multipart_upload_list.append(bun.default_allocator, "") catch bun.outOfMemory(); + // will deref and ends after commit + this.commitMultiPartRequest(); + } else { + this.callback(.{ .success = {} }, this.callback_context); + this.state = .finished; + this.deref(); + } + } + pub fn startMultiPartRequestResult(result: S3SimpleRequest.S3DownloadResult, this: *@This()) void { + defer this.deref(); + if (this.state == .finished) return; + switch (result) { + .failure => |err| { + log("startMultiPartRequestResult {s} failed {s}: {s}", .{ this.path, err.message, err.message }); + this.fail(err); + }, + .success => |response| { + const slice = response.body.list.items; + this.uploadid_buffer = result.success.body; + + if (strings.indexOf(slice, "")) |start| { + if (strings.indexOf(slice, "")) |end| { + this.upload_id = slice[start + 10 .. end]; + } + } + if (this.upload_id.len == 0) { + // Unknown type of response error from AWS + log("startMultiPartRequestResult {s} failed invalid id", .{this.path}); + this.fail(.{ + .code = "UnknownError", + .message = "Failed to initiate multipart upload", + }); + return; + } + log("startMultiPartRequestResult {s} success id: {s}", .{ this.path, this.upload_id }); + this.state = .multipart_completed; + this.drainEnqueuedParts(); + }, + // this is "unreachable" but we cover in case AWS returns 404 + .not_found => this.fail(.{ + .code = "UnknownError", + .message = "Failed to initiate multipart upload", + }), + } + } + + pub fn onCommitMultiPartRequest(result: S3SimpleRequest.S3CommitResult, this: *@This()) void { + log("onCommitMultiPartRequest {s}", .{this.upload_id}); + + switch (result) { + .failure => |err| { + if (this.options.retry > 0) { + this.options.retry -= 1; + // retry commit + this.commitMultiPartRequest(); + return; + } + this.callback(.{ .failure = err }, this.callback_context); + this.deref(); + }, + .success => { + this.callback(.{ .success = {} }, this.callback_context); + this.state = .finished; + this.deref(); + }, + } + } + + pub fn onRollbackMultiPartRequest(result: S3SimpleRequest.S3UploadResult, this: *@This()) void { + log("onRollbackMultiPartRequest {s}", .{this.upload_id}); + switch (result) { + .failure => { + if (this.options.retry > 0) { + this.options.retry -= 1; + // retry rollback + this.rollbackMultiPartRequest(); + return; + } + this.deref(); + }, + .success => { + this.deref(); + }, + } + } + + fn commitMultiPartRequest(this: *@This()) void { + log("commitMultiPartRequest {s}", .{this.upload_id}); + var params_buffer: [2048]u8 = undefined; + const searchParams = std.fmt.bufPrint(¶ms_buffer, "?uploadId={s}", .{ + this.upload_id, + }) catch unreachable; + + executeSimpleS3Request(this.credentials, .{ + .path = this.path, + .method = .POST, + .proxy_url = this.proxyUrl(), + .body = this.multipart_upload_list.slice(), + .search_params = searchParams, + }, .{ .commit = @ptrCast(&onCommitMultiPartRequest) }, this); + } + fn rollbackMultiPartRequest(this: *@This()) void { + log("rollbackMultiPartRequest {s}", .{this.upload_id}); + var params_buffer: [2048]u8 = undefined; + const search_params = std.fmt.bufPrint(¶ms_buffer, "?uploadId={s}", .{ + this.upload_id, + }) catch unreachable; + + executeSimpleS3Request(this.credentials, .{ + .path = this.path, + .method = .DELETE, + .proxy_url = this.proxyUrl(), + .body = "", + .search_params = search_params, + }, .{ .upload = @ptrCast(&onRollbackMultiPartRequest) }, this); + } + fn enqueuePart(this: *@This(), chunk: []const u8, owns_data: bool) bool { + const part = this.getCreatePart(chunk, owns_data) orelse return false; + + if (this.state == .not_started) { + // will auto start later + this.state = .multipart_started; + this.ref(); + executeSimpleS3Request(this.credentials, .{ + .path = this.path, + .method = .POST, + .proxy_url = this.proxyUrl(), + .body = "", + .search_params = "?uploads=", + .content_type = this.content_type, + .acl = this.acl, + }, .{ .download = @ptrCast(&startMultiPartRequestResult) }, this); + } else if (this.state == .multipart_completed) { + part.start(); + } + return true; + } + + fn processMultiPart(this: *@This(), part_size: usize) void { + // need to split in multiple parts because of the size + var buffer = this.buffered.items[this.offset..]; + var queue_full = false; + defer if (!this.ended and queue_full == false) { + this.buffered = .{}; + this.offset = 0; + }; + + while (buffer.len > 0) { + const len = @min(part_size, buffer.len); + const slice = buffer[0..len]; + buffer = buffer[len..]; + // its one big buffer lets free after we are done with everything, part dont own the data + if (this.enqueuePart(slice, this.ended)) { + this.offset += len; + } else { + queue_full = true; + break; + } + } + } + + pub fn proxyUrl(this: *@This()) ?[]const u8 { + return this.proxy; + } + fn processBuffered(this: *@This(), part_size: usize) void { + if (this.ended and this.buffered.items.len < this.partSizeInBytes() and this.state == .not_started) { + log("processBuffered {s} singlefile_started", .{this.path}); + this.state = .singlefile_started; + this.ref(); + // we can do only 1 request + executeSimpleS3Request(this.credentials, .{ + .path = this.path, + .method = .PUT, + .proxy_url = this.proxyUrl(), + .body = this.buffered.items, + .content_type = this.content_type, + .acl = this.acl, + }, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this); + } else { + // we need to split + this.processMultiPart(part_size); + } + } + + pub fn partSizeInBytes(this: *@This()) usize { + return this.options.partSize; + } + + pub fn continueStream(this: *@This()) void { + if (this.state == .wait_stream_check) { + this.state = .not_started; + if (this.ended) { + this.processBuffered(this.partSizeInBytes()); + } + } + } + + pub fn sendRequestData(this: *@This(), chunk: []const u8, is_last: bool) void { + if (this.ended) return; + if (this.state == .wait_stream_check and chunk.len == 0 and is_last) { + // we do this because stream will close if the file dont exists and we dont wanna to send an empty part in this case + this.ended = true; + return; + } + if (is_last) { + this.ended = true; + if (chunk.len > 0) { + this.buffered.appendSlice(bun.default_allocator, chunk) catch bun.outOfMemory(); + } + this.processBuffered(this.partSizeInBytes()); + } else { + // still have more data and receive empty, nothing todo here + if (chunk.len == 0) return; + this.buffered.appendSlice(bun.default_allocator, chunk) catch bun.outOfMemory(); + const partSize = this.partSizeInBytes(); + if (this.buffered.items.len >= partSize) { + // send the part we have enough data + this.processBuffered(partSize); + return; + } + + // wait for more + } + } +}; diff --git a/src/s3/multipart_options.zig b/src/s3/multipart_options.zig new file mode 100644 index 0000000000..e84aa59b6b --- /dev/null +++ b/src/s3/multipart_options.zig @@ -0,0 +1,22 @@ +pub const MultiPartUploadOptions = struct { + pub const OneMiB: usize = 1048576; + pub const MAX_SINGLE_UPLOAD_SIZE: usize = 5120 * OneMiB; // we limit to 5 GiB + pub const MIN_SINGLE_UPLOAD_SIZE: usize = 5 * OneMiB; + + pub const DefaultPartSize = MIN_SINGLE_UPLOAD_SIZE; + pub const MAX_QUEUE_SIZE = 64; // dont make sense more than this because we use fetch anything greater will be 64 + + /// more than 255 dont make sense http thread cannot handle more than that + queueSize: u8 = 5, + /// in s3 client sdk they set it in bytes but the min is still 5 MiB + /// var params = {Bucket: 'bucket', Key: 'key', Body: stream}; + /// var options = {partSize: 10 * 1024 * 1024, queueSize: 1}; + /// s3.upload(params, options, function(err, data) { + /// console.log(err, data); + /// }); + /// See. https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property + /// The value is in MiB min is 5 and max 5120 (but we limit to 4 GiB aka 4096) + partSize: u64 = DefaultPartSize, + /// default is 3 max 255 + retry: u8 = 3, +}; diff --git a/src/s3/simple_request.zig b/src/s3/simple_request.zig new file mode 100644 index 0000000000..d9a6891b7b --- /dev/null +++ b/src/s3/simple_request.zig @@ -0,0 +1,410 @@ +const std = @import("std"); +const bun = @import("root").bun; +const JSC = bun.JSC; +const strings = bun.strings; +const SignResult = @import("./credentials.zig").S3Credentials.SignResult; +const S3Error = @import("./error.zig").S3Error; +const getSignErrorCodeAndMessage = @import("./error.zig").getSignErrorCodeAndMessage; +const S3Credentials = @import("./credentials.zig").S3Credentials; +const picohttp = bun.picohttp; +const ACL = @import("./acl.zig").ACL; +pub const S3StatResult = union(enum) { + success: struct { + size: usize = 0, + /// etag is not owned and need to be copied if used after this callback + etag: []const u8 = "", + /// format: Mon, 06 Jan 2025 22:40:57 GMT, lastModified is not owned and need to be copied if used after this callback + lastModified: []const u8 = "", + /// format: text/plain, contentType is not owned and need to be copied if used after this callback + contentType: []const u8 = "", + }, + not_found: S3Error, + + /// failure error is not owned and need to be copied if used after this callback + failure: S3Error, +}; +pub const S3DownloadResult = union(enum) { + success: struct { + /// etag is not owned and need to be copied if used after this callback + etag: []const u8 = "", + /// body is owned and dont need to be copied, but dont forget to free it + body: bun.MutableString, + }, + not_found: S3Error, + /// failure error is not owned and need to be copied if used after this callback + failure: S3Error, +}; +pub const S3UploadResult = union(enum) { + success: void, + /// failure error is not owned and need to be copied if used after this callback + failure: S3Error, +}; +pub const S3DeleteResult = union(enum) { + success: void, + not_found: S3Error, + + /// failure error is not owned and need to be copied if used after this callback + failure: S3Error, +}; +// commit result also fails if status 200 but with body containing an Error +pub const S3CommitResult = union(enum) { + success: void, + /// failure error is not owned and need to be copied if used after this callback + failure: S3Error, +}; +// commit result also fails if status 200 but with body containing an Error +pub const S3PartResult = union(enum) { + etag: []const u8, + /// failure error is not owned and need to be copied if used after this callback + failure: S3Error, +}; + +pub const S3HttpSimpleTask = struct { + http: bun.http.AsyncHTTP, + vm: *JSC.VirtualMachine, + sign_result: SignResult, + headers: JSC.WebCore.Headers, + callback_context: *anyopaque, + callback: Callback, + response_buffer: bun.MutableString = .{ + .allocator = bun.default_allocator, + .list = .{ + .items = &.{}, + .capacity = 0, + }, + }, + result: bun.http.HTTPClientResult = .{}, + concurrent_task: JSC.ConcurrentTask = .{}, + range: ?[]const u8, + poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(), + + usingnamespace bun.New(@This()); + pub const Callback = union(enum) { + stat: *const fn (S3StatResult, *anyopaque) void, + download: *const fn (S3DownloadResult, *anyopaque) void, + upload: *const fn (S3UploadResult, *anyopaque) void, + delete: *const fn (S3DeleteResult, *anyopaque) void, + commit: *const fn (S3CommitResult, *anyopaque) void, + part: *const fn (S3PartResult, *anyopaque) void, + + pub fn fail(this: @This(), code: []const u8, message: []const u8, context: *anyopaque) void { + switch (this) { + inline .upload, + .download, + .stat, + .delete, + .commit, + .part, + => |callback| callback(.{ + .failure = .{ + .code = code, + .message = message, + }, + }, context), + } + } + pub fn notFound(this: @This(), code: []const u8, message: []const u8, context: *anyopaque) void { + switch (this) { + inline .download, + .stat, + .delete, + => |callback| callback(.{ + .not_found = .{ + .code = code, + .message = message, + }, + }, context), + else => this.fail(code, message, context), + } + } + }; + pub fn deinit(this: *@This()) void { + if (this.result.certificate_info) |*certificate| { + certificate.deinit(bun.default_allocator); + } + this.poll_ref.unref(this.vm); + this.response_buffer.deinit(); + this.headers.deinit(); + this.sign_result.deinit(); + this.http.clearData(); + if (this.range) |range| { + bun.default_allocator.free(range); + } + if (this.result.metadata) |*metadata| { + metadata.deinit(bun.default_allocator); + } + this.destroy(); + } + + const ErrorType = enum { + not_found, + failure, + }; + fn errorWithBody(this: @This(), comptime error_type: ErrorType) void { + var code: []const u8 = "UnknownError"; + var message: []const u8 = "an unexpected error has occurred"; + var has_error_code = false; + if (this.result.fail) |err| { + code = @errorName(err); + has_error_code = true; + } else if (this.result.body) |body| { + const bytes = body.list.items; + if (bytes.len > 0) { + message = bytes[0..]; + if (strings.indexOf(bytes, "")) |start| { + if (strings.indexOf(bytes, "")) |end| { + code = bytes[start + "".len .. end]; + has_error_code = true; + } + } + if (strings.indexOf(bytes, "")) |start| { + if (strings.indexOf(bytes, "")) |end| { + message = bytes[start + "".len .. end]; + } + } + } + } + + if (error_type == .not_found) { + if (!has_error_code) { + code = "NoSuchKey"; + message = "The specified key does not exist."; + } + this.callback.notFound(code, message, this.callback_context); + } else { + this.callback.fail(code, message, this.callback_context); + } + } + + fn failIfContainsError(this: *@This(), status: u32) bool { + var code: []const u8 = "UnknownError"; + var message: []const u8 = "an unexpected error has occurred"; + + if (this.result.fail) |err| { + code = @errorName(err); + } else if (this.result.body) |body| { + const bytes = body.list.items; + var has_error = false; + if (bytes.len > 0) { + message = bytes[0..]; + if (strings.indexOf(bytes, "") != null) { + has_error = true; + if (strings.indexOf(bytes, "")) |start| { + if (strings.indexOf(bytes, "")) |end| { + code = bytes[start + "".len .. end]; + } + } + if (strings.indexOf(bytes, "")) |start| { + if (strings.indexOf(bytes, "")) |end| { + message = bytes[start + "".len .. end]; + } + } + } + } + if (!has_error and status == 200 or status == 206) { + return false; + } + } else if (status == 200 or status == 206) { + return false; + } + this.callback.fail(code, message, this.callback_context); + return true; + } + /// this is the task callback from the last task result and is always in the main thread + pub fn onResponse(this: *@This()) void { + defer this.deinit(); + if (!this.result.isSuccess()) { + this.errorWithBody(.failure); + return; + } + bun.assert(this.result.metadata != null); + const response = this.result.metadata.?.response; + switch (this.callback) { + .stat => |callback| { + switch (response.status_code) { + 200 => { + callback(.{ + .success = .{ + .etag = response.headers.get("etag") orelse "", + .lastModified = response.headers.get("last-modified") orelse "", + .contentType = response.headers.get("content-type") orelse "", + .size = if (response.headers.get("content-length")) |content_len| (std.fmt.parseInt(usize, content_len, 10) catch 0) else 0, + }, + }, this.callback_context); + }, + 404 => { + this.errorWithBody(.not_found); + }, + else => { + this.errorWithBody(.failure); + }, + } + }, + .delete => |callback| { + switch (response.status_code) { + 200, 204 => { + callback(.{ .success = {} }, this.callback_context); + }, + 404 => { + this.errorWithBody(.not_found); + }, + else => { + this.errorWithBody(.failure); + }, + } + }, + .upload => |callback| { + switch (response.status_code) { + 200 => { + callback(.{ .success = {} }, this.callback_context); + }, + else => { + this.errorWithBody(.failure); + }, + } + }, + .download => |callback| { + switch (response.status_code) { + 200, 204, 206 => { + const body = this.response_buffer; + this.response_buffer = .{ + .allocator = bun.default_allocator, + .list = .{ + .items = &.{}, + .capacity = 0, + }, + }; + callback(.{ + .success = .{ + .etag = response.headers.get("etag") orelse "", + .body = body, + }, + }, this.callback_context); + }, + 404 => { + this.errorWithBody(.not_found); + }, + else => { + //error + this.errorWithBody(.failure); + }, + } + }, + .commit => |callback| { + // commit multipart upload can fail with status 200 + if (!this.failIfContainsError(response.status_code)) { + callback(.{ .success = {} }, this.callback_context); + } + }, + .part => |callback| { + if (!this.failIfContainsError(response.status_code)) { + if (response.headers.get("etag")) |etag| { + callback(.{ .etag = etag }, this.callback_context); + } else { + this.errorWithBody(.failure); + } + } + }, + } + } + + /// this is the callback from the http.zig AsyncHTTP is always called from the HTTPThread + pub fn httpCallback(this: *@This(), async_http: *bun.http.AsyncHTTP, result: bun.http.HTTPClientResult) void { + const is_done = !result.has_more; + this.result = result; + this.http = async_http.*; + this.response_buffer = async_http.response_buffer.*; + if (is_done) { + this.vm.eventLoop().enqueueTaskConcurrent(this.concurrent_task.from(this, .manual_deinit)); + } + } +}; + +pub const S3SimpleRequestOptions = struct { + // signing options + path: []const u8, + method: bun.http.Method, + search_params: ?[]const u8 = null, + content_type: ?[]const u8 = null, + content_disposition: ?[]const u8 = null, + + // http request options + body: []const u8, + proxy_url: ?[]const u8 = null, + range: ?[]const u8 = null, + acl: ?ACL = null, +}; + +pub fn executeSimpleS3Request( + this: *const S3Credentials, + options: S3SimpleRequestOptions, + callback: S3HttpSimpleTask.Callback, + callback_context: *anyopaque, +) void { + var result = this.signRequest(.{ + .path = options.path, + .method = options.method, + .search_params = options.search_params, + .content_disposition = options.content_disposition, + .acl = options.acl, + }, null) catch |sign_err| { + if (options.range) |range_| bun.default_allocator.free(range_); + const error_code_and_message = getSignErrorCodeAndMessage(sign_err); + callback.fail(error_code_and_message.code, error_code_and_message.message, callback_context); + return; + }; + + const headers = brk: { + var header_buffer: [10]picohttp.Header = undefined; + if (options.range) |range_| { + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); + } else { + if (options.content_type) |content_type| { + if (content_type.len > 0) { + const _headers = result.mixWithHeader(&header_buffer, .{ .name = "Content-Type", .value = content_type }); + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); + } + } + + break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); + } + }; + const task = S3HttpSimpleTask.new(.{ + .http = undefined, + .sign_result = result, + .callback_context = callback_context, + .callback = callback, + .range = options.range, + .headers = headers, + .vm = JSC.VirtualMachine.get(), + }); + task.poll_ref.ref(task.vm); + + const url = bun.URL.parse(result.url); + const proxy = options.proxy_url orelse ""; + task.http = bun.http.AsyncHTTP.init( + bun.default_allocator, + options.method, + url, + task.headers.entries, + task.headers.buf.items, + &task.response_buffer, + options.body, + bun.http.HTTPClientResult.Callback.New( + *S3HttpSimpleTask, + S3HttpSimpleTask.httpCallback, + ).init(task), + .follow, + .{ + .http_proxy = if (proxy.len > 0) bun.URL.parse(proxy) else null, + .verbose = task.vm.getVerboseFetch(), + .reject_unauthorized = task.vm.getTLSRejectUnauthorized(), + }, + ); + // queue http request + bun.http.HTTPThread.init(&.{}); + var batch = bun.ThreadPool.Batch{}; + task.http.schedule(bun.default_allocator, &batch); + bun.http.http_thread.schedule(batch); +} diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 58a3a915f7..4fbb9a564b 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -721,31 +721,24 @@ pub const ParsedShellScript = struct { } pub fn setEnv(this: *ParsedShellScript, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var env = - if (this.export_env) |*env| - brk: { - env.clearRetainingCapacity(); - break :brk env.*; - } else EnvMap.init(bun.default_allocator); - defer this.export_env = env; - const value1 = callframe.argument(0); if (!value1.isObject()) { return globalThis.throwInvalidArguments("env must be an object", .{}); } - var object_iter = JSC.JSPropertyIterator(.{ + var object_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true, }).init(globalThis, value1); defer object_iter.deinit(); + var env: EnvMap = EnvMap.init(bun.default_allocator); env.ensureTotalCapacity(object_iter.len); // If the env object does not include a $PATH, it must disable path lookup for argv[0] // PATH = ""; - while (object_iter.next()) |key| { + while (try object_iter.next()) |key| { const keyslice = key.toOwnedSlice(bun.default_allocator) catch bun.outOfMemory(); var value = object_iter.value; if (value == .undefined) continue; @@ -759,7 +752,10 @@ pub const ParsedShellScript = struct { env.insert(keyref, valueref); } - + if (this.export_env) |*previous| { + previous.deinit(); + } + this.export_env = env; return .undefined; } diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index 296ce7ee1c..c0f2bbef84 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -1398,7 +1398,9 @@ pub const PostgresSQLConnection = struct { pub fn onClose(this: *PostgresSQLConnection) void { var vm = this.globalObject.bunVM(); - defer vm.drainMicrotasks(); + const loop = vm.eventLoop(); + loop.enter(); + defer loop.exit(); this.fail("Connection closed", error.ConnectionClosed); } @@ -1987,6 +1989,8 @@ pub const PostgresSQLConnection = struct { value: Value, free_value: u8 = 0, + isIndexedColumn: u8 = 0, + index: u32 = 0, pub const Tag = enum(u8) { null = 0, @@ -2280,6 +2284,13 @@ pub const PostgresSQLConnection = struct { } } + pub const Flags = packed struct(u32) { + has_indexed_columns: bool = false, + has_named_columns: bool = false, + has_duplicate_columns: bool = false, + _: u29 = 0, + }; + pub const Putter = struct { list: []DataCell, fields: []const protocol.FieldDescription, @@ -2287,16 +2298,25 @@ pub const PostgresSQLConnection = struct { count: usize = 0, globalObject: *JSC.JSGlobalObject, - extern fn JSC__constructObjectFromDataCell(*JSC.JSGlobalObject, JSValue, JSValue, [*]DataCell, u32) JSValue; - pub fn toJS(this: *Putter, globalObject: *JSC.JSGlobalObject, array: JSValue, structure: JSValue) JSValue { - return JSC__constructObjectFromDataCell(globalObject, array, structure, this.list.ptr, @truncate(this.fields.len)); + extern fn JSC__constructObjectFromDataCell( + *JSC.JSGlobalObject, + JSValue, + JSValue, + [*]DataCell, + u32, + Flags, + ) JSValue; + + pub fn toJS(this: *Putter, globalObject: *JSC.JSGlobalObject, array: JSValue, structure: JSValue, flags: Flags) JSValue { + return JSC__constructObjectFromDataCell(globalObject, array, structure, this.list.ptr, @truncate(this.fields.len), flags); } pub fn put(this: *Putter, index: u32, optional_bytes: ?*Data) !bool { - const oid = this.fields[index].type_oid; + const field = &this.fields[index]; + const oid = field.type_oid; debug("index: {d}, oid: {d}", .{ index, oid }); - - this.list[index] = if (optional_bytes) |data| + const cell: *DataCell = &this.list[index]; + cell.* = if (optional_bytes) |data| try DataCell.fromBytes(this.binary, oid, data.slice(), this.globalObject) else DataCell{ @@ -2306,6 +2326,21 @@ pub const PostgresSQLConnection = struct { }, }; this.count += 1; + cell.index = switch (field.name_or_index) { + // The indexed columns can be out of order. + .index => |i| i, + + else => @intCast(index), + }; + + // TODO: when duplicate and we know the result will be an object + // and not a .values() array, we can discard the data + // immediately. + cell.isIndexedColumn = switch (field.name_or_index) { + .duplicate => 2, + .index => 1, + .name => 0, + }; return true; } }; @@ -2380,6 +2415,7 @@ pub const PostgresSQLConnection = struct { .DataRow => { const request = this.current() orelse return error.ExpectedRequest; var statement = request.statement orelse return error.ExpectedStatement; + statement.checkForDuplicateFields(); var putter = DataCell.Putter{ .list = &.{}, @@ -2413,7 +2449,7 @@ pub const PostgresSQLConnection = struct { const pending_value = PostgresSQLQuery.pendingValueGetCached(request.thisValue) orelse .zero; pending_value.ensureStillAlive(); - const result = putter.toJS(this.globalObject, pending_value, statement.structure(this.js_value, this.globalObject)); + const result = putter.toJS(this.globalObject, pending_value, statement.structure(this.js_value, this.globalObject), statement.fields_flags); if (pending_value == .zero) { PostgresSQLQuery.pendingValueSetCached(request.thisValue, this.globalObject, result); @@ -2802,11 +2838,13 @@ pub const PostgresSQLConnection = struct { pub const PostgresSQLStatement = struct { cached_structure: JSC.Strong = .{}, ref_count: u32 = 1, - fields: []const protocol.FieldDescription = &[_]protocol.FieldDescription{}, + fields: []protocol.FieldDescription = &[_]protocol.FieldDescription{}, parameters: []const int4 = &[_]int4{}, signature: Signature, status: Status = Status.parsing, error_response: protocol.ErrorResponse = .{}, + needs_duplicate_check: bool = true, + fields_flags: PostgresSQLConnection.DataCell.Flags = .{}, pub const Status = enum { parsing, @@ -2827,13 +2865,58 @@ pub const PostgresSQLStatement = struct { } } + pub fn checkForDuplicateFields(this: *PostgresSQLStatement) void { + if (!this.needs_duplicate_check) return; + this.needs_duplicate_check = false; + + var seen_numbers = std.ArrayList(u32).init(bun.default_allocator); + defer seen_numbers.deinit(); + var seen_fields = bun.StringHashMap(void).init(bun.default_allocator); + seen_fields.ensureUnusedCapacity(@intCast(this.fields.len)) catch bun.outOfMemory(); + defer seen_fields.deinit(); + + // iterate backwards + var remaining = this.fields.len; + var flags: PostgresSQLConnection.DataCell.Flags = .{}; + while (remaining > 0) { + remaining -= 1; + const field: *protocol.FieldDescription = &this.fields[remaining]; + switch (field.name_or_index) { + .name => |*name| { + const seen = seen_fields.getOrPut(name.slice()) catch unreachable; + if (seen.found_existing) { + field.name_or_index = .duplicate; + flags.has_duplicate_columns = true; + } + + flags.has_named_columns = true; + }, + .index => |index| { + if (std.mem.indexOfScalar(u32, seen_numbers.items, index) != null) { + field.name_or_index = .duplicate; + flags.has_duplicate_columns = true; + } else { + seen_numbers.append(index) catch bun.outOfMemory(); + } + + flags.has_indexed_columns = true; + }, + .duplicate => { + flags.has_duplicate_columns = true; + }, + } + } + + this.fields_flags = flags; + } + pub fn deinit(this: *PostgresSQLStatement) void { debug("PostgresSQLStatement deinit", .{}); bun.assert(this.ref_count == 0); for (this.fields) |*field| { - @constCast(field).deinit(); + field.deinit(); } bun.default_allocator.free(this.fields); bun.default_allocator.free(this.parameters); @@ -2845,21 +2928,37 @@ pub const PostgresSQLStatement = struct { pub fn structure(this: *PostgresSQLStatement, owner: JSValue, globalObject: *JSC.JSGlobalObject) JSValue { return this.cached_structure.get() orelse { - const names = bun.default_allocator.alloc(bun.String, this.fields.len) catch return .undefined; + const ids = bun.default_allocator.alloc(JSC.JSObject.ExternColumnIdentifier, this.fields.len) catch return .undefined; + this.checkForDuplicateFields(); defer { - for (names) |*name| { - name.deref(); + for (ids) |*name| { + name.deinit(); } - bun.default_allocator.free(names); + bun.default_allocator.free(ids); } - for (this.fields, names) |*field, *name| { - name.* = String.fromUTF8(field.name.slice()); + + for (this.fields, ids) |*field, *id| { + id.tag = switch (field.name_or_index) { + .name => 2, + .index => 1, + .duplicate => 0, + }; + switch (field.name_or_index) { + .name => |name| { + id.value.name = String.createUTF8(name.slice()); + }, + .index => |index| { + id.value.index = index; + }, + .duplicate => {}, + } } const structure_ = JSC.JSObject.createStructure( globalObject, owner, - @truncate(this.fields.len), - names.ptr, + @truncate(ids.len), + ids.ptr, + @bitCast(this.fields_flags), ); this.cached_structure.set(globalObject, structure_); return structure_; diff --git a/src/sql/postgres/postgres_protocol.zig b/src/sql/postgres/postgres_protocol.zig index 60eeaf9f9d..2abeff0787 100644 --- a/src/sql/postgres/postgres_protocol.zig +++ b/src/sql/postgres/postgres_protocol.zig @@ -963,8 +963,47 @@ pub const DataRow = struct { pub const BindComplete = [_]u8{'2'} ++ toBytes(Int32(4)); +pub const ColumnIdentifier = union(enum) { + name: Data, + index: u32, + duplicate: void, + + pub fn init(name: Data) !@This() { + if (switch (name.slice().len) { + 1..."4294967295".len => true, + 0 => return .{ .name = .{ .empty = {} } }, + else => false, + }) might_be_int: { + // use a u64 to avoid overflow + var int: u64 = 0; + for (name.slice()) |byte| { + int = int * 10 + switch (byte) { + '0'...'9' => @as(u64, byte - '0'), + else => break :might_be_int, + }; + } + + // JSC only supports indexed property names up to 2^32 + if (int < std.math.maxInt(u32)) + return .{ .index = @intCast(int) }; + } + + return .{ .name = .{ .owned = try name.toOwned() } }; + } + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .name => |*name| name.deinit(), + else => {}, + } + } +}; pub const FieldDescription = struct { - name: Data = .{ .empty = {} }, + /// JavaScriptCore treats numeric property names differently than string property names. + /// so we do the work to figure out if the property name is a number ahead of time. + name_or_index: ColumnIdentifier = .{ + .name = .{ .empty = {} }, + }, table_oid: int4 = 0, column_index: short = 0, type_oid: int4 = 0, @@ -974,7 +1013,7 @@ pub const FieldDescription = struct { } pub fn deinit(this: *@This()) void { - this.name.deinit(); + this.name_or_index.deinit(); } pub fn decodeInternal(this: *@This(), comptime Container: type, reader: NewReader(Container)) AnyPostgresError!void { @@ -997,7 +1036,7 @@ pub const FieldDescription = struct { .table_oid = try reader.int4(), .column_index = try reader.short(), .type_oid = try reader.int4(), - .name = .{ .owned = try name.toOwned() }, + .name_or_index = try ColumnIdentifier.init(name), }; try reader.skip(2 + 4 + 2); @@ -1007,10 +1046,10 @@ pub const FieldDescription = struct { }; pub const RowDescription = struct { - fields: []const FieldDescription = &[_]FieldDescription{}, + fields: []FieldDescription = &[_]FieldDescription{}, pub fn deinit(this: *@This()) void { for (this.fields) |*field| { - @constCast(field).deinit(); + field.deinit(); } bun.default_allocator.free(this.fields); diff --git a/src/sys.zig b/src/sys.zig index 2327c3bc7e..fa7d22c4df 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -296,10 +296,12 @@ pub const Error = struct { from_libuv: if (Environment.isWindows) bool else void = if (Environment.isWindows) false else undefined, path: []const u8 = "", syscall: Syscall.Tag = Syscall.Tag.TODO, + dest: []const u8 = "", pub fn clone(this: *const Error, allocator: std.mem.Allocator) !Error { var copy = this.*; copy.path = try allocator.dupe(u8, copy.path); + copy.dest = try allocator.dupe(u8, copy.dest); return copy; } @@ -426,6 +428,10 @@ pub const Error = struct { err.path = bun.String.createUTF8(this.path); } + if (this.dest.len > 0) { + err.dest = bun.String.createUTF8(this.dest); + } + if (this.fd != bun.invalid_fd) { err.fd = this.fd; } @@ -469,7 +475,7 @@ pub fn getcwdZ(buf: *bun.PathBuffer) Maybe([:0]const u8) { var wbuf = bun.WPathBufferPool.get(); defer bun.WPathBufferPool.put(wbuf); const len: windows.DWORD = kernel32.GetCurrentDirectoryW(wbuf.len, wbuf); - if (Result.errnoSys(len, .getcwd)) |err| return err; + if (Result.errnoSysP(len, .getcwd, buf)) |err| return err; return Result{ .result = bun.strings.fromWPath(buf, wbuf[0..len]) }; } @@ -477,7 +483,7 @@ pub fn getcwdZ(buf: *bun.PathBuffer) Maybe([:0]const u8) { return if (rc != null) Result{ .result = rc.?[0..std.mem.len(rc.?) :0] } else - Result.errnoSys(@as(c_int, 0), .getcwd).?; + Result.errnoSysP(@as(c_int, 0), .getcwd, buf).?; } pub fn fchmod(fd: bun.FileDescriptor, mode: bun.Mode) Maybe(void) { @@ -485,14 +491,14 @@ pub fn fchmod(fd: bun.FileDescriptor, mode: bun.Mode) Maybe(void) { return sys_uv.fchmod(fd, mode); } - return Maybe(void).errnoSys(C.fchmod(fd.cast(), mode), .fchmod) orelse + return Maybe(void).errnoSysFd(C.fchmod(fd.cast(), mode), .fchmod, fd) orelse Maybe(void).success; } pub fn fchmodat(fd: bun.FileDescriptor, path: [:0]const u8, mode: bun.Mode, flags: i32) Maybe(void) { if (comptime Environment.isWindows) @compileError("Use fchmod instead"); - return Maybe(void).errnoSys(C.fchmodat(fd.cast(), path.ptr, mode, flags), .fchmodat) orelse + return Maybe(void).errnoSysFd(C.fchmodat(fd.cast(), path.ptr, mode, flags), .fchmodat, fd) orelse Maybe(void).success; } @@ -505,19 +511,21 @@ pub fn chmod(path: [:0]const u8, mode: bun.Mode) Maybe(void) { Maybe(void).success; } -pub fn chdirOSPath(destination: bun.OSPathSliceZ) Maybe(void) { +pub fn chdirOSPath(path: bun.stringZ, destination: if (Environment.isPosix) bun.stringZ else bun.string) Maybe(void) { if (comptime Environment.isPosix) { const rc = syscall.chdir(destination); - return Maybe(void).errnoSys(rc, .chdir) orelse Maybe(void).success; + return Maybe(void).errnoSysPD(rc, .chdir, path, destination) orelse Maybe(void).success; } if (comptime Environment.isWindows) { - if (kernel32.SetCurrentDirectory(destination) == windows.FALSE) { - log("SetCurrentDirectory({}) = {d}", .{ bun.fmt.utf16(destination), kernel32.GetLastError() }); - return Maybe(void).errnoSys(0, .chdir) orelse Maybe(void).success; + const wbuf = bun.WPathBufferPool.get(); + defer bun.WPathBufferPool.put(wbuf); + if (kernel32.SetCurrentDirectory(bun.strings.toWDirPath(wbuf, destination)) == windows.FALSE) { + log("SetCurrentDirectory({s}) = {d}", .{ destination, kernel32.GetLastError() }); + return Maybe(void).errnoSysPD(0, .chdir, path, destination) orelse Maybe(void).success; } - log("SetCurrentDirectory({}) = {d}", .{ bun.fmt.utf16(destination), 0 }); + log("SetCurrentDirectory({s}) = {d}", .{ destination, 0 }); return Maybe(void).success; } @@ -525,12 +533,16 @@ pub fn chdirOSPath(destination: bun.OSPathSliceZ) Maybe(void) { @compileError("Not implemented yet"); } -pub fn chdir(destination: anytype) Maybe(void) { +pub fn chdir(path: anytype, destination: anytype) Maybe(void) { const Type = @TypeOf(destination); if (comptime Environment.isPosix) { if (comptime Type == []u8 or Type == []const u8) { return chdirOSPath( + &(std.posix.toPosixPath(path) catch return .{ .err = .{ + .errno = @intFromEnum(bun.C.SystemErrno.EINVAL), + .syscall = .chdir, + } }), &(std.posix.toPosixPath(destination) catch return .{ .err = .{ .errno = @intFromEnum(bun.C.SystemErrno.EINVAL), .syscall = .chdir, @@ -538,25 +550,23 @@ pub fn chdir(destination: anytype) Maybe(void) { ); } - return chdirOSPath(destination); + return chdirOSPath(path, destination); } if (comptime Environment.isWindows) { if (comptime Type == *[*:0]u16) { if (kernel32.SetCurrentDirectory(destination) != 0) { - return Maybe(void).errnoSys(0, .chdir) orelse Maybe(void).success; + return Maybe(void).errnoSysPD(0, .chdir, path, destination) orelse Maybe(void).success; } return Maybe(void).success; } if (comptime Type == bun.OSPathSliceZ or Type == [:0]u16) { - return chdirOSPath(@as(bun.OSPathSliceZ, destination)); + return chdirOSPath(path, @as(bun.OSPathSliceZ, destination)); } - const wbuf = bun.WPathBufferPool.get(); - defer bun.WPathBufferPool.put(wbuf); - return chdirOSPath(bun.strings.toWDirPath(wbuf, destination)); + return chdirOSPath(path, destination); } return Maybe(void).todo(); @@ -590,7 +600,7 @@ pub fn stat(path: [:0]const u8) Maybe(bun.Stat) { if (comptime Environment.allow_assert) log("stat({s}) = {d}", .{ bun.asByteSlice(path), rc }); - if (Maybe(bun.Stat).errnoSys(rc, .stat)) |err| return err; + if (Maybe(bun.Stat).errnoSysP(rc, .stat, path)) |err| return err; return Maybe(bun.Stat){ .result = stat_ }; } } @@ -600,7 +610,7 @@ pub fn lstat(path: [:0]const u8) Maybe(bun.Stat) { return sys_uv.lstat(path); } else { var stat_ = mem.zeroes(bun.Stat); - if (Maybe(bun.Stat).errnoSys(C.lstat(path, &stat_), .lstat)) |err| return err; + if (Maybe(bun.Stat).errnoSysP(C.lstat(path, &stat_), .lstat, path)) |err| return err; return Maybe(bun.Stat){ .result = stat_ }; } } @@ -621,7 +631,7 @@ pub fn fstat(fd: bun.FileDescriptor) Maybe(bun.Stat) { if (comptime Environment.allow_assert) log("fstat({}) = {d}", .{ fd, rc }); - if (Maybe(bun.Stat).errnoSys(rc, .fstat)) |err| return err; + if (Maybe(bun.Stat).errnoSysFd(rc, .fstat, fd)) |err| return err; return Maybe(bun.Stat){ .result = stat_ }; } @@ -674,7 +684,7 @@ pub fn fstatat(fd: bun.FileDescriptor, path: [:0]const u8) Maybe(bun.Stat) { }; } var stat_ = mem.zeroes(bun.Stat); - if (Maybe(bun.Stat).errnoSys(syscall.fstatat(fd.int(), path, &stat_, 0), .fstatat)) |err| { + if (Maybe(bun.Stat).errnoSysFP(syscall.fstatat(fd.int(), path, &stat_, 0), .fstatat, fd, path)) |err| { log("fstatat({}, {s}) = {s}", .{ fd, path, @tagName(err.getErrno()) }); return err; } @@ -758,7 +768,7 @@ const fnctl_int = if (Environment.isLinux) usize else c_int; pub fn fcntl(fd: bun.FileDescriptor, cmd: i32, arg: fnctl_int) Maybe(fnctl_int) { while (true) { const result = fcntl_symbol(fd.cast(), cmd, arg); - if (Maybe(fnctl_int).errnoSys(result, .fcntl)) |err| { + if (Maybe(fnctl_int).errnoSysFd(result, .fcntl, fd)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -1278,7 +1288,7 @@ pub fn openatOSPath(dirfd: bun.FileDescriptor, file_path: bun.OSPathSliceZ, flag if (comptime Environment.allow_assert) log("openat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(file_path, 0), rc }); - return Maybe(bun.FileDescriptor).errnoSys(rc, .open) orelse .{ .result = bun.toFD(rc) }; + return Maybe(bun.FileDescriptor).errnoSysFP(rc, .open, dirfd, file_path) orelse .{ .result = bun.toFD(rc) }; } else if (comptime Environment.isWindows) { return openatWindowsT(bun.OSPathChar, dirfd, file_path, flags); } @@ -1620,7 +1630,7 @@ pub fn pread(fd: bun.FileDescriptor, buf: []u8, offset: i64) Maybe(usize) { const ioffset = @as(i64, @bitCast(offset)); // the OS treats this as unsigned while (true) { const rc = pread_sym(fd.cast(), buf.ptr, adjusted_len, ioffset); - if (Maybe(usize).errnoSys(rc, .pread)) |err| { + if (Maybe(usize).errnoSysFd(rc, .pread, fd)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -1732,7 +1742,7 @@ pub fn recv(fd: bun.FileDescriptor, buf: []u8, flag: u32) Maybe(usize) { if (comptime Environment.isMac) { const rc = syscall.@"recvfrom$NOCANCEL"(fd.cast(), buf.ptr, adjusted_len, flag, null, null); - if (Maybe(usize).errnoSys(rc, .recv)) |err| { + if (Maybe(usize).errnoSysFd(rc, .recv, fd)) |err| { log("recv({}, {d}) = {s} {}", .{ fd, adjusted_len, err.err.name(), debug_timer }); return err; } @@ -1763,7 +1773,7 @@ pub fn send(fd: bun.FileDescriptor, buf: []const u8, flag: u32) Maybe(usize) { if (comptime Environment.isMac) { const rc = syscall.@"sendto$NOCANCEL"(fd.cast(), buf.ptr, buf.len, flag, null, 0); - if (Maybe(usize).errnoSys(rc, .send)) |err| { + if (Maybe(usize).errnoSysFd(rc, .send, fd)) |err| { syslog("send({}, {d}) = {s}", .{ fd, buf.len, err.err.name() }); return err; } @@ -1775,7 +1785,7 @@ pub fn send(fd: bun.FileDescriptor, buf: []const u8, flag: u32) Maybe(usize) { while (true) { const rc = linux.sendto(fd.cast(), buf.ptr, buf.len, flag, null, 0); - if (Maybe(usize).errnoSys(rc, .send)) |err| { + if (Maybe(usize).errnoSysFd(rc, .send, fd)) |err| { if (err.getErrno() == .INTR) continue; syslog("send({}, {d}) = {s}", .{ fd, buf.len, err.err.name() }); return err; @@ -1790,7 +1800,7 @@ pub fn send(fd: bun.FileDescriptor, buf: []const u8, flag: u32) Maybe(usize) { pub fn lseek(fd: bun.FileDescriptor, offset: i64, whence: usize) Maybe(usize) { while (true) { const rc = syscall.lseek(fd.cast(), offset, whence); - if (Maybe(usize).errnoSys(rc, .lseek)) |err| { + if (Maybe(usize).errnoSysFd(rc, .lseek, fd)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -1807,7 +1817,7 @@ pub fn readlink(in: [:0]const u8, buf: []u8) Maybe([:0]u8) { while (true) { const rc = syscall.readlink(in, buf.ptr, buf.len); - if (Maybe([:0]u8).errnoSys(rc, .readlink)) |err| { + if (Maybe([:0]u8).errnoSysP(rc, .readlink, in)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -1820,7 +1830,7 @@ pub fn readlinkat(fd: bun.FileDescriptor, in: [:0]const u8, buf: []u8) Maybe([:0 while (true) { const rc = syscall.readlinkat(fd.cast(), in, buf.ptr, buf.len); - if (Maybe([:0]u8).errnoSys(rc, .readlink)) |err| { + if (Maybe([:0]u8).errnoSysFP(rc, .readlink, fd, in)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -1832,14 +1842,14 @@ pub fn readlinkat(fd: bun.FileDescriptor, in: [:0]const u8, buf: []u8) Maybe([:0 pub fn ftruncate(fd: bun.FileDescriptor, size: isize) Maybe(void) { if (comptime Environment.isWindows) { if (kernel32.SetFileValidData(fd.cast(), size) == 0) { - return Maybe(void).errnoSys(0, .ftruncate) orelse Maybe(void).success; + return Maybe(void).errnoSysFd(0, .ftruncate, fd) orelse Maybe(void).success; } return Maybe(void).success; } return while (true) { - if (Maybe(void).errnoSys(syscall.ftruncate(fd.cast(), size), .ftruncate)) |err| { + if (Maybe(void).errnoSysFd(syscall.ftruncate(fd.cast(), size), .ftruncate, fd)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -2013,7 +2023,7 @@ pub fn renameat(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.Fi pub fn chown(path: [:0]const u8, uid: posix.uid_t, gid: posix.gid_t) Maybe(void) { while (true) { - if (Maybe(void).errnoSys(C.chown(path, uid, gid), .chown)) |err| { + if (Maybe(void).errnoSysP(C.chown(path, uid, gid), .chown, path)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -2023,7 +2033,7 @@ pub fn chown(path: [:0]const u8, uid: posix.uid_t, gid: posix.gid_t) Maybe(void) pub fn symlink(target: [:0]const u8, dest: [:0]const u8) Maybe(void) { while (true) { - if (Maybe(void).errnoSys(syscall.symlink(target, dest), .symlink)) |err| { + if (Maybe(void).errnoSysPD(syscall.symlink(target, dest), .symlink, target, dest)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -2184,7 +2194,7 @@ pub fn unlink(from: [:0]const u8) Maybe(void) { } while (true) { - if (Maybe(void).errnoSys(syscall.unlink(from), .unlink)) |err| { + if (Maybe(void).errnoSysP(syscall.unlink(from), .unlink, from)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -2213,7 +2223,7 @@ pub fn unlinkatWithFlags(dirfd: bun.FileDescriptor, to: anytype, flags: c_uint) } while (true) { - if (Maybe(void).errnoSys(syscall.unlinkat(dirfd.cast(), to, flags), .unlink)) |err| { + if (Maybe(void).errnoSysFP(syscall.unlinkat(dirfd.cast(), to, flags), .unlink, dirfd, to)) |err| { if (err.getErrno() == .INTR) continue; if (comptime Environment.allow_assert) log("unlinkat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); @@ -2231,7 +2241,7 @@ pub fn unlinkat(dirfd: bun.FileDescriptor, to: anytype) Maybe(void) { return unlinkatWithFlags(dirfd, to, 0); } while (true) { - if (Maybe(void).errnoSys(syscall.unlinkat(dirfd.cast(), to, 0), .unlink)) |err| { + if (Maybe(void).errnoSysFP(syscall.unlinkat(dirfd.cast(), to, 0), .unlink, dirfd, to)) |err| { if (err.getErrno() == .INTR) continue; if (comptime Environment.allow_assert) log("unlinkat({}, {s}) = {d}", .{ dirfd, bun.sliceTo(to, 0), @intFromEnum(err.getErrno()) }); @@ -2351,7 +2361,7 @@ pub fn setCloseOnExec(fd: bun.FileDescriptor) Maybe(void) { pub fn setsockopt(fd: bun.FileDescriptor, level: c_int, optname: u32, value: i32) Maybe(i32) { while (true) { const rc = syscall.setsockopt(fd.cast(), level, optname, &value, @sizeOf(i32)); - if (Maybe(i32).errnoSys(rc, .setsockopt)) |err| { + if (Maybe(i32).errnoSysFd(rc, .setsockopt, fd)) |err| { if (err.getErrno() == .INTR) continue; log("setsockopt() = {d} {s}", .{ err.err.errno, err.err.name() }); return err; @@ -2497,12 +2507,12 @@ pub fn setPipeCapacityOnLinux(fd: bun.FileDescriptor, capacity: usize) Maybe(usi // We don't use glibc here // It didn't work. Always returned 0. const pipe_len = std.os.linux.fcntl(fd.cast(), F_GETPIPE_SZ, 0); - if (Maybe(usize).errnoSys(pipe_len, .fcntl)) |err| return err; + if (Maybe(usize).errnoSysFd(pipe_len, .fcntl, fd)) |err| return err; if (pipe_len == 0) return Maybe(usize){ .result = 0 }; if (pipe_len >= capacity) return Maybe(usize){ .result = pipe_len }; const new_pipe_len = std.os.linux.fcntl(fd.cast(), F_SETPIPE_SZ, capacity); - if (Maybe(usize).errnoSys(new_pipe_len, .fcntl)) |err| return err; + if (Maybe(usize).errnoSysFd(new_pipe_len, .fcntl, fd)) |err| return err; return Maybe(usize){ .result = new_pipe_len }; } @@ -2892,7 +2902,7 @@ pub fn setFileOffset(fd: bun.FileDescriptor, offset: usize) Maybe(void) { windows.FILE_BEGIN, ); if (rc == windows.FALSE) { - return Maybe(void).errnoSys(0, .lseek) orelse Maybe(void).success; + return Maybe(void).errnoSysFd(0, .lseek, fd) orelse Maybe(void).success; } return Maybe(void).success; } @@ -2903,7 +2913,7 @@ pub fn setFileOffsetToEndWindows(fd: bun.FileDescriptor) Maybe(usize) { var new_ptr: std.os.windows.LARGE_INTEGER = undefined; const rc = kernel32.SetFilePointerEx(fd.cast(), 0, &new_ptr, windows.FILE_END); if (rc == windows.FALSE) { - return Maybe(usize).errnoSys(0, .lseek) orelse Maybe(usize){ .result = 0 }; + return Maybe(usize).errnoSysFd(0, .lseek, fd) orelse Maybe(usize){ .result = 0 }; } return Maybe(usize){ .result = @intCast(new_ptr) }; } @@ -2922,10 +2932,7 @@ pub fn pipe() Maybe([2]bun.FileDescriptor) { var fds: [2]i32 = undefined; const rc = syscall.pipe(&fds); - if (Maybe([2]bun.FileDescriptor).errnoSys( - rc, - .pipe, - )) |err| { + if (Maybe([2]bun.FileDescriptor).errnoSys(rc, .pipe)) |err| { return err; } log("pipe() = [{d}, {d}]", .{ fds[0], fds[1] }); diff --git a/test/bun.lockb b/test/bun.lockb index 5d98fa3753..091b3c31f8 100755 Binary files a/test/bun.lockb and b/test/bun.lockb differ diff --git a/test/bundler/bundler_compile.test.ts b/test/bundler/bundler_compile.test.ts index d4c3610527..3949d409ea 100644 --- a/test/bundler/bundler_compile.test.ts +++ b/test/bundler/bundler_compile.test.ts @@ -73,7 +73,7 @@ describe.todoIf(isFlaky && isWindows)("bundler", () => { import {rmSync} from 'fs'; // Verify we're not just importing from the filesystem rmSync("./worker.ts", {force: true}); - + console.log("Hello, world!"); new Worker("./worker"); `, diff --git a/test/bundler/bundler_html.test.ts b/test/bundler/bundler_html.test.ts index 29ac02c90f..6314971925 100644 --- a/test/bundler/bundler_html.test.ts +++ b/test/bundler/bundler_html.test.ts @@ -33,6 +33,38 @@ describe("bundler", () => { }, }); + // Test relative paths without "./" in script src + itBundled("html/implicit-relative-paths", { + outdir: "out/", + files: { + "/src/index.html": ` + + + + + + + +

Hello World

+ +`, + "/src/styles.css": "body { background-color: red; }", + "/src/script.js": "console.log('Hello World')", + }, + experimentalHtml: true, + experimentalCss: true, + root: "/src", + entryPoints: ["/src/index.html"], + + onAfterBundle(api) { + // Check that output HTML references hashed filenames + api.expectFile("out/index.html").not.toContain("styles.css"); + api.expectFile("out/index.html").not.toContain("script.js"); + api.expectFile("out/index.html").toMatch(/href=".*\.css"/); + api.expectFile("out/index.html").toMatch(/src=".*\.js"/); + }, + }); + // Test multiple script and style bundling itBundled("html/multiple-assets", { outdir: "out/", @@ -721,4 +753,163 @@ body { expect(cssBundle).toContain("box-sizing: border-box"); }, }); + + // Test absolute paths in HTML + itBundled("html/absolute-paths", { + outdir: "out/", + files: { + "/index.html": ` + + + + + + + +

Absolute Paths

+ + +`, + "/styles/main.css": "body { margin: 0; }", + "/scripts/app.js": "console.log('App loaded')", + "/images/logo.png": "fake image content", + }, + experimentalHtml: true, + experimentalCss: true, + entryPoints: ["/index.html"], + onAfterBundle(api) { + // Check that absolute paths are handled correctly + const htmlBundle = api.readFile("out/index.html"); + + // CSS should be bundled and hashed + api.expectFile("out/index.html").not.toContain("/styles/main.css"); + api.expectFile("out/index.html").toMatch(/href=".*\.css"/); + + // JS should be bundled and hashed + api.expectFile("out/index.html").not.toContain("/scripts/app.js"); + api.expectFile("out/index.html").toMatch(/src=".*\.js"/); + + // Image should be hashed + api.expectFile("out/index.html").not.toContain("/images/logo.png"); + api.expectFile("out/index.html").toMatch(/src=".*\.png"/); + + // Get the bundled files and verify their contents + const cssMatch = htmlBundle.match(/href="(.*\.css)"/); + const jsMatch = htmlBundle.match(/src="(.*\.js)"/); + const imgMatch = htmlBundle.match(/src="(.*\.png)"/); + + expect(cssMatch).not.toBeNull(); + expect(jsMatch).not.toBeNull(); + expect(imgMatch).not.toBeNull(); + + const cssBundle = api.readFile("out/" + cssMatch![1]); + const jsBundle = api.readFile("out/" + jsMatch![1]); + + expect(cssBundle).toContain("margin: 0"); + expect(jsBundle).toContain("App loaded"); + }, + }); + + // Test that sourcemap comments are not included in HTML and CSS files + itBundled("html/no-sourcemap-comments", { + outdir: "out/", + sourceMap: "linked", + files: { + "/index.html": ` + + + + + + + +

No Sourcemap Comments

+ +`, + "/styles.css": ` +body { + background-color: red; +} +/* This is a comment */`, + "/script.js": "console.log('Hello World')", + }, + experimentalHtml: true, + experimentalCss: true, + sourceMap: "linked", + entryPoints: ["/index.html"], + onAfterBundle(api) { + // Check HTML file doesn't contain sourcemap comments + const htmlContent = api.readFile("out/index.html"); + api.expectFile("out/index.html").not.toContain("sourceMappingURL"); + api.expectFile("out/index.html").not.toContain("debugId"); + + // Get the CSS filename from the HTML + const cssMatch = htmlContent.match(/href="(.*\.css)"/); + expect(cssMatch).not.toBeNull(); + const cssFile = cssMatch![1]; + + // Check CSS file doesn't contain sourcemap comments + api.expectFile("out/" + cssFile).not.toContain("sourceMappingURL"); + api.expectFile("out/" + cssFile).not.toContain("debugId"); + + // Get the JS filename from the HTML + const jsMatch = htmlContent.match(/src="(.*\.js)"/); + expect(jsMatch).not.toBeNull(); + const jsFile = jsMatch![1]; + + // JS file SHOULD contain sourcemap comment since it's supported + api.expectFile("out/" + jsFile).toContain("sourceMappingURL"); + }, + }); + + // Also test with inline sourcemaps + itBundled("html/no-sourcemap-comments-inline", { + outdir: "out/", + files: { + "/index.html": ` + + + + + + + +

No Sourcemap Comments

+ +`, + "/styles.css": ` +body { + background-color: red; +} +/* This is a comment */`, + "/script.js": "console.log('Hello World')", + }, + experimentalHtml: true, + experimentalCss: true, + sourceMap: "inline", + entryPoints: ["/index.html"], + onAfterBundle(api) { + // Check HTML file doesn't contain sourcemap comments + const htmlContent = api.readFile("out/index.html"); + api.expectFile("out/index.html").not.toContain("sourceMappingURL"); + api.expectFile("out/index.html").not.toContain("debugId"); + + // Get the CSS filename from the HTML + const cssMatch = htmlContent.match(/href="(.*\.css)"/); + expect(cssMatch).not.toBeNull(); + const cssFile = cssMatch![1]; + + // Check CSS file doesn't contain sourcemap comments + api.expectFile("out/" + cssFile).not.toContain("sourceMappingURL"); + api.expectFile("out/" + cssFile).not.toContain("debugId"); + + // Get the JS filename from the HTML + const jsMatch = htmlContent.match(/src="(.*\.js)"/); + expect(jsMatch).not.toBeNull(); + const jsFile = jsMatch![1]; + + // JS file SHOULD contain sourcemap comment since it's supported + api.expectFile("out/" + jsFile).toContain("sourceMappingURL"); + }, + }); }); diff --git a/test/cli/hot/hot.test.ts b/test/cli/hot/hot.test.ts index 9b53bb733c..8453a87dde 100644 --- a/test/cli/hot/hot.test.ts +++ b/test/cli/hot/hot.test.ts @@ -1,4 +1,4 @@ -import { spawn } from "bun"; +import { spawn, stderr } from "bun"; import { beforeEach, expect, it } from "bun:test"; import { copyFileSync, cpSync, readFileSync, renameSync, rmSync, unlinkSync, writeFileSync } from "fs"; import { bunEnv, bunExe, isDebug, tmpdirSync, waitForFileToExist } from "harness"; @@ -450,7 +450,7 @@ ${" ".repeat(reloadCounter * 2)}throw new Error(${reloadCounter});`, let it = str.split("\n"); let line; while ((line = it.shift())) { - if (!line.includes("error")) continue; + if (!line.includes("error:")) continue; str = ""; if (reloadCounter === 50) { @@ -530,7 +530,7 @@ ${" ".repeat(reloadCounter * 2)}throw new Error(${reloadCounter});`, let it = str.split("\n"); let line; while ((line = it.shift())) { - if (!line.includes("error")) continue; + if (!line.includes("error:")) continue; str = ""; if (reloadCounter === 50) { diff --git a/test/cli/install/registry/__snapshots__/bun-install-registry.test.ts.snap b/test/cli/install/__snapshots__/bun-install-registry.test.ts.snap similarity index 99% rename from test/cli/install/registry/__snapshots__/bun-install-registry.test.ts.snap rename to test/cli/install/__snapshots__/bun-install-registry.test.ts.snap index 9d2bebfe0c..c7af63e377 100644 --- a/test/cli/install/registry/__snapshots__/bun-install-registry.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-install-registry.test.ts.snap @@ -140,6 +140,7 @@ exports[`text lockfile workspace sorting 1`] = ` "lockfileVersion": 0, "workspaces": { "": { + "name": "foo", "dependencies": { "no-deps": "1.0.0", }, @@ -173,6 +174,7 @@ exports[`text lockfile workspace sorting 2`] = ` "lockfileVersion": 0, "workspaces": { "": { + "name": "foo", "dependencies": { "no-deps": "1.0.0", }, @@ -214,6 +216,7 @@ exports[`text lockfile --frozen-lockfile 1`] = ` "lockfileVersion": 0, "workspaces": { "": { + "name": "foo", "dependencies": { "a-dep": "^1.0.2", "no-deps": "^1.0.0", @@ -244,6 +247,7 @@ exports[`binaries each type of binary serializes correctly to text lockfile 1`] "lockfileVersion": 0, "workspaces": { "": { + "name": "foo", "dependencies": { "dir-bin": "./dir-bin", "file-bin": "./file-bin", @@ -270,6 +274,7 @@ exports[`binaries root resolution bins 1`] = ` "lockfileVersion": 0, "workspaces": { "": { + "name": "fooooo", "dependencies": { "fooooo": ".", "no-deps": "1.0.0", @@ -290,6 +295,7 @@ exports[`hoisting text lockfile is hoisted 1`] = ` "lockfileVersion": 0, "workspaces": { "": { + "name": "foo", "dependencies": { "hoist-lockfile-1": "1.0.0", "hoist-lockfile-2": "1.0.0", @@ -317,6 +323,7 @@ exports[`it should ignore peerDependencies within workspaces 1`] = ` "lockfileVersion": 0, "workspaces": { "": { + "name": "foo", "peerDependencies": { "no-deps": ">=1.0.0", }, diff --git a/test/cli/install/__snapshots__/bun-install.test.ts.snap b/test/cli/install/__snapshots__/bun-install.test.ts.snap index 96d0b00550..593bd7b0dd 100644 --- a/test/cli/install/__snapshots__/bun-install.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-install.test.ts.snap @@ -61,7 +61,9 @@ exports[`should read install.saveTextLockfile from bunfig.toml 1`] = ` "{ "lockfileVersion": 0, "workspaces": { - "": {}, + "": { + "name": "foo", + }, "packages/pkg1": { "name": "pkg-one", "version": "1.0.0", diff --git a/test/cli/install/__snapshots__/bun-lock.test.ts.snap b/test/cli/install/__snapshots__/bun-lock.test.ts.snap new file mode 100644 index 0000000000..4f15c6c30c --- /dev/null +++ b/test/cli/install/__snapshots__/bun-lock.test.ts.snap @@ -0,0 +1,45 @@ +// Bun Snapshot v1, https://goo.gl/fbAQLP + +exports[`should escape names 1`] = ` +"{ + "lockfileVersion": 0, + "workspaces": { + "": { + "name": "quote-in-dependency-name", + }, + "packages/\\"": { + "name": "\\"", + }, + "packages/pkg1": { + "name": "pkg1", + "dependencies": { + "\\"": "*", + }, + }, + }, + "packages": { + "\\"": ["\\"@workspace:packages/\\"", {}], + + "pkg1": ["pkg1@workspace:packages/pkg1", { "dependencies": { "\\"": "*" } }], + } +} +" +`; + +exports[`should write plaintext lockfiles 1`] = ` +"{ + "lockfileVersion": 0, + "workspaces": { + "": { + "name": "test-package", + "dependencies": { + "dummy-package": "file:./bar-0.0.2.tgz", + }, + }, + }, + "packages": { + "dummy-package": ["bar@./bar-0.0.2.tgz", {}], + } +} +" +`; diff --git a/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap b/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap index 423a2b49ae..b419206fba 100644 --- a/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap @@ -1,2177 +1,2177 @@ // Bun Snapshot v1, https://goo.gl/fbAQLP exports[`dependency on workspace without version in package.json: version: * 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "*", - "name": "lodash", - "npm": { - "name": "lodash", - "version": ">=0.0.0", - }, - "package_id": 1, - }, - ], +"{ "format": "v2", - "meta_hash": "1e2d5fa6591f007aa6674495d1022868fc3b60325c4a1555315ca0e16ef31c4e", + "meta_hash": "a5d5a45555763c1040428cd33363c16438c75b23d8961e7458abe2d985fa08d1", "package_index": { - "bar": 2, + "no-deps": 1, "foo": 0, - "lodash": 1, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, + "package_id": 1 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true }, - "depth": 0, - "id": 0, - "path": "node_modules", + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "*", + "npm": { + "name": "no-deps", + "version": ">=0.0.0" + }, + "package_id": 1, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: *.*.* 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "*.*.*", - "name": "lodash", - "npm": { - "name": "lodash", - "version": ">=0.0.0", - }, - "package_id": 1, - }, - ], +"{ "format": "v2", - "meta_hash": "1e2d5fa6591f007aa6674495d1022868fc3b60325c4a1555315ca0e16ef31c4e", + "meta_hash": "a5d5a45555763c1040428cd33363c16438c75b23d8961e7458abe2d985fa08d1", "package_index": { - "bar": 2, + "no-deps": 1, "foo": 0, - "lodash": 1, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, + "package_id": 1 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true }, - "depth": 0, - "id": 0, - "path": "node_modules", + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "*.*.*", + "npm": { + "name": "no-deps", + "version": ">=0.0.0" + }, + "package_id": 1, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: =* 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "=*", - "name": "lodash", - "npm": { - "name": "lodash", - "version": ">=0.0.0", - }, - "package_id": 1, - }, - ], +"{ "format": "v2", - "meta_hash": "1e2d5fa6591f007aa6674495d1022868fc3b60325c4a1555315ca0e16ef31c4e", + "meta_hash": "a5d5a45555763c1040428cd33363c16438c75b23d8961e7458abe2d985fa08d1", "package_index": { - "bar": 2, + "no-deps": 1, "foo": 0, - "lodash": 1, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, + "package_id": 1 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true }, - "depth": 0, - "id": 0, - "path": "node_modules", + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "=*", + "npm": { + "name": "no-deps", + "version": ">=0.0.0" + }, + "package_id": 1, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: kjwoehcojrgjoj 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "dist_tag": { - "name": "lodash", - "tag": "lodash", - }, - "id": 2, - "literal": "kjwoehcojrgjoj", - "name": "lodash", - "package_id": 1, - }, - ], +"{ "format": "v2", - "meta_hash": "1e2d5fa6591f007aa6674495d1022868fc3b60325c4a1555315ca0e16ef31c4e", + "meta_hash": "a5d5a45555763c1040428cd33363c16438c75b23d8961e7458abe2d985fa08d1", "package_index": { - "bar": 2, + "no-deps": 1, "foo": 0, - "lodash": 1, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, + "package_id": 1 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true }, - "depth": 0, - "id": 0, - "path": "node_modules", + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "kjwoehcojrgjoj", + "dist_tag": { + "name": "no-deps", + "tag": "no-deps" + }, + "package_id": 1, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: *.1.* 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "*.1.*", - "name": "lodash", - "npm": { - "name": "lodash", - "version": ">=0.0.0", - }, - "package_id": 1, - }, - ], +"{ "format": "v2", - "meta_hash": "1e2d5fa6591f007aa6674495d1022868fc3b60325c4a1555315ca0e16ef31c4e", + "meta_hash": "a5d5a45555763c1040428cd33363c16438c75b23d8961e7458abe2d985fa08d1", "package_index": { - "bar": 2, + "no-deps": 1, "foo": 0, - "lodash": 1, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, + "package_id": 1 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true }, - "depth": 0, - "id": 0, - "path": "node_modules", + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "*.1.*", + "npm": { + "name": "no-deps", + "version": ">=0.0.0" + }, + "package_id": 1, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: *-pre 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "*-pre", - "name": "lodash", - "npm": { - "name": "lodash", - "version": ">=0.0.0", - }, - "package_id": 1, - }, - ], +"{ "format": "v2", - "meta_hash": "1e2d5fa6591f007aa6674495d1022868fc3b60325c4a1555315ca0e16ef31c4e", + "meta_hash": "a5d5a45555763c1040428cd33363c16438c75b23d8961e7458abe2d985fa08d1", "package_index": { - "bar": 2, + "no-deps": 1, "foo": 0, - "lodash": 1, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, + "package_id": 1 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true }, - "depth": 0, - "id": 0, - "path": "node_modules", + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "*-pre", + "npm": { + "name": "no-deps", + "version": ">=0.0.0" + }, + "package_id": 1, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: 1 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "1", - "name": "lodash", - "npm": { - "name": "lodash", - "version": "<2.0.0 >=1.0.0", - }, - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "56c714bc8ac0cdbf731de74d216134f3ce156ab45adda065fa84e4b2ce349f4b", + "meta_hash": "80ecab0f58b4fb37bae1983a06ebd81b6573433d7f92e938ffa7854f8ff15e7c", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-F7AB8u+6d00CCgnbjWzq9fFLpzOMCgq6mPjOW4+8+dYbrnc0obRrC+IHctzfZ1KKTQxX0xo/punrlpOWcf4gpw==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.3.1.tgz", - "tag": "npm", - "value": "1.3.1", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "1", + "npm": { + "name": "no-deps", + "version": "<2.0.0 >=1.0.0" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "1.1.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-1.1.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-ebG2pipYAKINcNI3YxdsiAgFvNGp2gdRwxAKN2LYBm9+YxuH/lHH2sl+GKQTuGiNfCfNZRMHUyyLPEJD6HWm7w==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: 1.* 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "1.*", - "name": "lodash", - "npm": { - "name": "lodash", - "version": "<2.0.0 >=1.0.0", - }, - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "56c714bc8ac0cdbf731de74d216134f3ce156ab45adda065fa84e4b2ce349f4b", + "meta_hash": "80ecab0f58b4fb37bae1983a06ebd81b6573433d7f92e938ffa7854f8ff15e7c", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-F7AB8u+6d00CCgnbjWzq9fFLpzOMCgq6mPjOW4+8+dYbrnc0obRrC+IHctzfZ1KKTQxX0xo/punrlpOWcf4gpw==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.3.1.tgz", - "tag": "npm", - "value": "1.3.1", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "1.*", + "npm": { + "name": "no-deps", + "version": "<2.0.0 >=1.0.0" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "1.1.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-1.1.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-ebG2pipYAKINcNI3YxdsiAgFvNGp2gdRwxAKN2LYBm9+YxuH/lHH2sl+GKQTuGiNfCfNZRMHUyyLPEJD6HWm7w==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: 1.1.* 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "1.1.*", - "name": "lodash", - "npm": { - "name": "lodash", - "version": "<1.2.0 >=1.1.0", - }, - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "56ec928a6d5f1d18236abc348bc711d6cfd08ca0a068bfc9fda24e7b22bed046", + "meta_hash": "80ecab0f58b4fb37bae1983a06ebd81b6573433d7f92e938ffa7854f8ff15e7c", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-SFeNKyKPh4kvYv0yd95fwLKw4JXM45PJLsPRdA8v7/q0lBzFeK6XS8xJTl6mlhb8PbAzioMkHli1W/1g0y4XQQ==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.1.1.tgz", - "tag": "npm", - "value": "1.1.1", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "1.1.*", + "npm": { + "name": "no-deps", + "version": "<1.2.0 >=1.1.0" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "1.1.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-1.1.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-ebG2pipYAKINcNI3YxdsiAgFvNGp2gdRwxAKN2LYBm9+YxuH/lHH2sl+GKQTuGiNfCfNZRMHUyyLPEJD6HWm7w==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; -exports[`dependency on workspace without version in package.json: version: 1.1.1 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "1.1.1", - "name": "lodash", - "npm": { - "name": "lodash", - "version": "==1.1.1", - }, - "package_id": 3, - }, - ], +exports[`dependency on workspace without version in package.json: version: 1.1.0 1`] = ` +"{ "format": "v2", - "meta_hash": "56ec928a6d5f1d18236abc348bc711d6cfd08ca0a068bfc9fda24e7b22bed046", + "meta_hash": "80ecab0f58b4fb37bae1983a06ebd81b6573433d7f92e938ffa7854f8ff15e7c", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-SFeNKyKPh4kvYv0yd95fwLKw4JXM45PJLsPRdA8v7/q0lBzFeK6XS8xJTl6mlhb8PbAzioMkHli1W/1g0y4XQQ==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.1.1.tgz", - "tag": "npm", - "value": "1.1.1", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "1.1.0", + "npm": { + "name": "no-deps", + "version": "==1.1.0" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "1.1.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-1.1.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-ebG2pipYAKINcNI3YxdsiAgFvNGp2gdRwxAKN2LYBm9+YxuH/lHH2sl+GKQTuGiNfCfNZRMHUyyLPEJD6HWm7w==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: *-pre+build 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "*-pre+build", - "name": "lodash", - "npm": { - "name": "lodash", - "version": ">=0.0.0", - }, - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "13e05e9c7522649464f47891db2c094497e8827d4a1f6784db8ef6c066211846", + "meta_hash": "c881b2c8cf6783504861587208d2b08d131130ff006987d527987075b04aa921", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "tag": "npm", - "value": "4.17.21", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "*-pre+build", + "npm": { + "name": "no-deps", + "version": ">=0.0.0" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "2.0.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: *+build 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "id": 2, - "literal": "*+build", - "name": "lodash", - "npm": { - "name": "lodash", - "version": ">=0.0.0", - }, - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "13e05e9c7522649464f47891db2c094497e8827d4a1f6784db8ef6c066211846", + "meta_hash": "c881b2c8cf6783504861587208d2b08d131130ff006987d527987075b04aa921", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "tag": "npm", - "value": "4.17.21", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "*+build", + "npm": { + "name": "no-deps", + "version": ">=0.0.0" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "2.0.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: latest 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "dist_tag": { - "name": "lodash", - "tag": "lodash", - }, - "id": 2, - "literal": "latest", - "name": "lodash", - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "13e05e9c7522649464f47891db2c094497e8827d4a1f6784db8ef6c066211846", + "meta_hash": "c881b2c8cf6783504861587208d2b08d131130ff006987d527987075b04aa921", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "tag": "npm", - "value": "4.17.21", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "latest", + "dist_tag": { + "name": "no-deps", + "tag": "no-deps" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "2.0.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on workspace without version in package.json: version: 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "dist_tag": { - "name": "lodash", - "tag": "lodash", - }, - "id": 2, - "literal": "", - "name": "lodash", - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "13e05e9c7522649464f47891db2c094497e8827d4a1f6784db8ef6c066211846", + "meta_hash": "c881b2c8cf6783504861587208d2b08d131130ff006987d527987075b04aa921", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "tag": "npm", - "value": "4.17.21", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "", + "dist_tag": { + "name": "no-deps", + "tag": "no-deps" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "2.0.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { - "11592711315645265694": "1.0.0", - }, -} + "11592711315645265694": "1.0.0" + } +}" `; exports[`dependency on same name as workspace and dist-tag: with version 1`] = ` -{ - "dependencies": [ - { - "behavior": { - "workspace": true, - }, - "id": 0, - "literal": "packages/bar", - "name": "bar", - "package_id": 2, - "workspace": "packages/bar", - }, - { - "behavior": { - "workspace": true, - }, - "id": 1, - "literal": "packages/mono", - "name": "lodash", - "package_id": 1, - "workspace": "packages/mono", - }, - { - "behavior": { - "prod": true, - "workspace": true, - }, - "dist_tag": { - "name": "lodash", - "tag": "lodash", - }, - "id": 2, - "literal": "latest", - "name": "lodash", - "package_id": 3, - }, - ], +"{ "format": "v2", - "meta_hash": "13e05e9c7522649464f47891db2c094497e8827d4a1f6784db8ef6c066211846", + "meta_hash": "c881b2c8cf6783504861587208d2b08d131130ff006987d527987075b04aa921", "package_index": { - "bar": 2, - "foo": 0, - "lodash": [ + "no-deps": [ 1, - 3, + 3 ], + "foo": 0, + "bar": 2 }, - "packages": [ - { - "bin": null, - "dependencies": [ - 0, - 1, - ], - "id": 0, - "integrity": null, - "man_dir": "", - "name": "foo", - "name_hash": "14841791273925386894", - "origin": "local", - "resolution": { - "resolved": "", - "tag": "root", - "value": "", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 1, - "integrity": null, - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/mono", - "tag": "workspace", - "value": "workspace:packages/mono", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [ - 2, - ], - "id": 2, - "integrity": null, - "man_dir": "", - "name": "bar", - "name_hash": "11592711315645265694", - "origin": "npm", - "resolution": { - "resolved": "workspace:packages/bar", - "tag": "workspace", - "value": "workspace:packages/bar", - }, - "scripts": {}, - }, - { - "bin": null, - "dependencies": [], - "id": 3, - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "man_dir": "", - "name": "lodash", - "name_hash": "15298228331728003776", - "origin": "npm", - "resolution": { - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "tag": "npm", - "value": "4.17.21", - }, - "scripts": {}, - }, - ], "trees": [ { + "id": 0, + "path": "node_modules", + "depth": 0, "dependencies": { "bar": { "id": 0, - "package_id": 2, + "package_id": 2 }, - "lodash": { + "no-deps": { "id": 1, - "package_id": 1, - }, - }, - "depth": 0, - "id": 0, - "path": "node_modules", + "package_id": 1 + } + } }, { - "dependencies": { - "lodash": { - "id": 2, - "package_id": 3, - }, - }, - "depth": 1, "id": 1, "path": "node_modules/bar/node_modules", + "depth": 1, + "dependencies": { + "no-deps": { + "id": 2, + "package_id": 3 + } + } + } + ], + "dependencies": [ + { + "name": "bar", + "literal": "packages/bar", + "workspace": "packages/bar", + "package_id": 2, + "behavior": { + "workspace": true + }, + "id": 0 }, + { + "name": "no-deps", + "literal": "packages/mono", + "workspace": "packages/mono", + "package_id": 1, + "behavior": { + "workspace": true + }, + "id": 1 + }, + { + "name": "no-deps", + "literal": "latest", + "dist_tag": { + "name": "no-deps", + "tag": "no-deps" + }, + "package_id": 3, + "behavior": { + "prod": true, + "workspace": true + }, + "id": 2 + } + ], + "packages": [ + { + "id": 0, + "name": "foo", + "name_hash": "14841791273925386894", + "resolution": { + "tag": "root", + "value": "", + "resolved": "" + }, + "dependencies": [ + 0, + 1 + ], + "integrity": null, + "man_dir": "", + "origin": "local", + "bin": null, + "scripts": {} + }, + { + "id": 1, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/mono", + "resolved": "workspace:packages/mono" + }, + "dependencies": [], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 2, + "name": "bar", + "name_hash": "11592711315645265694", + "resolution": { + "tag": "workspace", + "value": "workspace:packages/bar", + "resolved": "workspace:packages/bar" + }, + "dependencies": [ + 2 + ], + "integrity": null, + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + }, + { + "id": 3, + "name": "no-deps", + "name_hash": "5128161233225832376", + "resolution": { + "tag": "npm", + "value": "2.0.0", + "resolved": "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz" + }, + "dependencies": [], + "integrity": "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g==", + "man_dir": "", + "origin": "npm", + "bin": null, + "scripts": {} + } ], "workspace_paths": { "11592711315645265694": "packages/bar", - "15298228331728003776": "packages/mono", + "5128161233225832376": "packages/mono" }, "workspace_versions": { "11592711315645265694": "1.0.0", - "15298228331728003776": "4.17.21", - }, -} + "5128161233225832376": "4.17.21" + } +}" `; diff --git a/test/cli/install/bun-add.test.ts b/test/cli/install/bun-add.test.ts index 89c4a9e312..d2deadd510 100644 --- a/test/cli/install/bun-add.test.ts +++ b/test/cli/install/bun-add.test.ts @@ -1,7 +1,7 @@ import { file, spawn } from "bun"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it, setDefaultTimeout } from "bun:test"; import { access, appendFile, copyFile, mkdir, readlink, rm, writeFile } from "fs/promises"; -import { bunExe, bunEnv as env, tmpdirSync, toBeValidBin, toBeWorkspaceLink, toHaveBins } from "harness"; +import { bunExe, bunEnv as env, tmpdirSync, toBeValidBin, toBeWorkspaceLink, toHaveBins, readdirSorted } from "harness"; import { join, relative, resolve } from "path"; import { check_npm_auth_type, @@ -11,7 +11,6 @@ import { dummyBeforeEach, dummyRegistry, package_dir, - readdirSorted, requested, root_url, setHandler, @@ -404,6 +403,165 @@ it("should add exact version with --exact", async () => { ); await access(join(package_dir, "bun.lockb")); }); +it("should add to devDependencies with --dev", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "add", "--dev", "BaR"], + cwd: package_dir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); + expect(err).toContain("Saved lockfile"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed BaR@0.0.2", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/BaR`, `${root_url}/BaR-0.0.2.tgz`]); + expect(requested).toBe(2); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "BaR"]); + expect(await readdirSorted(join(package_dir, "node_modules", "BaR"))).toEqual(["package.json"]); + expect(await file(join(package_dir, "node_modules", "BaR", "package.json")).json()).toEqual({ + name: "bar", + version: "0.0.2", + }); + expect(await file(join(package_dir, "package.json")).text()).toEqual( + JSON.stringify( + { + name: "foo", + version: "0.0.1", + devDependencies: { + BaR: "^0.0.2", + }, + }, + null, + 2, + ), + ); + await access(join(package_dir, "bun.lockb")); +}); +it("should add to optionalDependencies with --optional", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "add", "--optional", "BaR"], + cwd: package_dir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); + expect(err).toContain("Saved lockfile"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed BaR@0.0.2", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/BaR`, `${root_url}/BaR-0.0.2.tgz`]); + expect(requested).toBe(2); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "BaR"]); + expect(await readdirSorted(join(package_dir, "node_modules", "BaR"))).toEqual(["package.json"]); + expect(await file(join(package_dir, "node_modules", "BaR", "package.json")).json()).toEqual({ + name: "bar", + version: "0.0.2", + }); + expect(await file(join(package_dir, "package.json")).text()).toEqual( + JSON.stringify( + { + name: "foo", + version: "0.0.1", + optionalDependencies: { + BaR: "^0.0.2", + }, + }, + null, + 2, + ), + ); + await access(join(package_dir, "bun.lockb")); +}); +it("should add to peerDependencies with --peer", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + JSON.stringify({ + name: "foo", + version: "0.0.1", + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "add", "--peer", "BaR"], + cwd: package_dir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); + expect(err).toContain("Saved lockfile"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed BaR@0.0.2", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + expect(urls.sort()).toEqual([`${root_url}/BaR`, `${root_url}/BaR-0.0.2.tgz`]); + expect(requested).toBe(2); + expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "BaR"]); + expect(await readdirSorted(join(package_dir, "node_modules", "BaR"))).toEqual(["package.json"]); + expect(await file(join(package_dir, "node_modules", "BaR", "package.json")).json()).toEqual({ + name: "bar", + version: "0.0.2", + }); + expect(await file(join(package_dir, "package.json")).text()).toEqual( + JSON.stringify( + { + name: "foo", + version: "0.0.1", + peerDependencies: { + BaR: "^0.0.2", + }, + }, + null, + 2, + ), + ); + await access(join(package_dir, "bun.lockb")); +}); it("should add exact version with install.exact", async () => { const urls: string[] = []; diff --git a/test/cli/install/bun-install-lifecycle-scripts.test.ts b/test/cli/install/bun-install-lifecycle-scripts.test.ts new file mode 100644 index 0000000000..44bcf1bb5c --- /dev/null +++ b/test/cli/install/bun-install-lifecycle-scripts.test.ts @@ -0,0 +1,2910 @@ +import { + VerdaccioRegistry, + isLinux, + bunEnv as env, + bunExe, + assertManifestsPopulated, + readdirSorted, + isWindows, + stderrForInstall, + runBunInstall, +} from "harness"; +import { beforeAll, afterAll, beforeEach, test, expect, describe, setDefaultTimeout } from "bun:test"; +import { writeFile, exists, rm, mkdir } from "fs/promises"; +import { join, sep } from "path"; +import { spawn, file, write } from "bun"; + +var verdaccio = new VerdaccioRegistry(); +var packageDir: string; +var packageJson: string; + +beforeAll(async () => { + setDefaultTimeout(1000 * 60 * 5); + await verdaccio.start(); +}); + +afterAll(() => { + verdaccio.stop(); +}); + +beforeEach(async () => { + ({ packageDir, packageJson } = await verdaccio.createTestDir()); + env.BUN_INSTALL_CACHE_DIR = join(packageDir, ".bun-cache"); + env.BUN_TMPDIR = env.TMPDIR = env.TEMP = join(packageDir, ".bun-tmp"); +}); + +// waiter thread is only a thing on Linux. +for (const forceWaiterThread of isLinux ? [false, true] : [false]) { + describe("lifecycle scripts" + (forceWaiterThread ? " (waiter thread)" : ""), async () => { + test("root package with all lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const writeScript = async (name: string) => { + const contents = ` + import { writeFileSync, existsSync, rmSync } from "fs"; + import { join } from "path"; + + const file = join(import.meta.dir, "${name}.txt"); + + if (existsSync(file)) { + rmSync(file); + writeFileSync(file, "${name} exists!"); + } else { + writeFileSync(file, "${name}!"); + } + `; + await writeFile(join(packageDir, `${name}.js`), contents); + }; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + preinstall: `${bunExe()} preinstall.js`, + install: `${bunExe()} install.js`, + postinstall: `${bunExe()} postinstall.js`, + preprepare: `${bunExe()} preprepare.js`, + prepare: `${bunExe()} prepare.js`, + postprepare: `${bunExe()} postprepare.js`, + }, + }), + ); + + await writeScript("preinstall"); + await writeScript("install"); + await writeScript("postinstall"); + await writeScript("preprepare"); + await writeScript("prepare"); + await writeScript("postprepare"); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "preprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postprepare.txt"))).toBeTrue(); + expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(packageDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare!"); + expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare!"); + expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare!"); + + // add a dependency with all lifecycle scripts + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + preinstall: `${bunExe()} preinstall.js`, + install: `${bunExe()} install.js`, + postinstall: `${bunExe()} postinstall.js`, + preprepare: `${bunExe()} preprepare.js`, + prepare: `${bunExe()} prepare.js`, + postprepare: `${bunExe()} postprepare.js`, + }, + dependencies: { + "all-lifecycle-scripts": "1.0.0", + }, + trustedDependencies: ["all-lifecycle-scripts"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ all-lifecycle-scripts@1.0.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall exists!"); + expect(await file(join(packageDir, "install.txt")).text()).toBe("install exists!"); + expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall exists!"); + expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare exists!"); + expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare exists!"); + expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare exists!"); + + const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); + + expect(await exists(join(depDir, "preinstall.txt"))).toBeTrue(); + expect(await exists(join(depDir, "install.txt"))).toBeTrue(); + expect(await exists(join(depDir, "postinstall.txt"))).toBeTrue(); + expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); + expect(await exists(join(depDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); + + expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + + await rm(join(packageDir, "preinstall.txt")); + await rm(join(packageDir, "install.txt")); + await rm(join(packageDir, "postinstall.txt")); + await rm(join(packageDir, "preprepare.txt")); + await rm(join(packageDir, "prepare.txt")); + await rm(join(packageDir, "postprepare.txt")); + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + // all at once + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ all-lifecycle-scripts@1.0.0", + "", + "1 package installed", + ]); + + expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(packageDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare!"); + expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare!"); + expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare!"); + + expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + }); + + test("workspace lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + workspaces: ["packages/*"], + scripts: { + preinstall: `touch preinstall.txt`, + install: `touch install.txt`, + postinstall: `touch postinstall.txt`, + preprepare: `touch preprepare.txt`, + prepare: `touch prepare.txt`, + postprepare: `touch postprepare.txt`, + }, + }), + ); + + await mkdir(join(packageDir, "packages", "pkg1"), { recursive: true }); + await writeFile( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + version: "1.0.0", + scripts: { + preinstall: `touch preinstall.txt`, + install: `touch install.txt`, + postinstall: `touch postinstall.txt`, + preprepare: `touch preprepare.txt`, + prepare: `touch prepare.txt`, + postprepare: `touch postprepare.txt`, + }, + }), + ); + + await mkdir(join(packageDir, "packages", "pkg2"), { recursive: true }); + await writeFile( + join(packageDir, "packages", "pkg2", "package.json"), + JSON.stringify({ + name: "pkg2", + version: "1.0.0", + scripts: { + preinstall: `touch preinstall.txt`, + install: `touch install.txt`, + postinstall: `touch postinstall.txt`, + preprepare: `touch preprepare.txt`, + prepare: `touch prepare.txt`, + postprepare: `touch postprepare.txt`, + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + var err = await new Response(stderr).text(); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).toContain("Saved lockfile"); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "preprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "preprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "packages", "pkg1", "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "postprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "packages", "pkg2", "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "preprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "packages", "pkg2", "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "postprepare.txt"))).toBeFalse(); + }); + + test("dependency lifecycle scripts run before root lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const script = '[[ -f "./node_modules/uses-what-bin-slow/what-bin.txt" ]]'; + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin-slow": "1.0.0", + }, + trustedDependencies: ["uses-what-bin-slow"], + scripts: { + install: script, + postinstall: script, + preinstall: script, + prepare: script, + postprepare: script, + preprepare: script, + }, + }), + ); + + // uses-what-bin-slow will wait one second then write a file to disk. The root package should wait for + // for this to happen before running its lifecycle scripts. + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("install a dependency with lifecycle scripts, then add to trusted dependencies and install again", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "all-lifecycle-scripts": "1.0.0", + }, + trustedDependencies: [], + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ all-lifecycle-scripts@1.0.0", + "", + "1 package installed", + "", + "Blocked 3 postinstalls. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); + expect(await exists(join(depDir, "preinstall.txt"))).toBeFalse(); + expect(await exists(join(depDir, "install.txt"))).toBeFalse(); + expect(await exists(join(depDir, "postinstall.txt"))).toBeFalse(); + expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); + expect(await exists(join(depDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + + // add to trusted dependencies + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "all-lifecycle-scripts": "1.0.0", + }, + trustedDependencies: ["all-lifecycle-scripts"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("Checked 1 install across 2 packages (no changes)"), + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); + expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); + }); + + test("adding a package without scripts to trustedDependencies", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "what-bin": "1.0.0", + }, + trustedDependencies: ["what-bin"], + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ what-bin@1.0.0"), + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); + const what_bin_bins = !isWindows ? ["what-bin"] : ["what-bin.bunx", "what-bin.exe"]; + // prettier-ignore + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { "what-bin": "1.0.0" }, + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ what-bin@1.0.0"), + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); + + // add it to trusted dependencies + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "what-bin": "1.0.0", + }, + trustedDependencies: ["what-bin"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); + }); + + test("lifecycle scripts run if node_modules is deleted", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-postinstall": "1.0.0", + }, + trustedDependencies: ["lifecycle-postinstall"], + }), + ); + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ lifecycle-postinstall@1.0.0", + "", + // @ts-ignore + "1 package installed", + ]); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + await rm(join(packageDir, "node_modules"), { force: true, recursive: true }); + await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ lifecycle-postinstall@1.0.0", + "", + "1 package installed", + ]); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("INIT_CWD is set to the correct directory", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + install: "bun install.js", + }, + dependencies: { + "lifecycle-init-cwd": "1.0.0", + "another-init-cwd": "npm:lifecycle-init-cwd@1.0.0", + }, + trustedDependencies: ["lifecycle-init-cwd", "another-init-cwd"], + }), + ); + + await writeFile( + join(packageDir, "install.js"), + ` + const fs = require("fs"); + const path = require("path"); + + fs.writeFileSync( + path.join(__dirname, "test.txt"), + process.env.INIT_CWD || "does not exist" + ); + `, + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + const out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ another-init-cwd@1.0.0", + "+ lifecycle-init-cwd@1.0.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "test.txt")).text()).toBe(packageDir); + expect(await file(join(packageDir, "node_modules/lifecycle-init-cwd/test.txt")).text()).toBe(packageDir); + expect(await file(join(packageDir, "node_modules/another-init-cwd/test.txt")).text()).toBe(packageDir); + }); + + test("failing lifecycle script should print output", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-failing-postinstall": "1.0.0", + }, + trustedDependencies: ["lifecycle-failing-postinstall"], + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("hello"); + expect(await exited).toBe(1); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + const out = await new Response(stdout).text(); + expect(out).toEqual(expect.stringContaining("bun install v1.")); + }); + + test("failing root lifecycle script should print output correctly", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "fooooooooo", + version: "1.0.0", + scripts: { + preinstall: `${bunExe()} -e "throw new Error('Oops!')"`, + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + }); + + expect(await exited).toBe(1); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await Bun.readableStreamToText(stdout)).toEqual(expect.stringContaining("bun install v1.")); + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain("error: Oops!"); + expect(err).toContain('error: preinstall script from "fooooooooo" exited with 1'); + }); + + test("exit 0 in lifecycle scripts works", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + postinstall: "exit 0", + prepare: "exit 0", + postprepare: "exit 0", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("No packages! Deleted empty lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("done"), + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("--ignore-scripts should skip lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "lifecycle-failing-postinstall": "1.0.0", + }, + trustedDependencies: ["lifecycle-failing-postinstall"], + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--ignore-scripts"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("hello"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ lifecycle-failing-postinstall@1.0.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("it should add `node-gyp rebuild` as the `install` script when `install` and `postinstall` don't exist and `binding.gyp` exists in the root of the package", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "binding-gyp-scripts": "1.5.0", + }, + trustedDependencies: ["binding-gyp-scripts"], + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ binding-gyp-scripts@1.5.0", + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules/binding-gyp-scripts/build.node"))).toBeTrue(); + }); + + test("automatic node-gyp scripts should not run for untrusted dependencies, and should run after adding to `trustedDependencies`", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const packageJSON: any = { + name: "foo", + version: "1.0.0", + dependencies: { + "binding-gyp-scripts": "1.5.0", + }, + }; + await writeFile(packageJson, JSON.stringify(packageJSON)); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + let err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ binding-gyp-scripts@1.5.0", + "", + "2 packages installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeFalse(); + + packageJSON.trustedDependencies = ["binding-gyp-scripts"]; + await writeFile(packageJson, JSON.stringify(packageJSON)); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeTrue(); + }); + + test("automatic node-gyp scripts work in package root", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "node-gyp": "1.5.0", + }, + }), + ); + + await writeFile(join(packageDir, "binding.gyp"), ""); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ node-gyp@1.5.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); + + await rm(join(packageDir, "build.node")); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); + }); + + test("auto node-gyp scripts work when scripts exists other than `install` and `preinstall`", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "node-gyp": "1.5.0", + }, + scripts: { + postinstall: "exit 0", + prepare: "exit 0", + postprepare: "exit 0", + }, + }), + ); + + await writeFile(join(packageDir, "binding.gyp"), ""); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ node-gyp@1.5.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); + }); + + for (const script of ["install", "preinstall"]) { + test(`does not add auto node-gyp script when ${script} script exists`, async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const packageJSON: any = { + name: "foo", + version: "1.0.0", + dependencies: { + "node-gyp": "1.5.0", + }, + scripts: { + [script]: "exit 0", + }, + }; + await writeFile(packageJson, JSON.stringify(packageJSON)); + await writeFile(join(packageDir, "binding.gyp"), ""); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ node-gyp@1.5.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "build.node"))).toBeFalse(); + }); + } + + test("git dependencies also run `preprepare`, `prepare`, and `postprepare` scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-install-test": "dylan-conway/lifecycle-install-test#3ba6af5b64f2d27456e08df21d750072dffd3eee", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + let err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ lifecycle-install-test@github:dylan-conway/lifecycle-install-test#3ba6af5", + "", + "1 package installed", + "", + "Blocked 6 postinstalls. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preinstall.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "install.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postinstall.txt"))).toBeFalse(); + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-install-test": "dylan-conway/lifecycle-install-test#3ba6af5b64f2d27456e08df21d750072dffd3eee", + }, + trustedDependencies: ["lifecycle-install-test"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postinstall.txt"))).toBeTrue(); + }); + + test("root lifecycle scripts should wait for dependency lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin-slow": "1.0.0", + }, + trustedDependencies: ["uses-what-bin-slow"], + scripts: { + install: '[[ -f "./node_modules/uses-what-bin-slow/what-bin.txt" ]]', + }, + }), + ); + + // Package `uses-what-bin-slow` has an install script that will sleep for 1 second + // before writing `what-bin.txt` to disk. The root package has an install script that + // checks if this file exists. If the root package install script does not wait for + // the other to finish, it will fail. + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ uses-what-bin-slow@1.0.0", + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + async function createPackagesWithScripts( + packagesCount: number, + scripts: Record, + ): Promise { + const dependencies: Record = {}; + const dependenciesList: string[] = []; + + for (let i = 0; i < packagesCount; i++) { + const packageName: string = "stress-test-package-" + i; + const packageVersion = "1.0." + i; + + dependencies[packageName] = "file:./" + packageName; + dependenciesList[i] = packageName; + + const packagePath = join(packageDir, packageName); + await mkdir(packagePath); + await writeFile( + join(packagePath, "package.json"), + JSON.stringify({ + name: packageName, + version: packageVersion, + scripts, + }), + ); + } + + await writeFile( + packageJson, + JSON.stringify({ + name: "stress-test", + version: "1.0.0", + dependencies, + trustedDependencies: dependenciesList, + }), + ); + + return dependenciesList; + } + + test("reach max concurrent scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const scripts = { + "preinstall": `${bunExe()} -e 'Bun.sleepSync(500)'`, + }; + + const dependenciesList = await createPackagesWithScripts(4, scripts); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--concurrent-scripts=2"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await Bun.readableStreamToText(stdout); + expect(out).not.toContain("Blocked"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + ...dependenciesList.map(dep => `+ ${dep}@${dep}`), + "", + "4 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("stress test", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const dependenciesList = await createPackagesWithScripts(500, { + "postinstall": `${bunExe()} --version`, + }); + + // the script is quick, default number for max concurrent scripts + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await Bun.readableStreamToText(stdout); + expect(out).not.toContain("Blocked"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + ...dependenciesList.map(dep => `+ ${dep}@${dep}`).sort((a, b) => a.localeCompare(b)), + "", + "500 packages installed", + ]); + + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("it should install and use correct binary version", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + // this should install `what-bin` in two places: + // + // - node_modules/.bin/what-bin@1.5.0 + // - node_modules/uses-what-bin/node_modules/.bin/what-bin@1.0.0 + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin": "1.0.0", + "what-bin": "1.5.0", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ uses-what-bin@1.0.0"), + "+ what-bin@1.5.0", + "", + "3 packages installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "node_modules", "what-bin", "what-bin.js")).text()).toContain( + "what-bin@1.5.0", + ); + expect( + await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", "what-bin", "what-bin.js")).text(), + ).toContain("what-bin@1.0.0"); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin": "1.5.0", + "what-bin": "1.0.0", + }, + scripts: { + install: "what-bin", + }, + trustedDependencies: ["uses-what-bin"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "node_modules", "what-bin", "what-bin.js")).text()).toContain( + "what-bin@1.0.0", + ); + expect( + await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", "what-bin", "what-bin.js")).text(), + ).toContain("what-bin@1.5.0"); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + out = await new Response(stdout).text(); + err = await new Response(stderr).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ uses-what-bin@1.5.0"), + expect.stringContaining("+ what-bin@1.0.0"), + "", + "3 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("node-gyp should always be available for lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + install: "node-gyp --version", + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + + // if node-gyp isn't available, it would return a non-zero exit code + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + // if this test fails, `electron` might be removed from the default list + test("default trusted dependencies should work", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + "electron": "1.0.0", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ electron@1.0.0", + "", + "1 package installed", + ]); + expect(out).not.toContain("Blocked"); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("default trusted dependencies should not be used of trustedDependencies is populated", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + "uses-what-bin": "1.0.0", + // fake electron package because it's in the default trustedDependencies list + "electron": "1.0.0", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + // electron lifecycle scripts should run, uses-what-bin scripts should not run + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ electron@1.0.0", + expect.stringContaining("+ uses-what-bin@1.0.0"), + "", + "3 packages installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + "uses-what-bin": "1.0.0", + "electron": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }), + ); + + // now uses-what-bin scripts should run and electron scripts should not run. + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ electron@1.0.0", + expect.stringContaining("+ uses-what-bin@1.0.0"), + "", + "3 packages installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); + }); + + test("does not run any scripts if trustedDependencies is an empty list", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + "uses-what-bin": "1.0.0", + "electron": "1.0.0", + }, + trustedDependencies: [], + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await Bun.readableStreamToText(stderr); + const out = await Bun.readableStreamToText(stdout); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ electron@1.0.0", + expect.stringContaining("+ uses-what-bin@1.0.0"), + "", + "3 packages installed", + "", + "Blocked 2 postinstalls. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); + }); + + test("will run default trustedDependencies after install that didn't include them", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + electron: "1.0.0", + }, + trustedDependencies: ["blah"], + }), + ); + + // first install does not run electron scripts + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + var err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + var out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ electron@1.0.0", + "", + "1 package installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + electron: "1.0.0", + }, + }), + ); + + // The electron scripts should run now because it's in default trusted dependencies. + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + }); + + describe("--trust", async () => { + test("unhoisted untrusted scripts, none at root node_modules", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + // prevents real `uses-what-bin` from hoisting to root + "uses-what-bin": "npm:a-dep@1.0.3", + }, + workspaces: ["pkg1"], + }), + ), + write( + join(packageDir, "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ), + ]); + + await runBunInstall(testEnv, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + const results = await Promise.all([ + exists(join(packageDir, "node_modules", "pkg1", "node_modules", "uses-what-bin")), + exists(join(packageDir, "node_modules", "pkg1", "node_modules", "uses-what-bin", "what-bin.txt")), + ]); + + expect(results).toEqual([true, false]); + + const { stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "trust", "--all"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env: testEnv, + }); + + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + + expect(await exited).toBe(0); + + expect( + await exists(join(packageDir, "node_modules", "pkg1", "node_modules", "uses-what-bin", "what-bin.txt")), + ).toBeTrue(); + }); + const trustTests = [ + { + label: "only name", + packageJson: { + name: "foo", + }, + }, + { + label: "empty dependencies", + packageJson: { + name: "foo", + dependencies: {}, + }, + }, + { + label: "populated dependencies", + packageJson: { + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }, + }, + + { + label: "empty trustedDependencies", + packageJson: { + name: "foo", + trustedDependencies: [], + }, + }, + + { + label: "populated dependencies, empty trustedDependencies", + packageJson: { + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: [], + }, + }, + + { + label: "populated dependencies and trustedDependencies", + packageJson: { + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }, + }, + + { + label: "empty dependencies and trustedDependencies", + packageJson: { + name: "foo", + dependencies: {}, + trustedDependencies: [], + }, + }, + ]; + for (const { label, packageJson } of trustTests) { + test(label, async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile(join(packageDir, "package.json"), JSON.stringify(packageJson)); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "--trust", "uses-what-bin@1.0.0"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed uses-what-bin@1.0.0", + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }); + + // another install should not error with json SyntaxError + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + }); + } + describe("packages without lifecycle scripts", async () => { + test("initial install", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "--trust", "no-deps@1.0.0"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + const err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + const out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed no-deps@1.0.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "no-deps": "1.0.0", + }, + }); + }); + test("already installed", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + }), + ); + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "no-deps"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed no-deps@2.0.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "no-deps": "^2.0.0", + }, + }); + + // oops, I wanted to run the lifecycle scripts for no-deps, I'll install + // again with --trust. + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "--trust", "no-deps"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + })); + + // oh, I didn't realize no-deps doesn't have + // any lifecycle scripts. It shouldn't automatically add to + // trustedDependencies. + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed no-deps@2.0.0", + "", + expect.stringContaining("done"), + "", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "no-deps": "^2.0.0", + }, + }); + }); + }); + }); + + describe("updating trustedDependencies", async () => { + test("existing trustedDependencies, unchanged trustedDependencies", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + trustedDependencies: ["uses-what-bin"], + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ uses-what-bin@1.0.0"), + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }); + + // no changes, lockfile shouldn't be saved + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("existing trustedDependencies, removing trustedDependencies", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + trustedDependencies: ["uses-what-bin"], + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ uses-what-bin@1.0.0"), + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }); + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ); + + // this script should not run because uses-what-bin is no longer in trustedDependencies + await rm(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"), { force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + }); + + test("non-existent trustedDependencies, then adding it", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "electron": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ electron@1.0.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "electron": "1.0.0", + }, + }); + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + trustedDependencies: ["electron"], + dependencies: { + "electron": "1.0.0", + }, + }), + ); + + await rm(join(packageDir, "node_modules", "electron", "preinstall.txt"), { force: true }); + + // lockfile should save evenn though there are no changes to trustedDependencies due to + // the default list + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + }); + }); + + test("node -p should work in postinstall scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + postinstall: `node -p "require('fs').writeFileSync('postinstall.txt', 'postinstall')"`, + }, + }), + ); + + const originalPath = env.PATH; + env.PATH = ""; + + let { stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + env.PATH = originalPath; + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("No packages! Deleted empty lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); + }); + + test("ensureTempNodeGypScript works", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + preinstall: "node-gyp --version", + }, + }), + ); + + const originalPath = env.PATH; + env.PATH = ""; + + let { stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + env, + }); + + env.PATH = originalPath; + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("No packages! Deleted empty lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("bun pm trust and untrusted on missing package", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "uses-what-bin": "1.5.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + }); + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ uses-what-bin@1.5.0"), + "", + "2 packages installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + // remove uses-what-bin from node_modules, bun pm trust and untrusted should handle missing package + await rm(join(packageDir, "node_modules", "uses-what-bin"), { recursive: true, force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "untrusted"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("bun pm untrusted"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out).toContain("Found 0 untrusted dependencies with scripts"); + expect(await exited).toBe(0); + + ({ stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "trust", "uses-what-bin"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + })); + + expect(await exited).toBe(1); + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("bun pm trust"); + expect(err).toContain("0 scripts ran"); + expect(err).toContain("uses-what-bin"); + }); + + describe("add trusted, delete, then add again", async () => { + // when we change bun install to delete dependencies from node_modules + // for both cases, we need to update this test + for (const withRm of [true, false]) { + test(withRm ? "withRm" : "withoutRm", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "no-deps": "1.0.0", + "uses-what-bin": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + }); + + let err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + expect.stringContaining("+ no-deps@1.0.0"), + expect.stringContaining("+ uses-what-bin@1.0.0"), + "", + "3 packages installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "trust", "uses-what-bin"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out).toContain("1 script ran across 1 package"); + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await file(packageJson).json()).toEqual({ + name: "foo", + dependencies: { + "no-deps": "1.0.0", + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }); + + // now remove and install again + if (withRm) { + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "rm", "uses-what-bin"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out).toContain("1 package removed"); + expect(out).toContain("uses-what-bin"); + expect(await exited).toBe(0); + } + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "no-deps": "1.0.0", + }, + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + let expected = withRm + ? ["", "Checked 1 install across 2 packages (no changes)"] + : ["", expect.stringContaining("1 package removed")]; + expected = [expect.stringContaining("bun install v1."), ...expected]; + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(expected); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin"))).toBe(!withRm); + + // add again, bun pm untrusted should report it as untrusted + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "no-deps": "1.0.0", + "uses-what-bin": "1.0.0", + }, + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expected = withRm + ? [ + "", + expect.stringContaining("+ uses-what-bin@1.0.0"), + "", + "1 package installed", + "", + "Blocked 1 postinstall. Run `bun pm untrusted` for details.", + "", + ] + : ["", expect.stringContaining("Checked 3 installs across 4 packages (no changes)"), ""]; + expected = [expect.stringContaining("bun install v1."), ...expected]; + expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual(expected); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "untrusted"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out).toContain("./node_modules/uses-what-bin @1.0.0".replaceAll("/", sep)); + expect(await exited).toBe(0); + }); + } + }); + + describe.if(!forceWaiterThread || process.platform === "linux")("does not use 100% cpu", async () => { + test("install", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + preinstall: `${bunExe()} -e 'Bun.sleepSync(1000)'`, + }, + }), + ); + + const proc = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + stdin: "ignore", + env: testEnv, + }); + + expect(await proc.exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(proc.resourceUsage()?.cpuTime.total).toBeLessThan(750_000); + }); + + // https://github.com/oven-sh/bun/issues/11252 + test.todoIf(isWindows)("bun pm trust", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const dep = isWindows ? "uses-what-bin-slow-window" : "uses-what-bin-slow"; + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + [dep]: "1.0.0", + }, + }), + ); + + var { exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env: testEnv, + }); + + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", dep, "what-bin.txt"))).toBeFalse(); + + const proc = spawn({ + cmd: [bunExe(), "pm", "trust", "--all"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env: testEnv, + }); + + expect(await proc.exited).toBe(0); + + expect(await exists(join(packageDir, "node_modules", dep, "what-bin.txt"))).toBeTrue(); + + expect(proc.resourceUsage()?.cpuTime.total).toBeLessThan(750_000 * (isWindows ? 5 : 1)); + }); + }); + }); + + describe("stdout/stderr is inherited from root scripts during install", async () => { + test("without packages", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const exe = bunExe().replace(/\\/g, "\\\\"); + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + scripts: { + "preinstall": `${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, + "install": `${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, + "prepare": `${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(err.split(/\r?\n/)).toEqual([ + "No packages! Deleted empty lockfile", + "", + `$ ${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, + "preinstall stderr 🍦", + `$ ${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, + `$ ${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, + "", + ]); + const out = await Bun.readableStreamToText(stdout); + expect(out.split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "install stdout 🚀", + "prepare stdout done ✅", + "", + expect.stringContaining("done"), + "", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + + test("with a package", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + + const exe = bunExe().replace(/\\/g, "\\\\"); + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.2.3", + scripts: { + "preinstall": `${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, + "install": `${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, + "prepare": `${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, + }, + dependencies: { + "no-deps": "1.0.0", + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = stderrForInstall(await Bun.readableStreamToText(stderr)); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(err.split(/\r?\n/)).toEqual([ + "Resolving dependencies", + expect.stringContaining("Resolved, downloaded and extracted "), + "Saved lockfile", + "", + `$ ${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, + "preinstall stderr 🍦", + `$ ${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, + `$ ${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, + "", + ]); + const out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "install stdout 🚀", + "prepare stdout done ✅", + "", + expect.stringContaining("+ no-deps@1.0.0"), + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + }); +} diff --git a/test/cli/install/registry/bun-install-registry.test.ts b/test/cli/install/bun-install-registry.test.ts similarity index 70% rename from test/cli/install/registry/bun-install-registry.test.ts rename to test/cli/install/bun-install-registry.test.ts index 846635dfd4..da074098f7 100644 --- a/test/cli/install/registry/bun-install-registry.test.ts +++ b/test/cli/install/bun-install-registry.test.ts @@ -1,17 +1,14 @@ import { file, spawn, write } from "bun"; import { install_test_helpers } from "bun:internal-for-testing"; import { afterAll, beforeAll, beforeEach, describe, expect, it, setDefaultTimeout, test } from "bun:test"; -import { ChildProcess, fork } from "child_process"; import { copyFileSync, mkdirSync } from "fs"; import { cp, exists, mkdir, readlink, rm, writeFile } from "fs/promises"; import { assertManifestsPopulated, bunExe, bunEnv as env, - isLinux, isWindows, mergeWindowEnvs, - randomPort, runBunInstall, runBunUpdate, pack, @@ -25,9 +22,10 @@ import { tls, isFlaky, isMacOS, + readdirSorted, + VerdaccioRegistry, } from "harness"; -import { join, resolve, sep } from "path"; -import { readdirSorted } from "../dummy.registry"; +import { join, resolve } from "path"; const { parseLockfile } = install_test_helpers; const { iniInternals } = require("bun:internal-for-testing"); const { loadNpmrc } = iniInternals; @@ -38,8 +36,8 @@ expect.extend({ toMatchNodeModulesAt, }); -var verdaccioServer: ChildProcess; -var port: number = randomPort(); +var verdaccio: VerdaccioRegistry; +var port: number; var packageDir: string; /** packageJson = join(packageDir, "package.json"); */ var packageJson: string; @@ -47,69 +45,28 @@ var packageJson: string; let users: Record = {}; beforeAll(async () => { - console.log("STARTING VERDACCIO"); setDefaultTimeout(1000 * 60 * 5); - verdaccioServer = fork( - require.resolve("verdaccio/bin/verdaccio"), - ["-c", join(import.meta.dir, "verdaccio.yaml"), "-l", `${port}`], - { - silent: true, - // Prefer using a release build of Bun since it's faster - execPath: Bun.which("bun") || bunExe(), - }, - ); - - verdaccioServer.stderr?.on("data", data => { - console.error(`Error: ${data}`); - }); - - verdaccioServer.on("error", error => { - console.error(`Failed to start child process: ${error}`); - }); - - verdaccioServer.on("exit", (code, signal) => { - if (code !== 0) { - console.error(`Child process exited with code ${code} and signal ${signal}`); - } else { - console.log("Child process exited successfully"); - } - }); - - await new Promise(done => { - verdaccioServer.on("message", (msg: { verdaccio_started: boolean }) => { - if (msg.verdaccio_started) { - console.log("Verdaccio started"); - done(); - } - }); - }); + verdaccio = new VerdaccioRegistry(); + port = verdaccio.port; + await verdaccio.start(); }); afterAll(async () => { await Bun.$`rm -f ${import.meta.dir}/htpasswd`.throws(false); - if (verdaccioServer) verdaccioServer.kill(); + verdaccio.stop(); }); beforeEach(async () => { - packageDir = tmpdirSync(); - packageJson = join(packageDir, "package.json"); + ({ packageDir, packageJson } = await verdaccio.createTestDir()); await Bun.$`rm -f ${import.meta.dir}/htpasswd`.throws(false); await Bun.$`rm -rf ${import.meta.dir}/packages/private-pkg-dont-touch`.throws(false); users = {}; env.BUN_INSTALL_CACHE_DIR = join(packageDir, ".bun-cache"); env.BUN_TMPDIR = env.TMPDIR = env.TEMP = join(packageDir, ".bun-tmp"); - await writeFile( - join(packageDir, "bunfig.toml"), - ` -[install] -cache = "${join(packageDir, ".bun-cache")}" -registry = "http://localhost:${port}/" -`, - ); }); function registryUrl() { - return `http://localhost:${port}/`; + return verdaccio.registryUrl(); } /** @@ -549,7 +506,7 @@ describe("certificate authority", () => { const mockRegistryFetch = function (opts?: any): (req: Request) => Promise { return async function (req: Request) { if (req.url.includes("no-deps")) { - return new Response(Bun.file(join(import.meta.dir, "packages", "no-deps", "no-deps-1.0.0.tgz"))); + return new Response(Bun.file(join(import.meta.dir, "registry", "packages", "no-deps", "no-deps-1.0.0.tgz"))); } return new Response("OK", { status: 200 }); }; @@ -1036,7 +993,7 @@ describe("publish", async () => { cache = false registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; await Promise.all([ - rm(join(import.meta.dir, "packages", "otp-pkg-1"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "otp-pkg-1"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1068,7 +1025,7 @@ describe("publish", async () => { registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; await Promise.all([ - rm(join(import.meta.dir, "packages", "otp-pkg-2"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "otp-pkg-2"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1106,7 +1063,7 @@ describe("publish", async () => { registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; await Promise.all([ - rm(join(import.meta.dir, "packages", "otp-pkg-3"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "otp-pkg-3"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1149,7 +1106,7 @@ describe("publish", async () => { registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; await Promise.all([ - rm(join(import.meta.dir, "packages", "otp-pkg-4"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "otp-pkg-4"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1175,7 +1132,7 @@ describe("publish", async () => { test("can publish a package then install it", async () => { const bunfig = await authBunfig("basic"); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-1"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-1"), { recursive: true, force: true }), write( packageJson, JSON.stringify({ @@ -1207,7 +1164,7 @@ describe("publish", async () => { }, }; await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-2"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-2"), { recursive: true, force: true }), write(packageJson, JSON.stringify(json)), write(join(packageDir, "bunfig.toml"), bunfig), ]); @@ -1223,7 +1180,7 @@ describe("publish", async () => { expect(await exists(join(packageDir, "node_modules", "publish-pkg-2", "package.json"))).toBeTrue(); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-2"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-2"), { recursive: true, force: true }), rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }), rm(join(packageDir, "node_modules"), { recursive: true, force: true }), ]); @@ -1249,7 +1206,7 @@ describe("publish", async () => { console.log({ packageDir, publishDir }); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-bins"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-bins"), { recursive: true, force: true }), write( join(publishDir, "package.json"), JSON.stringify({ @@ -1313,7 +1270,7 @@ describe("publish", async () => { const publishDir = tmpdirSync(); const bunfig = await authBunfig("manydeps"); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-deps"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-deps"), { recursive: true, force: true }), write( join(publishDir, "package.json"), JSON.stringify( @@ -1373,7 +1330,7 @@ describe("publish", async () => { }, }; await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-3"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-3"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1398,7 +1355,7 @@ describe("publish", async () => { test("does not publish", async () => { const bunfig = await authBunfig("dryrun"); await Promise.all([ - rm(join(import.meta.dir, "packages", "dry-run-1"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "dry-run-1"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1415,12 +1372,12 @@ describe("publish", async () => { const { out, err, exitCode } = await publish(env, packageDir, "--dry-run"); expect(exitCode).toBe(0); - expect(await exists(join(import.meta.dir, "packages", "dry-run-1"))).toBeFalse(); + expect(await exists(join(verdaccio.packagesPath, "dry-run-1"))).toBeFalse(); }); test("does not publish from tarball path", async () => { const bunfig = await authBunfig("dryruntarball"); await Promise.all([ - rm(join(import.meta.dir, "packages", "dry-run-2"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "dry-run-2"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1439,7 +1396,7 @@ describe("publish", async () => { const { out, err, exitCode } = await publish(env, packageDir, "./dry-run-2-2.2.2.tgz", "--dry-run"); expect(exitCode).toBe(0); - expect(await exists(join(import.meta.dir, "packages", "dry-run-2"))).toBeFalse(); + expect(await exists(join(verdaccio.packagesPath, "dry-run-2"))).toBeFalse(); }); }); @@ -1473,7 +1430,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; test(`should run in order${arg ? " (--dry-run)" : ""}`, async () => { const bunfig = await authBunfig("lifecycle" + (arg ? "dry" : "")); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-4"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-4"), { recursive: true, force: true }), write(packageJson, JSON.stringify(json)), write(join(packageDir, "script.js"), script), write(join(packageDir, "bunfig.toml"), bunfig), @@ -1505,7 +1462,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; test("--ignore-scripts", async () => { const bunfig = await authBunfig("ignorescripts"); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-5"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-5"), { recursive: true, force: true }), write(packageJson, JSON.stringify(json)), write(join(packageDir, "script.js"), script), write(join(packageDir, "bunfig.toml"), bunfig), @@ -1530,7 +1487,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; test("attempting to publish a private package should fail", async () => { const bunfig = await authBunfig("privatepackage"); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-6"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-6"), { recursive: true, force: true }), write( packageJson, JSON.stringify({ @@ -1549,7 +1506,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; let { out, err, exitCode } = await publish(env, packageDir); expect(exitCode).toBe(1); expect(err).toContain("error: attempted to publish a private package"); - expect(await exists(join(import.meta.dir, "packages", "publish-pkg-6-6.6.6.tgz"))).toBeFalse(); + expect(await exists(join(verdaccio.packagesPath, "publish-pkg-6-6.6.6.tgz"))).toBeFalse(); // try tarball await pack(packageDir, env); @@ -1563,7 +1520,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; test("--access", async () => { const bunfig = await authBunfig("accessflag"); await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-7"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-7"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write( packageJson, @@ -1582,7 +1539,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; ({ out, err, exitCode } = await publish(env, packageDir, "--access", "public")); expect(exitCode).toBe(0); - expect(await exists(join(import.meta.dir, "packages", "publish-pkg-7"))).toBeTrue(); + expect(await exists(join(verdaccio.packagesPath, "publish-pkg-7"))).toBeTrue(); }); for (const access of ["restricted", "public"]) { @@ -1601,7 +1558,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; }; await Promise.all([ - rm(join(import.meta.dir, "packages", "@secret", "publish-pkg-8"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "@secret", "publish-pkg-8"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write(packageJson, JSON.stringify(pkgJson)), ]); @@ -1629,7 +1586,7 @@ postpack: \${fs.existsSync("postpack.txt")}\`)`; }, }; await Promise.all([ - rm(join(import.meta.dir, "packages", "publish-pkg-9"), { recursive: true, force: true }), + rm(join(verdaccio.packagesPath, "publish-pkg-9"), { recursive: true, force: true }), write(join(packageDir, "bunfig.toml"), bunfig), write(packageJson, JSON.stringify(pkgJson)), ]); @@ -1896,6 +1853,121 @@ describe("text lockfile", () => { ); }); } + + test("optionalPeers", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "foo", + workspaces: ["packages/*"], + dependencies: { + "a-dep": "1.0.1", + }, + }), + ), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + peerDependencies: { + "no-deps": "1.0.0", + }, + peerDependenciesMeta: { + "no-deps": { + optional: true, + }, + }, + }), + ), + ]); + + let { exited } = spawn({ + cmd: [bunExe(), "install", "--save-text-lockfile"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + }); + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeFalse(); + const firstLockfile = (await Bun.file(join(packageDir, "bun.lock")).text()).replaceAll( + /localhost:\d+/g, + "localhost:1234", + ); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + // another install should recognize the peer dependency as `"optional": true` + ({ exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + })); + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeFalse(); + expect((await Bun.file(join(packageDir, "bun.lock")).text()).replaceAll(/localhost:\d+/g, "localhost:1234")).toBe( + firstLockfile, + ); + }); +}); + +test("--lockfile-only", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "foo", + workspaces: ["packages/*"], + dependencies: { + "no-deps": "^1.0.0", + }, + }), + ), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "package1", + dependencies: { + "two-range-deps": "1.0.0", + }, + }), + ), + ]); + + let { exited } = spawn({ + cmd: [bunExe(), "install", "--save-text-lockfile", "--lockfile-only"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + }); + + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules"))).toBeFalse(); + const firstLockfile = (await Bun.file(join(packageDir, "bun.lock")).text()).replaceAll( + /localhost:\d+/g, + "localhost:1234", + ); + + // nothing changes with another --lockfile-only + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--lockfile-only"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + })); + + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules"))).toBeFalse(); + expect((await Bun.file(join(packageDir, "bun.lock")).text()).replaceAll(/localhost:\d+/g, "localhost:1234")).toBe( + firstLockfile, + ); }); describe("bundledDependencies", () => { @@ -3953,6 +4025,7 @@ describe("binaries", () => { "lockfileVersion": 0, "workspaces": { "": { + "name": "fooooo", "dependencies": { "fooooo": ".", // out of date, no no-deps @@ -5271,616 +5344,6 @@ describe("hoisting", async () => { }); }); -describe("workspaces", async () => { - test("adding packages in a subdirectory of a workspace", async () => { - await writeFile( - packageJson, - JSON.stringify({ - name: "root", - workspaces: ["foo"], - }), - ); - - await mkdir(join(packageDir, "folder1")); - await mkdir(join(packageDir, "foo", "folder2"), { recursive: true }); - await writeFile( - join(packageDir, "foo", "package.json"), - JSON.stringify({ - name: "foo", - }), - ); - - // add package to root workspace from `folder1` - let { stdout, exited } = spawn({ - cmd: [bunExe(), "add", "no-deps"], - cwd: join(packageDir, "folder1"), - stdout: "pipe", - stderr: "inherit", - env, - }); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed no-deps@2.0.0", - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(packageJson).json()).toEqual({ - name: "root", - workspaces: ["foo"], - dependencies: { - "no-deps": "^2.0.0", - }, - }); - - // add package to foo from `folder2` - ({ stdout, exited } = spawn({ - cmd: [bunExe(), "add", "what-bin"], - cwd: join(packageDir, "foo", "folder2"), - stdout: "pipe", - stderr: "inherit", - env, - })); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed what-bin@1.5.0 with binaries:", - " - what-bin", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "foo", "package.json")).json()).toEqual({ - name: "foo", - dependencies: { - "what-bin": "^1.5.0", - }, - }); - - // now delete node_modules and bun.lockb and install - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb")); - - ({ stdout, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: join(packageDir, "folder1"), - stdout: "pipe", - stderr: "inherit", - env, - })); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ no-deps@2.0.0", - "", - "3 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "foo", "no-deps", "what-bin"]); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb")); - - ({ stdout, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: join(packageDir, "foo", "folder2"), - stdout: "pipe", - stderr: "inherit", - env, - })); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ what-bin@1.5.0", - "", - "3 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "foo", "no-deps", "what-bin"]); - }); - test("adding packages in workspaces", async () => { - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - workspaces: ["packages/*"], - dependencies: { - "bar": "workspace:*", - }, - }), - ); - - await mkdir(join(packageDir, "packages", "bar"), { recursive: true }); - await mkdir(join(packageDir, "packages", "boba")); - await mkdir(join(packageDir, "packages", "pkg5")); - - await writeFile(join(packageDir, "packages", "bar", "package.json"), JSON.stringify({ name: "bar" })); - await writeFile( - join(packageDir, "packages", "boba", "package.json"), - JSON.stringify({ name: "boba", version: "1.0.0", dependencies: { "pkg5": "*" } }), - ); - await writeFile( - join(packageDir, "packages", "pkg5", "package.json"), - JSON.stringify({ - name: "pkg5", - version: "1.2.3", - dependencies: { - "bar": "workspace:*", - }, - }), - ); - - let { stdout, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stderr: "inherit", - env, - }); - - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ bar@workspace:packages/bar", - "", - "3 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "bar"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "boba"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "pkg5"))).toBeTrue(); - - // add a package to the root workspace - ({ stdout, exited } = spawn({ - cmd: [bunExe(), "add", "no-deps"], - cwd: packageDir, - stdout: "pipe", - stderr: "inherit", - env, - })); - - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed no-deps@2.0.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(packageJson).json()).toEqual({ - name: "foo", - workspaces: ["packages/*"], - dependencies: { - bar: "workspace:*", - "no-deps": "^2.0.0", - }, - }); - - // add a package in a workspace - ({ stdout, exited } = spawn({ - cmd: [bunExe(), "add", "two-range-deps"], - cwd: join(packageDir, "packages", "boba"), - stdout: "pipe", - stderr: "inherit", - env, - })); - - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed two-range-deps@1.0.0", - "", - "3 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "packages", "boba", "package.json")).json()).toEqual({ - name: "boba", - version: "1.0.0", - dependencies: { - "pkg5": "*", - "two-range-deps": "^1.0.0", - }, - }); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - "@types", - "bar", - "boba", - "no-deps", - "pkg5", - "two-range-deps", - ]); - - // add a dependency to a workspace with the same name as another workspace - ({ stdout, exited } = spawn({ - cmd: [bunExe(), "add", "bar@0.0.7"], - cwd: join(packageDir, "packages", "boba"), - stdout: "pipe", - stderr: "inherit", - env, - })); - - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed bar@0.0.7", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "packages", "boba", "package.json")).json()).toEqual({ - name: "boba", - version: "1.0.0", - dependencies: { - "pkg5": "*", - "two-range-deps": "^1.0.0", - "bar": "0.0.7", - }, - }); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - "@types", - "bar", - "boba", - "no-deps", - "pkg5", - "two-range-deps", - ]); - expect(await file(join(packageDir, "node_modules", "boba", "node_modules", "bar", "package.json")).json()).toEqual({ - name: "bar", - version: "0.0.7", - description: "not a workspace", - }); - }); - test("it should detect duplicate workspace dependencies", async () => { - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - workspaces: ["packages/*"], - }), - ); - - await mkdir(join(packageDir, "packages", "pkg1"), { recursive: true }); - await writeFile(join(packageDir, "packages", "pkg1", "package.json"), JSON.stringify({ name: "pkg1" })); - await mkdir(join(packageDir, "packages", "pkg2"), { recursive: true }); - await writeFile(join(packageDir, "packages", "pkg2", "package.json"), JSON.stringify({ name: "pkg1" })); - - var { stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - }); - - var err = await new Response(stderr).text(); - expect(err).toContain('Workspace name "pkg1" already exists'); - expect(await exited).toBe(1); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb"), { force: true }); - - ({ stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: join(packageDir, "packages", "pkg1"), - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - expect(err).toContain('Workspace name "pkg1" already exists'); - expect(await exited).toBe(1); - }); - - const versions = ["workspace:1.0.0", "workspace:*", "workspace:^1.0.0", "1.0.0", "*"]; - - for (const rootVersion of versions) { - for (const packageVersion of versions) { - test(`it should allow duplicates, root@${rootVersion}, package@${packageVersion}`, async () => { - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - workspaces: ["packages/*"], - dependencies: { - pkg2: rootVersion, - }, - }), - ); - - await mkdir(join(packageDir, "packages", "pkg1"), { recursive: true }); - await writeFile( - join(packageDir, "packages", "pkg1", "package.json"), - JSON.stringify({ - name: "pkg1", - version: "1.0.0", - dependencies: { - pkg2: packageVersion, - }, - }), - ); - - await mkdir(join(packageDir, "packages", "pkg2"), { recursive: true }); - await writeFile( - join(packageDir, "packages", "pkg2", "package.json"), - JSON.stringify({ name: "pkg2", version: "1.0.0" }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - }); - - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - `+ pkg2@workspace:packages/pkg2`, - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: join(packageDir, "packages", "pkg1"), - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 2 installs across 3 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: join(packageDir, "packages", "pkg1"), - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - `+ pkg2@workspace:packages/pkg2`, - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 2 installs across 3 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - } - } - - for (const version of versions) { - test(`it should allow listing workspace as dependency of the root package version ${version}`, async () => { - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - workspaces: ["packages/*"], - dependencies: { - "workspace-1": version, - }, - }), - ); - - await mkdir(join(packageDir, "packages", "workspace-1"), { recursive: true }); - await writeFile( - join(packageDir, "packages", "workspace-1", "package.json"), - JSON.stringify({ - name: "workspace-1", - version: "1.0.0", - }), - ); - // install first from the root, the workspace package - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - }); - - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("already exists"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("Duplicate dependency"); - expect(err).not.toContain('workspace dependency "workspace-1" not found'); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - `+ workspace-1@workspace:packages/workspace-1`, - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ - name: "workspace-1", - version: "1.0.0", - }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: join(packageDir, "packages", "workspace-1"), - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("already exists"); - expect(err).not.toContain("Duplicate dependency"); - expect(err).not.toContain('workspace dependency "workspace-1" not found'); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 1 install across 2 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ - name: "workspace-1", - version: "1.0.0", - }); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }); - - // install from workspace package then from root - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: join(packageDir, "packages", "workspace-1"), - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("already exists"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("Duplicate dependency"); - expect(err).not.toContain('workspace dependency "workspace-1" not found'); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ - name: "workspace-1", - version: "1.0.0", - }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("already exists"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("Duplicate dependency"); - expect(err).not.toContain('workspace dependency "workspace-1" not found'); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 1 install across 2 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ - name: "workspace-1", - version: "1.0.0", - }); - }); - } -}); - describe("transitive file dependencies", () => { async function checkHoistedFiles() { const aliasedFileDepFilesPackageJson = join( @@ -7758,2882 +7221,6 @@ test("missing package on reinstall, some with binaries", async () => { ).toBe(join(packageDir, "node_modules", "uses-what-bin", "node_modules", ".bin", bin)); }); -// waiter thread is only a thing on Linux. -for (const forceWaiterThread of isLinux ? [false, true] : [false]) { - describe("lifecycle scripts" + (forceWaiterThread ? " (waiter thread)" : ""), async () => { - test("root package with all lifecycle scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - const writeScript = async (name: string) => { - const contents = ` - import { writeFileSync, existsSync, rmSync } from "fs"; - import { join } from "path"; - - const file = join(import.meta.dir, "${name}.txt"); - - if (existsSync(file)) { - rmSync(file); - writeFileSync(file, "${name} exists!"); - } else { - writeFileSync(file, "${name}!"); - } - `; - await writeFile(join(packageDir, `${name}.js`), contents); - }; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - preinstall: `${bunExe()} preinstall.js`, - install: `${bunExe()} install.js`, - postinstall: `${bunExe()} postinstall.js`, - preprepare: `${bunExe()} preprepare.js`, - prepare: `${bunExe()} prepare.js`, - postprepare: `${bunExe()} postprepare.js`, - }, - }), - ); - - await writeScript("preinstall"); - await writeScript("install"); - await writeScript("postinstall"); - await writeScript("preprepare"); - await writeScript("prepare"); - await writeScript("postprepare"); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "preprepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "prepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "postprepare.txt"))).toBeTrue(); - expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall!"); - expect(await file(join(packageDir, "install.txt")).text()).toBe("install!"); - expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall!"); - expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare!"); - expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare!"); - expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare!"); - - // add a dependency with all lifecycle scripts - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - preinstall: `${bunExe()} preinstall.js`, - install: `${bunExe()} install.js`, - postinstall: `${bunExe()} postinstall.js`, - preprepare: `${bunExe()} preprepare.js`, - prepare: `${bunExe()} prepare.js`, - postprepare: `${bunExe()} postprepare.js`, - }, - dependencies: { - "all-lifecycle-scripts": "1.0.0", - }, - trustedDependencies: ["all-lifecycle-scripts"], - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ all-lifecycle-scripts@1.0.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall exists!"); - expect(await file(join(packageDir, "install.txt")).text()).toBe("install exists!"); - expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall exists!"); - expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare exists!"); - expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare exists!"); - expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare exists!"); - - const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); - - expect(await exists(join(depDir, "preinstall.txt"))).toBeTrue(); - expect(await exists(join(depDir, "install.txt"))).toBeTrue(); - expect(await exists(join(depDir, "postinstall.txt"))).toBeTrue(); - expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); - expect(await exists(join(depDir, "prepare.txt"))).toBeTrue(); - expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); - - expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); - expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); - expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); - expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); - - await rm(join(packageDir, "preinstall.txt")); - await rm(join(packageDir, "install.txt")); - await rm(join(packageDir, "postinstall.txt")); - await rm(join(packageDir, "preprepare.txt")); - await rm(join(packageDir, "prepare.txt")); - await rm(join(packageDir, "postprepare.txt")); - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb")); - - // all at once - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ all-lifecycle-scripts@1.0.0", - "", - "1 package installed", - ]); - - expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall!"); - expect(await file(join(packageDir, "install.txt")).text()).toBe("install!"); - expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall!"); - expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare!"); - expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare!"); - expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare!"); - - expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); - expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); - expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); - expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); - }); - - test("workspace lifecycle scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - workspaces: ["packages/*"], - scripts: { - preinstall: `touch preinstall.txt`, - install: `touch install.txt`, - postinstall: `touch postinstall.txt`, - preprepare: `touch preprepare.txt`, - prepare: `touch prepare.txt`, - postprepare: `touch postprepare.txt`, - }, - }), - ); - - await mkdir(join(packageDir, "packages", "pkg1"), { recursive: true }); - await writeFile( - join(packageDir, "packages", "pkg1", "package.json"), - JSON.stringify({ - name: "pkg1", - version: "1.0.0", - scripts: { - preinstall: `touch preinstall.txt`, - install: `touch install.txt`, - postinstall: `touch postinstall.txt`, - preprepare: `touch preprepare.txt`, - prepare: `touch prepare.txt`, - postprepare: `touch postprepare.txt`, - }, - }), - ); - - await mkdir(join(packageDir, "packages", "pkg2"), { recursive: true }); - await writeFile( - join(packageDir, "packages", "pkg2", "package.json"), - JSON.stringify({ - name: "pkg2", - version: "1.0.0", - scripts: { - preinstall: `touch preinstall.txt`, - install: `touch install.txt`, - postinstall: `touch postinstall.txt`, - preprepare: `touch preprepare.txt`, - prepare: `touch prepare.txt`, - postprepare: `touch postprepare.txt`, - }, - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - var err = await new Response(stderr).text(); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).toContain("Saved lockfile"); - var out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "preprepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "prepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "postprepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg1", "preinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg1", "install.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg1", "postinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg1", "preprepare.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "packages", "pkg1", "prepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg1", "postprepare.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "packages", "pkg2", "preinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg2", "install.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg2", "postinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg2", "preprepare.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "packages", "pkg2", "prepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "packages", "pkg2", "postprepare.txt"))).toBeFalse(); - }); - - test("dependency lifecycle scripts run before root lifecycle scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const script = '[[ -f "./node_modules/uses-what-bin-slow/what-bin.txt" ]]'; - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "uses-what-bin-slow": "1.0.0", - }, - trustedDependencies: ["uses-what-bin-slow"], - scripts: { - install: script, - postinstall: script, - preinstall: script, - prepare: script, - postprepare: script, - preprepare: script, - }, - }), - ); - - // uses-what-bin-slow will wait one second then write a file to disk. The root package should wait for - // for this to happen before running its lifecycle scripts. - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("install a dependency with lifecycle scripts, then add to trusted dependencies and install again", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "all-lifecycle-scripts": "1.0.0", - }, - trustedDependencies: [], - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ all-lifecycle-scripts@1.0.0", - "", - "1 package installed", - "", - "Blocked 3 postinstalls. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); - expect(await exists(join(depDir, "preinstall.txt"))).toBeFalse(); - expect(await exists(join(depDir, "install.txt"))).toBeFalse(); - expect(await exists(join(depDir, "postinstall.txt"))).toBeFalse(); - expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); - expect(await exists(join(depDir, "prepare.txt"))).toBeTrue(); - expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); - expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); - - // add to trusted dependencies - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "all-lifecycle-scripts": "1.0.0", - }, - trustedDependencies: ["all-lifecycle-scripts"], - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("Checked 1 install across 2 packages (no changes)"), - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); - expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); - expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); - expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); - expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); - expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); - }); - - test("adding a package without scripts to trustedDependencies", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "what-bin": "1.0.0", - }, - trustedDependencies: ["what-bin"], - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ what-bin@1.0.0"), - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); - const what_bin_bins = !isWindows ? ["what-bin"] : ["what-bin.bunx", "what-bin.exe"]; - // prettier-ignore - expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 1 install across 2 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb")); - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { "what-bin": "1.0.0" }, - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ what-bin@1.0.0"), - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); - expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 1 install across 2 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); - expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); - - // add it to trusted dependencies - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "what-bin": "1.0.0", - }, - trustedDependencies: ["what-bin"], - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 1 install across 2 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); - expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); - }); - - test("lifecycle scripts run if node_modules is deleted", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "lifecycle-postinstall": "1.0.0", - }, - trustedDependencies: ["lifecycle-postinstall"], - }), - ); - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ lifecycle-postinstall@1.0.0", - "", - // @ts-ignore - "1 package installed", - ]); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - await rm(join(packageDir, "node_modules"), { force: true, recursive: true }); - await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ lifecycle-postinstall@1.0.0", - "", - "1 package installed", - ]); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("INIT_CWD is set to the correct directory", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - install: "bun install.js", - }, - dependencies: { - "lifecycle-init-cwd": "1.0.0", - "another-init-cwd": "npm:lifecycle-init-cwd@1.0.0", - }, - trustedDependencies: ["lifecycle-init-cwd", "another-init-cwd"], - }), - ); - - await writeFile( - join(packageDir, "install.js"), - ` - const fs = require("fs"); - const path = require("path"); - - fs.writeFileSync( - path.join(__dirname, "test.txt"), - process.env.INIT_CWD || "does not exist" - ); - `, - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - const out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ another-init-cwd@1.0.0", - "+ lifecycle-init-cwd@1.0.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "test.txt")).text()).toBe(packageDir); - expect(await file(join(packageDir, "node_modules/lifecycle-init-cwd/test.txt")).text()).toBe(packageDir); - expect(await file(join(packageDir, "node_modules/another-init-cwd/test.txt")).text()).toBe(packageDir); - }); - - test("failing lifecycle script should print output", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "lifecycle-failing-postinstall": "1.0.0", - }, - trustedDependencies: ["lifecycle-failing-postinstall"], - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("hello"); - expect(await exited).toBe(1); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - const out = await new Response(stdout).text(); - expect(out).toEqual(expect.stringContaining("bun install v1.")); - }); - - test("failing root lifecycle script should print output correctly", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "fooooooooo", - version: "1.0.0", - scripts: { - preinstall: `${bunExe()} -e "throw new Error('Oops!')"`, - }, - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - }); - - expect(await exited).toBe(1); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await Bun.readableStreamToText(stdout)).toEqual(expect.stringContaining("bun install v1.")); - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain("error: Oops!"); - expect(err).toContain('error: preinstall script from "fooooooooo" exited with 1'); - }); - - test("exit 0 in lifecycle scripts works", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - postinstall: "exit 0", - prepare: "exit 0", - postprepare: "exit 0", - }, - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("No packages! Deleted empty lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("done"), - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("--ignore-scripts should skip lifecycle scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "lifecycle-failing-postinstall": "1.0.0", - }, - trustedDependencies: ["lifecycle-failing-postinstall"], - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install", "--ignore-scripts"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("hello"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ lifecycle-failing-postinstall@1.0.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("it should add `node-gyp rebuild` as the `install` script when `install` and `postinstall` don't exist and `binding.gyp` exists in the root of the package", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "binding-gyp-scripts": "1.5.0", - }, - trustedDependencies: ["binding-gyp-scripts"], - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ binding-gyp-scripts@1.5.0", - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules/binding-gyp-scripts/build.node"))).toBeTrue(); - }); - - test("automatic node-gyp scripts should not run for untrusted dependencies, and should run after adding to `trustedDependencies`", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const packageJSON: any = { - name: "foo", - version: "1.0.0", - dependencies: { - "binding-gyp-scripts": "1.5.0", - }, - }; - await writeFile(packageJson, JSON.stringify(packageJSON)); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - let err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ binding-gyp-scripts@1.5.0", - "", - "2 packages installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeFalse(); - - packageJSON.trustedDependencies = ["binding-gyp-scripts"]; - await writeFile(packageJson, JSON.stringify(packageJSON)); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeTrue(); - }); - - test("automatic node-gyp scripts work in package root", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "node-gyp": "1.5.0", - }, - }), - ); - - await writeFile(join(packageDir, "binding.gyp"), ""); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ node-gyp@1.5.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "build.node"))).toBeTrue(); - - await rm(join(packageDir, "build.node")); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "build.node"))).toBeTrue(); - }); - - test("auto node-gyp scripts work when scripts exists other than `install` and `preinstall`", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "node-gyp": "1.5.0", - }, - scripts: { - postinstall: "exit 0", - prepare: "exit 0", - postprepare: "exit 0", - }, - }), - ); - - await writeFile(join(packageDir, "binding.gyp"), ""); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ node-gyp@1.5.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "build.node"))).toBeTrue(); - }); - - for (const script of ["install", "preinstall"]) { - test(`does not add auto node-gyp script when ${script} script exists`, async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const packageJSON: any = { - name: "foo", - version: "1.0.0", - dependencies: { - "node-gyp": "1.5.0", - }, - scripts: { - [script]: "exit 0", - }, - }; - await writeFile(packageJson, JSON.stringify(packageJSON)); - await writeFile(join(packageDir, "binding.gyp"), ""); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ node-gyp@1.5.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "build.node"))).toBeFalse(); - }); - } - - test("git dependencies also run `preprepare`, `prepare`, and `postprepare` scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "lifecycle-install-test": "dylan-conway/lifecycle-install-test#3ba6af5b64f2d27456e08df21d750072dffd3eee", - }, - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - let err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ lifecycle-install-test@github:dylan-conway/lifecycle-install-test#3ba6af5", - "", - "1 package installed", - "", - "Blocked 6 postinstalls. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preinstall.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "install.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postinstall.txt"))).toBeFalse(); - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "lifecycle-install-test": "dylan-conway/lifecycle-install-test#3ba6af5b64f2d27456e08df21d750072dffd3eee", - }, - trustedDependencies: ["lifecycle-install-test"], - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preinstall.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "install.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postinstall.txt"))).toBeTrue(); - }); - - test("root lifecycle scripts should wait for dependency lifecycle scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "uses-what-bin-slow": "1.0.0", - }, - trustedDependencies: ["uses-what-bin-slow"], - scripts: { - install: '[[ -f "./node_modules/uses-what-bin-slow/what-bin.txt" ]]', - }, - }), - ); - - // Package `uses-what-bin-slow` has an install script that will sleep for 1 second - // before writing `what-bin.txt` to disk. The root package has an install script that - // checks if this file exists. If the root package install script does not wait for - // the other to finish, it will fail. - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ uses-what-bin-slow@1.0.0", - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - async function createPackagesWithScripts( - packagesCount: number, - scripts: Record, - ): Promise { - const dependencies: Record = {}; - const dependenciesList = []; - - for (let i = 0; i < packagesCount; i++) { - const packageName: string = "stress-test-package-" + i; - const packageVersion = "1.0." + i; - - dependencies[packageName] = "file:./" + packageName; - dependenciesList[i] = packageName; - - const packagePath = join(packageDir, packageName); - await mkdir(packagePath); - await writeFile( - join(packagePath, "package.json"), - JSON.stringify({ - name: packageName, - version: packageVersion, - scripts, - }), - ); - } - - await writeFile( - packageJson, - JSON.stringify({ - name: "stress-test", - version: "1.0.0", - dependencies, - trustedDependencies: dependenciesList, - }), - ); - - return dependenciesList; - } - - test("reach max concurrent scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const scripts = { - "preinstall": `${bunExe()} -e 'Bun.sleepSync(500)'`, - }; - - const dependenciesList = await createPackagesWithScripts(4, scripts); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install", "--concurrent-scripts=2"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await Bun.readableStreamToText(stdout); - expect(out).not.toContain("Blocked"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - ...dependenciesList.map(dep => `+ ${dep}@${dep}`), - "", - "4 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("stress test", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const dependenciesList = await createPackagesWithScripts(500, { - "postinstall": `${bunExe()} --version`, - }); - - // the script is quick, default number for max concurrent scripts - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await Bun.readableStreamToText(stdout); - expect(out).not.toContain("Blocked"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - ...dependenciesList.map(dep => `+ ${dep}@${dep}`).sort((a, b) => a.localeCompare(b)), - "", - "500 packages installed", - ]); - - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("it should install and use correct binary version", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - // this should install `what-bin` in two places: - // - // - node_modules/.bin/what-bin@1.5.0 - // - node_modules/uses-what-bin/node_modules/.bin/what-bin@1.0.0 - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "uses-what-bin": "1.0.0", - "what-bin": "1.5.0", - }, - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - var err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - var out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ uses-what-bin@1.0.0"), - "+ what-bin@1.5.0", - "", - "3 packages installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "node_modules", "what-bin", "what-bin.js")).text()).toContain( - "what-bin@1.5.0", - ); - expect( - await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", "what-bin", "what-bin.js")).text(), - ).toContain("what-bin@1.0.0"); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb")); - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "uses-what-bin": "1.5.0", - "what-bin": "1.0.0", - }, - scripts: { - install: "what-bin", - }, - trustedDependencies: ["uses-what-bin"], - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(join(packageDir, "node_modules", "what-bin", "what-bin.js")).text()).toContain( - "what-bin@1.0.0", - ); - expect( - await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", "what-bin", "what-bin.js")).text(), - ).toContain("what-bin@1.5.0"); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - out = await new Response(stdout).text(); - err = await new Response(stderr).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ uses-what-bin@1.5.0"), - expect.stringContaining("+ what-bin@1.0.0"), - "", - "3 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("node-gyp should always be available for lifecycle scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - install: "node-gyp --version", - }, - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await new Response(stderr).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - - // if node-gyp isn't available, it would return a non-zero exit code - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - // if this test fails, `electron` might be removed from the default list - test("default trusted dependencies should work", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - dependencies: { - "electron": "1.0.0", - }, - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - }); - - const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - const out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ electron@1.0.0", - "", - "1 package installed", - ]); - expect(out).not.toContain("Blocked"); - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("default trusted dependencies should not be used of trustedDependencies is populated", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - dependencies: { - "uses-what-bin": "1.0.0", - // fake electron package because it's in the default trustedDependencies list - "electron": "1.0.0", - }, - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - // electron lifecycle scripts should run, uses-what-bin scripts should not run - var err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - var out = await new Response(stdout).text(); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ electron@1.0.0", - expect.stringContaining("+ uses-what-bin@1.0.0"), - "", - "3 packages installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb")); - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - dependencies: { - "uses-what-bin": "1.0.0", - "electron": "1.0.0", - }, - trustedDependencies: ["uses-what-bin"], - }), - ); - - // now uses-what-bin scripts should run and electron scripts should not run. - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await Bun.readableStreamToText(stderr); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ electron@1.0.0", - expect.stringContaining("+ uses-what-bin@1.0.0"), - "", - "3 packages installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); - }); - - test("does not run any scripts if trustedDependencies is an empty list", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - dependencies: { - "uses-what-bin": "1.0.0", - "electron": "1.0.0", - }, - trustedDependencies: [], - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = await Bun.readableStreamToText(stderr); - const out = await Bun.readableStreamToText(stdout); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ electron@1.0.0", - expect.stringContaining("+ uses-what-bin@1.0.0"), - "", - "3 packages installed", - "", - "Blocked 2 postinstalls. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); - }); - - test("will run default trustedDependencies after install that didn't include them", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - dependencies: { - electron: "1.0.0", - }, - trustedDependencies: ["blah"], - }), - ); - - // first install does not run electron scripts - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - var err = await Bun.readableStreamToText(stderr); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - var out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ electron@1.0.0", - "", - "1 package installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - dependencies: { - electron: "1.0.0", - }, - }), - ); - - // The electron scripts should run now because it's in default trusted dependencies. - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = await Bun.readableStreamToText(stderr); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 1 install across 2 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); - }); - - describe("--trust", async () => { - test("unhoisted untrusted scripts, none at root node_modules", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await Promise.all([ - write( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - // prevents real `uses-what-bin` from hoisting to root - "uses-what-bin": "npm:a-dep@1.0.3", - }, - workspaces: ["pkg1"], - }), - ), - write( - join(packageDir, "pkg1", "package.json"), - JSON.stringify({ - name: "pkg1", - dependencies: { - "uses-what-bin": "1.0.0", - }, - }), - ), - ]); - - await runBunInstall(testEnv, packageDir); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - const results = await Promise.all([ - exists(join(packageDir, "node_modules", "pkg1", "node_modules", "uses-what-bin")), - exists(join(packageDir, "node_modules", "pkg1", "node_modules", "uses-what-bin", "what-bin.txt")), - ]); - - expect(results).toEqual([true, false]); - - const { stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "trust", "--all"], - cwd: packageDir, - stdout: "ignore", - stderr: "pipe", - env: testEnv, - }); - - const err = await Bun.readableStreamToText(stderr); - expect(err).not.toContain("error:"); - - expect(await exited).toBe(0); - - expect( - await exists(join(packageDir, "node_modules", "pkg1", "node_modules", "uses-what-bin", "what-bin.txt")), - ).toBeTrue(); - }); - const trustTests = [ - { - label: "only name", - packageJson: { - name: "foo", - }, - }, - { - label: "empty dependencies", - packageJson: { - name: "foo", - dependencies: {}, - }, - }, - { - label: "populated dependencies", - packageJson: { - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - }, - }, - - { - label: "empty trustedDependencies", - packageJson: { - name: "foo", - trustedDependencies: [], - }, - }, - - { - label: "populated dependencies, empty trustedDependencies", - packageJson: { - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - trustedDependencies: [], - }, - }, - - { - label: "populated dependencies and trustedDependencies", - packageJson: { - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - trustedDependencies: ["uses-what-bin"], - }, - }, - - { - label: "empty dependencies and trustedDependencies", - packageJson: { - name: "foo", - dependencies: {}, - trustedDependencies: [], - }, - }, - ]; - for (const { label, packageJson } of trustTests) { - test(label, async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile(join(packageDir, "package.json"), JSON.stringify(packageJson)); - - let { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i", "--trust", "uses-what-bin@1.0.0"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - }); - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed uses-what-bin@1.0.0", - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); - expect(await file(join(packageDir, "package.json")).json()).toEqual({ - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - trustedDependencies: ["uses-what-bin"], - }); - - // another install should not error with json SyntaxError - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 2 installs across 3 packages (no changes)", - ]); - expect(await exited).toBe(0); - }); - } - describe("packages without lifecycle scripts", async () => { - test("initial install", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i", "--trust", "no-deps@1.0.0"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - }); - - const err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - const out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed no-deps@1.0.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "no-deps": "1.0.0", - }, - }); - }); - test("already installed", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - }), - ); - let { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i", "no-deps"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - }); - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed no-deps@2.0.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "no-deps": "^2.0.0", - }, - }); - - // oops, I wanted to run the lifecycle scripts for no-deps, I'll install - // again with --trust. - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i", "--trust", "no-deps"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - })); - - // oh, I didn't realize no-deps doesn't have - // any lifecycle scripts. It shouldn't automatically add to - // trustedDependencies. - - err = await Bun.readableStreamToText(stderr); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun add v1."), - "", - "installed no-deps@2.0.0", - "", - expect.stringContaining("done"), - "", - ]); - expect(await exited).toBe(0); - expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "no-deps": "^2.0.0", - }, - }); - }); - }); - }); - - describe("updating trustedDependencies", async () => { - test("existing trustedDependencies, unchanged trustedDependencies", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - trustedDependencies: ["uses-what-bin"], - dependencies: { - "uses-what-bin": "1.0.0", - }, - }), - ); - - let { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - }); - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ uses-what-bin@1.0.0"), - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - trustedDependencies: ["uses-what-bin"], - }); - - // no changes, lockfile shouldn't be saved - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 2 installs across 3 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("existing trustedDependencies, removing trustedDependencies", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - trustedDependencies: ["uses-what-bin"], - dependencies: { - "uses-what-bin": "1.0.0", - }, - }), - ); - - let { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - }); - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ uses-what-bin@1.0.0"), - "", - "2 packages installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - trustedDependencies: ["uses-what-bin"], - }); - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - }), - ); - - // this script should not run because uses-what-bin is no longer in trustedDependencies - await rm(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"), { force: true }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 2 installs across 3 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "uses-what-bin": "1.0.0", - }, - }); - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); - }); - - test("non-existent trustedDependencies, then adding it", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "electron": "1.0.0", - }, - }), - ); - - let { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - }); - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "+ electron@1.0.0", - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "electron": "1.0.0", - }, - }); - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - trustedDependencies: ["electron"], - dependencies: { - "electron": "1.0.0", - }, - }), - ); - - await rm(join(packageDir, "node_modules", "electron", "preinstall.txt"), { force: true }); - - // lockfile should save evenn though there are no changes to trustedDependencies due to - // the default list - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - "Checked 1 install across 2 packages (no changes)", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); - }); - }); - - test("node -p should work in postinstall scripts", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - postinstall: `node -p "require('fs').writeFileSync('postinstall.txt', 'postinstall')"`, - }, - }), - ); - - const originalPath = env.PATH; - env.PATH = ""; - - let { stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env: testEnv, - }); - - env.PATH = originalPath; - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("No packages! Deleted empty lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); - }); - - test("ensureTempNodeGypScript works", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - preinstall: "node-gyp --version", - }, - }), - ); - - const originalPath = env.PATH; - env.PATH = ""; - - let { stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - stdin: "ignore", - env, - }); - - env.PATH = originalPath; - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("No packages! Deleted empty lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("bun pm trust and untrusted on missing package", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "uses-what-bin": "1.5.0", - }, - }), - ); - - let { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - }); - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ uses-what-bin@1.5.0"), - "", - "2 packages installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - // remove uses-what-bin from node_modules, bun pm trust and untrusted should handle missing package - await rm(join(packageDir, "node_modules", "uses-what-bin"), { recursive: true, force: true }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "untrusted"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("bun pm untrusted"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out).toContain("Found 0 untrusted dependencies with scripts"); - expect(await exited).toBe(0); - - ({ stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "trust", "uses-what-bin"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - })); - - expect(await exited).toBe(1); - - err = await Bun.readableStreamToText(stderr); - expect(err).toContain("bun pm trust"); - expect(err).toContain("0 scripts ran"); - expect(err).toContain("uses-what-bin"); - }); - - describe("add trusted, delete, then add again", async () => { - // when we change bun install to delete dependencies from node_modules - // for both cases, we need to update this test - for (const withRm of [true, false]) { - test(withRm ? "withRm" : "withoutRm", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "no-deps": "1.0.0", - "uses-what-bin": "1.0.0", - }, - }), - ); - - let { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - }); - - let err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - let out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "", - expect.stringContaining("+ no-deps@1.0.0"), - expect.stringContaining("+ uses-what-bin@1.0.0"), - "", - "3 packages installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "trust", "uses-what-bin"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out).toContain("1 script ran across 1 package"); - expect(await exited).toBe(0); - - expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); - expect(await file(packageJson).json()).toEqual({ - name: "foo", - dependencies: { - "no-deps": "1.0.0", - "uses-what-bin": "1.0.0", - }, - trustedDependencies: ["uses-what-bin"], - }); - - // now remove and install again - if (withRm) { - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "rm", "uses-what-bin"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out).toContain("1 package removed"); - expect(out).toContain("uses-what-bin"); - expect(await exited).toBe(0); - } - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "no-deps": "1.0.0", - }, - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - let expected = withRm - ? ["", "Checked 1 install across 2 packages (no changes)"] - : ["", expect.stringContaining("1 package removed")]; - expected = [expect.stringContaining("bun install v1."), ...expected]; - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(expected); - expect(await exited).toBe(0); - expect(await exists(join(packageDir, "node_modules", "uses-what-bin"))).toBe(!withRm); - - // add again, bun pm untrusted should report it as untrusted - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "no-deps": "1.0.0", - "uses-what-bin": "1.0.0", - }, - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "i"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expected = withRm - ? [ - "", - expect.stringContaining("+ uses-what-bin@1.0.0"), - "", - "1 package installed", - "", - "Blocked 1 postinstall. Run `bun pm untrusted` for details.", - "", - ] - : ["", expect.stringContaining("Checked 3 installs across 4 packages (no changes)"), ""]; - expected = [expect.stringContaining("bun install v1."), ...expected]; - expect(out.replace(/\s*\[[0-9\.]+m?s\]$/m, "").split(/\r?\n/)).toEqual(expected); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "untrusted"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - })); - - err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - out = await Bun.readableStreamToText(stdout); - expect(out).toContain("./node_modules/uses-what-bin @1.0.0".replaceAll("/", sep)); - expect(await exited).toBe(0); - }); - } - }); - - describe.if(!forceWaiterThread || process.platform === "linux")("does not use 100% cpu", async () => { - test("install", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - scripts: { - preinstall: `${bunExe()} -e 'Bun.sleepSync(1000)'`, - }, - }), - ); - - const proc = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "ignore", - stderr: "ignore", - stdin: "ignore", - env: testEnv, - }); - - expect(await proc.exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(proc.resourceUsage()?.cpuTime.total).toBeLessThan(750_000); - }); - - // https://github.com/oven-sh/bun/issues/11252 - test.todoIf(isWindows)("bun pm trust", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const dep = isWindows ? "uses-what-bin-slow-window" : "uses-what-bin-slow"; - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - [dep]: "1.0.0", - }, - }), - ); - - var { exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "ignore", - stderr: "ignore", - env: testEnv, - }); - - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - - expect(await exists(join(packageDir, "node_modules", dep, "what-bin.txt"))).toBeFalse(); - - const proc = spawn({ - cmd: [bunExe(), "pm", "trust", "--all"], - cwd: packageDir, - stdout: "ignore", - stderr: "ignore", - env: testEnv, - }); - - expect(await proc.exited).toBe(0); - - expect(await exists(join(packageDir, "node_modules", dep, "what-bin.txt"))).toBeTrue(); - - expect(proc.resourceUsage()?.cpuTime.total).toBeLessThan(750_000 * (isWindows ? 5 : 1)); - }); - }); - }); - - describe("stdout/stderr is inherited from root scripts during install", async () => { - test("without packages", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const exe = bunExe().replace(/\\/g, "\\\\"); - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - scripts: { - "preinstall": `${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, - "install": `${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, - "prepare": `${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, - }, - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(err.split(/\r?\n/)).toEqual([ - "No packages! Deleted empty lockfile", - "", - `$ ${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, - "preinstall stderr 🍦", - `$ ${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, - `$ ${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, - "", - ]); - const out = await Bun.readableStreamToText(stdout); - expect(out.split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "install stdout 🚀", - "prepare stdout done ✅", - "", - expect.stringContaining("done"), - "", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - - test("with a package", async () => { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; - - const exe = bunExe().replace(/\\/g, "\\\\"); - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.2.3", - scripts: { - "preinstall": `${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, - "install": `${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, - "prepare": `${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, - }, - dependencies: { - "no-deps": "1.0.0", - }, - }), - ); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: testEnv, - }); - - const err = stderrForInstall(await Bun.readableStreamToText(stderr)); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(err.split(/\r?\n/)).toEqual([ - "Resolving dependencies", - expect.stringContaining("Resolved, downloaded and extracted "), - "Saved lockfile", - "", - `$ ${exe} -e 'process.stderr.write("preinstall stderr 🍦\\n")'`, - "preinstall stderr 🍦", - `$ ${exe} -e 'process.stdout.write("install stdout 🚀\\n")'`, - `$ ${exe} -e 'Bun.sleepSync(200); process.stdout.write("prepare stdout done ✅\\n")'`, - "", - ]); - const out = await Bun.readableStreamToText(stdout); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - expect.stringContaining("bun install v1."), - "install stdout 🚀", - "prepare stdout done ✅", - "", - expect.stringContaining("+ no-deps@1.0.0"), - "", - "1 package installed", - ]); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - }); - }); -} - describe("pm trust", async () => { test("--default", async () => { await writeFile( @@ -13086,7 +9673,7 @@ it("$npm_command is accurate during publish", async () => { }), ); await write(join(packageDir, "bunfig.toml"), await authBunfig("npm_command")); - await rm(join(import.meta.dir, "packages", "publish-pkg-10"), { recursive: true, force: true }); + await rm(join(verdaccio.packagesPath, "publish-pkg-10"), { recursive: true, force: true }); let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); expect(err).toBe(`$ echo $npm_command\n`); expect(out.split("\n")).toEqual([ @@ -13125,7 +9712,7 @@ it("$npm_lifecycle_event is accurate during publish", async () => { `, ); await write(join(packageDir, "bunfig.toml"), await authBunfig("npm_lifecycle_event")); - await rm(join(import.meta.dir, "packages", "publish-pkg-11"), { recursive: true, force: true }); + await rm(join(verdaccio.packagesPath, "publish-pkg-11"), { recursive: true, force: true }); let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); expect(err).toBe(`$ echo 2 $npm_lifecycle_event\n$ echo 3 $npm_lifecycle_event\n`); expect(out.split("\n")).toEqual([ diff --git a/test/cli/install/bun-install-retry.test.ts b/test/cli/install/bun-install-retry.test.ts index 842691bb0b..cbba8e2b37 100644 --- a/test/cli/install/bun-install-retry.test.ts +++ b/test/cli/install/bun-install-retry.test.ts @@ -1,7 +1,7 @@ import { file, spawn } from "bun"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it, setDefaultTimeout } from "bun:test"; import { access, writeFile } from "fs/promises"; -import { bunExe, bunEnv as env, tmpdirSync, toBeValidBin, toBeWorkspaceLink, toHaveBins } from "harness"; +import { bunExe, bunEnv as env, tmpdirSync, toBeValidBin, toBeWorkspaceLink, toHaveBins, readdirSorted } from "harness"; import { join } from "path"; import { dummyAfterAll, @@ -10,7 +10,6 @@ import { dummyBeforeEach, dummyRegistry, package_dir, - readdirSorted, requested, root_url, setHandler, diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index 70addfbf37..fb4a1d7c40 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -23,6 +23,7 @@ import { runBunInstall, isWindows, textLockfile, + readdirSorted, } from "harness"; import { join, sep, resolve } from "path"; import { @@ -32,7 +33,6 @@ import { dummyBeforeEach, dummyRegistry, package_dir, - readdirSorted, requested, root_url, setHandler, diff --git a/test/cli/install/bun-link.test.ts b/test/cli/install/bun-link.test.ts index 20d2be6439..68f5160faa 100644 --- a/test/cli/install/bun-link.test.ts +++ b/test/cli/install/bun-link.test.ts @@ -1,16 +1,18 @@ import { file, spawn } from "bun"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test"; import { access, mkdir, writeFile } from "fs/promises"; -import { bunExe, bunEnv as env, runBunInstall, tmpdirSync, toBeValidBin, toHaveBins, stderrForInstall } from "harness"; -import { basename, join } from "path"; import { - dummyAfterAll, - dummyAfterEach, - dummyBeforeAll, - dummyBeforeEach, - package_dir, + bunExe, + bunEnv as env, + runBunInstall, + tmpdirSync, + toBeValidBin, + toHaveBins, + stderrForInstall, readdirSorted, -} from "./dummy.registry"; +} from "harness"; +import { basename, join } from "path"; +import { dummyAfterAll, dummyAfterEach, dummyBeforeAll, dummyBeforeEach, package_dir } from "./dummy.registry"; beforeAll(dummyBeforeAll); afterAll(dummyAfterAll); diff --git a/test/cli/install/bun-lock.test.ts b/test/cli/install/bun-lock.test.ts index d5d6ce6c3c..f60725be5f 100644 --- a/test/cli/install/bun-lock.test.ts +++ b/test/cli/install/bun-lock.test.ts @@ -1,4 +1,4 @@ -import { spawn } from "bun"; +import { spawn, write, file } from "bun"; import { expect, it } from "bun:test"; import { access, copyFile, open, writeFile } from "fs/promises"; import { bunExe, bunEnv as env, isWindows, tmpdirSync } from "harness"; @@ -45,7 +45,41 @@ it("should write plaintext lockfiles", async () => { } expect(stat.mode).toBe(mode); - expect(await file.readFile({ encoding: "utf8" })).toEqual( - `{\n \"lockfileVersion\": 0,\n \"workspaces\": {\n \"\": {\n \"dependencies\": {\n \"dummy-package\": \"file:./bar-0.0.2.tgz\",\n },\n },\n },\n \"packages\": {\n \"dummy-package\": [\"bar@./bar-0.0.2.tgz\", {}],\n }\n}\n`, - ); + expect(await file.readFile({ encoding: "utf8" })).toMatchSnapshot(); +}); + +// won't work on windows, " is not a valid character in a filename +it.skipIf(isWindows)("should escape names", async () => { + const packageDir = tmpdirSync(); + await Promise.all([ + write( + join(packageDir, "package.json"), + JSON.stringify({ + name: "quote-in-dependency-name", + workspaces: ["packages/*"], + }), + ), + write(join(packageDir, "packages", '"', "package.json"), JSON.stringify({ name: '"' })), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + dependencies: { + '"': "*", + }, + }), + ), + ]); + + const { exited } = spawn({ + cmd: [bunExe(), "install", "--save-text-lockfile"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + }); + + expect(await exited).toBe(0); + + expect(await file(join(packageDir, "bun.lock")).text()).toMatchSnapshot(); }); diff --git a/test/cli/install/bun-pm.test.ts b/test/cli/install/bun-pm.test.ts index d1a6042f96..8a49dcaf16 100644 --- a/test/cli/install/bun-pm.test.ts +++ b/test/cli/install/bun-pm.test.ts @@ -1,7 +1,7 @@ import { spawn } from "bun"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test"; import { exists, mkdir, writeFile } from "fs/promises"; -import { bunEnv, bunExe, bunEnv as env, tmpdirSync } from "harness"; +import { bunEnv, bunExe, bunEnv as env, tmpdirSync, readdirSorted } from "harness"; import { cpSync } from "node:fs"; import { join } from "path"; import { @@ -11,7 +11,6 @@ import { dummyBeforeEach, dummyRegistry, package_dir, - readdirSorted, requested, root_url, setHandler, diff --git a/test/cli/install/bun-run.test.ts b/test/cli/install/bun-run.test.ts index 99293d6013..3b363ba90f 100644 --- a/test/cli/install/bun-run.test.ts +++ b/test/cli/install/bun-run.test.ts @@ -1,9 +1,17 @@ import { file, spawn, spawnSync } from "bun"; import { beforeEach, describe, expect, it } from "bun:test"; import { exists, mkdir, rm, writeFile } from "fs/promises"; -import { bunEnv, bunExe, bunEnv as env, isWindows, tempDirWithFiles, tmpdirSync, stderrForInstall } from "harness"; +import { + bunEnv, + bunExe, + bunEnv as env, + isWindows, + tempDirWithFiles, + tmpdirSync, + stderrForInstall, + readdirSorted, +} from "harness"; import { join } from "path"; -import { readdirSorted } from "./dummy.registry"; let run_dir: string; @@ -591,22 +599,53 @@ it("should pass arguments correctly in scripts", async () => { } }); -it("should run with bun instead of npm even with leading spaces", async () => { - const dir = tempDirWithFiles("test", { - "package.json": JSON.stringify({ - workspaces: ["a", "b"], - scripts: { "root_script": " npm run other_script ", "other_script": " echo hi " }, - }), - }); - { - const { stdout, stderr, exitCode } = spawnSync({ - cmd: [bunExe(), "run", "root_script"], - cwd: dir, - env: bunEnv, - }); +const cases = [ + ["yarn run", "run"], + ["yarn add", "passthrough"], + ["yarn audit", "passthrough"], + ["yarn -abcd run", "passthrough"], + ["yarn info", "passthrough"], + ["yarn generate-lock-entry", "passthrough"], + ["yarn", "run"], + ["npm run", "run"], + ["npx", "x"], + ["pnpm run", "run"], + ["pnpm dlx", "x"], + ["pnpx", "x"], +]; +describe("should handle run case", () => { + for (const ccase of cases) { + it(ccase[0], async () => { + const dir = tempDirWithFiles("test", { + "package.json": JSON.stringify({ + scripts: { + "root_script": ` ${ccase[0]} target_script% `, + "target_script%": " echo target_script ", + }, + }), + }); + { + const { stdout, stderr, exitCode } = spawnSync({ + cmd: [bunExe(), "root_script"], + cwd: dir, + env: bunEnv, + }); - expect(stderr.toString()).toMatch(/\$ bun(-debug)? run other_script \n\$ echo hi \n/); - expect(stdout.toString()).toEndWith("hi\n"); - expect(exitCode).toBe(0); + if (ccase[1] === "run") { + expect(stderr.toString()).toMatch( + /^\$ bun(-debug)? run target_script% \n\$ echo target_script \n/, + ); + expect(stdout.toString()).toEndWith("target_script\n"); + expect(exitCode).toBe(0); + } else if (ccase[1] === "x") { + expect(stderr.toString()).toMatch( + /^\$ bun(-debug)? x target_script% \nerror: unrecognised dependency format: target_script%/, + ); + expect(exitCode).toBe(1); + } else { + expect(stderr.toString()).toStartWith(`$ ${ccase[0]} target_script% \n`); + } + } + }); } }); diff --git a/test/cli/install/bun-update.test.ts b/test/cli/install/bun-update.test.ts index 2ecbbb9daa..e81f65184a 100644 --- a/test/cli/install/bun-update.test.ts +++ b/test/cli/install/bun-update.test.ts @@ -1,7 +1,7 @@ import { file, spawn } from "bun"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test"; import { access, readFile, rm, writeFile } from "fs/promises"; -import { bunExe, bunEnv as env, toBeValidBin, toHaveBins } from "harness"; +import { bunExe, bunEnv as env, toBeValidBin, toHaveBins, readdirSorted } from "harness"; import { join } from "path"; import { dummyAfterAll, @@ -10,7 +10,6 @@ import { dummyBeforeEach, dummyRegistry, package_dir, - readdirSorted, requested, root_url, setHandler, diff --git a/test/cli/install/bun-workspaces.test.ts b/test/cli/install/bun-workspaces.test.ts index 7cf4d49d37..a72ece2280 100644 --- a/test/cli/install/bun-workspaces.test.ts +++ b/test/cli/install/bun-workspaces.test.ts @@ -1,31 +1,41 @@ -import { file, write } from "bun"; +import { file, write, spawn } from "bun"; import { install_test_helpers } from "bun:internal-for-testing"; -import { beforeEach, describe, expect, test } from "bun:test"; +import { beforeEach, describe, expect, test, beforeAll, afterAll } from "bun:test"; import { mkdirSync, rmSync, writeFileSync } from "fs"; -import { cp } from "fs/promises"; -import { bunExe, bunEnv as env, runBunInstall, tmpdirSync, toMatchNodeModulesAt } from "harness"; +import { cp, mkdir, rm, exists } from "fs/promises"; +import { + bunExe, + bunEnv as env, + runBunInstall, + toMatchNodeModulesAt, + assertManifestsPopulated, + VerdaccioRegistry, + readdirSorted, +} from "harness"; import { join } from "path"; const { parseLockfile } = install_test_helpers; expect.extend({ toMatchNodeModulesAt }); -var testCounter: number = 0; - // not necessary, but verdaccio will be added to this file in the near future -var port: number = 4873; -var packageDir: string; -beforeEach(() => { - packageDir = tmpdirSync(); +var verdaccio: VerdaccioRegistry; +var packageDir: string; +var packageJson: string; + +beforeAll(async () => { + verdaccio = new VerdaccioRegistry(); + await verdaccio.start(); +}); + +afterAll(() => { + verdaccio.stop(); +}); + +beforeEach(async () => { + ({ packageDir, packageJson } = await verdaccio.createTestDir()); env.BUN_INSTALL_CACHE_DIR = join(packageDir, ".bun-cache"); env.BUN_TMPDIR = env.TMPDIR = env.TEMP = join(packageDir, ".bun-tmp"); - writeFileSync( - join(packageDir, "bunfig.toml"), - ` -[install] -cache = false -`, - ); }); test("dependency on workspace without version in package.json", async () => { @@ -41,7 +51,7 @@ test("dependency on workspace without version in package.json", async () => { write( join(packageDir, "packages", "mono", "package.json"), JSON.stringify({ - name: "lodash", + name: "no-deps", }), ), ]); @@ -60,7 +70,7 @@ test("dependency on workspace without version in package.json", async () => { "1", "1.*", "1.1.*", - "1.1.1", + "1.1.0", "*-pre+build", "*+build", "latest", // dist-tag exists, should choose package from npm @@ -74,7 +84,7 @@ test("dependency on workspace without version in package.json", async () => { name: "bar", version: "1.0.0", dependencies: { - lodash: version, + "no-deps": version, }, }), ); @@ -82,7 +92,9 @@ test("dependency on workspace without version in package.json", async () => { const { out } = await runBunInstall(env, packageDir); const lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); - expect(lockfile).toMatchSnapshot(`version: ${version}`); + expect( + JSON.stringify(lockfile, null, 2).replaceAll(/http:\/\/localhost:\d+/g, "http://localhost:1234"), + ).toMatchSnapshot(`version: ${version}`); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ expect.stringContaining("bun install v1."), "", @@ -101,7 +113,7 @@ test("dependency on workspace without version in package.json", async () => { name: "bar", version: "1.0.0", dependencies: { - lodash: version, + "no-deps": version, }, }), ); @@ -109,7 +121,9 @@ test("dependency on workspace without version in package.json", async () => { const { out } = await runBunInstall(env, packageDir); const lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); - expect(lockfile).toMatchSnapshot(`version: ${version}`); + expect( + JSON.stringify(lockfile, null, 2).replaceAll(/http:\/\/localhost:\d+/g, "http://localhost:1234"), + ).toMatchSnapshot(`version: ${version}`); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ expect.stringContaining("bun install v1."), "", @@ -134,7 +148,7 @@ test("dependency on same name as workspace and dist-tag", async () => { write( join(packageDir, "packages", "mono", "package.json"), JSON.stringify({ - name: "lodash", + name: "no-deps", version: "4.17.21", }), ), @@ -145,7 +159,7 @@ test("dependency on same name as workspace and dist-tag", async () => { name: "bar", version: "1.0.0", dependencies: { - lodash: "latest", + "no-deps": "latest", }, }), ), @@ -153,7 +167,9 @@ test("dependency on same name as workspace and dist-tag", async () => { const { out } = await runBunInstall(env, packageDir); const lockfile = parseLockfile(packageDir); - expect(lockfile).toMatchSnapshot("with version"); + expect( + JSON.stringify(lockfile, null, 2).replaceAll(/http:\/\/localhost:\d+/g, "http://localhost:1234"), + ).toMatchSnapshot("with version"); expect(lockfile).toMatchNodeModulesAt(packageDir); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ expect.stringContaining("bun install v1."), @@ -365,8 +381,6 @@ describe("workspace aliases", async () => { ), ]); - console.log({ packageDir }); - await runBunInstall(env, packageDir); const files = await Promise.all( ["a0", "a1", "a2", "a3", "a4", "a5"].map(name => @@ -658,3 +672,957 @@ test("$npm_package_config_ works in root in subpackage", async () => { expect(await new Response(p.stderr).text()).toBe(`$ echo $npm_package_config_foo $npm_package_config_qux\n`); expect(await new Response(p.stdout).text()).toBe(`tab\n`); }); + +test("adding packages in a subdirectory of a workspace", async () => { + await write( + packageJson, + JSON.stringify({ + name: "root", + workspaces: ["foo"], + }), + ); + + await mkdir(join(packageDir, "folder1")); + await mkdir(join(packageDir, "foo", "folder2"), { recursive: true }); + await write( + join(packageDir, "foo", "package.json"), + JSON.stringify({ + name: "foo", + }), + ); + + // add package to root workspace from `folder1` + let { stdout, exited } = spawn({ + cmd: [bunExe(), "add", "no-deps"], + cwd: join(packageDir, "folder1"), + stdout: "pipe", + stderr: "inherit", + env, + }); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed no-deps@2.0.0", + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(packageJson).json()).toEqual({ + name: "root", + workspaces: ["foo"], + dependencies: { + "no-deps": "^2.0.0", + }, + }); + + // add package to foo from `folder2` + ({ stdout, exited } = spawn({ + cmd: [bunExe(), "add", "what-bin"], + cwd: join(packageDir, "foo", "folder2"), + stdout: "pipe", + stderr: "inherit", + env, + })); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed what-bin@1.5.0 with binaries:", + " - what-bin", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "foo", "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "what-bin": "^1.5.0", + }, + }); + + // now delete node_modules and bun.lockb and install + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + ({ stdout, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: join(packageDir, "folder1"), + stdout: "pipe", + stderr: "inherit", + env, + })); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ no-deps@2.0.0", + "", + "3 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "foo", "no-deps", "what-bin"]); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + ({ stdout, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: join(packageDir, "foo", "folder2"), + stdout: "pipe", + stderr: "inherit", + env, + })); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ what-bin@1.5.0", + "", + "3 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "foo", "no-deps", "what-bin"]); +}); +test("adding packages in workspaces", async () => { + await write( + packageJson, + JSON.stringify({ + name: "foo", + workspaces: ["packages/*"], + dependencies: { + "bar": "workspace:*", + }, + }), + ); + + await mkdir(join(packageDir, "packages", "bar"), { recursive: true }); + await mkdir(join(packageDir, "packages", "boba")); + await mkdir(join(packageDir, "packages", "pkg5")); + + await write(join(packageDir, "packages", "bar", "package.json"), JSON.stringify({ name: "bar" })); + await write( + join(packageDir, "packages", "boba", "package.json"), + JSON.stringify({ name: "boba", version: "1.0.0", dependencies: { "pkg5": "*" } }), + ); + await write( + join(packageDir, "packages", "pkg5", "package.json"), + JSON.stringify({ + name: "pkg5", + version: "1.2.3", + dependencies: { + "bar": "workspace:*", + }, + }), + ); + + let { stdout, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "inherit", + env, + }); + + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "+ bar@workspace:packages/bar", + "", + "3 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await exists(join(packageDir, "node_modules", "bar"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "boba"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "pkg5"))).toBeTrue(); + + // add a package to the root workspace + ({ stdout, exited } = spawn({ + cmd: [bunExe(), "add", "no-deps"], + cwd: packageDir, + stdout: "pipe", + stderr: "inherit", + env, + })); + + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed no-deps@2.0.0", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(packageJson).json()).toEqual({ + name: "foo", + workspaces: ["packages/*"], + dependencies: { + bar: "workspace:*", + "no-deps": "^2.0.0", + }, + }); + + // add a package in a workspace + ({ stdout, exited } = spawn({ + cmd: [bunExe(), "add", "two-range-deps"], + cwd: join(packageDir, "packages", "boba"), + stdout: "pipe", + stderr: "inherit", + env, + })); + + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed two-range-deps@1.0.0", + "", + "3 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "packages", "boba", "package.json")).json()).toEqual({ + name: "boba", + version: "1.0.0", + dependencies: { + "pkg5": "*", + "two-range-deps": "^1.0.0", + }, + }); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + "@types", + "bar", + "boba", + "no-deps", + "pkg5", + "two-range-deps", + ]); + + // add a dependency to a workspace with the same name as another workspace + ({ stdout, exited } = spawn({ + cmd: [bunExe(), "add", "bar@0.0.7"], + cwd: join(packageDir, "packages", "boba"), + stdout: "pipe", + stderr: "inherit", + env, + })); + + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun add v1."), + "", + "installed bar@0.0.7", + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "packages", "boba", "package.json")).json()).toEqual({ + name: "boba", + version: "1.0.0", + dependencies: { + "pkg5": "*", + "two-range-deps": "^1.0.0", + "bar": "0.0.7", + }, + }); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + "@types", + "bar", + "boba", + "no-deps", + "pkg5", + "two-range-deps", + ]); + expect(await file(join(packageDir, "node_modules", "boba", "node_modules", "bar", "package.json")).json()).toEqual({ + name: "bar", + version: "0.0.7", + description: "not a workspace", + }); +}); +test("it should detect duplicate workspace dependencies", async () => { + await write( + packageJson, + JSON.stringify({ + name: "foo", + workspaces: ["packages/*"], + }), + ); + + await mkdir(join(packageDir, "packages", "pkg1"), { recursive: true }); + await write(join(packageDir, "packages", "pkg1", "package.json"), JSON.stringify({ name: "pkg1" })); + await mkdir(join(packageDir, "packages", "pkg2"), { recursive: true }); + await write(join(packageDir, "packages", "pkg2", "package.json"), JSON.stringify({ name: "pkg1" })); + + var { stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await new Response(stderr).text(); + expect(err).toContain('Workspace name "pkg1" already exists'); + expect(await exited).toBe(1); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb"), { force: true }); + + ({ stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: join(packageDir, "packages", "pkg1"), + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + expect(err).toContain('Workspace name "pkg1" already exists'); + expect(await exited).toBe(1); +}); + +const versions = ["workspace:1.0.0", "workspace:*", "workspace:^1.0.0", "1.0.0", "*"]; + +for (const rootVersion of versions) { + for (const packageVersion of versions) { + test(`it should allow duplicates, root@${rootVersion}, package@${packageVersion}`, async () => { + await write( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", + workspaces: ["packages/*"], + dependencies: { + pkg2: rootVersion, + }, + }), + ); + + await mkdir(join(packageDir, "packages", "pkg1"), { recursive: true }); + await write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + version: "1.0.0", + dependencies: { + pkg2: packageVersion, + }, + }), + ); + + await mkdir(join(packageDir, "packages", "pkg2"), { recursive: true }); + await write( + join(packageDir, "packages", "pkg2", "package.json"), + JSON.stringify({ name: "pkg2", version: "1.0.0" }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + `+ pkg2@workspace:packages/pkg2`, + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: join(packageDir, "packages", "pkg1"), + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: join(packageDir, "packages", "pkg1"), + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + `+ pkg2@workspace:packages/pkg2`, + "", + "2 packages installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + }); + } +} + +for (const version of versions) { + test(`it should allow listing workspace as dependency of the root package version ${version}`, async () => { + await write( + packageJson, + JSON.stringify({ + name: "foo", + workspaces: ["packages/*"], + dependencies: { + "workspace-1": version, + }, + }), + ); + + await mkdir(join(packageDir, "packages", "workspace-1"), { recursive: true }); + await write( + join(packageDir, "packages", "workspace-1", "package.json"), + JSON.stringify({ + name: "workspace-1", + version: "1.0.0", + }), + ); + // install first from the root, the workspace package + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("already exists"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("Duplicate dependency"); + expect(err).not.toContain('workspace dependency "workspace-1" not found'); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + `+ workspace-1@workspace:packages/workspace-1`, + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ + name: "workspace-1", + version: "1.0.0", + }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: join(packageDir, "packages", "workspace-1"), + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("already exists"); + expect(err).not.toContain("Duplicate dependency"); + expect(err).not.toContain('workspace dependency "workspace-1" not found'); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ + name: "workspace-1", + version: "1.0.0", + }); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }); + + // install from workspace package then from root + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: join(packageDir, "packages", "workspace-1"), + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("already exists"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("Duplicate dependency"); + expect(err).not.toContain('workspace dependency "workspace-1" not found'); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "1 package installed", + ]); + expect(await exited).toBe(0); + expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ + name: "workspace-1", + version: "1.0.0", + }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("already exists"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("Duplicate dependency"); + expect(err).not.toContain('workspace dependency "workspace-1" not found'); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + expect.stringContaining("bun install v1."), + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), verdaccio.registryUrl()); + + expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ + name: "workspace-1", + version: "1.0.0", + }); + }); +} + +describe("install --filter", () => { + test("does not run root scripts if root is filtered out", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + scripts: { + postinstall: `${bunExe()} root.js`, + }, + }), + ), + write(join(packageDir, "root.js"), `require("fs").writeFileSync("root.txt", "")`), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + scripts: { + postinstall: `${bunExe()} pkg1.js`, + }, + }), + ), + write(join(packageDir, "packages", "pkg1", "pkg1.js"), `require("fs").writeFileSync("pkg1.txt", "")`), + ]); + + var { exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "pkg1"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + }); + + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "root.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "packages", "pkg1", "pkg1.txt"))).toBeTrue(); + + await rm(join(packageDir, "packages", "pkg1", "pkg1.txt")); + + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "root"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + })); + + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "root.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1.txt"))).toBeFalse(); + }); + + test("basic", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + dependencies: { + "a-dep": "1.0.1", + }, + }), + ), + ]); + + var { exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "pkg1"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + }); + + expect(await exited).toBe(0); + expect( + await Promise.all([ + exists(join(packageDir, "node_modules", "a-dep")), + exists(join(packageDir, "node_modules", "no-deps")), + ]), + ).toEqual([false, false]); + + // add workspace + await write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + version: "1.0.0", + dependencies: { + "no-deps": "2.0.0", + }, + }), + ); + + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "pkg1"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + })); + + expect(await exited).toBe(0); + expect( + await Promise.all([ + exists(join(packageDir, "node_modules", "a-dep")), + exists(join(packageDir, "node_modules", "no-deps")), + ]), + ).toEqual([false, true]); + }); + + test("all but one or two", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + dependencies: { + "a-dep": "1.0.1", + }, + }), + ), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + version: "1.0.0", + dependencies: { + "no-deps": "2.0.0", + }, + }), + ), + write( + join(packageDir, "packages", "pkg2", "package.json"), + JSON.stringify({ + name: "pkg2", + dependencies: { + "no-deps": "1.0.0", + }, + }), + ), + ]); + + var { exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "!pkg2", "--save-text-lockfile"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + }); + + expect(await exited).toBe(0); + expect( + await Promise.all([ + exists(join(packageDir, "node_modules", "a-dep")), + file(join(packageDir, "node_modules", "no-deps", "package.json")).json(), + exists(join(packageDir, "node_modules", "pkg2")), + ]), + ).toEqual([true, { name: "no-deps", version: "2.0.0" }, false]); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + // exclude the root by name + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "!root"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + })); + + expect(await exited).toBe(0); + expect( + await Promise.all([ + exists(join(packageDir, "node_modules", "a-dep")), + exists(join(packageDir, "node_modules", "no-deps")), + exists(join(packageDir, "node_modules", "pkg1")), + exists(join(packageDir, "node_modules", "pkg2")), + ]), + ).toEqual([false, true, true, true]); + }); + + test("matched workspace depends on filtered workspace", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + dependencies: { + "a-dep": "1.0.1", + }, + }), + ), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + version: "1.0.0", + dependencies: { + "no-deps": "2.0.0", + }, + }), + ), + write( + join(packageDir, "packages", "pkg2", "package.json"), + JSON.stringify({ + name: "pkg2", + dependencies: { + "pkg1": "1.0.0", + }, + }), + ), + ]); + + var { exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "!pkg1"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + }); + + expect(await exited).toBe(0); + expect( + await Promise.all([ + exists(join(packageDir, "node_modules", "a-dep")), + file(join(packageDir, "node_modules", "no-deps", "package.json")).json(), + exists(join(packageDir, "node_modules", "pkg1")), + exists(join(packageDir, "node_modules", "pkg2")), + ]), + ).toEqual([true, { name: "no-deps", version: "2.0.0" }, true, true]); + }); + + test("filter with a path", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "path-pattern", + workspaces: ["packages/*"], + dependencies: { + "a-dep": "1.0.1", + }, + }), + ), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + dependencies: { + "no-deps": "2.0.0", + }, + }), + ), + ]); + + async function checkRoot() { + expect( + await Promise.all([ + exists(join(packageDir, "node_modules", "a-dep")), + exists(join(packageDir, "node_modules", "no-deps", "package.json")), + exists(join(packageDir, "node_modules", "pkg1")), + ]), + ).toEqual([true, false, false]); + } + + async function checkWorkspace() { + expect( + await Promise.all([ + exists(join(packageDir, "node_modules", "a-dep")), + file(join(packageDir, "node_modules", "no-deps", "package.json")).json(), + exists(join(packageDir, "node_modules", "pkg1")), + ]), + ).toEqual([false, { name: "no-deps", version: "2.0.0" }, true]); + } + + var { exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "./packages/pkg1"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + }); + + expect(await exited).toBe(0); + await checkWorkspace(); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "./packages/*"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + })); + + expect(await exited).toBe(0); + await checkWorkspace(); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "!./packages/pkg1"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + })); + + expect(await exited).toBe(0); + await checkRoot(); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "!./packages/*"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + })); + + expect(await exited).toBe(0); + await checkRoot(); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + ({ exited } = spawn({ + cmd: [bunExe(), "install", "--filter", "!./"], + cwd: packageDir, + stdout: "ignore", + stderr: "pipe", + env, + })); + + expect(await exited).toBe(0); + await checkWorkspace(); + }); +}); diff --git a/test/cli/install/bunx.test.ts b/test/cli/install/bunx.test.ts index 87a26b0c7b..81efa8318a 100644 --- a/test/cli/install/bunx.test.ts +++ b/test/cli/install/bunx.test.ts @@ -1,11 +1,10 @@ import { spawn } from "bun"; import { beforeAll, beforeEach, expect, it, setDefaultTimeout } from "bun:test"; import { rm, writeFile } from "fs/promises"; -import { bunEnv, bunExe, isWindows, tmpdirSync } from "harness"; +import { bunEnv, bunExe, isWindows, tmpdirSync, readdirSorted } from "harness"; import { readdirSync } from "node:fs"; import { tmpdir } from "os"; import { join, resolve } from "path"; -import { readdirSorted } from "./dummy.registry"; let x_dir: string; let current_tmpdir: string; diff --git a/test/cli/install/dummy.registry.ts b/test/cli/install/dummy.registry.ts index 060b50a0fe..f83f719542 100644 --- a/test/cli/install/dummy.registry.ts +++ b/test/cli/install/dummy.registry.ts @@ -87,12 +87,6 @@ export function dummyRegistry(urls: string[], info: any = { "0.0.2": {} }, numbe return _handler; } -export async function readdirSorted(path: PathLike): Promise { - const results = await readdir(path); - results.sort(); - return results; -} - export function setHandler(newHandler: Handler) { handler = newHandler; } diff --git a/test/cli/install/registry/missing-directory-bin-1.1.1.tgz b/test/cli/install/missing-directory-bin-1.1.1.tgz similarity index 100% rename from test/cli/install/registry/missing-directory-bin-1.1.1.tgz rename to test/cli/install/missing-directory-bin-1.1.1.tgz diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index 2d5b4f4fe9..8a6b064a77 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -110,7 +110,7 @@ function runInCwdSuccess({ cmd.push("--filter", p); } } else { - cmd.push("--filter", pattern); + cmd.push("-F", pattern); } for (const c of command) { diff --git a/test/harness.ts b/test/harness.ts index bbdc48006f..5fe4cf1a18 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -1,8 +1,9 @@ -import { gc as bunGC, sleepSync, spawnSync, unsafe, which } from "bun"; +import { gc as bunGC, sleepSync, spawnSync, unsafe, which, write } from "bun"; import { heapStats } from "bun:jsc"; +import { fork, ChildProcess } from "child_process"; import { afterAll, beforeAll, describe, expect, test } from "bun:test"; -import { readFile, readlink, writeFile } from "fs/promises"; -import fs, { closeSync, openSync } from "node:fs"; +import { readFile, readlink, writeFile, readdir, rm } from "fs/promises"; +import fs, { closeSync, openSync, rmSync } from "node:fs"; import os from "node:os"; import { dirname, isAbsolute, join } from "path"; import detectLibc from "detect-libc"; @@ -1388,9 +1389,9 @@ Object.defineProperty(globalThis, "gc", { configurable: true, }); -export function waitForFileToExist(path: string, interval: number) { +export function waitForFileToExist(path: string, interval_ms: number) { while (!fs.existsSync(path)) { - sleepSync(interval); + sleepSync(interval_ms); } } @@ -1434,3 +1435,82 @@ export function textLockfile(version: number, pkgs: any): string { ...pkgs, }); } + +export class VerdaccioRegistry { + port: number; + process: ChildProcess | undefined; + configPath: string; + packagesPath: string; + + constructor(opts?: { configPath?: string; packagesPath?: string; verbose?: boolean }) { + this.port = randomPort(); + this.configPath = opts?.configPath ?? join(import.meta.dir, "cli", "install", "registry", "verdaccio.yaml"); + this.packagesPath = opts?.packagesPath ?? join(import.meta.dir, "cli", "install", "registry", "packages"); + } + + async start(silent: boolean = true) { + await rm(join(dirname(this.configPath), "htpasswd"), { force: true }); + this.process = fork(require.resolve("verdaccio/bin/verdaccio"), ["-c", this.configPath, "-l", `${this.port}`], { + silent, + // Prefer using a release build of Bun since it's faster + execPath: Bun.which("bun") || bunExe(), + }); + + this.process.stderr?.on("data", data => { + console.error(`[verdaccio] stderr: ${data}`); + }); + + const started = Promise.withResolvers(); + + this.process.on("error", error => { + console.error(`Failed to start verdaccio: ${error}`); + started.reject(error); + }); + + this.process.on("exit", (code, signal) => { + if (code !== 0) { + console.error(`Verdaccio exited with code ${code} and signal ${signal}`); + } else { + console.log("Verdaccio exited successfully"); + } + }); + + this.process.on("message", (message: { verdaccio_started: boolean }) => { + if (message.verdaccio_started) { + started.resolve(); + } + }); + + await started.promise; + } + + registryUrl() { + return `http://localhost:${this.port}/`; + } + + stop() { + rmSync(join(dirname(this.configPath), "htpasswd"), { force: true }); + this.process?.kill(); + } + + async createTestDir() { + const packageDir = tmpdirSync(); + const packageJson = join(packageDir, "package.json"); + await write( + join(packageDir, "bunfig.toml"), + ` + [install] + cache = "${join(packageDir, ".bun-cache")}" + registry = "${this.registryUrl()}" + `, + ); + + return { packageDir, packageJson }; + } +} + +export async function readdirSorted(path: string): Promise { + const results = await readdir(path); + results.sort(); + return results; +} diff --git a/test/js/bun/glob/match.test.ts b/test/js/bun/glob/match.test.ts index c09f8b7cd0..9a98d44c40 100644 --- a/test/js/bun/glob/match.test.ts +++ b/test/js/bun/glob/match.test.ts @@ -634,6 +634,13 @@ describe("Glob.match", () => { expect(new Glob("[^a-c]*").match("BewAre")).toBeTrue(); }); + test("square braces", () => { + expect(new Glob("src/*.[tj]s").match("src/foo.js")).toBeTrue(); + expect(new Glob("src/*.[tj]s").match("src/foo.ts")).toBeTrue(); + expect(new Glob("foo/ba[rz].md").match("foo/bar.md")).toBeTrue(); + expect(new Glob("foo/ba[rz].md").match("foo/baz.md")).toBeTrue(); + }); + test("bash wildmatch", () => { expect(new Glob("a[]-]b").match("aab")).toBeFalse(); expect(new Glob("[ten]").match("ten")).toBeFalse(); diff --git a/test/js/bun/http/bun-server.test.ts b/test/js/bun/http/bun-server.test.ts index 771debc7f1..1c49bc8bff 100644 --- a/test/js/bun/http/bun-server.test.ts +++ b/test/js/bun/http/bun-server.test.ts @@ -895,6 +895,74 @@ describe("HEAD requests #15355", () => { } }); + test("should fallback to the body if content-length is missing in the headers", async () => { + using server = Bun.serve({ + port: 0, + fetch(req) { + if (req.url.endsWith("/content-length")) { + return new Response("Hello World", { + headers: { + "Content-Type": "text/plain", + "X-Bun-Test": "1", + }, + }); + } + + if (req.url.endsWith("/chunked")) { + return new Response( + async function* () { + yield "Hello"; + await Bun.sleep(1); + yield " "; + await Bun.sleep(1); + yield "World"; + }, + { + headers: { + "Content-Type": "text/plain", + "X-Bun-Test": "1", + }, + }, + ); + } + + return new Response(null, { + headers: { + "Content-Type": "text/plain", + "X-Bun-Test": "1", + }, + }); + }, + }); + { + const response = await fetch(server.url + "/content-length", { + method: "HEAD", + }); + expect(response.status).toBe(200); + expect(response.headers.get("content-length")).toBe("11"); + expect(response.headers.get("x-bun-test")).toBe("1"); + expect(await response.text()).toBe(""); + } + { + const response = await fetch(server.url + "/chunked", { + method: "HEAD", + }); + expect(response.status).toBe(200); + expect(response.headers.get("transfer-encoding")).toBe("chunked"); + expect(response.headers.get("x-bun-test")).toBe("1"); + expect(await response.text()).toBe(""); + } + { + const response = await fetch(server.url + "/null", { + method: "HEAD", + }); + expect(response.status).toBe(200); + expect(response.headers.get("content-length")).toBe("0"); + expect(response.headers.get("x-bun-test")).toBe("1"); + expect(await response.text()).toBe(""); + } + }); + test("HEAD requests should not have body", async () => { const dir = tempDirWithFiles("fsr", { "hello": "Hello World", diff --git a/test/js/bun/http/serve-body-leak.test.ts b/test/js/bun/http/serve-body-leak.test.ts index ed40ed810d..510a00a078 100644 --- a/test/js/bun/http/serve-body-leak.test.ts +++ b/test/js/bun/http/serve-body-leak.test.ts @@ -1,6 +1,6 @@ import type { Subprocess } from "bun"; import { afterEach, beforeEach, expect, it } from "bun:test"; -import { bunEnv, bunExe, isDebug, isFlaky, isLinux } from "harness"; +import { bunEnv, bunExe, isDebug, isFlaky, isLinux, isWindows } from "harness"; import { join } from "path"; const payload = Buffer.alloc(512 * 1024, "1").toString("utf-8"); // decent size payload to test memory leak @@ -149,7 +149,7 @@ for (const test_info of [ ["should not leak memory when streaming the body and echoing it back", callStreamingEcho, false, 64], ] as const) { const [testName, fn, skip, maxMemoryGrowth] = test_info; - it.todoIf(skip)( + it.todoIf(skip || isFlaky && isWindows)( testName, async () => { const { url, process } = await getURL(); diff --git a/test/js/bun/net/socket.test.ts b/test/js/bun/net/socket.test.ts index e3735148f3..693ce9e808 100644 --- a/test/js/bun/net/socket.test.ts +++ b/test/js/bun/net/socket.test.ts @@ -220,7 +220,8 @@ it("should reject on connection error, calling both connectError() and rejecting expect(socket).toBeDefined(); expect(socket.data).toBe(data); expect(error).toBeDefined(); - expect(error.name).toBe("ECONNREFUSED"); + expect(error.name).toBe("Error"); + expect(error.code).toBe("ECONNREFUSED"); expect(error.message).toBe("Failed to connect"); }, data() { @@ -246,7 +247,8 @@ it("should reject on connection error, calling both connectError() and rejecting () => done(new Error("Promise should reject instead")), err => { expect(err).toBeDefined(); - expect(err.name).toBe("ECONNREFUSED"); + expect(err.name).toBe("Error"); + expect(err.code).toBe("ECONNREFUSED"); expect(err.message).toBe("Failed to connect"); done(); @@ -293,7 +295,7 @@ it("should handle connection error", done => { expect(socket).toBeDefined(); expect(socket.data).toBe(data); expect(error).toBeDefined(); - expect(error.name).toBe("ECONNREFUSED"); + expect(error.name).toBe("Error"); expect(error.message).toBe("Failed to connect"); expect((error as any).code).toBe("ECONNREFUSED"); done(); @@ -595,6 +597,7 @@ it("should not call drain before handshake", async () => { }); it("upgradeTLS handles errors", async () => { using server = Bun.serve({ + port: 0, tls, async fetch(req) { return new Response("Hello World"); @@ -699,6 +702,7 @@ it("upgradeTLS handles errors", async () => { }); it("should be able to upgrade to TLS", async () => { using server = Bun.serve({ + port: 0, tls, async fetch(req) { return new Response("Hello World"); diff --git a/test/js/bun/s3/s3-insecure.test.ts b/test/js/bun/s3/s3-insecure.test.ts new file mode 100644 index 0000000000..d757fff77b --- /dev/null +++ b/test/js/bun/s3/s3-insecure.test.ts @@ -0,0 +1,35 @@ +import { describe, it, expect } from "bun:test"; +import { S3Client } from "bun"; + +describe("s3", async () => { + it("should not fail to connect when endpoint is http and not https", async () => { + using server = Bun.serve({ + port: 0, + async fetch(req) { + return new Response("<>lol!", { + headers: { + "Content-Type": "text/plain", + }, + status: 400, + }); + }, + }); + + const s3 = new S3Client({ + accessKeyId: "test", + secretAccessKey: "test", + endpoint: server.url.href, + bucket: "test", + }); + + const file = s3.file("hello.txt"); + let err; + try { + await file.text(); + } catch (e) { + err = e; + } + // Test we don't get ConnectionRefused + expect(err.code!).toBe("UnknownError"); + }); +}); diff --git a/test/js/bun/s3/s3.leak.test.ts b/test/js/bun/s3/s3.leak.test.ts index 9b25c622cb..4f81470722 100644 --- a/test/js/bun/s3/s3.leak.test.ts +++ b/test/js/bun/s3/s3.leak.test.ts @@ -1,8 +1,8 @@ import { describe, expect, it } from "bun:test"; import { bunExe, bunEnv, getSecret, tempDirWithFiles } from "harness"; -import type { S3FileOptions } from "bun"; +import type { S3Options } from "bun"; import path from "path"; -const s3Options: S3FileOptions = { +const s3Options: S3Options = { accessKeyId: getSecret("S3_R2_ACCESS_KEY"), secretAccessKey: getSecret("S3_R2_SECRET_KEY"), endpoint: getSecret("S3_R2_ENDPOINT"), diff --git a/test/js/bun/s3/s3.test.ts b/test/js/bun/s3/s3.test.ts index 7ac336fc6c..a991031980 100644 --- a/test/js/bun/s3/s3.test.ts +++ b/test/js/bun/s3/s3.test.ts @@ -1,617 +1,1060 @@ import { describe, expect, it, beforeAll, afterAll } from "bun:test"; -import { bunExe, bunEnv, getSecret, tempDirWithFiles } from "harness"; +import { bunExe, bunEnv, getSecret, tempDirWithFiles, isLinux } from "harness"; import { randomUUID } from "crypto"; -import { S3, s3, file } from "bun"; -import type { S3File, S3FileOptions } from "bun"; +import { S3Client, s3, file, which } from "bun"; +const S3 = (...args) => new S3Client(...args); +import child_process from "child_process"; +import type { S3Options } from "bun"; import path from "path"; -const s3Options: S3FileOptions = { - accessKeyId: getSecret("S3_R2_ACCESS_KEY"), - secretAccessKey: getSecret("S3_R2_SECRET_KEY"), - endpoint: getSecret("S3_R2_ENDPOINT"), -}; -const S3Bucket = getSecret("S3_R2_BUCKET"); - -function makePayLoadFrom(text: string, size: number): string { - while (Buffer.byteLength(text) < size) { - text += text; +const dockerCLI = which("docker") as string; +function isDockerEnabled(): boolean { + if (!dockerCLI) { + return false; + } + + try { + const info = child_process.execSync(`${dockerCLI} info`, { stdio: ["ignore", "pipe", "inherit"] }); + return info.toString().indexOf("Server Version:") !== -1; + } catch (error) { + return false; } - return text.slice(0, size); } -// 10 MiB big enough to Multipart upload in more than one part -const bigPayload = makePayLoadFrom("Bun is the best runtime ever", 10 * 1024 * 1024); -const bigishPayload = makePayLoadFrom("Bun is the best runtime ever", 1 * 1024 * 1024); +const allCredentials = [ + { + accessKeyId: getSecret("S3_R2_ACCESS_KEY"), + secretAccessKey: getSecret("S3_R2_SECRET_KEY"), + endpoint: getSecret("S3_R2_ENDPOINT"), + bucket: getSecret("S3_R2_BUCKET"), + service: "R2" as string, + }, +]; -describe.skipIf(!s3Options.accessKeyId)("s3", () => { - for (let bucketInName of [true, false]) { - describe("fetch", () => { - describe(bucketInName ? "bucket in path" : "bucket in options", () => { - var tmp_filename: string; - const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; - beforeAll(async () => { - tmp_filename = bucketInName ? `s3://${S3Bucket}/${randomUUID()}` : `s3://${randomUUID()}`; - const result = await fetch(tmp_filename, { - method: "PUT", - body: "Hello Bun!", - s3: options, - }); - expect(result.status).toBe(200); - }); +// TODO: figure out why minio is not creating a bucket on Linux, works on macOS and windows +if (isDockerEnabled() && !isLinux) { + const minio_dir = tempDirWithFiles("minio", {}); + const result = child_process.spawnSync( + "docker", + [ + "run", + "-d", + "--name", + "minio", + "-p", + "9000:9000", + "-p", + "9001:9001", + "-e", + "MINIO_ROOT_USER=minioadmin", + "-e", + "MINIO_ROOT_PASSWORD=minioadmin", + "-v", + `${minio_dir}:/data`, + "minio/minio", + "server", + "--console-address", + ":9001", + "/data", + ], + { + stdio: ["ignore", "pipe", "pipe"], + }, + ); - afterAll(async () => { - const result = await fetch(tmp_filename, { - method: "DELETE", - s3: options, - }); - expect(result.status).toBe(204); - }); + if (result.error) { + if (!result.error.message.includes('The container name "/minio" is already in use by container')) + throw result.error; + } + // wait for minio to be ready + await Bun.sleep(1_000); - it("should download file via fetch GET", async () => { - const result = await fetch(tmp_filename, { s3: options }); - expect(result.status).toBe(200); - expect(await result.text()).toBe("Hello Bun!"); - }); + /// create a bucket + child_process.spawnSync(dockerCLI, [`exec`, `minio`, `mc`, `mb`, `http://localhost:9000/buntest`], { + stdio: "ignore", + }); - it("should download range", async () => { - const result = await fetch(tmp_filename, { - headers: { "range": "bytes=6-10" }, - s3: options, - }); - expect(result.status).toBe(206); - expect(await result.text()).toBe("Bun!"); - }); + allCredentials.push({ + endpoint: "http://localhost:9000", // MinIO endpoint + accessKeyId: "minioadmin", + secretAccessKey: "minioadmin", + bucket: "buntest", + service: "MinIO" as string, + }); +} +for (let credentials of allCredentials) { + describe(`${credentials.service}`, () => { + const s3Options: S3Options = { + accessKeyId: credentials.accessKeyId, + secretAccessKey: credentials.secretAccessKey, + endpoint: credentials.endpoint, + }; - it("should check if a key exists or content-length", async () => { - const result = await fetch(tmp_filename, { - method: "HEAD", - s3: options, - }); - expect(result.status).toBe(200); // 404 if do not exists - expect(result.headers.get("content-length")).toBe("10"); // content-length - }); + const S3Bucket = credentials.bucket; - it("should check if a key does not exist", async () => { - const result = await fetch(tmp_filename + "-does-not-exist", { s3: options }); - expect(result.status).toBe(404); - }); + function makePayLoadFrom(text: string, size: number): string { + while (Buffer.byteLength(text) < size) { + text += text; + } + return text.slice(0, size); + } - it("should be able to set content-type", async () => { - { - const result = await fetch(tmp_filename, { - method: "PUT", - body: "Hello Bun!", - headers: { - "Content-Type": "application/json", - }, - s3: options, - }); - expect(result.status).toBe(200); - const response = await fetch(tmp_filename, { s3: options }); - expect(response.headers.get("content-type")).toStartWith("application/json"); - } - { - const result = await fetch(tmp_filename, { - method: "PUT", - body: "Hello Bun!", - headers: { - "Content-Type": "text/plain", - }, - s3: options, - }); - expect(result.status).toBe(200); - const response = await fetch(tmp_filename, { s3: options }); - expect(response.headers.get("content-type")).toStartWith("text/plain"); - } - }); + // 10 MiB big enough to Multipart upload in more than one part + const bigPayload = makePayLoadFrom("Bun is the best runtime ever", 10 * 1024 * 1024); + const bigishPayload = makePayLoadFrom("Bun is the best runtime ever", 1 * 1024 * 1024); - it("should be able to upload large files", async () => { - // 10 MiB big enough to Multipart upload in more than one part - const buffer = Buffer.alloc(1 * 1024 * 1024, "a"); - { - await fetch(tmp_filename, { - method: "PUT", - body: async function* () { - for (let i = 0; i < 10; i++) { - await Bun.sleep(10); - yield buffer; - } - }, - s3: options, - }).then(res => res.text()); - - const result = await fetch(tmp_filename, { method: "HEAD", s3: options }); - expect(result.status).toBe(200); - expect(result.headers.get("content-length")).toBe((buffer.byteLength * 10).toString()); - } - }, 10_000); - }); - }); - - describe("Bun.S3", () => { - describe(bucketInName ? "bucket in path" : "bucket in options", () => { - const tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`; - const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; - beforeAll(async () => { - const file = new S3(tmp_filename, options); - await file.write("Hello Bun!"); - }); - - afterAll(async () => { - const file = new S3(tmp_filename, options); - await file.unlink(); - }); - - it("should download file via Bun.s3().text()", async () => { - const file = new S3(tmp_filename, options); - const text = await file.text(); - expect(text).toBe("Hello Bun!"); - }); - - it("should download range", async () => { - const file = new S3(tmp_filename, options); - const text = await file.slice(6, 10).text(); - expect(text).toBe("Bun!"); - }); - - it("should check if a key exists or content-length", async () => { - const file = new S3(tmp_filename, options); - const exists = await file.exists(); - expect(exists).toBe(true); - const contentLength = await file.size; - expect(contentLength).toBe(10); - }); - - it("should check if a key does not exist", async () => { - const file = new S3(tmp_filename + "-does-not-exist", options); - const exists = await file.exists(); - expect(exists).toBe(false); - }); - - it("should be able to set content-type", async () => { - { - const s3file = new S3(tmp_filename, { ...options, type: "text/css" }); - await s3file.write("Hello Bun!"); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("text/css"); - } - { - const s3file = new S3(tmp_filename, options); - await s3file.write("Hello Bun!", { type: "text/plain" }); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("text/plain"); - } - - { - const s3file = new S3(tmp_filename, options); - const writer = s3file.writer({ type: "application/json" }); - writer.write("Hello Bun!"); - await writer.end(); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("application/json"); - } - - { - await S3.upload(tmp_filename, "Hello Bun!", { ...options, type: "application/xml" }); - const response = await fetch(s3(tmp_filename, options).presign()); - expect(response.headers.get("content-type")).toStartWith("application/xml"); - } - }); - - it("should be able to upload large files using S3.upload + readable Request", async () => { - { - await S3.upload( - tmp_filename, - new Request("https://example.com", { + describe.skipIf(!s3Options.accessKeyId)("s3", () => { + for (let bucketInName of [true, false]) { + describe("fetch", () => { + describe(bucketInName ? "bucket in path" : "bucket in options", () => { + var tmp_filename: string; + const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; + beforeAll(async () => { + tmp_filename = bucketInName ? `s3://${S3Bucket}/${randomUUID()}` : `s3://${randomUUID()}`; + const result = await fetch(tmp_filename, { method: "PUT", - body: async function* () { - for (let i = 0; i < 10; i++) { - if (i % 5 === 0) { + body: "Hello Bun!", + s3: options, + }); + expect(result.status).toBe(200); + }); + + afterAll(async () => { + const result = await fetch(tmp_filename, { + method: "DELETE", + s3: options, + }); + expect(result.status).toBe(204); + }); + + it("should download file via fetch GET", async () => { + const result = await fetch(tmp_filename, { s3: options }); + expect(result.status).toBe(200); + expect(await result.text()).toBe("Hello Bun!"); + }); + + it("should download range", async () => { + const result = await fetch(tmp_filename, { + headers: { "range": "bytes=6-10" }, + s3: options, + }); + expect(result.status).toBe(206); + expect(await result.text()).toBe("Bun!"); + }); + + it("should check if a key exists or content-length", async () => { + const result = await fetch(tmp_filename, { + method: "HEAD", + s3: options, + }); + expect(result.status).toBe(200); // 404 if do not exists + expect(result.headers.get("content-length")).toBe("10"); // content-length + }); + + it("should check if a key does not exist", async () => { + const result = await fetch(tmp_filename + "-does-not-exist", { s3: options }); + expect(result.status).toBe(404); + }); + + it("should be able to set content-type", async () => { + { + const result = await fetch(tmp_filename, { + method: "PUT", + body: "Hello Bun!", + headers: { + "Content-Type": "application/json", + }, + s3: options, + }); + expect(result.status).toBe(200); + const response = await fetch(tmp_filename, { s3: options }); + expect(response.headers.get("content-type")).toStartWith("application/json"); + } + { + const result = await fetch(tmp_filename, { + method: "PUT", + body: "Hello Bun!", + headers: { + "Content-Type": "text/plain", + }, + s3: options, + }); + expect(result.status).toBe(200); + const response = await fetch(tmp_filename, { s3: options }); + expect(response.headers.get("content-type")).toStartWith("text/plain"); + } + }); + + it("should be able to upload large files", async () => { + // 10 MiB big enough to Multipart upload in more than one part + const buffer = Buffer.alloc(1 * 1024 * 1024, "a"); + { + await fetch(tmp_filename, { + method: "PUT", + body: async function* () { + for (let i = 0; i < 10; i++) { await Bun.sleep(10); + yield buffer; } - yield bigishPayload; - } - }, - }), - options, - ); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigishPayload) * 10); - } - }, 10_000); + }, + s3: options, + }).then(res => res.text()); - it("should be able to upload large files in one go using S3.upload", async () => { - { - await S3.upload(tmp_filename, bigPayload, options); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); - expect(await new S3(tmp_filename, options).text()).toBe(bigPayload); - } - }, 10_000); + const result = await fetch(tmp_filename, { method: "HEAD", s3: options }); + expect(result.status).toBe(200); + expect(result.headers.get("content-length")).toBe((buffer.byteLength * 10).toString()); + } + }, 20_000); + }); + }); - it("should be able to upload large files in one go using S3File.write", async () => { - { - const s3File = new S3(tmp_filename, options); - await s3File.write(bigPayload); - expect(await s3File.size).toBe(Buffer.byteLength(bigPayload)); - expect(await s3File.text()).toBe(bigPayload); - } - }, 10_000); - }); - }); + describe("Bun.S3Client", () => { + describe(bucketInName ? "bucket in path" : "bucket in options", () => { + const tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`; + const options = bucketInName ? null : { bucket: S3Bucket }; - describe("Bun.file", () => { - describe(bucketInName ? "bucket in path" : "bucket in options", () => { - const tmp_filename = bucketInName ? `s3://${S3Bucket}/${randomUUID()}` : `s3://${randomUUID()}`; - const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; - beforeAll(async () => { - const s3file = file(tmp_filename, options); + var bucket = S3(s3Options); + beforeAll(async () => { + const file = bucket.file(tmp_filename, options); + await file.write("Hello Bun!"); + }); + + afterAll(async () => { + const file = bucket.file(tmp_filename, options); + await file.unlink(); + }); + + it("should download file via Bun.s3().text()", async () => { + const file = bucket.file(tmp_filename, options); + const text = await file.text(); + expect(text).toBe("Hello Bun!"); + }); + + it("should download range", async () => { + const file = bucket.file(tmp_filename, options); + const text = await file.slice(6, 10).text(); + expect(text).toBe("Bun!"); + }); + + it("should check if a key exists or content-length", async () => { + const file = bucket.file(tmp_filename, options); + const exists = await file.exists(); + expect(exists).toBe(true); + const stat = await file.stat(); + expect(stat.size).toBe(10); + }); + + it("should check if a key does not exist", async () => { + const file = bucket.file(tmp_filename + "-does-not-exist", options); + const exists = await file.exists(); + expect(exists).toBe(false); + }); + + it("should be able to set content-type", async () => { + { + const s3file = bucket.file(tmp_filename, options); + await s3file.write("Hello Bun!", { type: "text/css" }); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("text/css"); + } + { + const s3file = bucket.file(tmp_filename, options); + await s3file.write("Hello Bun!", { type: "text/plain" }); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("text/plain"); + } + + { + const s3file = bucket.file(tmp_filename, options); + const writer = s3file.writer({ type: "application/json" }); + writer.write("Hello Bun!"); + await writer.end(); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("application/json"); + } + + { + await bucket.write(tmp_filename, "Hello Bun!", { ...options, type: "application/xml" }); + const response = await fetch(bucket.file(tmp_filename, options).presign()); + expect(response.headers.get("content-type")).toStartWith("application/xml"); + } + }); + + it("should be able to upload large files using bucket.write + readable Request", async () => { + { + await bucket.write( + tmp_filename, + new Request("https://example.com", { + method: "PUT", + body: async function* () { + for (let i = 0; i < 10; i++) { + if (i % 5 === 0) { + await Bun.sleep(10); + } + yield bigishPayload; + } + }, + }), + options, + ); + expect(await bucket.size(tmp_filename, options)).toBe(Buffer.byteLength(bigishPayload) * 10); + } + }, 10_000); + + it("should be able to upload large files in one go using bucket.write", async () => { + { + await bucket.write(tmp_filename, bigPayload, options); + expect(await bucket.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); + expect(await bucket.file(tmp_filename, options).text()).toBe(bigPayload); + } + }, 10_000); + + it("should be able to upload large files in one go using S3File.write", async () => { + { + const s3File = bucket.file(tmp_filename, options); + await s3File.write(bigPayload); + const stat = await s3File.stat(); + expect(stat.size).toBe(Buffer.byteLength(bigPayload)); + expect(await s3File.text()).toBe(bigPayload); + } + }, 10_000); + }); + }); + + describe("Bun.file", () => { + describe(bucketInName ? "bucket in path" : "bucket in options", () => { + const tmp_filename = bucketInName ? `s3://${S3Bucket}/${randomUUID()}` : `s3://${randomUUID()}`; + const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; + beforeAll(async () => { + const s3file = file(tmp_filename, options); + await s3file.write("Hello Bun!"); + }); + + afterAll(async () => { + const s3file = file(tmp_filename, options); + await s3file.unlink(); + }); + + it("should download file via Bun.file().text()", async () => { + const s3file = file(tmp_filename, options); + const text = await s3file.text(); + expect(text).toBe("Hello Bun!"); + }); + + it("should download range", async () => { + const s3file = file(tmp_filename, options); + const text = await s3file.slice(6, 10).text(); + expect(text).toBe("Bun!"); + }); + + it("should check if a key exists or content-length", async () => { + const s3file = file(tmp_filename, options); + const exists = await s3file.exists(); + expect(exists).toBe(true); + const stat = await s3file.stat(); + expect(stat.size).toBe(10); + }); + + it("should check if a key does not exist", async () => { + const s3file = file(tmp_filename + "-does-not-exist", options); + const exists = await s3file.exists(); + expect(exists).toBe(false); + }); + + it("should be able to set content-type", async () => { + { + const s3file = file(tmp_filename, { ...options, type: "text/css" }); + await s3file.write("Hello Bun!"); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("text/css"); + } + { + const s3file = file(tmp_filename, options); + await s3file.write("Hello Bun!", { type: "text/plain" }); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("text/plain"); + } + + { + const s3file = file(tmp_filename, options); + const writer = s3file.writer({ type: "application/json" }); + writer.write("Hello Bun!"); + await writer.end(); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("application/json"); + } + }); + + it("should be able to upload large files in one go using Bun.write", async () => { + { + await Bun.write(file(tmp_filename, options), bigPayload); + expect(await S3Client.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); + expect(await file(tmp_filename, options).text()).toEqual(bigPayload); + } + }, 15_000); + + it("should be able to upload large files in one go using S3File.write", async () => { + { + const s3File = file(tmp_filename, options); + await s3File.write(bigPayload); + expect(s3File.size).toBeNaN(); + expect(await s3File.text()).toBe(bigPayload); + } + }, 10_000); + }); + }); + + describe("Bun.s3", () => { + describe(bucketInName ? "bucket in path" : "bucket in options", () => { + const tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`; + const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; + beforeAll(async () => { + const s3file = s3(tmp_filename, options); + await s3file.write("Hello Bun!"); + }); + + afterAll(async () => { + const s3file = s3(tmp_filename, options); + await s3file.unlink(); + }); + + it("should download file via Bun.s3().text()", async () => { + const s3file = s3(tmp_filename, options); + const text = await s3file.text(); + expect(text).toBe("Hello Bun!"); + }); + + it("should download range", async () => { + const s3file = s3(tmp_filename, options); + const text = await s3file.slice(6, 10).text(); + expect(text).toBe("Bun!"); + }); + + it("should check if a key exists or content-length", async () => { + const s3file = s3(tmp_filename, options); + const exists = await s3file.exists(); + expect(exists).toBe(true); + expect(s3file.size).toBeNaN(); + const stat = await s3file.stat(); + expect(stat.size).toBe(10); + expect(stat.etag).toBeDefined(); + + expect(stat.lastModified).toBeDefined(); + }); + + it("should check if a key does not exist", async () => { + const s3file = s3(tmp_filename + "-does-not-exist", options); + const exists = await s3file.exists(); + expect(exists).toBe(false); + }); + + it("presign url", async () => { + const s3file = s3(tmp_filename, options); + const response = await fetch(s3file.presign()); + expect(response.status).toBe(200); + expect(await response.text()).toBe("Hello Bun!"); + }); + + it("should be able to set content-type", async () => { + { + const s3file = s3(tmp_filename, { ...options, type: "text/css" }); + await s3file.write("Hello Bun!"); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("text/css"); + } + { + const s3file = s3(tmp_filename, options); + await s3file.write("Hello Bun!", { type: "text/plain" }); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("text/plain"); + } + + { + const s3file = s3(tmp_filename, options); + const writer = s3file.writer({ type: "application/json" }); + writer.write("Hello Bun!"); + await writer.end(); + const response = await fetch(s3file.presign()); + expect(response.headers.get("content-type")).toStartWith("application/json"); + } + }); + + it("should be able to upload large files in one go using Bun.write", async () => { + { + const s3file = s3(tmp_filename, options); + await Bun.write(s3file, bigPayload); + const stat = await s3file.stat(); + expect(stat.size).toBe(Buffer.byteLength(bigPayload)); + expect(stat.etag).toBeDefined(); + + expect(stat.lastModified).toBeDefined(); + expect(await s3file.text()).toBe(bigPayload); + } + }, 10_000); + + it("should be able to upload large files in one go using S3File.write", async () => { + { + const s3File = s3(tmp_filename, options); + await s3File.write(bigPayload); + const stat = await s3File.stat(); + expect(stat.size).toBe(Buffer.byteLength(bigPayload)); + expect(stat.etag).toBeDefined(); + + expect(stat.lastModified).toBeDefined(); + + expect(await s3File.text()).toBe(bigPayload); + } + }, 10_000); + + describe("readable stream", () => { + afterAll(async () => { + await Promise.all([ + s3(tmp_filename + "-readable-stream", options).unlink(), + s3(tmp_filename + "-readable-stream-big", options).unlink(), + ]); + }); + it("should work with small files", async () => { + const s3file = s3(tmp_filename + "-readable-stream", options); + await s3file.write("Hello Bun!"); + const stream = s3file.stream(); + const reader = stream.getReader(); + let bytes = 0; + let chunks: Array = []; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + bytes += value?.length ?? 0; + + if (value) chunks.push(value as Buffer); + } + expect(bytes).toBe(10); + expect(Buffer.concat(chunks)).toEqual(Buffer.from("Hello Bun!")); + }); + it("should work with large files ", async () => { + const s3file = s3(tmp_filename + "-readable-stream-big", options); + await s3file.write(bigishPayload); + const stream = s3file.stream(); + const reader = stream.getReader(); + let bytes = 0; + let chunks: Array = []; + while (true) { + const { done, value } = await reader.read(); + if (done) break; + bytes += value?.length ?? 0; + if (value) chunks.push(value as Buffer); + } + expect(bytes).toBe(Buffer.byteLength(bigishPayload)); + expect(Buffer.concat(chunks).toString()).toBe(bigishPayload); + }, 30_000); + }); + }); + }); + } + describe("special characters", () => { + it("should allow special characters in the path", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + const s3file = s3(`🌈🦄${randomUUID()}.txt`, options); await s3file.write("Hello Bun!"); - }); - - afterAll(async () => { - const s3file = file(tmp_filename, options); + await s3file.exists(); await s3file.unlink(); + expect().pass(); }); - - it("should download file via Bun.file().text()", async () => { - const s3file = file(tmp_filename, options); - const text = await s3file.text(); - expect(text).toBe("Hello Bun!"); + it("should allow forward slashes in the path", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + const s3file = s3(`${randomUUID()}/test.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.exists(); + await s3file.unlink(); + expect().pass(); }); - - it("should download range", async () => { - const s3file = file(tmp_filename, options); - const text = await s3file.slice(6, 10).text(); - expect(text).toBe("Bun!"); + it("should allow backslashes in the path", async () => { + const options = { ...s3Options, bucket: S3Bucket }; + const s3file = s3(`${randomUUID()}\\test.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.exists(); + await s3file.unlink(); + expect().pass(); }); - - it("should check if a key exists or content-length", async () => { - const s3file = file(tmp_filename, options); - const exists = await s3file.exists(); - expect(exists).toBe(true); - const contentLength = await s3file.size; - expect(contentLength).toBe(10); - }); - - it("should check if a key does not exist", async () => { - const s3file = file(tmp_filename + "-does-not-exist", options); - const exists = await s3file.exists(); - expect(exists).toBe(false); - }); - - it("should be able to set content-type", async () => { + it("should allow starting with slashs and backslashes", async () => { + const options = { ...s3Options, bucket: S3Bucket }; { - const s3file = file(tmp_filename, { ...options, type: "text/css" }); + const s3file = s3(`/${randomUUID()}test.txt`, options); await s3file.write("Hello Bun!"); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("text/css"); + await s3file.unlink(); } { - const s3file = file(tmp_filename, options); - await s3file.write("Hello Bun!", { type: "text/plain" }); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("text/plain"); - } - - { - const s3file = file(tmp_filename, options); - const writer = s3file.writer({ type: "application/json" }); - writer.write("Hello Bun!"); - await writer.end(); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("application/json"); + const s3file = s3(`\\${randomUUID()}test.txt`, options); + await s3file.write("Hello Bun!"); + await s3file.unlink(); } + expect().pass(); }); - it("should be able to upload large files in one go using Bun.write", async () => { + it("should allow ending with slashs and backslashes", async () => { + const options = { ...s3Options, bucket: S3Bucket }; { - await Bun.write(file(tmp_filename, options), bigPayload); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); - expect(await file(tmp_filename, options).text()).toEqual(bigPayload); + const s3file = s3(`${randomUUID()}/`, options); + await s3file.write("Hello Bun!"); + await s3file.unlink(); } - }, 15_000); - - it("should be able to upload large files in one go using S3File.write", async () => { { - const s3File = file(tmp_filename, options); - await s3File.write(bigPayload); - expect(await s3File.size).toBe(Buffer.byteLength(bigPayload)); - expect(await s3File.text()).toBe(bigPayload); + const s3file = s3(`${randomUUID()}\\`, options); + await s3file.write("Hello Bun!"); + await s3file.unlink(); } - }, 10_000); + expect().pass(); + }); }); - }); - describe("Bun.s3", () => { - describe(bucketInName ? "bucket in path" : "bucket in options", () => { - const tmp_filename = bucketInName ? `${S3Bucket}/${randomUUID()}` : `${randomUUID()}`; - const options = bucketInName ? s3Options : { ...s3Options, bucket: S3Bucket }; - beforeAll(async () => { - const s3file = s3(tmp_filename, options); - await s3file.write("Hello Bun!"); + describe("static methods", () => { + it("its defined", () => { + expect(S3Client).toBeDefined(); + expect(S3Client.write).toBeDefined(); + expect(S3Client.file).toBeDefined(); + expect(S3Client.stat).toBeDefined(); + expect(S3Client.unlink).toBeDefined(); + expect(S3Client.exists).toBeDefined(); + expect(S3Client.presign).toBeDefined(); + expect(S3Client.size).toBeDefined(); + expect(S3Client.delete).toBeDefined(); }); - - afterAll(async () => { - const s3file = s3(tmp_filename, options); - await s3file.unlink(); - }); - - it("should download file via Bun.s3().text()", async () => { - const s3file = s3(tmp_filename, options); - const text = await s3file.text(); - expect(text).toBe("Hello Bun!"); - }); - - it("should download range", async () => { - const s3file = s3(tmp_filename, options); - const text = await s3file.slice(6, 10).text(); - expect(text).toBe("Bun!"); - }); - - it("should check if a key exists or content-length", async () => { - const s3file = s3(tmp_filename, options); - const exists = await s3file.exists(); - expect(exists).toBe(true); - const contentLength = await s3file.size; - expect(contentLength).toBe(10); - }); - - it("should check if a key does not exist", async () => { - const s3file = s3(tmp_filename + "-does-not-exist", options); - const exists = await s3file.exists(); - expect(exists).toBe(false); - }); - - it("presign url", async () => { - const s3file = s3(tmp_filename, options); - const response = await fetch(s3file.presign()); + it("should work", async () => { + const filename = randomUUID() + ".txt"; + await S3Client.write(filename, "Hello Bun!", { ...s3Options, bucket: S3Bucket }); + expect(await S3Client.file(filename, { ...s3Options, bucket: S3Bucket }).text()).toBe("Hello Bun!"); + const stat = await S3Client.stat(filename, { ...s3Options, bucket: S3Bucket }); + expect(stat.size).toBe(10); + expect(stat.etag).toBeString(); + expect(stat.lastModified).toBeValidDate(); + expect(stat.type).toBe("text/plain;charset=utf-8"); + const url = S3Client.presign(filename, { ...s3Options, bucket: S3Bucket }); + expect(url).toBeDefined(); + const response = await fetch(url); expect(response.status).toBe(200); expect(await response.text()).toBe("Hello Bun!"); + await S3Client.unlink(filename, { ...s3Options, bucket: S3Bucket }); + expect().pass(); }); - - it("should be able to set content-type", async () => { - { - const s3file = s3(tmp_filename, { ...options, type: "text/css" }); - await s3file.write("Hello Bun!"); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("text/css"); - } - { - const s3file = s3(tmp_filename, options); - await s3file.write("Hello Bun!", { type: "text/plain" }); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("text/plain"); - } - - { - const s3file = s3(tmp_filename, options); - const writer = s3file.writer({ type: "application/json" }); - writer.write("Hello Bun!"); - await writer.end(); - const response = await fetch(s3file.presign()); - expect(response.headers.get("content-type")).toStartWith("application/json"); + }); + describe("errors", () => { + it("Bun.write(s3file, file) should throw if the file does not exist", async () => { + try { + await Bun.write(s3("test.txt", { ...s3Options, bucket: S3Bucket }), file("./do-not-exist.txt")); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ENOENT"); + expect(e?.path).toBe("./do-not-exist.txt"); + expect(e?.syscall).toBe("open"); } }); - it("should be able to upload large files in one go using S3.upload", async () => { - { - await S3.upload(s3(tmp_filename, options), bigPayload); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); - } - }, 10_000); - - it("should be able to upload large files in one go using Bun.write", async () => { - { - await Bun.write(s3(tmp_filename, options), bigPayload); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); - expect(await s3(tmp_filename, options).text()).toBe(bigPayload); - } - }, 10_000); - - it("should be able to upload large files in one go using S3File.write", async () => { - { - const s3File = s3(tmp_filename, options); - await s3File.write(bigPayload); - expect(await s3File.size).toBe(Buffer.byteLength(bigPayload)); - expect(await s3File.text()).toBe(bigPayload); - } - }, 10_000); - - describe("readable stream", () => { - afterAll(async () => { - await Promise.all([ - s3(tmp_filename + "-readable-stream", options).unlink(), - s3(tmp_filename + "-readable-stream-big", options).unlink(), - ]); + it("Bun.write(s3file, file) should work with empty file", async () => { + const dir = tempDirWithFiles("fsr", { + "hello.txt": "", }); - it("should work with small files", async () => { - const s3file = s3(tmp_filename + "-readable-stream", options); - await s3file.write("Hello Bun!"); - const stream = s3file.stream(); - const reader = stream.getReader(); - let bytes = 0; - let chunks: Array = []; + await Bun.write(s3("test.txt", { ...s3Options, bucket: S3Bucket }), file(path.join(dir, "hello.txt"))); + }); + it("Bun.write(s3file, file) should throw if the file does not exist", async () => { + try { + await Bun.write( + s3("test.txt", { ...s3Options, bucket: S3Bucket }), + s3("do-not-exist.txt", { ...s3Options, bucket: S3Bucket }), + ); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("NoSuchKey"); + expect(e?.path).toBe("do-not-exist.txt"); + expect(e?.name).toBe("S3Error"); + } + }); + it("Bun.write(s3file, file) should throw if the file does not exist", async () => { + try { + await Bun.write( + s3("test.txt", { ...s3Options, bucket: S3Bucket }), + s3("do-not-exist.txt", { ...s3Options, bucket: "does-not-exists" }), + ); + expect.unreachable(); + } catch (e: any) { + expect(["AccessDenied", "NoSuchBucket"]).toContain(e?.code); + expect(e?.path).toBe("do-not-exist.txt"); + expect(e?.name).toBe("S3Error"); + } + }); + it("should error if bucket is missing", async () => { + try { + await Bun.write(s3("test.txt", s3Options), "Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_PATH"); + expect(e?.name).toBe("S3Error"); + } + }); - while (true) { - const { done, value } = await reader.read(); - if (done) break; - bytes += value?.length ?? 0; + it("should error if bucket is missing on payload", async () => { + try { + await Bun.write(s3("test.txt", { ...s3Options, bucket: S3Bucket }), s3("test2.txt", s3Options)); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_PATH"); + expect(e?.path).toBe("test2.txt"); + expect(e?.name).toBe("S3Error"); + } + }); - if (value) chunks.push(value as Buffer); - } - expect(bytes).toBe(10); - expect(Buffer.concat(chunks)).toEqual(Buffer.from("Hello Bun!")); + it("should error when invalid method", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path)].map(async fn => { + const s3file = fn("method-test", { + ...s3Options, + bucket: S3Bucket, + }); + + try { + await s3file.presign({ method: "OPTIONS" }); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_METHOD"); + } + }), + ); + }); + + it("should error when path is too long", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path)].map(async fn => { + try { + const s3file = fn("test" + "a".repeat(4096), { + ...s3Options, + bucket: S3Bucket, + }); + + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(["ENAMETOOLONG", "ERR_S3_INVALID_PATH"]).toContain(e?.code); + } + }), + ); + }); + }); + describe("credentials", () => { + it("should error with invalid access key id", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path), file].map(async fn => { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + accessKeyId: "invalid", + }); + + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(["InvalidAccessKeyId", "InvalidArgument"]).toContain(e?.code); + } + }), + ); + }); + it("should error with invalid secret key id", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path), file].map(async fn => { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + secretAccessKey: "invalid", + }); + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(["SignatureDoesNotMatch", "AccessDenied"]).toContain(e?.code); + } + }), + ); + }); + + it("should error with invalid endpoint", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path), file].map(async fn => { + try { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + endpoint: "🙂.🥯", + }); + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_INVALID_ARG_TYPE"); + } + }), + ); + }); + it("should error with invalid endpoint", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path), file].map(async fn => { + try { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, // credentials and endpoint dont match + endpoint: "s3.us-west-1.amazonaws.com", + }); + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("PermanentRedirect"); + } + }), + ); + }); + it("should error with invalid endpoint", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path), file].map(async fn => { + try { + const s3file = fn("s3://bucket/credentials-test", { + ...s3Options, + endpoint: "..asd.@%&&&%%", + }); + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_INVALID_ARG_TYPE"); + } + }), + ); + }); + + it("should error with invalid bucket", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path), file].map(async fn => { + const s3file = fn("s3://credentials-test", { + ...s3Options, + bucket: "invalid", + }); + + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(["AccessDenied", "NoSuchBucket"]).toContain(e?.code); + expect(e?.name).toBe("S3Error"); + } + }), + ); + }); + + it("should error when missing credentials", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path), file].map(async fn => { + const s3file = fn("s3://credentials-test", { + bucket: "invalid", + }); + + try { + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_MISSING_CREDENTIALS"); + } + }), + ); + }); + it("should error when presign missing credentials", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path)].map(async fn => { + const s3file = fn("method-test", { + bucket: S3Bucket, + }); + + try { + await s3file.presign(); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_MISSING_CREDENTIALS"); + } + }), + ); + }); + + it("should error when presign with invalid endpoint", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path)].map(async fn => { + let options = { ...s3Options, bucket: S3Bucket }; + options.endpoint = Buffer.alloc(1024, "a").toString(); + + try { + const s3file = fn(randomUUID(), options); + + await s3file.write("Hello Bun!"); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_ENDPOINT"); + } + }), + ); + }); + it("should error when presign with invalid token", async () => { + await Promise.all( + [s3, (path, ...args) => S3(...args).file(path)].map(async fn => { + let options = { ...s3Options, bucket: S3Bucket }; + options.sessionToken = Buffer.alloc(4096, "a").toString(); + + try { + const s3file = fn(randomUUID(), options); + await s3file.presign(); + expect.unreachable(); + } catch (e: any) { + expect(e?.code).toBe("ERR_S3_INVALID_SESSION_TOKEN"); + } + }), + ); + }); + }); + + describe("S3 static methods", () => { + describe("presign", () => { + it("should work", async () => { + const s3file = s3("s3://bucket/credentials-test", s3Options); + const url = s3file.presign(); + expect(url).toBeDefined(); + expect(url.includes("X-Amz-Expires=86400")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); + it("default endpoint and region should work", async () => { + let options = { ...s3Options }; + options.endpoint = undefined; + options.region = undefined; + const s3file = s3("s3://bucket/credentials-test", options); + const url = s3file.presign(); + expect(url).toBeDefined(); + expect(url.includes("https://s3.us-east-1.amazonaws.com")).toBe(true); + expect(url.includes("X-Amz-Expires=86400")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); + it("default endpoint + region should work", async () => { + let options = { ...s3Options }; + options.endpoint = undefined; + options.region = "us-west-1"; + const s3file = s3("s3://bucket/credentials-test", options); + const url = s3file.presign(); + expect(url).toBeDefined(); + expect(url.includes("https://s3.us-west-1.amazonaws.com")).toBe(true); + expect(url.includes("X-Amz-Expires=86400")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); + it("should work with expires", async () => { + const s3file = s3("s3://bucket/credentials-test", s3Options); + const url = s3file.presign({ + expiresIn: 10, + }); + expect(url).toBeDefined(); + expect(url.includes("X-Amz-Expires=10")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); + it("should work with acl", async () => { + const s3file = s3("s3://bucket/credentials-test", s3Options); + const url = s3file.presign({ + expiresIn: 10, + acl: "public-read", + }); + expect(url).toBeDefined(); + expect(url.includes("X-Amz-Expires=10")).toBe(true); + expect(url.includes("X-Amz-Acl=public-read")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); }); - it("should work with large files ", async () => { - const s3file = s3(tmp_filename + "-readable-stream-big", options); - await s3file.write(bigishPayload); - const stream = s3file.stream(); - const reader = stream.getReader(); - let bytes = 0; - let chunks: Array = []; - while (true) { - const { done, value } = await reader.read(); - if (done) break; - bytes += value?.length ?? 0; - if (value) chunks.push(value as Buffer); - } - expect(bytes).toBe(Buffer.byteLength(bigishPayload)); - expect(Buffer.concat(chunks).toString()).toBe(bigishPayload); - }, 30_000); - }); - }); - }); - } - describe("credentials", () => { - it("should error with invalid access key id", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - accessKeyId: "invalid", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); - }); - it("should error with invalid secret key id", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - secretAccessKey: "invalid", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); - }); + it("s3().presign() should work", async () => { + const url = s3("s3://bucket/credentials-test", s3Options).presign({ + expiresIn: 10, + }); + expect(url).toBeDefined(); + expect(url.includes("X-Amz-Expires=10")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); - it("should error with invalid endpoint", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - endpoint: "🙂.🥯", - }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); - }); + it("s3().presign() endpoint should work", async () => { + const url = s3("s3://bucket/credentials-test", s3Options).presign({ + expiresIn: 10, + endpoint: "https://s3.bun.sh", + }); + expect(url).toBeDefined(); + expect(url.includes("https://s3.bun.sh")).toBe(true); + expect(url.includes("X-Amz-Expires=10")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); - it("should error with invalid endpoint", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://bucket/credentials-test", { - ...s3Options, - endpoint: "..asd.@%&&&%%", + it("s3().presign() endpoint should work", async () => { + const url = s3("s3://folder/credentials-test", s3Options).presign({ + expiresIn: 10, + bucket: "my-bucket", + }); + expect(url).toBeDefined(); + expect(url.includes("my-bucket")).toBe(true); + expect(url.includes("X-Amz-Expires=10")).toBe(true); + expect(url.includes("X-Amz-Date")).toBe(true); + expect(url.includes("X-Amz-Signature")).toBe(true); + expect(url.includes("X-Amz-Credential")).toBe(true); + expect(url.includes("X-Amz-Algorithm")).toBe(true); + expect(url.includes("X-Amz-SignedHeaders")).toBe(true); + }); }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); - }); - }); - it("should error with invalid bucket", async () => { - [s3, (...args) => new S3(...args), file].forEach(fn => { - const s3file = fn("s3://credentials-test", { - ...s3Options, - bucket: "invalid", + it("exists, write, size, unlink should work", async () => { + const fullPath = randomUUID(); + const bucket = S3({ + ...s3Options, + bucket: S3Bucket, + }); + expect(await bucket.exists(fullPath)).toBe(false); + + await bucket.write(fullPath, "bun"); + expect(await bucket.exists(fullPath)).toBe(true); + expect(await bucket.size(fullPath)).toBe(3); + await bucket.unlink(fullPath); + expect(await bucket.exists(fullPath)).toBe(false); + }); + + it("should be able to upload a slice", async () => { + const filename = randomUUID(); + const fullPath = `s3://${S3Bucket}/${filename}`; + const s3file = s3(fullPath, s3Options); + await s3file.write("Hello Bun!"); + const slice = s3file.slice(6, 10); + expect(await slice.text()).toBe("Bun!"); + expect(await s3file.text()).toBe("Hello Bun!"); + + await s3file.write(slice); + const text = await s3file.text(); + expect(text).toBe("Bun!"); + await s3file.unlink(); }); - expect(s3file.write("Hello Bun!")).rejects.toThrow(); }); }); }); - - describe("S3 static methods", () => { - describe("presign", () => { - it("should work", async () => { - const s3file = s3("s3://bucket/credentials-test", s3Options); - const url = s3file.presign(); - expect(url).toBeDefined(); - expect(url.includes("X-Amz-Expires=86400")).toBe(true); - expect(url.includes("X-Amz-Date")).toBe(true); - expect(url.includes("X-Amz-Signature")).toBe(true); - expect(url.includes("X-Amz-Credential")).toBe(true); - expect(url.includes("X-Amz-Algorithm")).toBe(true); - expect(url.includes("X-Amz-SignedHeaders")).toBe(true); - }); - it("should work with expires", async () => { - const s3file = s3("s3://bucket/credentials-test", s3Options); - const url = s3file.presign({ - expiresIn: 10, - }); - expect(url).toBeDefined(); - expect(url.includes("X-Amz-Expires=10")).toBe(true); - expect(url.includes("X-Amz-Date")).toBe(true); - expect(url.includes("X-Amz-Signature")).toBe(true); - expect(url.includes("X-Amz-Credential")).toBe(true); - expect(url.includes("X-Amz-Algorithm")).toBe(true); - expect(url.includes("X-Amz-SignedHeaders")).toBe(true); - }); - - it("S3.presign should work", async () => { - const url = S3.presign("s3://bucket/credentials-test", { - ...s3Options, - expiresIn: 10, - }); - expect(url).toBeDefined(); - expect(url.includes("X-Amz-Expires=10")).toBe(true); - expect(url.includes("X-Amz-Date")).toBe(true); - expect(url.includes("X-Amz-Signature")).toBe(true); - expect(url.includes("X-Amz-Credential")).toBe(true); - expect(url.includes("X-Amz-Algorithm")).toBe(true); - expect(url.includes("X-Amz-SignedHeaders")).toBe(true); - }); - - it("S3.presign endpoint should work", async () => { - const url = S3.presign("s3://bucket/credentials-test", { - ...s3Options, - expiresIn: 10, - endpoint: "https://s3.bun.sh", - }); - expect(url).toBeDefined(); - expect(url.includes("https://s3.bun.sh")).toBe(true); - expect(url.includes("X-Amz-Expires=10")).toBe(true); - expect(url.includes("X-Amz-Date")).toBe(true); - expect(url.includes("X-Amz-Signature")).toBe(true); - expect(url.includes("X-Amz-Credential")).toBe(true); - expect(url.includes("X-Amz-Algorithm")).toBe(true); - expect(url.includes("X-Amz-SignedHeaders")).toBe(true); - }); - - it("S3.presign endpoint should work", async () => { - const url = S3.presign("s3://folder/credentials-test", { - ...s3Options, - expiresIn: 10, - bucket: "my-bucket", - }); - expect(url).toBeDefined(); - expect(url.includes("my-bucket")).toBe(true); - expect(url.includes("X-Amz-Expires=10")).toBe(true); - expect(url.includes("X-Amz-Date")).toBe(true); - expect(url.includes("X-Amz-Signature")).toBe(true); - expect(url.includes("X-Amz-Credential")).toBe(true); - expect(url.includes("X-Amz-Algorithm")).toBe(true); - expect(url.includes("X-Amz-SignedHeaders")).toBe(true); - }); - }); - - it("exists, upload, size, unlink should work", async () => { - const filename = randomUUID(); - const fullPath = `s3://${S3Bucket}/${filename}`; - expect(await S3.exists(fullPath, s3Options)).toBe(false); - - await S3.upload(fullPath, "bun", s3Options); - expect(await S3.exists(fullPath, s3Options)).toBe(true); - expect(await S3.size(fullPath, s3Options)).toBe(3); - await S3.unlink(fullPath, s3Options); - expect(await S3.exists(fullPath, s3Options)).toBe(false); - }); - - it("should be able to upload a slice", async () => { - const filename = randomUUID(); - const fullPath = `s3://${S3Bucket}/${filename}`; - const s3file = s3(fullPath, s3Options); - await s3file.write("Hello Bun!"); - const slice = s3file.slice(6, 10); - expect(await slice.text()).toBe("Bun!"); - expect(await s3file.text()).toBe("Hello Bun!"); - - await S3.upload(fullPath, slice, s3Options); - const text = await s3file.text(); - expect(text).toBe("Bun!"); - await s3file.unlink(); - }); - }); -}); +} diff --git a/test/js/bun/spawn/spawn-env.test.ts b/test/js/bun/spawn/spawn-env.test.ts new file mode 100644 index 0000000000..5d2e34cc0e --- /dev/null +++ b/test/js/bun/spawn/spawn-env.test.ts @@ -0,0 +1,24 @@ +import { test, expect } from "bun:test"; +import { spawn } from "bun"; +import { bunExe } from "harness"; + +test("spawn env", async () => { + const env = {}; + Object.defineProperty(env, "LOL", { + get() { + throw new Error("Bad!!"); + }, + configurable: false, + enumerable: true, + }); + + // This was the minimum to reliably cause a crash in Bun < v1.1.42 + for (let i = 0; i < 1024 * 10; i++) { + try { + const result = spawn({ + env, + cmd: [bunExe(), "-e", "console.log(process.env.LOL)"], + }); + } catch (e) {} + } +}); diff --git a/test/js/bun/spawn/spawn-path.test.ts b/test/js/bun/spawn/spawn-path.test.ts new file mode 100644 index 0000000000..d47876c33e --- /dev/null +++ b/test/js/bun/spawn/spawn-path.test.ts @@ -0,0 +1,26 @@ +import { test, expect } from "bun:test"; +import { chmodSync } from "fs"; +import { isWindows, tempDirWithFiles, bunEnv } from "harness"; +import path from "path"; + +test.skipIf(isWindows)("spawn uses PATH from env if present", async () => { + const tmpDir = await tempDirWithFiles("spawn-path", { + "test-script": `#!/usr/bin/env bash +echo "hello from script"`, + }); + + chmodSync(path.join(tmpDir, "test-script"), 0o777); + + const proc = Bun.spawn(["test-script"], { + env: { + ...bunEnv, + PATH: tmpDir + ":" + bunEnv.PATH, + }, + }); + + const output = await new Response(proc.stdout).text(); + expect(output.trim()).toBe("hello from script"); + + const status = await proc.exited; + expect(status).toBe(0); +}); diff --git a/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts b/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts index fc014b9faf..d211fd4c19 100644 --- a/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts +++ b/test/js/bun/test/snapshot-tests/snapshots/snapshot.test.ts @@ -533,7 +533,7 @@ describe("inline snapshots", () => { (\r ${v("", bad, '`"12"`')})\r ; - expect("13").toMatchInlineSnapshot(${v("", bad, '`"13"`')}); expect("14").toMatchInlineSnapshot(${v("", bad, '`"14"`')}); expect("15").toMatchInlineSnapshot(${v("", bad, '`"15"`')}); + expect("13").toMatchInlineSnapshot(${v("", bad, '`"13"`')}); expect("14").toMatchInlineSnapshot(${v("", bad, '`"14"`')}); expect("15").toMatchInlineSnapshot(${v("", bad, '`"15"`')}); expect({a: new Date()}).toMatchInlineSnapshot({a: expect.any(Date)}${v("", ', "bad"', ', `\n{\n "a": Any,\n}\n`')}); expect({a: new Date()}).toMatchInlineSnapshot({a: expect.any(Date)}${v(",", ', "bad"', ', `\n{\n "a": Any,\n}\n`')}); expect({a: new Date()}).toMatchInlineSnapshot({a: expect.any(Date)\n}${v("", ', "bad"', ', `\n{\n "a": Any,\n}\n`')}); diff --git a/test/js/bun/test/stack.test.ts b/test/js/bun/test/stack.test.ts index dd32267d49..3b0c3a6061 100644 --- a/test/js/bun/test/stack.test.ts +++ b/test/js/bun/test/stack.test.ts @@ -113,7 +113,8 @@ test("throwing inside an error suppresses the error and continues printing prope const { stderr, exitCode } = result; - expect(stderr.toString().trim()).toStartWith(`ENOENT: No such file or directory + expect(stderr.toString().trim()).toStartWith(`error: No such file or directory + code: "ENOENT", path: "this-file-path-is-bad", syscall: "open", errno: -2, diff --git a/test/js/bun/util/__snapshots__/inspect-error.test.js.snap b/test/js/bun/util/__snapshots__/inspect-error.test.js.snap index a6a949433d..eff7103964 100644 --- a/test/js/bun/util/__snapshots__/inspect-error.test.js.snap +++ b/test/js/bun/util/__snapshots__/inspect-error.test.js.snap @@ -2,7 +2,7 @@ exports[`error.cause 1`] = ` "1 | import { expect, test } from "bun:test"; -2 | +2 | 3 | test("error.cause", () => { 4 | const err = new Error("error 1"); 5 | const err2 = new Error("error 2", { cause: err }); @@ -11,7 +11,7 @@ error: error 2 at [dir]/inspect-error.test.js:5:16 1 | import { expect, test } from "bun:test"; -2 | +2 | 3 | test("error.cause", () => { 4 | const err = new Error("error 1"); ^ @@ -24,7 +24,7 @@ exports[`Error 1`] = ` " 9 | .replaceAll("//", "/"), 10 | ).toMatchSnapshot(); 11 | }); -12 | +12 | 13 | test("Error", () => { 14 | const err = new Error("my message"); ^ @@ -65,7 +65,7 @@ exports[`Error inside minified file (color) 1`] = ` 23 | arguments);c=b;c.s=1;return c.v=g}catch(h){throw g=b,g.s=2,g.v=h,h;}}}; 24 | exports.cloneElement=function(a,b,c){if(null===a||void 0===a)throw Error("React.cloneElement(...): The argument must be a React element, but you passed "+a+".");var f=C({},a.props),d=a.key,e=a.ref,g=a._owner;if(null!=b){void 0!==b.ref&&(e=b.ref,g=K.current);void 0!==b.key&&(d=""+b.key);if(a.type&&a.type.defaultProps)var h=a.type.defaultProps;for(k in b)J.call(b,k)&&!L.hasOwnProperty(k)&&(f[k]=void 0===b[k]&&void 0!==h?h[k]:b[k])}var k=arguments.length-2;if(1===k)f.children=c;else if(1 { + const snapshot = Bun.generateHeapSnapshot("v8"); + // Sanity check: run the validations from this library + const parsed = await v8HeapSnapshot.parseSnapshot(JSON.parse(snapshot)); + + // Loop over all edges and nodes as another sanity check. + for (const edge of parsed.edges) { + if (!edge.to) { + throw new Error("Edge has no 'to' property"); + } + } + for (const node of parsed.nodes) { + if (!node) { + throw new Error("Node is undefined"); + } + } + + expect(parsed.nodes.length).toBeGreaterThan(0); + expect(parsed.edges.length).toBeGreaterThan(0); +}); + +test("v8.getHeapSnapshot()", async () => { + const snapshot = v8.getHeapSnapshot(); + let chunks = []; + for await (const chunk of snapshot) { + expect(chunk.byteLength).toBeGreaterThan(0); + chunks.push(chunk); + } + expect(chunks.length).toBeGreaterThan(0); +}); + +test("v8.writeHeapSnapshot()", async () => { + const path = v8.writeHeapSnapshot(); + expect(path).toBeDefined(); + expect(path).toContain("Heap-"); + + const snapshot = await Bun.file(path).json(); + expect(await v8HeapSnapshot.parseSnapshot(snapshot)).toBeDefined(); +}); + +test("v8.writeHeapSnapshot() with path", async () => { + const dir = tempDirWithFiles("v8-heap-snapshot", { + "test.heapsnapshot": "", + }); + + const path = join(dir, "test.heapsnapshot"); + v8.writeHeapSnapshot(path); + + const snapshot = await Bun.file(path).json(); + expect(await v8HeapSnapshot.parseSnapshot(snapshot)).toBeDefined(); +}); diff --git a/test/js/node/child_process/child_process.test.ts b/test/js/node/child_process/child_process.test.ts index a259c6897d..961f9634d3 100644 --- a/test/js/node/child_process/child_process.test.ts +++ b/test/js/node/child_process/child_process.test.ts @@ -195,8 +195,8 @@ describe("spawn()", () => { it("should allow us to set env", async () => { async function getChildEnv(env: any): Promise { const result: string = await new Promise(resolve => { - const child = spawn(bunExe(), ["-e", "process.stdout.write(JSON.stringify(process.env))"], { env }); - child.stdout.on("data", data => { + const child = spawn(bunExe(), ["-e", "process.stderr.write(JSON.stringify(process.env))"], { env }); + child.stderr.on("data", data => { resolve(data.toString()); }); }); @@ -231,6 +231,7 @@ describe("spawn()", () => { { argv0: bun, stdio: ["inherit", "pipe", "inherit"], + env: bunEnv, }, ); delete process.env.NO_COLOR; diff --git a/test/js/node/fs/fs.test.ts b/test/js/node/fs/fs.test.ts index d4f52071e5..86fc06de30 100644 --- a/test/js/node/fs/fs.test.ts +++ b/test/js/node/fs/fs.test.ts @@ -1115,7 +1115,8 @@ it("readdirSync throws when given a file path", () => { readdirSync(import.meta.path); throw new Error("should not get here"); } catch (exception: any) { - expect(exception.name).toBe("ENOTDIR"); + expect(exception.name).toBe("Error"); + expect(exception.code).toBe("ENOTDIR"); } }); @@ -1126,7 +1127,8 @@ it("readdirSync throws when given a path that doesn't exist", () => { } catch (exception: any) { // the correct error to return in this case is actually ENOENT (which we do on windows), // but on posix we return ENOTDIR - expect(exception.name).toMatch(/ENOTDIR|ENOENT/); + expect(exception.name).toBe("Error"); + expect(exception.code).toMatch(/ENOTDIR|ENOENT/); } }); @@ -1135,7 +1137,8 @@ it("readdirSync throws when given a file path with trailing slash", () => { readdirSync(import.meta.path + "/"); throw new Error("should not get here"); } catch (exception: any) { - expect(exception.name).toBe("ENOTDIR"); + expect(exception.name).toBe("Error"); + expect(exception.code).toBe("ENOTDIR"); } }); diff --git a/test/js/node/net/node-net-server.test.ts b/test/js/node/net/node-net-server.test.ts index 70034749ed..0572567901 100644 --- a/test/js/node/net/node-net-server.test.ts +++ b/test/js/node/net/node-net-server.test.ts @@ -285,7 +285,8 @@ describe("net.createServer listen", () => { expect(err).not.toBeNull(); expect(err!.message).toBe("Failed to connect"); - expect(err!.name).toBe("ECONNREFUSED"); + expect(err!.name).toBe("Error"); + expect(err!.code).toBe("ECONNREFUSED"); server.close(); done(); diff --git a/test/js/node/path/matches-glob.test.ts b/test/js/node/path/matches-glob.test.ts new file mode 100644 index 0000000000..8802be251b --- /dev/null +++ b/test/js/node/path/matches-glob.test.ts @@ -0,0 +1,78 @@ +import path from "path"; + +describe("path.matchesGlob(path, glob)", () => { + const stringLikeObject = { + toString() { + return "hi"; + }, + }; + + it.each([ + // line break + null, + undefined, + 123, + stringLikeObject, + Symbol("hi"), + ])("throws if `path` is not a string", (notAString: any) => { + expect(() => path.matchesGlob(notAString, "*")).toThrow(TypeError); + }); + + it.each([ + // line break + null, + undefined, + 123, + stringLikeObject, + Symbol("hi"), + ])("throws if `glob` is not a string", (notAString: any) => { + expect(() => path.matchesGlob("hi", notAString)).toThrow(TypeError); + }); +}); + +describe("path.posix.matchesGlob(path, glob)", () => { + it.each([ + // line break + ["foo.js", "*.js"], + ["foo.js", "*.[tj]s"], + ["foo.ts", "*.[tj]s"], + ["foo.js", "**/*.js"], + ["src/bar/foo.js", "**/*.js"], + ["foo/bar/baz", "foo/[bcr]ar/baz"], + ])("path '%s' matches pattern '%s'", (pathname, glob) => { + expect(path.posix.matchesGlob(pathname, glob)).toBeTrue(); + }); + it.each([ + // line break + ["foo.js", "*.ts"], + ["src/foo.js", "*.js"], + ["foo.js", "src/*.js"], + ["foo/bar", "*"], + ])("path '%s' does not match pattern '%s'", (pathname, glob) => { + expect(path.posix.matchesGlob(pathname, glob)).toBeFalse(); + }); +}); + +describe("path.win32.matchesGlob(path, glob)", () => { + it.each([ + // line break + ["foo.js", "*.js"], + ["foo.js", "*.[tj]s"], + ["foo.ts", "*.[tj]s"], + ["foo.js", "**\\*.js"], + ["src\\bar\\foo.js", "**\\*.js"], + ["src\\bar\\foo.js", "**/*.js"], + ["foo\\bar\\baz", "foo\\[bcr]ar\\baz"], + ["foo\\bar\\baz", "foo/[bcr]ar/baz"], + ])("path '%s' matches gattern '%s'", (pathname, glob) => { + expect(path.win32.matchesGlob(pathname, glob)).toBeTrue(); + }); + it.each([ + // line break + ["foo.js", "*.ts"], + ["foo.js", "src\\*.js"], + ["foo/bar", "*"], + ])("path '%s' does not match pattern '%s'", (pathname, glob) => { + expect(path.win32.matchesGlob(pathname, glob)).toBeFalse(); + }); +}); diff --git a/test/js/node/process/call-constructor.test.js b/test/js/node/process/call-constructor.test.js new file mode 100644 index 0000000000..7522966572 --- /dev/null +++ b/test/js/node/process/call-constructor.test.js @@ -0,0 +1,11 @@ +import { expect, test } from "bun:test"; +import process from "process"; + +test("the constructor of process can be called", () => { + let obj = process.constructor.call({ ...process }); + expect(Object.getPrototypeOf(obj)).toEqual(Object.getPrototypeOf(process)); +}); + +test("#14346", () => { + process.__proto__.constructor.call({}); +}); diff --git a/test/js/node/process/process.test.js b/test/js/node/process/process.test.js index 7055550847..965105f56b 100644 --- a/test/js/node/process/process.test.js +++ b/test/js/node/process/process.test.js @@ -2,7 +2,7 @@ import { spawnSync, which } from "bun"; import { describe, expect, it } from "bun:test"; import { existsSync, readFileSync, writeFileSync } from "fs"; import { bunEnv, bunExe, isWindows, tmpdirSync } from "harness"; -import { basename, join, resolve } from "path"; +import path, { basename, join, resolve } from "path"; import { familySync } from "detect-libc"; expect.extend({ @@ -236,12 +236,16 @@ it("process.uptime()", () => { }); it("process.umask()", () => { - let notNumbers = [265n, "string", true, false, null, {}, [], () => {}, Symbol("symbol"), BigInt(1)]; - for (let notNumber of notNumbers) { - expect(() => { - process.umask(notNumber); - }).toThrow('The "mask" argument must be of type number'); - } + expect(() => process.umask(265n)).toThrow('The "mask" argument must be of type number. Received type bigint (265n)'); + expect(() => process.umask("string")).toThrow(`The argument 'mask' must be a 32-bit unsigned integer or an octal string. Received "string"`); // prettier-ignore + expect(() => process.umask(true)).toThrow('The "mask" argument must be of type number. Received type boolean (true)'); + expect(() => process.umask(false)).toThrow('The "mask" argument must be of type number. Received type boolean (false)'); // prettier-ignore + expect(() => process.umask(null)).toThrow('The "mask" argument must be of type number. Received null'); + expect(() => process.umask({})).toThrow('The "mask" argument must be of type number. Received an instance of Object'); + expect(() => process.umask([])).toThrow('The "mask" argument must be of type number. Received an instance of Array'); + expect(() => process.umask(() => {})).toThrow('The "mask" argument must be of type number. Received function '); + expect(() => process.umask(Symbol("symbol"))).toThrow('The "mask" argument must be of type number. Received type symbol (Symbol(symbol))'); // prettier-ignore + expect(() => process.umask(BigInt(1))).toThrow('The "mask" argument must be of type number. Received type bigint (1n)'); // prettier-ignore let rangeErrors = [NaN, -1.4, Infinity, -Infinity, -1, 1.3, 4294967296]; for (let rangeError of rangeErrors) { @@ -310,20 +314,6 @@ it("process.config", () => { }); }); -it("process.emitWarning", () => { - process.emitWarning("-- Testing process.emitWarning --"); - var called = 0; - process.on("warning", err => { - called++; - expect(err.message).toBe("-- Testing process.on('warning') --"); - }); - process.emitWarning("-- Testing process.on('warning') --"); - expect(called).toBe(1); - expect(process.off("warning")).toBe(process); - process.emitWarning("-- Testing process.on('warning') --"); - expect(called).toBe(1); -}); - it("process.execArgv", () => { expect(process.execArgv instanceof Array).toBe(true); }); @@ -342,11 +332,21 @@ it("process.argv in testing", () => { describe("process.exitCode", () => { it("validates int", () => { - expect(() => (process.exitCode = "potato")).toThrow(`exitCode must be an integer`); - expect(() => (process.exitCode = 1.2)).toThrow("exitCode must be an integer"); - expect(() => (process.exitCode = NaN)).toThrow("exitCode must be an integer"); - expect(() => (process.exitCode = Infinity)).toThrow("exitCode must be an integer"); - expect(() => (process.exitCode = -Infinity)).toThrow("exitCode must be an integer"); + expect(() => (process.exitCode = "potato")).toThrow( + `The "code" argument must be of type number. Received type string ("potato")`, + ); + expect(() => (process.exitCode = 1.2)).toThrow( + `The value of \"code\" is out of range. It must be an integer. Received 1.2`, + ); + expect(() => (process.exitCode = NaN)).toThrow( + `The value of \"code\" is out of range. It must be an integer. Received NaN`, + ); + expect(() => (process.exitCode = Infinity)).toThrow( + `The value of \"code\" is out of range. It must be an integer. Received Infinity`, + ); + expect(() => (process.exitCode = -Infinity)).toThrow( + `The value of \"code\" is out of range. It must be an integer. Received -Infinity`, + ); }); it("works with implicit process.exit", () => { @@ -458,13 +458,13 @@ describe("process.cpuUsage", () => { user: -1, system: 100, }), - ).toThrow("The 'user' property must be a number between 0 and 2^53"); + ).toThrow("The property 'prevValue.user' is invalid. Received -1"); expect(() => process.cpuUsage({ user: 100, system: -1, }), - ).toThrow("The 'system' property must be a number between 0 and 2^53"); + ).toThrow("The property 'prevValue.system' is invalid. Received -1"); }); // Skipped on Windows because it seems UV returns { user: 15000, system: 0 } constantly @@ -684,13 +684,7 @@ it("dlopen accepts file: URLs", () => { }); it("process.constrainedMemory()", () => { - if (process.platform === "linux") { - // On Linux, it returns 0 if the kernel doesn't support it - expect(process.constrainedMemory() >= 0).toBe(true); - } else { - // On unsupported platforms, it returns undefined - expect(process.constrainedMemory()).toBeUndefined(); - } + expect(process.constrainedMemory() >= 0).toBe(true); }); it("process.report", () => { diff --git a/test/js/node/readline/readline.node.test.ts b/test/js/node/readline/readline.node.test.ts index caa38dcfa5..fecce0f34d 100644 --- a/test/js/node/readline/readline.node.test.ts +++ b/test/js/node/readline/readline.node.test.ts @@ -306,15 +306,15 @@ describe("readline.cursorTo()", () => { // Verify that cursorTo() throws if x or y is NaN. assert.throws(() => { readline.cursorTo(writable, NaN); - }, /ERR_INVALID_ARG_VALUE/); + }, "ERR_INVALID_ARG_VALUE"); assert.throws(() => { readline.cursorTo(writable, 1, NaN); - }, /ERR_INVALID_ARG_VALUE/); + }, "ERR_INVALID_ARG_VALUE"); assert.throws(() => { readline.cursorTo(writable, NaN, NaN); - }, /ERR_INVALID_ARG_VALUE/); + }, "ERR_INVALID_ARG_VALUE"); }); }); diff --git a/test/js/node/test/common/index.js b/test/js/node/test/common/index.js index 6b5d1079ff..40d0639a00 100644 --- a/test/js/node/test/common/index.js +++ b/test/js/node/test/common/index.js @@ -132,11 +132,9 @@ if (process.argv.length === 2 && const options = { encoding: 'utf8', stdio: 'inherit' }; const result = spawnSync(process.execPath, args, options); if (result.signal) { - process.kill(0, result.signal); + process.kill(process.pid, result.signal); } else { - // Ensure we don't call the "exit" callbacks, as that will cause the - // test to fail when it may have passed in the child process. - process.kill(process.pid, result.status); + process.exit(result.status); } } } @@ -900,6 +898,7 @@ function invalidArgTypeHelper(input) { let inspected = inspect(input, { colors: false }); if (inspected.length > 28) { inspected = `${inspected.slice(inspected, 0, 25)}...`; } + if (inspected.startsWith("'") && inspected.endsWith("'")) inspected = `"${inspected.slice(1, inspected.length - 1)}"`; // BUN: util.inspect uses ' but bun uses " for strings return ` Received type ${typeof input} (${inspected})`; } @@ -1218,5 +1217,3 @@ module.exports = new Proxy(common, { return obj[prop]; }, }); - - diff --git a/test/js/node/test/parallel/test-child-process-stdio.js b/test/js/node/test/parallel/test-child-process-stdio.js new file mode 100644 index 0000000000..15c2770aa2 --- /dev/null +++ b/test/js/node/test/parallel/test-child-process-stdio.js @@ -0,0 +1,77 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const { spawn } = require('child_process'); + +// Test stdio piping. +{ + const child = spawn(...common.pwdCommand, { stdio: ['pipe'] }); + assert.notStrictEqual(child.stdout, null); + assert.notStrictEqual(child.stderr, null); +} + +// Test stdio ignoring. +{ + const child = spawn(...common.pwdCommand, { stdio: 'ignore' }); + assert.strictEqual(child.stdout, null); + assert.strictEqual(child.stderr, null); +} + +// Asset options invariance. +{ + const options = { stdio: 'ignore' }; + spawn(...common.pwdCommand, options); + assert.deepStrictEqual(options, { stdio: 'ignore' }); +} + +// Test stdout buffering. +{ + let output = ''; + const child = spawn(...common.pwdCommand); + + child.stdout.setEncoding('utf8'); + child.stdout.on('data', function(s) { + output += s; + }); + + child.on('exit', common.mustCall(function(code) { + assert.strictEqual(code, 0); + })); + + child.on('close', common.mustCall(function() { + assert.strictEqual(output.length > 1, true); + assert.strictEqual(output[output.length - 1], '\n'); + })); +} + +// Assert only one IPC pipe allowed. +assert.throws( + () => { + spawn( + ...common.pwdCommand, + { stdio: ['pipe', 'pipe', 'pipe', 'ipc', 'ipc'] } + ); + }, + { code: 'ERR_IPC_ONE_PIPE', name: 'Error' } +); diff --git a/test/js/node/test/parallel/test-console-tty-colors.js b/test/js/node/test/parallel/test-console-tty-colors.js index 969fb53a23..63ff42935b 100644 --- a/test/js/node/test/parallel/test-console-tty-colors.js +++ b/test/js/node/test/parallel/test-console-tty-colors.js @@ -60,7 +60,21 @@ check(false, false, false); write: common.mustNotCall() }); - [0, 'true', null, {}, [], () => {}].forEach((colorMode) => { + assert.throws( + () => { + new Console({ + stdout: stream, + ignoreErrors: false, + colorMode: 'true' + }); + }, + { + message: `The argument 'colorMode' must be one of: 'auto', true, false. Received "true"`, + code: 'ERR_INVALID_ARG_VALUE' + } + ); + + [0, null, {}, [], () => {}].forEach((colorMode) => { const received = util.inspect(colorMode); assert.throws( () => { diff --git a/test/js/node/test/parallel/test-path-glob.js b/test/js/node/test/parallel/test-path-glob.js new file mode 100644 index 0000000000..47647e1278 --- /dev/null +++ b/test/js/node/test/parallel/test-path-glob.js @@ -0,0 +1,44 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); +const path = require('path'); + +const globs = { + win32: [ + ['foo\\bar\\baz', 'foo\\[bcr]ar\\baz', true], // Matches 'bar' or 'car' in 'foo\\bar' + ['foo\\bar\\baz', 'foo\\[!bcr]ar\\baz', false], // Matches anything except 'bar' or 'car' in 'foo\\bar' + ['foo\\bar\\baz', 'foo\\[bc-r]ar\\baz', true], // Matches 'bar' or 'car' using range in 'foo\\bar' + ['foo\\bar\\baz', 'foo\\*\\!bar\\*\\baz', false], // Matches anything with 'foo' and 'baz' but not 'bar' in between + ['foo\\bar1\\baz', 'foo\\bar[0-9]\\baz', true], // Matches 'bar' followed by any digit in 'foo\\bar1' + ['foo\\bar5\\baz', 'foo\\bar[0-9]\\baz', true], // Matches 'bar' followed by any digit in 'foo\\bar5' + ['foo\\barx\\baz', 'foo\\bar[a-z]\\baz', true], // Matches 'bar' followed by any lowercase letter in 'foo\\barx' + ['foo\\bar\\baz\\boo', 'foo\\[bc-r]ar\\baz\\*', true], // Matches 'bar' or 'car' in 'foo\\bar' + ['foo\\bar\\baz', 'foo/**', true], // Matches anything in 'foo' + ['foo\\bar\\baz', '*', false], // No match + ], + posix: [ + ['foo/bar/baz', 'foo/[bcr]ar/baz', true], // Matches 'bar' or 'car' in 'foo/bar' + ['foo/bar/baz', 'foo/[!bcr]ar/baz', false], // Matches anything except 'bar' or 'car' in 'foo/bar' + ['foo/bar/baz', 'foo/[bc-r]ar/baz', true], // Matches 'bar' or 'car' using range in 'foo/bar' + ['foo/bar/baz', 'foo/*/!bar/*/baz', false], // Matches anything with 'foo' and 'baz' but not 'bar' in between + ['foo/bar1/baz', 'foo/bar[0-9]/baz', true], // Matches 'bar' followed by any digit in 'foo/bar1' + ['foo/bar5/baz', 'foo/bar[0-9]/baz', true], // Matches 'bar' followed by any digit in 'foo/bar5' + ['foo/barx/baz', 'foo/bar[a-z]/baz', true], // Matches 'bar' followed by any lowercase letter in 'foo/barx' + ['foo/bar/baz/boo', 'foo/[bc-r]ar/baz/*', true], // Matches 'bar' or 'car' in 'foo/bar' + ['foo/bar/baz', 'foo/**', true], // Matches anything in 'foo' + ['foo/bar/baz', '*', false], // No match + ], +}; + + +for (const [platform, platformGlobs] of Object.entries(globs)) { + for (const [pathStr, glob, expected] of platformGlobs) { + const actual = path[platform].matchesGlob(pathStr, glob); + assert.strictEqual(actual, expected, `Expected ${pathStr} to ` + (expected ? '' : 'not ') + `match ${glob} on ${platform}`); + } +} + +// Test for non-string input +assert.throws(() => path.matchesGlob(123, 'foo/bar/baz'), /.*must be of type string.*/); +assert.throws(() => path.matchesGlob('foo/bar/baz', 123), /.*must be of type string.*/); diff --git a/test/js/node/test/parallel/test-process-assert.js b/test/js/node/test/parallel/test-process-assert.js new file mode 100644 index 0000000000..f740d3d70c --- /dev/null +++ b/test/js/node/test/parallel/test-process-assert.js @@ -0,0 +1,19 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +assert.strictEqual(process.assert(1, 'error'), undefined); +assert.throws(() => { + process.assert(undefined, 'errorMessage'); +}, { + code: 'ERR_ASSERTION', + name: 'Error', + message: 'errorMessage' +}); +assert.throws(() => { + process.assert(false); +}, { + code: 'ERR_ASSERTION', + name: 'Error', + message: 'assertion error' +}); diff --git a/test/js/node/test/parallel/test-process-available-memory.js b/test/js/node/test/parallel/test-process-available-memory.js new file mode 100644 index 0000000000..67de5b5e0b --- /dev/null +++ b/test/js/node/test/parallel/test-process-available-memory.js @@ -0,0 +1,5 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const availableMemory = process.availableMemory(); +assert(typeof availableMemory, 'number'); diff --git a/test/js/node/test/parallel/test-process-beforeexit-throw-exit.js b/test/js/node/test/parallel/test-process-beforeexit-throw-exit.js new file mode 100644 index 0000000000..6e9d764be9 --- /dev/null +++ b/test/js/node/test/parallel/test-process-beforeexit-throw-exit.js @@ -0,0 +1,12 @@ +'use strict'; +const common = require('../common'); +common.skipIfWorker(); + +// Test that 'exit' is emitted if 'beforeExit' throws. + +process.on('exit', common.mustCall(() => { + process.exitCode = 0; +})); +process.on('beforeExit', common.mustCall(() => { + throw new Error(); +})); diff --git a/test/js/node/test/parallel/test-process-beforeexit.js b/test/js/node/test/parallel/test-process-beforeexit.js new file mode 100644 index 0000000000..e04b756cad --- /dev/null +++ b/test/js/node/test/parallel/test-process-beforeexit.js @@ -0,0 +1,81 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const net = require('net'); + +process.once('beforeExit', common.mustCall(tryImmediate)); + +function tryImmediate() { + setImmediate(common.mustCall(() => { + process.once('beforeExit', common.mustCall(tryTimer)); + })); +} + +function tryTimer() { + setTimeout(common.mustCall(() => { + process.once('beforeExit', common.mustCall(tryListen)); + }), 1); +} + +function tryListen() { + net.createServer() + .listen(0) + .on('listening', common.mustCall(function() { + this.close(); + process.once('beforeExit', common.mustCall(tryRepeatedTimer)); + })); +} + +// Test that a function invoked from the beforeExit handler can use a timer +// to keep the event loop open, which can use another timer to keep the event +// loop open, etc. +// +// After N times, call function `tryNextTick` to test behaviors of the +// `process.nextTick`. +function tryRepeatedTimer() { + const N = 5; + let n = 0; + const repeatedTimer = common.mustCall(function() { + if (++n < N) + setTimeout(repeatedTimer, 1); + else // n == N + process.once('beforeExit', common.mustCall(tryNextTickSetImmediate)); + }, N); + setTimeout(repeatedTimer, 1); +} + +// Test if the callback of `process.nextTick` can be invoked. +function tryNextTickSetImmediate() { + process.nextTick(common.mustCall(function() { + setImmediate(common.mustCall(() => { + process.once('beforeExit', common.mustCall(tryNextTick)); + })); + })); +} + +// Test that `process.nextTick` won't keep the event loop running by itself. +function tryNextTick() { + process.nextTick(common.mustCall(function() { + process.once('beforeExit', common.mustNotCall()); + })); +} diff --git a/test/js/node/test/parallel/test-process-binding-util.js b/test/js/node/test/parallel/test-process-binding-util.js new file mode 100644 index 0000000000..a834676e05 --- /dev/null +++ b/test/js/node/test/parallel/test-process-binding-util.js @@ -0,0 +1,58 @@ +'use strict'; +require('../common'); +const assert = require('assert'); +const util = require('util'); + +const utilBinding = process.binding('util'); +assert.deepStrictEqual( + Object.keys(utilBinding).sort(), + [ + 'isAnyArrayBuffer', + 'isArgumentsObject', + 'isArrayBuffer', + 'isArrayBufferView', + 'isAsyncFunction', + 'isBigInt64Array', + 'isBigIntObject', + 'isBigUint64Array', + 'isBooleanObject', + 'isBoxedPrimitive', + 'isCryptoKey', + 'isDataView', + 'isDate', + 'isEventTarget', + 'isExternal', + 'isFloat16Array', + 'isFloat32Array', + 'isFloat64Array', + 'isGeneratorFunction', + 'isGeneratorObject', + 'isInt16Array', + 'isInt32Array', + 'isInt8Array', + 'isKeyObject', + 'isMap', + 'isMapIterator', + 'isModuleNamespaceObject', + 'isNativeError', + 'isNumberObject', + 'isPromise', + 'isProxy', + 'isRegExp', + 'isSet', + 'isSetIterator', + 'isSharedArrayBuffer', + 'isStringObject', + 'isSymbolObject', + 'isTypedArray', + 'isUint16Array', + 'isUint32Array', + 'isUint8Array', + 'isUint8ClampedArray', + 'isWeakMap', + 'isWeakSet', + ]); + +for (const k of Object.keys(utilBinding)) { + assert.strictEqual(utilBinding[k], util.types[k]); +} diff --git a/test/js/node/test/parallel/test-process-chdir-errormessage.js b/test/js/node/test/parallel/test-process-chdir-errormessage.js new file mode 100644 index 0000000000..16cdf4aa1d --- /dev/null +++ b/test/js/node/test/parallel/test-process-chdir-errormessage.js @@ -0,0 +1,20 @@ +'use strict'; + +const common = require('../common'); +if (!common.isMainThread) + common.skip('process.chdir is not available in Workers'); +const assert = require('assert'); + +assert.throws( + () => { + process.chdir('does-not-exist'); + }, + { + name: 'Error', + code: 'ENOENT', + // message: /ENOENT: No such file or directory, chdir .+ -> 'does-not-exist'/, + path: process.cwd(), + syscall: 'chdir', + dest: 'does-not-exist' + } +); diff --git a/test/js/node/test/parallel/test-process-chdir.js b/test/js/node/test/parallel/test-process-chdir.js new file mode 100644 index 0000000000..ee59df853b --- /dev/null +++ b/test/js/node/test/parallel/test-process-chdir.js @@ -0,0 +1,44 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const fs = require('fs'); +const path = require('path'); + +if (!common.isMainThread) + common.skip('process.chdir is not available in Workers'); + +const tmpdir = require('../common/tmpdir'); + +process.chdir('..'); +assert.notStrictEqual(process.cwd(), __dirname); +process.chdir(__dirname); +assert.strictEqual(process.cwd(), __dirname); + +let dirName; +if (process.versions.icu) { + // ICU is available, use characters that could possibly be decomposed + dirName = 'weird \uc3a4\uc3ab\uc3af characters \u00e1\u00e2\u00e3'; +} else { + // ICU is unavailable, use characters that can't be decomposed + dirName = 'weird \ud83d\udc04 characters \ud83d\udc05'; +} +const dir = tmpdir.resolve(dirName); + +// Make sure that the tmp directory is clean +tmpdir.refresh(); + +fs.mkdirSync(dir); +process.chdir(dir); +assert.strictEqual(process.cwd().normalize(), dir.normalize()); + +process.chdir('..'); +assert.strictEqual(process.cwd().normalize(), + path.resolve(tmpdir.path).normalize()); + +const err = { + code: 'ERR_INVALID_ARG_TYPE', + message: /The "directory" argument must be of type string/ +}; +assert.throws(function() { process.chdir({}); }, err); +assert.throws(function() { process.chdir(); }, err); diff --git a/test/js/node/test/parallel/test-process-config.js b/test/js/node/test/parallel/test-process-config.js new file mode 100644 index 0000000000..20ebc36a99 --- /dev/null +++ b/test/js/node/test/parallel/test-process-config.js @@ -0,0 +1,69 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +const common = require('../common'); + +// Checks that the internal process.config is equivalent to the config.gypi file +// created when we run configure. + +const assert = require('assert'); +const fs = require('fs'); +const path = require('path'); + +// Check for existence of `process.config`. +assert(Object.hasOwn(process, 'config')); + +// Ensure that `process.config` is an Object. +assert.strictEqual(Object(process.config), process.config); + +// Ensure that you can't change config values +assert.throws(() => { process.config.variables = 42; }, TypeError); + +const configPath = path.resolve(__dirname, '..', '..', 'config.gypi'); + +if (!fs.existsSync(configPath)) { + common.skip('config.gypi does not exist.'); +} + +let config = fs.readFileSync(configPath, 'utf8'); + +// Clean up comment at the first line. +config = config.split('\n').slice(1).join('\n'); +config = config.replace(/"/g, '\\"'); +config = config.replace(/'/g, '"'); +config = JSON.parse(config, (key, value) => { + if (value === 'true') return true; + if (value === 'false') return false; + return value; +}); + +try { + assert.deepStrictEqual(config, process.config); +} catch (e) { + // If the assert fails, it only shows 3 lines. We need all the output to + // compare. + console.log('config:', config); + console.log('process.config:', process.config); + + throw e; +} diff --git a/test/js/node/test/parallel/test-process-constrained-memory.js b/test/js/node/test/parallel/test-process-constrained-memory.js new file mode 100644 index 0000000000..03f99b166f --- /dev/null +++ b/test/js/node/test/parallel/test-process-constrained-memory.js @@ -0,0 +1,6 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +const constrainedMemory = process.constrainedMemory(); +assert.strictEqual(typeof constrainedMemory, 'number'); diff --git a/test/js/node/test/parallel/test-process-cpuUsage.js b/test/js/node/test/parallel/test-process-cpuUsage.js new file mode 100644 index 0000000000..f1580d5f09 --- /dev/null +++ b/test/js/node/test/parallel/test-process-cpuUsage.js @@ -0,0 +1,118 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const result = process.cpuUsage(); + +// Validate the result of calling with no previous value argument. +validateResult(result); + +// Validate the result of calling with a previous value argument. +validateResult(process.cpuUsage(result)); + +// Ensure the results are >= the previous. +let thisUsage; +let lastUsage = process.cpuUsage(); +for (let i = 0; i < 10; i++) { + thisUsage = process.cpuUsage(); + validateResult(thisUsage); + assert(thisUsage.user >= lastUsage.user); + assert(thisUsage.system >= lastUsage.system); + lastUsage = thisUsage; +} + +// Ensure that the diffs are >= 0. +let startUsage; +let diffUsage; +for (let i = 0; i < 10; i++) { + startUsage = process.cpuUsage(); + diffUsage = process.cpuUsage(startUsage); + validateResult(startUsage); + validateResult(diffUsage); + assert(diffUsage.user >= 0); + assert(diffUsage.system >= 0); +} + +// Ensure that an invalid shape for the previous value argument throws an error. +assert.throws( + () => process.cpuUsage(1), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "prevValue" argument must be of type object. ' + + 'Received type number (1)' + } +); + +// Check invalid types. +[ + {}, + { user: 'a' }, + { user: null, system: 'c' }, +].forEach((value) => { + assert.throws( + () => process.cpuUsage(value), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "prevValue.user" property must be of type number.' + + common.invalidArgTypeHelper(value.user) + } + ); +}); + +[ + { user: 3, system: 'b' }, + { user: 3, system: null }, +].forEach((value) => { + assert.throws( + () => process.cpuUsage(value), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "prevValue.system" property must be of type number.' + + common.invalidArgTypeHelper(value.system) + } + ); +}); + +// Check invalid values. +[ + { user: -1, system: 2 }, + { user: Number.POSITIVE_INFINITY, system: 4 }, +].forEach((value) => { + assert.throws( + () => process.cpuUsage(value), + { + code: 'ERR_INVALID_ARG_VALUE', + name: 'RangeError', + message: "The property 'prevValue.user' is invalid. " + + `Received ${value.user}`, + } + ); +}); + +[ + { user: 3, system: -2 }, + { user: 5, system: Number.NEGATIVE_INFINITY }, +].forEach((value) => { + assert.throws( + () => process.cpuUsage(value), + { + code: 'ERR_INVALID_ARG_VALUE', + name: 'RangeError', + message: "The property 'prevValue.system' is invalid. " + + `Received ${value.system}`, + } + ); +}); + +// Ensure that the return value is the expected shape. +function validateResult(result) { + assert.notStrictEqual(result, null); + + assert(Number.isFinite(result.user)); + assert(Number.isFinite(result.system)); + + assert(result.user >= 0); + assert(result.system >= 0); +} diff --git a/test/js/node/test/parallel/test-process-dlopen-error-message-crash.js b/test/js/node/test/parallel/test-process-dlopen-error-message-crash.js new file mode 100644 index 0000000000..cc93e01abd --- /dev/null +++ b/test/js/node/test/parallel/test-process-dlopen-error-message-crash.js @@ -0,0 +1,47 @@ +'use strict'; + +// This is a regression test for some scenarios in which node would pass +// unsanitized user input to a printf-like formatting function when dlopen +// fails, potentially crashing the process. + +const common = require('../common'); +if (common.isWindows) return; // TODO: BUN +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); + +const assert = require('assert'); +const fs = require('fs'); + +// This error message should not be passed to a printf-like function. +assert.throws(() => { + process.dlopen({ exports: {} }, 'foo-%s.node'); +}, ({ name, code, message }) => { + assert.strictEqual(name, 'Error'); + assert.strictEqual(code, 'ERR_DLOPEN_FAILED'); + if (!common.isAIX && !common.isIBMi) { + assert.match(message, /foo-%s\.node/); + } + return true; +}); + +const notBindingDir = 'test/addons/not-a-binding'; +const notBindingPath = `${notBindingDir}/build/Release/binding.node`; +const strangeBindingPath = `${tmpdir.path}/binding-%s.node`; +// Ensure that the addon directory exists, but skip the remainder of the test if +// the addon has not been compiled. +// fs.accessSync(notBindingDir); +// try { +// fs.copyFileSync(notBindingPath, strangeBindingPath); +// } catch (err) { +// if (err.code !== 'ENOENT') throw err; +// common.skip(`addon not found: ${notBindingPath}`); +// } + +// This error message should also not be passed to a printf-like function. +assert.throws(() => { + process.dlopen({ exports: {} }, strangeBindingPath); +}, { + name: 'Error', + code: 'ERR_DLOPEN_FAILED', + message: /binding-%s\.node/ +}); diff --git a/test/js/node/test/parallel/test-process-emitwarning.js b/test/js/node/test/parallel/test-process-emitwarning.js new file mode 100644 index 0000000000..e1c7473f8a --- /dev/null +++ b/test/js/node/test/parallel/test-process-emitwarning.js @@ -0,0 +1,81 @@ +// Flags: --no-warnings +// The flag suppresses stderr output but the warning event will still emit +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const testMsg = 'A Warning'; +const testCode = 'CODE001'; +const testDetail = 'Some detail'; +const testType = 'CustomWarning'; + +process.on('warning', common.mustCall((warning) => { + assert(warning); + assert.match(warning.name, /^(?:Warning|CustomWarning)/); + assert.strictEqual(warning.message, testMsg); + if (warning.code) assert.strictEqual(warning.code, testCode); + if (warning.detail) assert.strictEqual(warning.detail, testDetail); +}, 15)); + +class CustomWarning extends Error { + constructor() { + super(); + this.name = testType; + this.message = testMsg; + this.code = testCode; + Error.captureStackTrace(this, CustomWarning); + } +} + +[ + [testMsg], + [testMsg, testType], + [testMsg, CustomWarning], + [testMsg, testType, CustomWarning], + [testMsg, testType, testCode], + [testMsg, { type: testType }], + [testMsg, { type: testType, code: testCode }], + [testMsg, { type: testType, code: testCode, detail: testDetail }], + [new CustomWarning()], + // Detail will be ignored for the following. No errors thrown + [testMsg, { type: testType, code: testCode, detail: true }], + [testMsg, { type: testType, code: testCode, detail: [] }], + [testMsg, { type: testType, code: testCode, detail: null }], + [testMsg, { type: testType, code: testCode, detail: 1 }], +].forEach((args) => { + process.emitWarning(...args); +}); + +const warningNoToString = new CustomWarning(); +warningNoToString.toString = null; +process.emitWarning(warningNoToString); + +const warningThrowToString = new CustomWarning(); +warningThrowToString.toString = function() { + throw new Error('invalid toString'); +}; +process.emitWarning(warningThrowToString); + +// TypeError is thrown on invalid input +[ + [1], + [{}], + [true], + [[]], + ['', '', {}], + ['', 1], + ['', '', 1], + ['', true], + ['', '', true], + ['', []], + ['', '', []], + [], + [undefined, 'foo', 'bar'], + [undefined], +].forEach((args) => { + assert.throws( + () => process.emitWarning(...args), + { code: 'ERR_INVALID_ARG_TYPE', name: 'TypeError' } + ); +}); diff --git a/test/js/node/test/parallel/test-process-euid-egid.js b/test/js/node/test/parallel/test-process-euid-egid.js new file mode 100644 index 0000000000..06854ba3f5 --- /dev/null +++ b/test/js/node/test/parallel/test-process-euid-egid.js @@ -0,0 +1,70 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +if (common.isWindows) { + assert.strictEqual(process.geteuid, undefined); + assert.strictEqual(process.getegid, undefined); + assert.strictEqual(process.seteuid, undefined); + assert.strictEqual(process.setegid, undefined); + return; +} + +if (!common.isMainThread) + return; + +assert.throws(() => { + process.seteuid({}); +}, { + code: 'ERR_INVALID_ARG_TYPE', + message: 'The "id" argument must be of type number or string. ' + + 'Received an instance of Object' +}); + +assert.throws(() => { + process.seteuid('fhqwhgadshgnsdhjsdbkhsdabkfabkveyb'); +}, { + code: 'ERR_UNKNOWN_CREDENTIAL', + message: 'User identifier does not exist: fhqwhgadshgnsdhjsdbkhsdabkfabkveyb' +}); + +// IBMi does not support below operations. +if (common.isIBMi) + return; + +// If we're not running as super user... +if (process.getuid() !== 0) { + // Should not throw. + process.getegid(); + process.geteuid(); + + assert.throws(() => { + process.setegid('nobody'); + }, /(?:EPERM: .+|Group identifier does not exist: nobody)$/); + + assert.throws(() => { + process.seteuid('nobody'); + }, /(?:EPERM: .+|User identifier does not exist: nobody)$/); + + return; +} + +// If we are running as super user... +const oldgid = process.getegid(); +try { + process.setegid('nobody'); +} catch (err) { + if (err.message !== 'Group identifier does not exist: nobody') { + throw err; + } else { + process.setegid('nogroup'); + } +} +const newgid = process.getegid(); +assert.notStrictEqual(newgid, oldgid); + +const olduid = process.geteuid(); +process.seteuid('nobody'); +const newuid = process.geteuid(); +assert.notStrictEqual(newuid, olduid); diff --git a/test/js/node/test/parallel/test-process-exception-capture-errors.js b/test/js/node/test/parallel/test-process-exception-capture-errors.js new file mode 100644 index 0000000000..8eb825267c --- /dev/null +++ b/test/js/node/test/parallel/test-process-exception-capture-errors.js @@ -0,0 +1,24 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +assert.throws( + () => process.setUncaughtExceptionCaptureCallback(42), + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "fn" argument must be of type function or null. ' + + 'Received type number (42)' + } +); + +process.setUncaughtExceptionCaptureCallback(common.mustNotCall()); + +assert.throws( + () => process.setUncaughtExceptionCaptureCallback(common.mustNotCall()), + { + code: 'ERR_UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET', + name: 'Error', + message: /setupUncaughtExceptionCapture.*called while a capture callback/ + } +); diff --git a/test/js/node/test/parallel/test-process-exit-code-validation.js b/test/js/node/test/parallel/test-process-exit-code-validation.js new file mode 100644 index 0000000000..59934fa31d --- /dev/null +++ b/test/js/node/test/parallel/test-process-exit-code-validation.js @@ -0,0 +1,145 @@ +'use strict'; + +require('../common'); + +const invalids = [ + { + code: '', + expected: 1, + pattern: 'Received type string \\(""\\)$', + }, + { + code: '1 one', + expected: 1, + pattern: 'Received type string \\("1 one"\\)$', + }, + { + code: 'two', + expected: 1, + pattern: 'Received type string \\("two"\\)$', + }, + { + code: {}, + expected: 1, + pattern: 'Received an instance of Object$', + }, + { + code: [], + expected: 1, + pattern: 'Received an instance of Array$', + }, + { + code: true, + expected: 1, + pattern: 'Received type boolean \\(true\\)$', + }, + { + code: false, + expected: 1, + pattern: 'Received type boolean \\(false\\)$', + }, + { + code: 2n, + expected: 1, + pattern: 'Received type bigint \\(2n\\)$', + }, + { + code: 2.1, + expected: 1, + pattern: 'Received 2.1$', + }, + { + code: Infinity, + expected: 1, + pattern: 'Received Infinity$', + }, + { + code: NaN, + expected: 1, + pattern: 'Received NaN$', + }, +]; +const valids = [ + { + code: 1, + expected: 1, + }, + { + code: '2', + expected: 2, + }, + { + code: undefined, + expected: 0, + }, + { + code: null, + expected: 0, + }, + { + code: 0, + expected: 0, + }, + { + code: '0', + expected: 0, + }, +]; +const args = [...invalids, ...valids]; + +if (process.argv[2] === undefined) { + const { spawnSync } = require('node:child_process'); + const { inspect, debuglog } = require('node:util'); + const { throws, strictEqual } = require('node:assert'); + + const debug = debuglog('test'); + const node = process.execPath; + const test = (index, useProcessExitCode) => { + const { status: code } = spawnSync(node, [ + __filename, + index, + useProcessExitCode, + ]); + console.log(`actual: ${code}, ${args[index].expected} ${index} ${!!useProcessExitCode} ${args[index].code}`); + debug(`actual: ${code}, ${inspect(args[index])} ${!!useProcessExitCode}`); + strictEqual( + code, + args[index].expected, + `actual: ${code}, ${inspect(args[index])}` + ); + }; + + // Check process.exitCode + for (const arg of invalids) { + debug(`invaild code: ${inspect(arg.code)}`); + throws(() => (process.exitCode = arg.code), new RegExp(arg.pattern)); + } + for (const arg of valids) { + debug(`vaild code: ${inspect(arg.code)}`); + process.exitCode = arg.code; + } + + throws(() => { + delete process.exitCode; + // }, /Cannot delete property 'exitCode' of #/); + }, /Unable to delete property./); + process.exitCode = 0; + + // Check process.exit([code]) + for (const index of args.keys()) { + test(index); + test(index, true); + } +} else { + const index = parseInt(process.argv[2]); + const useProcessExitCode = process.argv[3] !== 'undefined'; + if (Number.isNaN(index)) { + return process.exit(100); + } + + if (useProcessExitCode) { + process.exitCode = args[index].code; + } else { + process.exit(args[index].code); + } +} diff --git a/test/js/node/test/parallel/test-process-hrtime.js b/test/js/node/test/parallel/test-process-hrtime.js new file mode 100644 index 0000000000..34ef514aac --- /dev/null +++ b/test/js/node/test/parallel/test-process-hrtime.js @@ -0,0 +1,74 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const assert = require('assert'); + +// The default behavior, return an Array "tuple" of numbers +const tuple = process.hrtime(); + +// Validate the default behavior +validateTuple(tuple); + +// Validate that passing an existing tuple returns another valid tuple +validateTuple(process.hrtime(tuple)); + +// Test that only an Array may be passed to process.hrtime() +assert.throws(() => { + process.hrtime(1); +}, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "time" argument must be an instance of Array. Received type ' + + 'number (1)' +}); +assert.throws(() => { + process.hrtime([]); +}, { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError', + message: 'The value of "time" is out of range. It must be 2. Received 0' +}); +assert.throws(() => { + process.hrtime([1]); +}, { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError', + message: 'The value of "time" is out of range. It must be 2. Received 1' +}); +assert.throws(() => { + process.hrtime([1, 2, 3]); +}, { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError', + message: 'The value of "time" is out of range. It must be 2. Received 3' +}); + +function validateTuple(tuple) { + assert(Array.isArray(tuple)); + assert.strictEqual(tuple.length, 2); + assert(Number.isInteger(tuple[0])); + assert(Number.isInteger(tuple[1])); +} + +const diff = process.hrtime([0, 1e9 - 1]); +assert(diff[1] >= 0); // https://github.com/nodejs/node/issues/4751 diff --git a/test/js/node/test/parallel/test-process-kill-pid.js b/test/js/node/test/parallel/test-process-kill-pid.js new file mode 100644 index 0000000000..1fa1d6c2ab --- /dev/null +++ b/test/js/node/test/parallel/test-process-kill-pid.js @@ -0,0 +1,116 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +if (common.isWindows) return; // TODO: BUN +const assert = require('assert'); + +// Test variants of pid +// +// null: TypeError +// undefined: TypeError +// +// 'SIGTERM': TypeError +// +// String(process.pid): TypeError +// +// Nan, Infinity, -Infinity: TypeError +// +// 0, String(0): our group process +// +// process.pid, String(process.pid): ourself + +assert.throws(() => process.kill('SIGTERM'), { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "pid" argument must be of type number. Received type string ("SIGTERM")' +}); + +[null, undefined, NaN, Infinity, -Infinity].forEach((val) => { + assert.throws(() => process.kill(val), { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "pid" argument must be of type number.' + + common.invalidArgTypeHelper(val) + }); +}); + +// Test that kill throws an error for unknown signal names +assert.throws(() => process.kill(0, 'test'), { + code: 'ERR_UNKNOWN_SIGNAL', + name: 'TypeError', + message: 'Unknown signal: test' +}); + +// Test that kill throws an error for invalid signal numbers +assert.throws(() => process.kill(0, 987), { + code: 'EINVAL', + name: 'SystemError', + message: 'kill() failed: EINVAL: Invalid argument' +}); + +// Test kill argument processing in valid cases. +// +// Monkey patch _kill so that we don't actually send any signals, particularly +// that we don't kill our process group, or try to actually send ANY signals on +// windows, which doesn't support them. +function kill(tryPid, trySig, expectPid, expectSig) { + let getPid; + let getSig; + const origKill = process._kill; + process._kill = function(pid, sig) { + getPid = pid; + getSig = sig; + + // un-monkey patch process._kill + process._kill = origKill; + }; + + process.kill(tryPid, trySig); + + assert.strictEqual(getPid.toString(), expectPid.toString()); + assert.strictEqual(getSig, expectSig); +} + +// Note that SIGHUP and SIGTERM map to 1 and 15 respectively, even on Windows +// (for Windows, libuv maps 1 and 15 to the correct behavior). + +kill(0, 'SIGHUP', 0, 1); +kill(0, undefined, 0, 15); +kill('0', 'SIGHUP', 0, 1); +kill('0', undefined, 0, 15); + +// Confirm that numeric signal arguments are supported + +kill(0, 1, 0, 1); +kill(0, 15, 0, 15); + +// Negative numbers are meaningful on unix +kill(-1, 'SIGHUP', -1, 1); +kill(-1, undefined, -1, 15); +kill('-1', 'SIGHUP', -1, 1); +kill('-1', undefined, -1, 15); + +kill(process.pid, 'SIGHUP', process.pid, 1); +kill(process.pid, undefined, process.pid, 15); +kill(String(process.pid), 'SIGHUP', process.pid, 1); +kill(String(process.pid), undefined, process.pid, 15); diff --git a/test/js/node/test/parallel/test-process-no-deprecation.js b/test/js/node/test/parallel/test-process-no-deprecation.js new file mode 100644 index 0000000000..bcda99de25 --- /dev/null +++ b/test/js/node/test/parallel/test-process-no-deprecation.js @@ -0,0 +1,32 @@ +'use strict'; +// Flags: --no-warnings + +// The --no-warnings flag only suppresses writing the warning to stderr, not the +// emission of the corresponding event. This test file can be run without it. + +const common = require('../common'); +process.noDeprecation = true; + +const assert = require('assert'); + +function listener() { + assert.fail('received unexpected warning'); +} + +process.addListener('warning', listener); + +process.emitWarning('Something is deprecated.', 'DeprecationWarning'); + +// The warning would be emitted in the next tick, so continue after that. +process.nextTick(common.mustCall(() => { + // Check that deprecations can be re-enabled. + process.noDeprecation = false; + process.removeListener('warning', listener); + + process.addListener('warning', common.mustCall((warning) => { + assert.strictEqual(warning.name, 'DeprecationWarning'); + assert.strictEqual(warning.message, 'Something else is deprecated.'); + })); + + process.emitWarning('Something else is deprecated.', 'DeprecationWarning'); +})); diff --git a/test/js/node/test/parallel/test-process-really-exit.js b/test/js/node/test/parallel/test-process-really-exit.js new file mode 100644 index 0000000000..8445d220ca --- /dev/null +++ b/test/js/node/test/parallel/test-process-really-exit.js @@ -0,0 +1,17 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +// Ensure that the reallyExit hook is executed. +// see: https://github.com/nodejs/node/issues/25650 +if (process.argv[2] === 'subprocess') { + process.reallyExit = function() { + console.info('really exited'); + }; + process.exit(); +} else { + const { spawnSync } = require('child_process'); + const out = spawnSync(process.execPath, [__filename, 'subprocess']); + const observed = out.output[1].toString('utf8').trim(); + assert.strictEqual(observed, 'really exited'); +} diff --git a/test/js/node/test/parallel/test-process-release.js b/test/js/node/test/parallel/test-process-release.js new file mode 100644 index 0000000000..98a089a8f9 --- /dev/null +++ b/test/js/node/test/parallel/test-process-release.js @@ -0,0 +1,32 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const versionParts = process.versions.node.split('.'); + +assert.strictEqual(process.release.name, 'node'); + +// It's expected that future LTS release lines will have additional +// branches in here +if (versionParts[0] === '4' && versionParts[1] >= 2) { + assert.strictEqual(process.release.lts, 'Argon'); +} else if (versionParts[0] === '6' && versionParts[1] >= 9) { + assert.strictEqual(process.release.lts, 'Boron'); +} else if (versionParts[0] === '8' && versionParts[1] >= 9) { + assert.strictEqual(process.release.lts, 'Carbon'); +} else if (versionParts[0] === '10' && versionParts[1] >= 13) { + assert.strictEqual(process.release.lts, 'Dubnium'); +} else if (versionParts[0] === '12' && versionParts[1] >= 13) { + assert.strictEqual(process.release.lts, 'Erbium'); +} else if (versionParts[0] === '14' && versionParts[1] >= 15) { + assert.strictEqual(process.release.lts, 'Fermium'); +} else if (versionParts[0] === '16' && versionParts[1] >= 13) { + assert.strictEqual(process.release.lts, 'Gallium'); +} else if (versionParts[0] === '18' && versionParts[1] >= 12) { + assert.strictEqual(process.release.lts, 'Hydrogen'); +} else if (versionParts[0] === '20' && versionParts[1] >= 9) { + assert.strictEqual(process.release.lts, 'Iron'); +} else { + assert.strictEqual(process.release.lts, undefined); +} diff --git a/test/js/node/test/parallel/test-process-setgroups.js b/test/js/node/test/parallel/test-process-setgroups.js new file mode 100644 index 0000000000..c26b5dbaf1 --- /dev/null +++ b/test/js/node/test/parallel/test-process-setgroups.js @@ -0,0 +1,55 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +if (common.isWindows) { + assert.strictEqual(process.setgroups, undefined); + return; +} + +if (!common.isMainThread) + return; + +assert.throws( + () => { + process.setgroups(); + }, + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "groups" argument must be an instance of Array. ' + + 'Received undefined' + } +); + +assert.throws( + () => { + process.setgroups([1, -1]); + }, + { + code: 'ERR_OUT_OF_RANGE', + name: 'RangeError', + } +); + +[undefined, null, true, {}, [], () => {}].forEach((val) => { + assert.throws( + () => { + process.setgroups([val]); + }, + { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "groups[0]" argument must be ' + + 'of type number or string.' + + common.invalidArgTypeHelper(val) + } + ); +}); + +assert.throws(() => { + process.setgroups([1, 'fhqwhgadshgnsdhjsdbkhsdabkfabkveyb']); +}, { + code: 'ERR_UNKNOWN_CREDENTIAL', + message: 'Group identifier does not exist: fhqwhgadshgnsdhjsdbkhsdabkfabkveyb' +}); diff --git a/test/js/node/test/parallel/test-process-title-cli.js b/test/js/node/test/parallel/test-process-title-cli.js new file mode 100644 index 0000000000..98b3da003f --- /dev/null +++ b/test/js/node/test/parallel/test-process-title-cli.js @@ -0,0 +1,17 @@ +// Flags: --title=foo +'use strict'; + +const common = require('../common'); +if (common.isWindows) return; // TODO: BUN + +if (common.isSunOS) + common.skip(`Unsupported platform [${process.platform}]`); + +if (common.isIBMi) + common.skip('Unsupported platform IBMi'); + +const assert = require('assert'); + +// Verifies that the --title=foo command line flag set the process +// title on startup. +assert.strictEqual(process.title, 'foo'); diff --git a/test/js/node/test/parallel/test-process-uid-gid.js b/test/js/node/test/parallel/test-process-uid-gid.js new file mode 100644 index 0000000000..0e8e0e89a0 --- /dev/null +++ b/test/js/node/test/parallel/test-process-uid-gid.js @@ -0,0 +1,100 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); + +const assert = require('assert'); + +if (common.isWindows) { + // uid/gid functions are POSIX only. + assert.strictEqual(process.getuid, undefined); + assert.strictEqual(process.getgid, undefined); + assert.strictEqual(process.setuid, undefined); + assert.strictEqual(process.setgid, undefined); + return; +} + +if (!common.isMainThread) + return; + +assert.throws(() => { + process.setuid({}); +}, { + code: 'ERR_INVALID_ARG_TYPE', + message: 'The "id" argument must be of type ' + + 'number or string. Received an instance of Object' +}); + +assert.throws(() => { + process.setuid('fhqwhgadshgnsdhjsdbkhsdabkfabkveyb'); +}, { + code: 'ERR_UNKNOWN_CREDENTIAL', + message: 'User identifier does not exist: fhqwhgadshgnsdhjsdbkhsdabkfabkveyb' +}); + +// Passing -0 shouldn't crash the process +// Refs: https://github.com/nodejs/node/issues/32750 +// And neither should values exceeding 2 ** 31 - 1. +for (const id of [-0, 2 ** 31, 2 ** 32 - 1]) { + for (const fn of [process.setuid, process.setuid, process.setgid, process.setegid]) { + try { fn(id); } catch { + // Continue regardless of error. + } + } +} + +// If we're not running as super user... +if (process.getuid() !== 0) { + // Should not throw. + process.getgid(); + process.getuid(); + + assert.throws( + () => { process.setgid('nobody'); }, + /(?:EPERM: .+|Group identifier does not exist: nobody)$/ + ); + + assert.throws( + () => { process.setuid('nobody'); }, + /(?:EPERM: .+|User identifier does not exist: nobody)$/ + ); + return; +} + +// If we are running as super user... +const oldgid = process.getgid(); +try { + process.setgid('nobody'); +} catch (err) { + if (err.code !== 'ERR_UNKNOWN_CREDENTIAL') { + throw err; + } + process.setgid('nogroup'); +} + +const newgid = process.getgid(); +assert.notStrictEqual(newgid, oldgid); + +const olduid = process.getuid(); +process.setuid('nobody'); +const newuid = process.getuid(); +assert.notStrictEqual(newuid, olduid); diff --git a/test/js/node/test/parallel/test-process-umask-mask.js b/test/js/node/test/parallel/test-process-umask-mask.js new file mode 100644 index 0000000000..d599379761 --- /dev/null +++ b/test/js/node/test/parallel/test-process-umask-mask.js @@ -0,0 +1,32 @@ +'use strict'; + +// This tests that the lower bits of mode > 0o777 still works in +// process.umask() + +const common = require('../common'); +const assert = require('assert'); + +if (!common.isMainThread) + common.skip('Setting process.umask is not supported in Workers'); + +let mask; + +if (common.isWindows) { + mask = 0o600; +} else { + mask = 0o664; +} + +const maskToIgnore = 0o10000; + +const old = process.umask(); + +function test(input, output) { + process.umask(input); + assert.strictEqual(process.umask(), output); + + process.umask(old); +} + +test(mask | maskToIgnore, mask); +test((mask | maskToIgnore).toString(8), mask); diff --git a/test/js/node/test/parallel/test-process-umask.js b/test/js/node/test/parallel/test-process-umask.js new file mode 100644 index 0000000000..e90955f394 --- /dev/null +++ b/test/js/node/test/parallel/test-process-umask.js @@ -0,0 +1,65 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); + +if (!common.isMainThread) { + assert.strictEqual(typeof process.umask(), 'number'); + assert.throws(() => { + process.umask('0664'); + }, { code: 'ERR_WORKER_UNSUPPORTED_OPERATION' }); + + common.skip('Setting process.umask is not supported in Workers'); +} + +// Note in Windows one can only set the "user" bits. +let mask; +if (common.isWindows) { + mask = '0600'; +} else { + mask = '0664'; +} + +const old = process.umask(mask); + +assert.strictEqual(process.umask(old), parseInt(mask, 8)); + +// Confirm reading the umask does not modify it. +// 1. If the test fails, this call will succeed, but the mask will be set to 0 +assert.strictEqual(process.umask(), old); +// 2. If the test fails, process.umask() will return 0 +assert.strictEqual(process.umask(), old); + +assert.throws(() => { + process.umask({}); +}, { + code: 'ERR_INVALID_ARG_TYPE', +}); + +['123x', 'abc', '999'].forEach((value) => { + assert.throws(() => { + process.umask(value); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); diff --git a/test/js/node/test/parallel/test-process-warning.js b/test/js/node/test/parallel/test-process-warning.js new file mode 100644 index 0000000000..c1fbbf775f --- /dev/null +++ b/test/js/node/test/parallel/test-process-warning.js @@ -0,0 +1,68 @@ +'use strict'; + +const common = require('../common'); +const { + hijackStderr, + restoreStderr +} = require('../common/hijackstdio'); +const assert = require('assert'); + +function test1() { + // Output is skipped if the argument to the 'warning' event is + // not an Error object. + hijackStderr(common.mustNotCall('stderr.write must not be called')); + process.emit('warning', 'test'); + setImmediate(test2); +} + +function test2() { + // Output is skipped if it's a deprecation warning and + // process.noDeprecation = true + process.noDeprecation = true; + process.emitWarning('test', 'DeprecationWarning'); + process.noDeprecation = false; + setImmediate(test3); +} + +function test3() { + restoreStderr(); + // Type defaults to warning when the second argument is an object + process.emitWarning('test', {}); + process.once('warning', common.mustCall((warning) => { + assert.strictEqual(warning.name, 'Warning'); + })); + setImmediate(test4); +} + +function test4() { + // process.emitWarning will throw when process.throwDeprecation is true + // and type is `DeprecationWarning`. + process.throwDeprecation = true; + process.once('uncaughtException', (err) => { + assert.match(err.toString(), /^DeprecationWarning: test$/); + }); + try { + process.emitWarning('test', 'DeprecationWarning'); + } catch { + assert.fail('Unreachable'); + } + process.throwDeprecation = false; + setImmediate(test5); +} + +function test5() { + // Setting toString to a non-function should not cause an error + const err = new Error('test'); + err.toString = 1; + process.emitWarning(err); + setImmediate(test6); +} + +function test6() { + process.emitWarning('test', { detail: 'foo' }); + process.on('warning', (warning) => { + assert.strictEqual(warning.detail, 'foo'); + }); +} + +test1(); diff --git a/test/js/node/test/parallel/test-queue-microtask-uncaught-asynchooks.js b/test/js/node/test/parallel/test-queue-microtask-uncaught-asynchooks.js deleted file mode 100644 index 35b3d9fa30..0000000000 --- a/test/js/node/test/parallel/test-queue-microtask-uncaught-asynchooks.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict'; -const common = require('../common'); -const assert = require('assert'); -const async_hooks = require('async_hooks'); - -// Regression test for https://github.com/nodejs/node/issues/30080: -// An uncaught exception inside a queueMicrotask callback should not lead -// to multiple after() calls for it. - -let µtaskId; -const events = []; - -async_hooks.createHook({ - init(id, type, triggerId, resource) { - if (type === 'Microtask') { - µtaskId = id; - events.push('init'); - } - }, - before(id) { - if (id === µtaskId) events.push('before'); - }, - after(id) { - if (id === µtaskId) events.push('after'); - }, - destroy(id) { - if (id === µtaskId) events.push('destroy'); - } -}).enable(); - -queueMicrotask(() => { throw new Error(); }); - -process.on('uncaughtException', common.mustCall()); -process.on('exit', () => { - assert.deepStrictEqual(events, ['init', 'after', 'before', 'destroy']); -}); diff --git a/test/js/node/v8/capture-stack-trace.test.js b/test/js/node/v8/capture-stack-trace.test.js index 69dcf9307f..814aee3ab3 100644 --- a/test/js/node/v8/capture-stack-trace.test.js +++ b/test/js/node/v8/capture-stack-trace.test.js @@ -1,6 +1,6 @@ import { nativeFrameForTesting } from "bun:internal-for-testing"; -import { afterEach, expect, test } from "bun:test"; import { noInline } from "bun:jsc"; +import { afterEach, expect, mock, test } from "bun:test"; const origPrepareStackTrace = Error.prepareStackTrace; afterEach(() => { Error.prepareStackTrace = origPrepareStackTrace; @@ -697,3 +697,30 @@ test("Error.prepareStackTrace propagates exceptions", () => { ]), ).toThrow("hi"); }); + +test("CallFrame.p.getScriptNameOrSourceURL inside eval", () => { + let prevPrepareStackTrace = Error.prepareStackTrace; + const prepare = mock((e, s) => { + expect(s[0].getScriptNameOrSourceURL()).toBe("https://zombo.com/welcome-to-zombo.js"); + expect(s[1].getScriptNameOrSourceURL()).toBe("https://zombo.com/welcome-to-zombo.js"); + expect(s[2].getScriptNameOrSourceURL()).toBe("[native code]"); + expect(s[3].getScriptNameOrSourceURL()).toBe(import.meta.path); + expect(s[4].getScriptNameOrSourceURL()).toBe(import.meta.path); + }); + Error.prepareStackTrace = prepare; + let evalScript = `(function() { + throw new Error("bad error!"); + })() //# sourceURL=https://zombo.com/welcome-to-zombo.js`; + + try { + function insideAFunction() { + eval(evalScript); + } + insideAFunction(); + } catch (e) { + e.stack; + } + Error.prepareStackTrace = prevPrepareStackTrace; + + expect(prepare).toHaveBeenCalledTimes(1); +}); diff --git a/test/js/sql/sql.test.ts b/test/js/sql/sql.test.ts index 92fd82931b..4f16d46073 100644 --- a/test/js/sql/sql.test.ts +++ b/test/js/sql/sql.test.ts @@ -157,6 +157,62 @@ if (!isCI) { expect(error.code).toBe(`ERR_POSTGRES_LIFETIME_TIMEOUT`); }); + // Last one wins. + test("Handles duplicate string column names", async () => { + const result = await sql`select 1 as x, 2 as x, 3 as x`; + expect(result).toEqual([{ x: 3 }]); + }); + + test("Handles numeric column names", async () => { + // deliberately out of order + const result = await sql`select 1 as "1", 2 as "2", 3 as "3", 0 as "0"`; + expect(result).toEqual([{ "1": 1, "2": 2, "3": 3, "0": 0 }]); + + expect(Object.keys(result[0])).toEqual(["0", "1", "2", "3"]); + // Sanity check: ensure iterating through the properties doesn't crash. + Bun.inspect(result); + }); + + // Last one wins. + test("Handles duplicate numeric column names", async () => { + const result = await sql`select 1 as "1", 2 as "1", 3 as "1"`; + expect(result).toEqual([{ "1": 3 }]); + // Sanity check: ensure iterating through the properties doesn't crash. + Bun.inspect(result); + }); + + test("Handles mixed column names", async () => { + const result = await sql`select 1 as "1", 2 as "2", 3 as "3", 4 as x`; + expect(result).toEqual([{ "1": 1, "2": 2, "3": 3, x: 4 }]); + // Sanity check: ensure iterating through the properties doesn't crash. + Bun.inspect(result); + }); + + test("Handles mixed column names with duplicates", async () => { + const result = await sql`select 1 as "1", 2 as "2", 3 as "3", 4 as "1", 1 as x, 2 as x`; + expect(result).toEqual([{ "1": 4, "2": 2, "3": 3, x: 2 }]); + // Sanity check: ensure iterating through the properties doesn't crash. + Bun.inspect(result); + + // Named columns are inserted first, but they appear from JS as last. + expect(Object.keys(result[0])).toEqual(["1", "2", "3", "x"]); + }); + + test("Handles mixed column names with duplicates at the end", async () => { + const result = await sql`select 1 as "1", 2 as "2", 3 as "3", 4 as "1", 1 as x, 2 as x, 3 as x, 4 as "y"`; + expect(result).toEqual([{ "1": 4, "2": 2, "3": 3, x: 3, y: 4 }]); + + // Sanity check: ensure iterating through the properties doesn't crash. + Bun.inspect(result); + }); + + test("Handles mixed column names with duplicates at the start", async () => { + const result = await sql`select 1 as "1", 2 as "1", 3 as "2", 4 as "3", 1 as x, 2 as x, 3 as x`; + expect(result).toEqual([{ "1": 2, "2": 3, "3": 4, x: 3 }]); + // Sanity check: ensure iterating through the properties doesn't crash. + Bun.inspect(result); + }); + test("Uses default database without slash", async () => { const sql = postgres("postgres://localhost"); expect(sql.options.username).toBe(sql.options.database); diff --git a/test/js/third_party/@electric-sql/pglite/pglite.test.ts b/test/js/third_party/@electric-sql/pglite/pglite.test.ts new file mode 100644 index 0000000000..45423d5a74 --- /dev/null +++ b/test/js/third_party/@electric-sql/pglite/pglite.test.ts @@ -0,0 +1,19 @@ +import { PGlite } from "@electric-sql/pglite"; + +describe("pglite", () => { + it("can initialize successfully", async () => { + const db = new PGlite(); + expect(await db.query("SELECT version()")).toEqual({ + rows: [ + { + version: + // since pglite is wasm, there is only one binary for all platforms. it always thinks it + // is x86_64-pc-linux-gnu. + "PostgreSQL 16.4 on x86_64-pc-linux-gnu, compiled by emcc (Emscripten gcc/clang-like replacement + linker emulating GNU ld) 3.1.72 (437140d149d9c977ffc8b09dbaf9b0f5a02db190), 32-bit", + }, + ], + fields: [{ name: "version", dataTypeID: 25 }], + affectedRows: 0, + }); + }); +}); diff --git a/test/js/web/fetch/fetch.stream.test.ts b/test/js/web/fetch/fetch.stream.test.ts index 21a72ede53..b3414f453b 100644 --- a/test/js/web/fetch/fetch.stream.test.ts +++ b/test/js/web/fetch/fetch.stream.test.ts @@ -1209,12 +1209,15 @@ describe("fetch() with streaming", () => { expect(buffer.toString("utf8")).toBe("unreachable"); } catch (err) { if (compression === "br") { - expect((err as Error).name).toBe("BrotliDecompressionError"); + expect((err as Error).name).toBe("Error"); + expect((err as Error).code).toBe("BrotliDecompressionError"); } else if (compression === "deflate-libdeflate") { // Since the compressed data is different, the error ends up different. - expect((err as Error).name).toBe("ShortRead"); + expect((err as Error).name).toBe("Error"); + expect((err as Error).code).toBe("ShortRead"); } else { - expect((err as Error).name).toBe("ZlibError"); + expect((err as Error).name).toBe("Error"); + expect((err as Error).code).toBe("ZlibError"); } } } @@ -1306,7 +1309,8 @@ describe("fetch() with streaming", () => { gcTick(false); expect(buffer.toString("utf8")).toBe("unreachable"); } catch (err) { - expect((err as Error).name).toBe("ConnectionClosed"); + expect((err as Error).name).toBe("Error"); + expect((err as Error).code).toBe("ConnectionClosed"); } } }); diff --git a/test/js/web/streams/streams.test.js b/test/js/web/streams/streams.test.js index 1caa6eb7ae..b8636ccf9f 100644 --- a/test/js/web/streams/streams.test.js +++ b/test/js/web/streams/streams.test.js @@ -7,7 +7,7 @@ import { readableStreamToText, } from "bun"; import { describe, expect, it, test } from "bun:test"; -import { tmpdirSync, isWindows, isMacOS } from "harness"; +import { tmpdirSync, isWindows, isMacOS, bunEnv } from "harness"; import { mkfifo } from "mkfifo"; import { createReadStream, realpathSync, unlinkSync, writeFileSync } from "node:fs"; import { join } from "node:path"; @@ -445,6 +445,7 @@ it.todoIf(isWindows || isMacOS)("Bun.file() read text from pipe", async () => { stdout: "pipe", stdin: null, env: { + ...bunEnv, FIFO_TEST: large, }, }); diff --git a/test/package.json b/test/package.json index efe5c43799..03fe468beb 100644 --- a/test/package.json +++ b/test/package.json @@ -11,6 +11,7 @@ "dependencies": { "@azure/service-bus": "7.9.4", "@duckdb/node-api": "1.1.3-alpha.7", + "@electric-sql/pglite": "0.2.15", "@grpc/grpc-js": "1.12.0", "@grpc/proto-loader": "0.7.10", "@napi-rs/canvas": "0.1.65", @@ -67,6 +68,7 @@ "svelte": "5.4.0", "typescript": "5.0.2", "undici": "5.20.0", + "v8-heapsnapshot": "1.3.1", "verdaccio": "6.0.0", "vitest": "0.32.2", "webpack": "5.88.0", diff --git a/test/regression/issue/08093.test.ts b/test/regression/issue/08093.test.ts index 280d0ec4df..4d32dab6b7 100644 --- a/test/regression/issue/08093.test.ts +++ b/test/regression/issue/08093.test.ts @@ -1,7 +1,7 @@ import { file, spawn } from "bun"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it } from "bun:test"; import { access, writeFile } from "fs/promises"; -import { bunExe, bunEnv as env } from "harness"; +import { bunExe, bunEnv as env, readdirSorted } from "harness"; import { join } from "path"; import { dummyAfterAll, @@ -10,7 +10,6 @@ import { dummyBeforeEach, dummyRegistry, package_dir, - readdirSorted, requested, root_url, setHandler, diff --git a/test/v8/v8.test.ts b/test/v8/v8.test.ts index 1f4ff131ea..d2a3a468ba 100644 --- a/test/v8/v8.test.ts +++ b/test/v8/v8.test.ts @@ -1,6 +1,6 @@ import { spawn, spawnSync } from "bun"; import { beforeAll, describe, expect, it } from "bun:test"; -import { bunEnv, bunExe, tmpdirSync, isWindows, isMusl, isBroken } from "harness"; +import { bunEnv, bunExe, tmpdirSync, isWindows, isMusl, isBroken, nodeExe } from "harness"; import assert from "node:assert"; import fs from "node:fs/promises"; import { join, basename } from "path"; @@ -38,7 +38,7 @@ const directories = { }; async function install(srcDir: string, tmpDir: string, runtime: Runtime): Promise { - await fs.cp(srcDir, tmpDir, { recursive: true }); + await fs.cp(srcDir, tmpDir, { recursive: true, force: true }); const install = spawn({ cmd: [bunExe(), "install", "--ignore-scripts"], cwd: tmpDir, @@ -47,9 +47,9 @@ async function install(srcDir: string, tmpDir: string, runtime: Runtime): Promis stdout: "inherit", stderr: "inherit", }); - await install.exited; - if (install.exitCode != 0) { - throw new Error("build failed"); + const exitCode = await install.exited; + if (exitCode !== 0) { + throw new Error(`install failed: ${exitCode}`); } } @@ -63,20 +63,24 @@ async function build( cmd: runtime == Runtime.bun ? [bunExe(), "x", "--bun", "node-gyp", "rebuild", buildMode == BuildMode.debug ? "--debug" : "--release"] - : ["npx", "node-gyp", "rebuild", "--release"], // for node.js we don't bother with debug mode + : [bunExe(), "x", "node-gyp", "rebuild", "--release"], // for node.js we don't bother with debug mode cwd: tmpDir, env: bunEnv, stdin: "inherit", stdout: "pipe", stderr: "pipe", }); - await build.exited; - const out = await new Response(build.stdout).text(); - const err = await new Response(build.stderr).text(); - if (build.exitCode != 0) { + const [exitCode, out, err] = await Promise.all([ + build.exited, + new Response(build.stdout).text(), + new Response(build.stderr).text(), + ]); + if (exitCode !== 0) { console.error(err); - throw new Error("build failed"); + console.log(out); + throw new Error(`build failed: ${exitCode}`); } + return { out, err, @@ -112,89 +116,89 @@ describe.todoIf(isBroken && isMusl)("node:v8", () => { }); describe("module lifecycle", () => { - it("can call a basic native function", () => { - checkSameOutput("test_v8_native_call", []); + it("can call a basic native function", async () => { + await checkSameOutput("test_v8_native_call", []); }); }); describe("primitives", () => { - it("can create and distinguish between null, undefined, true, and false", () => { - checkSameOutput("test_v8_primitives", []); + it("can create and distinguish between null, undefined, true, and false", async () => { + await checkSameOutput("test_v8_primitives", []); }); }); describe("Number", () => { - it("can create small integer", () => { - checkSameOutput("test_v8_number_int", []); + it("can create small integer", async () => { + await checkSameOutput("test_v8_number_int", []); }); // non-i32 v8::Number is not implemented yet - it("can create large integer", () => { - checkSameOutput("test_v8_number_large_int", []); + it("can create large integer", async () => { + await checkSameOutput("test_v8_number_large_int", []); }); - it("can create fraction", () => { - checkSameOutput("test_v8_number_fraction", []); + it("can create fraction", async () => { + await checkSameOutput("test_v8_number_fraction", []); }); }); describe("String", () => { - it("can create and read back strings with only ASCII characters", () => { - checkSameOutput("test_v8_string_ascii", []); + it("can create and read back strings with only ASCII characters", async () => { + await checkSameOutput("test_v8_string_ascii", []); }); // non-ASCII strings are not implemented yet - it("can create and read back strings with UTF-8 characters", () => { - checkSameOutput("test_v8_string_utf8", []); + it("can create and read back strings with UTF-8 characters", async () => { + await checkSameOutput("test_v8_string_utf8", []); }); - it("handles replacement correctly in strings with invalid UTF-8 sequences", () => { - checkSameOutput("test_v8_string_invalid_utf8", []); + it("handles replacement correctly in strings with invalid UTF-8 sequences", async () => { + await checkSameOutput("test_v8_string_invalid_utf8", []); }); - it("can create strings from null-terminated Latin-1 data", () => { - checkSameOutput("test_v8_string_latin1", []); + it("can create strings from null-terminated Latin-1 data", async () => { + await checkSameOutput("test_v8_string_latin1", []); }); describe("WriteUtf8", () => { - it("truncates the string correctly", () => { - checkSameOutput("test_v8_string_write_utf8", []); + it("truncates the string correctly", async () => { + await checkSameOutput("test_v8_string_write_utf8", []); }); }); }); describe("External", () => { - it("can create an external and read back the correct value", () => { - checkSameOutput("test_v8_external", []); + it("can create an external and read back the correct value", async () => { + await checkSameOutput("test_v8_external", []); }); }); describe("Object", () => { - it("can create an object and set properties", () => { - checkSameOutput("test_v8_object", []); + it("can create an object and set properties", async () => { + await checkSameOutput("test_v8_object", []); }); }); describe("Array", () => { // v8::Array::New is broken as it still tries to reinterpret locals as JSValues - it.skip("can create an array from a C array of Locals", () => { - checkSameOutput("test_v8_array_new", []); + it.skip("can create an array from a C array of Locals", async () => { + await checkSameOutput("test_v8_array_new", []); }); }); describe("ObjectTemplate", () => { - it("creates objects with internal fields", () => { - checkSameOutput("test_v8_object_template", []); + it("creates objects with internal fields", async () => { + await checkSameOutput("test_v8_object_template", []); }); }); describe("FunctionTemplate", () => { - it("keeps the data parameter alive", () => { - checkSameOutput("test_v8_function_template", []); + it("keeps the data parameter alive", async () => { + await checkSameOutput("test_v8_function_template", []); }); }); describe("Function", () => { - it("correctly receives all its arguments from JS", () => { - checkSameOutput("print_values_from_js", [5.0, true, null, false, "meow", {}]); - checkSameOutput("print_native_function", []); + it("correctly receives all its arguments from JS", async () => { + await checkSameOutput("print_values_from_js", [5.0, true, null, false, "async meow", {}]); + await checkSameOutput("print_native_function", []); }); - it("correctly receives the this value from JS", () => { - checkSameOutput("call_function_with_weird_this_values", []); + it("correctly receives the this value from JS", async () => { + await checkSameOutput("call_function_with_weird_this_values", []); }); }); @@ -213,44 +217,56 @@ describe.todoIf(isBroken && isMusl)("node:v8", () => { }); describe("Global", () => { - it("can create, modify, and read the value from global handles", () => { - checkSameOutput("test_v8_global", []); + it("can create, modify, and read the value from global handles", async () => { + await checkSameOutput("test_v8_global", []); }); }); describe("HandleScope", () => { - it("can hold a lot of locals", () => { - checkSameOutput("test_many_v8_locals", []); + it("can hold a lot of locals", async () => { + await checkSameOutput("test_many_v8_locals", []); }); - it("keeps GC objects alive", () => { - checkSameOutput("test_handle_scope_gc", []); + it("keeps GC objects alive", async () => { + await checkSameOutput("test_handle_scope_gc", []); }, 10000); }); describe("EscapableHandleScope", () => { - it("keeps handles alive in the outer scope", () => { - checkSameOutput("test_v8_escapable_handle_scope", []); + it("keeps handles alive in the outer scope", async () => { + await checkSameOutput("test_v8_escapable_handle_scope", []); }); }); describe("uv_os_getpid", () => { - it.skipIf(isWindows)("returns the same result as getpid on POSIX", () => { - checkSameOutput("test_uv_os_getpid", []); + it.skipIf(isWindows)("returns the same result as getpid on POSIX", async () => { + await checkSameOutput("test_uv_os_getpid", []); }); }); describe("uv_os_getppid", () => { - it.skipIf(isWindows)("returns the same result as getppid on POSIX", () => { - checkSameOutput("test_uv_os_getppid", []); + it.skipIf(isWindows)("returns the same result as getppid on POSIX", async () => { + await checkSameOutput("test_uv_os_getppid", []); }); }); }); -function checkSameOutput(testName: string, args: any[], thisValue?: any) { - const nodeResult = runOn(Runtime.node, BuildMode.release, testName, args, thisValue).trim(); - let bunReleaseResult = runOn(Runtime.bun, BuildMode.release, testName, args, thisValue); - let bunDebugResult = runOn(Runtime.bun, BuildMode.debug, testName, args, thisValue); - +async function checkSameOutput(testName: string, args: any[], thisValue?: any) { + const [nodeResultResolution, bunReleaseResultResolution, bunDebugResultResolution] = await Promise.allSettled([ + runOn(Runtime.node, BuildMode.release, testName, args, thisValue), + runOn(Runtime.bun, BuildMode.release, testName, args, thisValue), + runOn(Runtime.bun, BuildMode.debug, testName, args, thisValue), + ]); + const errors = [nodeResultResolution, bunReleaseResultResolution, bunDebugResultResolution] + .filter(r => r.status === "rejected") + .map(r => r.reason); + if (errors.length > 0) { + throw new AggregateError(errors); + } + let [nodeResult, bunReleaseResult, bunDebugResult] = [ + nodeResultResolution, + bunReleaseResultResolution, + bunDebugResultResolution, + ].map(r => (r as any).value); // remove all debug logs bunReleaseResult = bunReleaseResult.replaceAll(/^\[\w+\].+$/gm, "").trim(); bunDebugResult = bunDebugResult.replaceAll(/^\[\w+\].+$/gm, "").trim(); @@ -262,7 +278,7 @@ function checkSameOutput(testName: string, args: any[], thisValue?: any) { return nodeResult; } -function runOn(runtime: Runtime, buildMode: BuildMode, testName: string, jsArgs: any[], thisValue?: any) { +async function runOn(runtime: Runtime, buildMode: BuildMode, testName: string, jsArgs: any[], thisValue?: any) { if (runtime == Runtime.node) { assert(buildMode == BuildMode.release); } @@ -272,7 +288,7 @@ function runOn(runtime: Runtime, buildMode: BuildMode, testName: string, jsArgs: : buildMode == BuildMode.debug ? directories.bunDebug : directories.bunRelease; - const exe = runtime == Runtime.node ? "node" : bunExe(); + const exe = runtime == Runtime.node ? (nodeExe() ?? "node") : bunExe(); const cmd = [ exe, @@ -286,16 +302,21 @@ function runOn(runtime: Runtime, buildMode: BuildMode, testName: string, jsArgs: cmd.push("debug"); } - const exec = spawnSync({ + const proc = spawn({ cmd, cwd: baseDir, env: bunEnv, + stdio: ["inherit", "pipe", "pipe"], }); - const errs = exec.stderr.toString(); + const [exitCode, out, err] = await Promise.all([ + proc.exited, + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + ]); const crashMsg = `test ${testName} crashed under ${Runtime[runtime]} in ${BuildMode[buildMode]} mode`; - if (errs !== "") { - throw new Error(`${crashMsg}: ${errs}`); + if (exitCode !== 0) { + throw new Error(`${crashMsg}: ${err}\n${out}`.trim()); } - expect(exec.success, crashMsg).toBeTrue(); - return exec.stdout.toString(); + expect(exitCode, crashMsg).toBe(0); + return out.trim(); }