mirror of
https://github.com/oven-sh/bun
synced 2026-02-25 02:57:27 +01:00
Compare commits
5 Commits
jarred/uti
...
don/build/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac7ff8cba5 | ||
|
|
d98c847027 | ||
|
|
b09430c11c | ||
|
|
30371a69a7 | ||
|
|
8c1a4973e8 |
26
.github/workflows/lint.yml
vendored
26
.github/workflows/lint.yml
vendored
@@ -1,26 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.38"
|
||||
OXLINT_VERSION: "0.15.0"
|
||||
|
||||
jobs:
|
||||
lint-js:
|
||||
name: "Lint JavaScript"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Lint
|
||||
run: bunx oxlint --config oxlint.json --quiet --format github
|
||||
|
||||
|
||||
|
||||
|
||||
19
.github/workflows/typos.yml
vendored
19
.github/workflows/typos.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Typos
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Spellcheck
|
||||
uses: crate-ci/typos@v1.29.4
|
||||
with:
|
||||
files: docs/**/*
|
||||
@@ -1,4 +1,4 @@
|
||||
# command script import vendor/zig/tools/lldb_pretty_printers.py
|
||||
command script import vendor/zig/tools/lldb_pretty_printers.py
|
||||
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
|
||||
|
||||
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"
|
||||
|
||||
2
.mailmap
2
.mailmap
@@ -1,2 +0,0 @@
|
||||
# To learn more about git's mailmap: https://ntietz.com/blog/git-mailmap-for-name-changes
|
||||
chloe caruso <git@paperclover.net> <me@paperdave.net>
|
||||
@@ -1,2 +0,0 @@
|
||||
[type.md]
|
||||
extend-ignore-words-re = ["^ba"]
|
||||
@@ -1,6 +1,6 @@
|
||||
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
|
||||
|
||||
If you are using Windows, please refer to [this guide](https://bun.sh/docs/project/building-windows)
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
@@ -63,7 +63,7 @@ $ brew install llvm@18
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 16 all
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
@@ -71,21 +71,23 @@ $ sudo pacman -S llvm clang lld
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install llvm18 clang18 lld18-devel
|
||||
$ sudo dnf install 'dnf-command(copr)'
|
||||
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
|
||||
$ sudo dnf install llvm16 clang16 lld16-devel
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
$ sudo zypper install clang18 lld18 llvm18
|
||||
$ sudo zypper install clang16 lld16 llvm16
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-18.1.8).
|
||||
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6).
|
||||
|
||||
Make sure Clang/LLVM 18 is in your path:
|
||||
|
||||
```bash
|
||||
$ which clang-18
|
||||
$ which clang-16
|
||||
```
|
||||
|
||||
If not, run this to manually add it:
|
||||
@@ -94,13 +96,13 @@ If not, run this to manually add it:
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
# use fish_add_path if you're using fish
|
||||
# use path+="$(brew --prefix llvm@18)/bin" if you are using zsh
|
||||
$ export PATH="$(brew --prefix llvm@18)/bin:$PATH"
|
||||
# use path+="$(brew --prefix llvm@16)/bin" if you are using zsh
|
||||
$ export PATH="$(brew --prefix llvm@16)/bin:$PATH"
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
# use fish_add_path if you're using fish
|
||||
$ export PATH="$PATH:/usr/lib/llvm18/bin"
|
||||
$ export PATH="$PATH:/usr/lib/llvm16/bin"
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -161,7 +163,7 @@ The binary will be located at `./build/release/bun` and `./build/release/bun-pro
|
||||
|
||||
### Download release build from pull requests
|
||||
|
||||
To save you time spent building a release build locally, we provide a way to run release builds from pull requests. This is useful for manually testing changes in a release build before they are merged.
|
||||
To save you time spent building a release build locally, we provide a way to run release builds from pull requests. This is useful for manully testing changes in a release build before they are merged.
|
||||
|
||||
To run a release build from a pull request, you can use the `bun-pr` npm package:
|
||||
|
||||
@@ -207,7 +209,7 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
$ make jsc-debug
|
||||
|
||||
# Build bun with the local JSC build
|
||||
$ bun run build:local
|
||||
@@ -238,7 +240,7 @@ The issue may manifest when initially running `bun setup` as Clang being unable
|
||||
```
|
||||
The C++ compiler
|
||||
|
||||
"/usr/bin/clang++-18"
|
||||
"/usr/bin/clang++-16"
|
||||
|
||||
is not able to compile a simple test program.
|
||||
```
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,7 +1,7 @@
|
||||
import ReactDOM from "react-dom";
|
||||
import { Main } from "./main";
|
||||
|
||||
const Base = () => {
|
||||
const Base = ({}) => {
|
||||
const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null;
|
||||
return <Main productName={name} />;
|
||||
};
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"execa": "^8.0.1",
|
||||
"fast-glob": "3.3.1",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"mitata": "^1.0.10",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
import { Buffer } from "node:buffer";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const variations = [
|
||||
["latin1", "hello world"],
|
||||
["utf16", "hello emoji 🤔"],
|
||||
];
|
||||
|
||||
for (const [label, string] of variations) {
|
||||
const big = Buffer.alloc(1000000, string).toString();
|
||||
const small = Buffer.from(string).toString();
|
||||
const substring = big.slice(0, big.length - 2);
|
||||
|
||||
bench(`${substring.length}`, () => {
|
||||
return Buffer.byteLength(substring, "utf8");
|
||||
});
|
||||
|
||||
bench(`${small.length}`, () => {
|
||||
return Buffer.byteLength(small);
|
||||
});
|
||||
|
||||
bench(`${big.length}`, () => {
|
||||
return Buffer.byteLength(big);
|
||||
});
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,14 +1,20 @@
|
||||
import { noOpForTesting as noop } from "bun:internal-for-testing";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
// These are no-op C++ functions that are exported to JS.
|
||||
const lazy = globalThis[Symbol.for("Bun.lazy")];
|
||||
const noop = lazy("noop");
|
||||
const fn = noop.function;
|
||||
const regular = noop.functionRegular;
|
||||
const callback = noop.callback;
|
||||
|
||||
bench("C++ callback into JS", () => {
|
||||
callback(() => {});
|
||||
});
|
||||
|
||||
bench("C++ fn regular", () => {
|
||||
regular();
|
||||
});
|
||||
|
||||
bench("C++ fn", () => {
|
||||
fn();
|
||||
});
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { brotliCompress, brotliDecompress, createBrotliCompress, createBrotliDecompress } from "node:zlib";
|
||||
import { promisify } from "node:util";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import { Readable } from "node:stream";
|
||||
import { readFileSync } from "node:fs";
|
||||
|
||||
const brotliCompressAsync = promisify(brotliCompress);
|
||||
const brotliDecompressAsync = promisify(brotliDecompress);
|
||||
|
||||
const testData =
|
||||
process.argv.length > 2
|
||||
? readFileSync(process.argv[2])
|
||||
: Buffer.alloc(1024 * 1024 * 16, "abcdefghijklmnopqrstuvwxyz");
|
||||
let compressed;
|
||||
|
||||
bench("brotli compress", async () => {
|
||||
compressed = await brotliCompressAsync(testData);
|
||||
});
|
||||
|
||||
bench("brotli decompress", async () => {
|
||||
await brotliDecompressAsync(compressed);
|
||||
});
|
||||
|
||||
bench("brotli compress stream", async () => {
|
||||
const source = Readable.from([testData]);
|
||||
const compress = createBrotliCompress();
|
||||
await pipeline(source, compress);
|
||||
});
|
||||
|
||||
bench("brotli decompress stream", async () => {
|
||||
const source = Readable.from([compressed]);
|
||||
const decompress = createBrotliDecompress();
|
||||
await pipeline(source, decompress);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -10,6 +10,7 @@ bench("new URLSearchParams(obj)", () => {
|
||||
"Content-Length": "123",
|
||||
"User-Agent": "node-fetch/1.0",
|
||||
"Accept-Encoding": "gzip,deflate",
|
||||
"Content-Length": "0",
|
||||
"Content-Range": "bytes 0-9/10",
|
||||
});
|
||||
});
|
||||
|
||||
19
build.zig
19
build.zig
@@ -346,6 +346,20 @@ pub fn build(b: *Build) !void {
|
||||
// const run = b.addRunArtifact(exe);
|
||||
// step.dependOn(&run.step);
|
||||
}
|
||||
|
||||
// zig build test
|
||||
{
|
||||
const unit_tests = b.addTest(.{
|
||||
.name = "zig-unit-tests",
|
||||
.root_source_file = b.path("src/unit_test.zig"),
|
||||
.optimize = build_options.optimize,
|
||||
.target = build_options.target,
|
||||
.use_llvm = if (build_options.no_llvm) false else null,
|
||||
.use_lld = if (build_options.os == .mac) false else !build_options.no_llvm,
|
||||
});
|
||||
const test_step = b.step("test", "Run Zig-only unit tests");
|
||||
test_step.dependOn(&unit_tests.step);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addMultiCheck(
|
||||
@@ -470,11 +484,6 @@ pub fn addInstallObjectFile(
|
||||
name: []const u8,
|
||||
out_mode: ObjectFormat,
|
||||
) *Step {
|
||||
if (@import("builtin").os.tag != .windows and std.posix.getenvZ("COMPILE_ERRORS_ONLY") != null) {
|
||||
const failstep = b.addSystemCommand(&.{"COMPILE_ERRORS_ONLY set but there were no compile errors"});
|
||||
failstep.step.dependOn(&compile.step);
|
||||
return &failstep.step;
|
||||
}
|
||||
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
|
||||
const bin = compile.getEmittedBin();
|
||||
return &b.addInstallFile(switch (out_mode) {
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
# FIXME: move this back to test/js/node
|
||||
# https://github.com/oven-sh/bun/issues/16289
|
||||
[test]
|
||||
preload = ["./test/js/node/harness.ts", "./test/preload.ts"]
|
||||
@@ -600,6 +600,7 @@ file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_C_SOURCES ${CWD}/src/bun.js/bindings/windows/musl-memmem.c)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
|
||||
endif()
|
||||
|
||||
@@ -2,15 +2,13 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION e1a802a2287edfe7f4046a9dd8307c8b59f5d816)
|
||||
set(WEBKIT_VERSION 00e2b186fd25e79cea4cb4d63d9fd388192327f6)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
|
||||
else()
|
||||
set(DEFAULT_WEBKIT_PATH ${CACHE_PATH}/webkit-${WEBKIT_VERSION_PREFIX})
|
||||
set(DEFAULT_WEBKIT_PATH ${CACHE_PATH}/webkit-${WEBKIT_VERSION})
|
||||
endif()
|
||||
|
||||
option(WEBKIT_PATH "The path to the WebKit directory")
|
||||
@@ -30,8 +28,6 @@ if(WEBKIT_LOCAL)
|
||||
${WEBKIT_PATH}
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
|
||||
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
|
||||
${WEBKIT_PATH}/bmalloc/Headers
|
||||
${WEBKIT_PATH}/WTF/Headers
|
||||
)
|
||||
|
||||
@@ -87,7 +87,7 @@ _bun_completions() {
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";
|
||||
|
||||
PACKAGE_OPTIONS[ADD_OPTIONS_LONG]="--development --optional --peer";
|
||||
PACKAGE_OPTIONS[ADD_OPTIONS_LONG]="--development --optional";
|
||||
PACKAGE_OPTIONS[ADD_OPTIONS_SHORT]="-d";
|
||||
PACKAGE_OPTIONS[REMOVE_OPTIONS_LONG]="";
|
||||
PACKAGE_OPTIONS[REMOVE_OPTIONS_SHORT]="";
|
||||
|
||||
@@ -35,7 +35,6 @@ _bun_add_completion() {
|
||||
'-D[]' \
|
||||
'--development[]' \
|
||||
'--optional[Add dependency to "optionalDependencies]' \
|
||||
'--peer[Add dependency to "peerDependencies]' \
|
||||
'--exact[Add the exact version instead of the ^range]' &&
|
||||
ret=0
|
||||
|
||||
@@ -340,7 +339,6 @@ _bun_install_completion() {
|
||||
'--development[]' \
|
||||
'-D[]' \
|
||||
'--optional[Add dependency to "optionalDependencies]' \
|
||||
'--peer[Add dependency to "peerDependencies]' \
|
||||
'--exact[Add the exact version instead of the ^range]' &&
|
||||
ret=0
|
||||
|
||||
|
||||
@@ -179,16 +179,16 @@ type Flags = string | string[];
|
||||
|
||||
These are flags like `-I` for include directories and `-D` for preprocessor definitions.
|
||||
|
||||
#### `define: Record<string, string>`
|
||||
#### `defines: Record<string, string>`
|
||||
|
||||
The `define` is an optional object that should be passed to the TinyCC compiler.
|
||||
The `defines` is an optional object that should be passed to the TinyCC compiler.
|
||||
|
||||
```ts
|
||||
type Defines = Record<string, string>;
|
||||
|
||||
cc({
|
||||
source: "hello.c",
|
||||
define: {
|
||||
defines: {
|
||||
"NDEBUG": "1",
|
||||
},
|
||||
});
|
||||
|
||||
@@ -297,20 +297,6 @@ setTimeout(() => {
|
||||
|
||||
When you're done with a JSCallback, you should call `close()` to free the memory.
|
||||
|
||||
### Experimental thread-safe callbacks
|
||||
`JSCallback` has experimental support for thread-safe callbacks. This will be needed if you pass a callback function into a different thread from it's instantiation context. You can enable it with the optional `threadsafe` option flag.
|
||||
```ts
|
||||
const searchIterator = new JSCallback(
|
||||
(ptr, length) => /hello/.test(new CString(ptr, length)),
|
||||
{
|
||||
returns: "bool",
|
||||
args: ["ptr", "usize"],
|
||||
threadsafe: true, // Optional. Defaults to `false`
|
||||
},
|
||||
);
|
||||
```
|
||||
Be aware that there are still cases where this does not 100% work.
|
||||
|
||||
{% callout %}
|
||||
|
||||
**⚡️ Performance tip** — For a slight performance boost, directly pass `JSCallback.prototype.ptr` instead of the `JSCallback` object:
|
||||
|
||||
@@ -62,14 +62,6 @@ Bun.stdout;
|
||||
Bun.stderr;
|
||||
```
|
||||
|
||||
### Deleting files (`file.delete()`)
|
||||
|
||||
You can delete a file by calling the `.delete()` function.
|
||||
|
||||
```ts
|
||||
await Bun.file("logs.json").delete()
|
||||
```
|
||||
|
||||
## Writing files (`Bun.write()`)
|
||||
|
||||
`Bun.write(destination, data): Promise<number>`
|
||||
|
||||
678
docs/api/s3.md
678
docs/api/s3.md
@@ -1,678 +0,0 @@
|
||||
Production servers often read, upload, and write files to S3-compatible object storage services instead of the local filesystem. Historically, that means local filesystem APIs you use in development can't be used in production. When you use Bun, things are different.
|
||||
|
||||
Bun provides fast, native bindings for interacting with S3-compatible object storage services. Bun's S3 API is designed to be simple and feel similar to fetch's `Response` and `Blob` APIs (like Bun's local filesystem APIs).
|
||||
|
||||
```ts
|
||||
import { s3, write, S3Client } from "bun";
|
||||
|
||||
// Bun.s3 reads environment variables for credentials
|
||||
// file() returns a lazy reference to a file on S3
|
||||
const metadata = s3.file("123.json");
|
||||
|
||||
// Download from S3 as JSON
|
||||
const data = await metadata.json();
|
||||
|
||||
// Upload to S3
|
||||
await write(metadata, JSON.stringify({ name: "John", age: 30 }));
|
||||
|
||||
// Presign a URL (synchronous - no network request needed)
|
||||
const url = metadata.presign({
|
||||
acl: "public-read",
|
||||
expiresIn: 60 * 60 * 24, // 1 day
|
||||
});
|
||||
|
||||
// Delete the file
|
||||
await metadata.delete();
|
||||
```
|
||||
|
||||
S3 is the [de facto standard](https://en.wikipedia.org/wiki/De_facto_standard) internet filesystem. Bun's S3 API works with S3-compatible storage services like:
|
||||
|
||||
- AWS S3
|
||||
- Cloudflare R2
|
||||
- DigitalOcean Spaces
|
||||
- MinIO
|
||||
- Backblaze B2
|
||||
- ...and any other S3-compatible storage service
|
||||
|
||||
## Basic Usage
|
||||
|
||||
There are several ways to interact with Bun's S3 API.
|
||||
|
||||
### `Bun.S3Client` & `Bun.s3`
|
||||
|
||||
`Bun.s3` is equivalent to `new Bun.S3Client()`, relying on environment variables for credentials.
|
||||
|
||||
To explicitly set credentials, pass them to the `Bun.S3Client` constructor.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const client = new S3Client({
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// sessionToken: "..."
|
||||
// acl: "public-read",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
// endpoint: "https://<region>.digitaloceanspaces.com", // DigitalOcean Spaces
|
||||
// endpoint: "http://localhost:9000", // MinIO
|
||||
});
|
||||
|
||||
// Bun.s3 is a global singleton that is equivalent to `new Bun.S3Client()`
|
||||
Bun.s3 = client;
|
||||
```
|
||||
|
||||
### Working with S3 Files
|
||||
|
||||
The **`file`** method in `S3Client` returns a **lazy reference to a file on S3**.
|
||||
|
||||
```ts
|
||||
// A lazy reference to a file on S3
|
||||
const s3file: S3File = client.file("123.json");
|
||||
```
|
||||
|
||||
Like `Bun.file(path)`, the `S3Client`'s `file` method is synchronous. It does zero network requests until you call a method that depends on a network request.
|
||||
|
||||
### Reading files from S3
|
||||
|
||||
If you've used the `fetch` API, you're familiar with the `Response` and `Blob` APIs. `S3File` extends `Blob`. The same methods that work on `Blob` also work on `S3File`.
|
||||
|
||||
```ts
|
||||
// Read an S3File as text
|
||||
const text = await s3file.text();
|
||||
|
||||
// Read an S3File as JSON
|
||||
const json = await s3file.json();
|
||||
|
||||
// Read an S3File as an ArrayBuffer
|
||||
const buffer = await s3file.arrayBuffer();
|
||||
|
||||
// Get only the first 1024 bytes
|
||||
const partial = await s3file.slice(0, 1024).text();
|
||||
|
||||
// Stream the file
|
||||
const stream = s3file.stream();
|
||||
for await (const chunk of stream) {
|
||||
console.log(chunk);
|
||||
}
|
||||
```
|
||||
|
||||
#### Memory optimization
|
||||
|
||||
Methods like `text()`, `json()`, `bytes()`, or `arrayBuffer()` avoid duplicating the string or bytes in memory when possible.
|
||||
|
||||
If the text happens to be ASCII, Bun directly transfers the string to JavaScriptCore (the engine) without transcoding and without duplicating the string in memory. When you use `.bytes()` or `.arrayBuffer()`, it will also avoid duplicating the bytes in memory.
|
||||
|
||||
These helper methods not only simplify the API, they also make it faster.
|
||||
|
||||
### Writing & uploading files to S3
|
||||
|
||||
Writing to S3 is just as simple.
|
||||
|
||||
```ts
|
||||
// Write a string (replacing the file)
|
||||
await s3file.write("Hello World!");
|
||||
|
||||
// Write a Buffer (replacing the file)
|
||||
await s3file.write(Buffer.from("Hello World!"));
|
||||
|
||||
// Write a Response (replacing the file)
|
||||
await s3file.write(new Response("Hello World!"));
|
||||
|
||||
// Write with content type
|
||||
await s3file.write(JSON.stringify({ name: "John", age: 30 }), {
|
||||
type: "application/json",
|
||||
});
|
||||
|
||||
// Write using a writer (streaming)
|
||||
const writer = s3file.writer({ type: "application/json" });
|
||||
writer.write("Hello");
|
||||
writer.write(" World!");
|
||||
await writer.end();
|
||||
|
||||
// Write using Bun.write
|
||||
await Bun.write(s3file, "Hello World!");
|
||||
```
|
||||
|
||||
### Working with large files (streams)
|
||||
|
||||
Bun automatically handles multipart uploads for large files and provides streaming capabilities. The same API that works for local files also works for S3 files.
|
||||
|
||||
```ts
|
||||
// Write a large file
|
||||
const bigFile = Buffer.alloc(10 * 1024 * 1024); // 10MB
|
||||
const writer = s3file.writer({
|
||||
// Automatically retry on network errors up to 3 times
|
||||
retry: 3,
|
||||
|
||||
// Queue up to 10 requests at a time
|
||||
queueSize: 10,
|
||||
|
||||
// Upload in 5 MB chunks
|
||||
partSize: 5 * 1024 * 1024,
|
||||
});
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await writer.write(bigFile);
|
||||
}
|
||||
await writer.end();
|
||||
```
|
||||
|
||||
## Presigning URLs
|
||||
|
||||
When your production service needs to let users upload files to your server, it's often more reliable for the user to upload directly to S3 instead of your server acting as an intermediary.
|
||||
|
||||
To facilitate this, you can presign URLs for S3 files. This generates a URL with a signature that allows a user to securely upload that specific file to S3, without exposing your credentials or granting them unnecessary access to your bucket.
|
||||
|
||||
```ts
|
||||
import { s3 } from "bun";
|
||||
|
||||
// Generate a presigned URL that expires in 24 hours (default)
|
||||
const url = s3.presign("my-file.txt", {
|
||||
expiresIn: 3600, // 1 hour
|
||||
});
|
||||
```
|
||||
|
||||
### Setting ACLs
|
||||
|
||||
To set an ACL (access control list) on a presigned URL, pass the `acl` option:
|
||||
|
||||
```ts
|
||||
const url = s3file.presign({
|
||||
acl: "public-read",
|
||||
expiresIn: 3600,
|
||||
});
|
||||
```
|
||||
|
||||
You can pass any of the following ACLs:
|
||||
|
||||
| ACL | Explanation |
|
||||
| ----------------------------- | ------------------------------------------------------------------- |
|
||||
| `"public-read"` | The object is readable by the public. |
|
||||
| `"private"` | The object is readable only by the bucket owner. |
|
||||
| `"public-read-write"` | The object is readable and writable by the public. |
|
||||
| `"authenticated-read"` | The object is readable by the bucket owner and authenticated users. |
|
||||
| `"aws-exec-read"` | The object is readable by the AWS account that made the request. |
|
||||
| `"bucket-owner-read"` | The object is readable by the bucket owner. |
|
||||
| `"bucket-owner-full-control"` | The object is readable and writable by the bucket owner. |
|
||||
| `"log-delivery-write"` | The object is writable by AWS services used for log delivery. |
|
||||
|
||||
### Expiring URLs
|
||||
|
||||
To set an expiration time for a presigned URL, pass the `expiresIn` option.
|
||||
|
||||
```ts
|
||||
const url = s3file.presign({
|
||||
// Seconds
|
||||
expiresIn: 3600, // 1 hour
|
||||
|
||||
// access control list
|
||||
acl: "public-read",
|
||||
|
||||
// HTTP method
|
||||
method: "PUT",
|
||||
});
|
||||
```
|
||||
|
||||
### `method`
|
||||
|
||||
To set the HTTP method for a presigned URL, pass the `method` option.
|
||||
|
||||
```ts
|
||||
const url = s3file.presign({
|
||||
method: "PUT",
|
||||
// method: "DELETE",
|
||||
// method: "GET",
|
||||
// method: "HEAD",
|
||||
// method: "POST",
|
||||
// method: "PUT",
|
||||
});
|
||||
```
|
||||
|
||||
### `new Response(S3File)`
|
||||
|
||||
To quickly redirect users to a presigned URL for an S3 file, pass an `S3File` instance to a `Response` object as the body.
|
||||
|
||||
```ts
|
||||
const response = new Response(s3file);
|
||||
console.log(response);
|
||||
```
|
||||
|
||||
This will automatically redirect the user to the presigned URL for the S3 file, saving you the memory, time, and bandwidth cost of downloading the file to your server and sending it back to the user.
|
||||
|
||||
```ts
|
||||
Response (0 KB) {
|
||||
ok: false,
|
||||
url: "",
|
||||
status: 302,
|
||||
statusText: "",
|
||||
headers: Headers {
|
||||
"location": "https://<account-id>.r2.cloudflarestorage.com/...",
|
||||
},
|
||||
redirected: true,
|
||||
bodyUsed: false
|
||||
}
|
||||
```
|
||||
|
||||
## Support for S3-Compatible Services
|
||||
|
||||
Bun's S3 implementation works with any S3-compatible storage service. Just specify the appropriate endpoint:
|
||||
|
||||
### Using Bun's S3Client with AWS S3
|
||||
|
||||
AWS S3 is the default. You can also pass a `region` option instead of an `endpoint` option for AWS S3.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
// AWS S3
|
||||
const s3 = new S3Client({
|
||||
accessKeyId: "access-key",
|
||||
secretAccessKey: "secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// region: "us-east-1",
|
||||
});
|
||||
```
|
||||
|
||||
### Using Bun's S3Client with Google Cloud Storage
|
||||
|
||||
To use Bun's S3 client with [Google Cloud Storage](https://cloud.google.com/storage), set `endpoint` to `"https://storage.googleapis.com"` in the `S3Client` constructor.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
// Google Cloud Storage
|
||||
const gcs = new S3Client({
|
||||
accessKeyId: "access-key",
|
||||
secretAccessKey: "secret-key",
|
||||
bucket: "my-bucket",
|
||||
endpoint: "https://storage.googleapis.com",
|
||||
});
|
||||
```
|
||||
|
||||
### Using Bun's S3Client with Cloudflare R2
|
||||
|
||||
To use Bun's S3 client with [Cloudflare R2](https://developers.cloudflare.com/r2/), set `endpoint` to the R2 endpoint in the `S3Client` constructor. The R2 endpoint includes your account ID.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
// CloudFlare R2
|
||||
const r2 = new S3Client({
|
||||
accessKeyId: "access-key",
|
||||
secretAccessKey: "secret-key",
|
||||
bucket: "my-bucket",
|
||||
endpoint: "https://<account-id>.r2.cloudflarestorage.com",
|
||||
});
|
||||
```
|
||||
|
||||
### Using Bun's S3Client with DigitalOcean Spaces
|
||||
|
||||
To use Bun's S3 client with [DigitalOcean Spaces](https://www.digitalocean.com/products/spaces/), set `endpoint` to the DigitalOcean Spaces endpoint in the `S3Client` constructor.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const spaces = new S3Client({
|
||||
accessKeyId: "access-key",
|
||||
secretAccessKey: "secret-key",
|
||||
bucket: "my-bucket",
|
||||
// region: "nyc3",
|
||||
endpoint: "https://<region>.digitaloceanspaces.com",
|
||||
});
|
||||
```
|
||||
|
||||
### Using Bun's S3Client with MinIO
|
||||
|
||||
To use Bun's S3 client with [MinIO](https://min.io/), set `endpoint` to the URL that MinIO is running on in the `S3Client` constructor.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const minio = new S3Client({
|
||||
accessKeyId: "access-key",
|
||||
secretAccessKey: "secret-key",
|
||||
bucket: "my-bucket",
|
||||
|
||||
// Make sure to use the correct endpoint URL
|
||||
// It might not be localhost in production!
|
||||
endpoint: "http://localhost:9000",
|
||||
});
|
||||
```
|
||||
|
||||
## Credentials
|
||||
|
||||
Credentials are one of the hardest parts of using S3, and we've tried to make it as easy as possible. By default, Bun reads the following environment variables for credentials.
|
||||
|
||||
| Option name | Environment variable |
|
||||
| ----------------- | ---------------------- |
|
||||
| `accessKeyId` | `S3_ACCESS_KEY_ID` |
|
||||
| `secretAccessKey` | `S3_SECRET_ACCESS_KEY` |
|
||||
| `region` | `S3_REGION` |
|
||||
| `endpoint` | `S3_ENDPOINT` |
|
||||
| `bucket` | `S3_BUCKET` |
|
||||
| `sessionToken` | `S3_SESSION_TOKEN` |
|
||||
|
||||
If the `S3_*` environment variable is not set, Bun will also check for the `AWS_*` environment variable, for each of the above options.
|
||||
|
||||
| Option name | Fallback environment variable |
|
||||
| ----------------- | ----------------------------- |
|
||||
| `accessKeyId` | `AWS_ACCESS_KEY_ID` |
|
||||
| `secretAccessKey` | `AWS_SECRET_ACCESS_KEY` |
|
||||
| `region` | `AWS_REGION` |
|
||||
| `endpoint` | `AWS_ENDPOINT` |
|
||||
| `bucket` | `AWS_BUCKET` |
|
||||
| `sessionToken` | `AWS_SESSION_TOKEN` |
|
||||
|
||||
These environment variables are read from [`.env` files](/docs/runtime/env) or from the process environment at initialization time (`process.env` is not used for this).
|
||||
|
||||
These defaults are overridden by the options you pass to `s3(credentials)`, `new Bun.S3Client(credentials)`, or any of the methods that accept credentials. So if, for example, you use the same credentials for different buckets, you can set the credentials once in your `.env` file and then pass `bucket: "my-bucket"` to the `s3()` helper function without having to specify all the credentials again.
|
||||
|
||||
### `S3Client` objects
|
||||
|
||||
When you're not using environment variables or using multiple buckets, you can create a `S3Client` object to explicitly set credentials.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const client = new S3Client({
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// sessionToken: "..."
|
||||
endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
// endpoint: "http://localhost:9000", // MinIO
|
||||
});
|
||||
|
||||
// Write using a Response
|
||||
await file.write(new Response("Hello World!"));
|
||||
|
||||
// Presign a URL
|
||||
const url = file.presign({
|
||||
expiresIn: 60 * 60 * 24, // 1 day
|
||||
acl: "public-read",
|
||||
});
|
||||
|
||||
// Delete the file
|
||||
await file.delete();
|
||||
```
|
||||
|
||||
### `S3Client.prototype.write`
|
||||
|
||||
To upload or write a file to S3, call `write` on the `S3Client` instance.
|
||||
|
||||
```ts
|
||||
const client = new Bun.S3Client({
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
bucket: "my-bucket",
|
||||
});
|
||||
await client.write("my-file.txt", "Hello World!");
|
||||
await client.write("my-file.txt", new Response("Hello World!"));
|
||||
|
||||
// equivalent to
|
||||
// await client.file("my-file.txt").write("Hello World!");
|
||||
```
|
||||
|
||||
### `S3Client.prototype.delete`
|
||||
|
||||
To delete a file from S3, call `delete` on the `S3Client` instance.
|
||||
|
||||
```ts
|
||||
const client = new Bun.S3Client({
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
});
|
||||
|
||||
await client.delete("my-file.txt");
|
||||
// equivalent to
|
||||
// await client.file("my-file.txt").delete();
|
||||
```
|
||||
|
||||
### `S3Client.prototype.exists`
|
||||
|
||||
To check if a file exists in S3, call `exists` on the `S3Client` instance.
|
||||
|
||||
```ts
|
||||
const client = new Bun.S3Client({
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
});
|
||||
|
||||
const exists = await client.exists("my-file.txt");
|
||||
// equivalent to
|
||||
// const exists = await client.file("my-file.txt").exists();
|
||||
```
|
||||
|
||||
## `S3File`
|
||||
|
||||
`S3File` instances are created by calling the `S3` instance method or the `s3()` helper function. Like `Bun.file()`, `S3File` instances are lazy. They don't refer to something that necessarily exists at the time of creation. That's why all the methods that don't involve network requests are fully synchronous.
|
||||
|
||||
```ts
|
||||
interface S3File extends Blob {
|
||||
slice(start: number, end?: number): S3File;
|
||||
exists(): Promise<boolean>;
|
||||
unlink(): Promise<void>;
|
||||
presign(options: S3Options): string;
|
||||
text(): Promise<string>;
|
||||
json(): Promise<any>;
|
||||
bytes(): Promise<Uint8Array>;
|
||||
arrayBuffer(): Promise<ArrayBuffer>;
|
||||
stream(options: S3Options): ReadableStream;
|
||||
write(
|
||||
data:
|
||||
| string
|
||||
| Uint8Array
|
||||
| ArrayBuffer
|
||||
| Blob
|
||||
| ReadableStream
|
||||
| Response
|
||||
| Request,
|
||||
options?: BlobPropertyBag,
|
||||
): Promise<void>;
|
||||
|
||||
exists(options?: S3Options): Promise<boolean>;
|
||||
unlink(options?: S3Options): Promise<void>;
|
||||
delete(options?: S3Options): Promise<void>;
|
||||
presign(options?: S3Options): string;
|
||||
|
||||
stat(options?: S3Options): Promise<S3Stat>;
|
||||
/**
|
||||
* Size is not synchronously available because it requires a network request.
|
||||
*
|
||||
* @deprecated Use `stat()` instead.
|
||||
*/
|
||||
size: NaN;
|
||||
|
||||
// ... more omitted for brevity
|
||||
}
|
||||
```
|
||||
|
||||
Like `Bun.file()`, `S3File` extends [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob), so all the methods that are available on `Blob` are also available on `S3File`. The same API for reading data from a local file is also available for reading data from S3.
|
||||
|
||||
| Method | Output |
|
||||
| ---------------------------- | ---------------- |
|
||||
| `await s3File.text()` | `string` |
|
||||
| `await s3File.bytes()` | `Uint8Array` |
|
||||
| `await s3File.json()` | `JSON` |
|
||||
| `await s3File.stream()` | `ReadableStream` |
|
||||
| `await s3File.arrayBuffer()` | `ArrayBuffer` |
|
||||
|
||||
That means using `S3File` instances with `fetch()`, `Response`, and other web APIs that accept `Blob` instances just works.
|
||||
|
||||
### Partial reads with `slice`
|
||||
|
||||
To read a partial range of a file, you can use the `slice` method.
|
||||
|
||||
```ts
|
||||
const partial = s3file.slice(0, 1024);
|
||||
|
||||
// Read the partial range as a Uint8Array
|
||||
const bytes = await partial.bytes();
|
||||
|
||||
// Read the partial range as a string
|
||||
const text = await partial.text();
|
||||
```
|
||||
|
||||
Internally, this works by using the HTTP `Range` header to request only the bytes you want. This `slice` method is the same as [`Blob.prototype.slice`](https://developer.mozilla.org/en-US/docs/Web/API/Blob/slice).
|
||||
|
||||
### Deleting files from S3
|
||||
|
||||
To delete a file from S3, you can use the `delete` method.
|
||||
|
||||
```ts
|
||||
await s3file.delete();
|
||||
// await s3File.unlink();
|
||||
```
|
||||
|
||||
`delete` is the same as `unlink`.
|
||||
|
||||
## Error codes
|
||||
|
||||
When Bun's S3 API throws an error, it will have a `code` property that matches one of the following values:
|
||||
|
||||
- `ERR_S3_MISSING_CREDENTIALS`
|
||||
- `ERR_S3_INVALID_METHOD`
|
||||
- `ERR_S3_INVALID_PATH`
|
||||
- `ERR_S3_INVALID_ENDPOINT`
|
||||
- `ERR_S3_INVALID_SIGNATURE`
|
||||
- `ERR_S3_INVALID_SESSION_TOKEN`
|
||||
|
||||
When the S3 Object Storage service returns an error (that is, not Bun), it will be an `S3Error` instance (an `Error` instance with the name `"S3Error"`).
|
||||
|
||||
## `S3Client` static methods
|
||||
|
||||
The `S3Client` class provides several static methods for interacting with S3.
|
||||
|
||||
### `S3Client.presign` (static)
|
||||
|
||||
To generate a presigned URL for an S3 file, you can use the `S3Client.presign` static method.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
|
||||
};
|
||||
|
||||
const url = S3Client.presign("my-file.txt", {
|
||||
...credentials,
|
||||
expiresIn: 3600,
|
||||
});
|
||||
```
|
||||
|
||||
This is equivalent to calling `new S3Client(credentials).presign("my-file.txt", { expiresIn: 3600 })`.
|
||||
|
||||
### `S3Client.exists` (static)
|
||||
|
||||
To check if an S3 file exists, you can use the `S3Client.exists` static method.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
};
|
||||
|
||||
const exists = await S3Client.exists("my-file.txt", credentials);
|
||||
```
|
||||
|
||||
The same method also works on `S3File` instances.
|
||||
|
||||
```ts
|
||||
const s3file = Bun.s3("my-file.txt", {
|
||||
...credentials,
|
||||
});
|
||||
const exists = await s3file.exists();
|
||||
```
|
||||
|
||||
### `S3Client.stat` (static)
|
||||
|
||||
To get the size, etag, and other metadata of an S3 file, you can use the `S3Client.stat` static method.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
};
|
||||
|
||||
const stat = await S3Client.stat("my-file.txt", credentials);
|
||||
// {
|
||||
// etag: "\"7a30b741503c0b461cc14157e2df4ad8\"",
|
||||
// lastModified: 2025-01-07T00:19:10.000Z,
|
||||
// size: 1024,
|
||||
// type: "text/plain;charset=utf-8",
|
||||
// }
|
||||
```
|
||||
|
||||
### `S3Client.delete` (static)
|
||||
|
||||
To delete an S3 file, you can use the `S3Client.delete` static method.
|
||||
|
||||
```ts
|
||||
import { S3Client } from "bun";
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
bucket: "my-bucket",
|
||||
// endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
};
|
||||
|
||||
await S3Client.delete("my-file.txt", credentials);
|
||||
// equivalent to
|
||||
// await new S3Client(credentials).delete("my-file.txt");
|
||||
|
||||
// S3Client.unlink is alias of S3Client.delete
|
||||
await S3Client.unlink("my-file.txt", credentials);
|
||||
```
|
||||
|
||||
## s3:// protocol
|
||||
|
||||
To make it easier to use the same code for local files and S3 files, the `s3://` protocol is supported in `fetch` and `Bun.file()`.
|
||||
|
||||
```ts
|
||||
const response = await fetch("s3://my-bucket/my-file.txt");
|
||||
const file = Bun.file("s3://my-bucket/my-file.txt");
|
||||
```
|
||||
|
||||
You can additionally pass `s3` options to the `fetch` and `Bun.file` functions.
|
||||
|
||||
```ts
|
||||
const response = await fetch("s3://my-bucket/my-file.txt", {
|
||||
s3: {
|
||||
accessKeyId: "your-access-key",
|
||||
secretAccessKey: "your-secret-key",
|
||||
endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
},
|
||||
headers: {
|
||||
"range": "bytes=0-1023",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### UTF-8, UTF-16, and BOM (byte order mark)
|
||||
|
||||
Like `Response` and `Blob`, `S3File` assumes UTF-8 encoding by default.
|
||||
|
||||
When calling one of the `text()` or `json()` methods on an `S3File`:
|
||||
|
||||
- When a UTF-16 byte order mark (BOM) is detected, it will be treated as UTF-16. JavaScriptCore natively supports UTF-16, so it skips the UTF-8 transcoding process (and strips the BOM). This is mostly good, but it does mean if you have invalid surrogate pairs characters in your UTF-16 string, they will be passed through to JavaScriptCore (same as source code).
|
||||
- When a UTF-8 BOM is detected, it gets stripped before the string is passed to JavaScriptCore and invalid UTF-8 codepoints are replaced with the Unicode replacement character (`\uFFFD`).
|
||||
- UTF-32 is not supported.
|
||||
@@ -82,7 +82,7 @@ const strict = new Database(
|
||||
// throws error because of the typo:
|
||||
const query = strict
|
||||
.query("SELECT $message;")
|
||||
.all({ message: "Hello world" });
|
||||
.all({ messag: "Hello world" });
|
||||
|
||||
const notStrict = new Database(
|
||||
":memory:"
|
||||
@@ -90,7 +90,7 @@ const notStrict = new Database(
|
||||
// does not throw error:
|
||||
notStrict
|
||||
.query("SELECT $message;")
|
||||
.all({ message: "Hello world" });
|
||||
.all({ messag: "Hello world" });
|
||||
```
|
||||
|
||||
### Load via ES module import
|
||||
|
||||
@@ -121,7 +121,7 @@ const id = randomUUIDv7();
|
||||
|
||||
A UUID v7 is a 128-bit value that encodes the current timestamp, a random value, and a counter. The timestamp is encoded using the lowest 48 bits, and the random value and counter are encoded using the remaining bits.
|
||||
|
||||
The `timestamp` parameter defaults to the current time in milliseconds. When the timestamp changes, the counter is reset to a pseudo-random integer wrapped to 4096. This counter is atomic and threadsafe, meaning that using `Bun.randomUUIDv7()` in many Workers within the same process running at the same timestamp will not have colliding counter values.
|
||||
The `timestamp` parameter defaults to the current time in milliseconds. When the timestamp changes, the counter is reset to a psuedo-random integer wrapped to 4096. This counter is atomic and threadsafe, meaning that using `Bun.randomUUIDv7()` in many Workers within the same process running at the same timestamp will not have colliding counter values.
|
||||
|
||||
The final 8 bytes of the UUID are a cryptographically secure random value. It uses the same random number generator used by `crypto.randomUUID()` (which comes from BoringSSL, which in turn comes from the platform-specific system random number generator usually provided by the underlying hardware).
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ await Bun.build({
|
||||
|
||||
### Namespaces
|
||||
|
||||
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespace?
|
||||
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespaace?
|
||||
|
||||
Every module has a namespace. Namespaces are used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.
|
||||
|
||||
@@ -239,7 +239,7 @@ One of the arguments passed to the `onLoad` callback is a `defer` function. This
|
||||
|
||||
This allows you to delay execution of the `onLoad` callback until all other modules have been loaded.
|
||||
|
||||
This is useful for returning contents of a module that depends on other modules.
|
||||
This is useful for returning contens of a module that depends on other modules.
|
||||
|
||||
##### Example: tracking and reporting unused exports
|
||||
|
||||
|
||||
@@ -33,14 +33,6 @@ To add a package as an optional dependency (`"optionalDependencies"`):
|
||||
$ bun add --optional lodash
|
||||
```
|
||||
|
||||
## `--peer`
|
||||
|
||||
To add a package as a peer dependency (`"peerDependencies"`):
|
||||
|
||||
```bash
|
||||
$ bun add --peer @types/bun
|
||||
```
|
||||
|
||||
## `--exact`
|
||||
|
||||
{% callout %}
|
||||
|
||||
@@ -1,51 +1,4 @@
|
||||
The `--filter` (or `-F`) flag is used for selecting packages by pattern in a monorepo. Patterns can be used to match package names or package paths, with full glob syntax support.
|
||||
|
||||
Currently `--filter` is supported by `bun install` and `bun outdated`, and can also be used to run scripts for multiple packages at once.
|
||||
|
||||
## Matching
|
||||
|
||||
### Package Name `--filter <pattern>`
|
||||
|
||||
Name patterns select packages based on the package name, as specified in `package.json`. For example, if you have packages `pkg-a`, `pkg-b` and `other`, you can match all packages with `*`, only `pkg-a` and `pkg-b` with `pkg*`, and a specific package by providing the full name of the package.
|
||||
|
||||
### Package Path `--filter ./<glob>`
|
||||
|
||||
Path patterns are specified by starting the pattern with `./`, and will select all packages in directories that match the pattern. For example, to match all packages in subdirectories of `packages`, you can use `--filter './packages/**'`. To match a package located in `packages/foo`, use `--filter ./packages/foo`.
|
||||
|
||||
## `bun install` and `bun outdated`
|
||||
|
||||
Both `bun install` and `bun outdated` support the `--filter` flag.
|
||||
|
||||
`bun install` by default will install dependencies for all packages in the monorepo. To install dependencies for specific packages, use `--filter`.
|
||||
|
||||
Given a monorepo with workspaces `pkg-a`, `pkg-b`, and `pkg-c` under `./packages`:
|
||||
|
||||
```bash
|
||||
# Install dependencies for all workspaces except `pkg-c`
|
||||
$ bun install --filter '!pkg-c'
|
||||
|
||||
# Install dependencies for packages in `./packages` (`pkg-a`, `pkg-b`, `pkg-c`)
|
||||
$ bun install --filter './packages/*'
|
||||
|
||||
# Save as above, but exclude the root package.json
|
||||
$ bun install --filter --filter '!./' --filter './packages/*'
|
||||
```
|
||||
|
||||
Similarly, `bun outdated` will display outdated dependencies for all packages in the monorepo, and `--filter` can be used to restrict the command to a subset of the packages:
|
||||
|
||||
```bash
|
||||
# Display outdated dependencies for workspaces starting with `pkg-`
|
||||
$ bun outdated --filter 'pkg-*'
|
||||
|
||||
# Display outdated dependencies for only the root package.json
|
||||
$ bun outdated --filter './'
|
||||
```
|
||||
|
||||
For more information on both these commands, see [`bun install`](https://bun.sh/docs/cli/install) and [`bun outdated`](https://bun.sh/docs/cli/outdated).
|
||||
|
||||
## Running scripts with `--filter`
|
||||
|
||||
Use the `--filter` flag to execute scripts in multiple packages at once:
|
||||
Use the `--filter` flag to execute lifecycle scripts in multiple packages at once:
|
||||
|
||||
```bash
|
||||
bun --filter <pattern> <script>
|
||||
@@ -71,7 +24,19 @@ bun --filter '*' dev
|
||||
Both commands will be run in parallel, and you will see a nice terminal UI showing their respective outputs:
|
||||

|
||||
|
||||
### Running scripts in workspaces
|
||||
## Matching
|
||||
|
||||
`--filter` accepts a pattern to match specific packages, either by name or by path. Patterns have full support for glob syntax.
|
||||
|
||||
### Package Name `--filter <pattern>`
|
||||
|
||||
Name patterns select packages based on the package name, as specified in `package.json`. For example, if you have packages `pkga`, `pkgb` and `other`, you can match all packages with `*`, only `pkga` and `pkgb` with `pkg*`, and a specific package by providing the full name of the package.
|
||||
|
||||
### Package Path `--filter ./<glob>`
|
||||
|
||||
Path patterns are specified by starting the pattern with `./`, and will select all packages in directories that match the pattern. For example, to match all packages in subdirectories of `packages`, you can use `--filter './packages/**'`. To match a package located in `pkgs/foo`, use `--filter ./pkgs/foo`.
|
||||
|
||||
## Workspaces
|
||||
|
||||
Filters respect your [workspace configuration](https://bun.sh/docs/install/workspaces): If you have a `package.json` file that specifies which packages are part of the workspace,
|
||||
`--filter` will be restricted to only these packages. Also, in a workspace you can use `--filter` to run scripts in packages that are located anywhere in the workspace:
|
||||
@@ -85,6 +50,8 @@ Filters respect your [workspace configuration](https://bun.sh/docs/install/works
|
||||
bun run --filter foo myscript
|
||||
```
|
||||
|
||||
### Dependency Order
|
||||
## Dependency Order
|
||||
|
||||
Bun will respect package dependency order when running scripts. Say you have a package `foo` that depends on another package `bar` in your workspace, and both packages have a `build` script. When you run `bun --filter '*' build`, you will notice that `foo` will only start running once `bar` is done.
|
||||
|
||||
### Cyclic Dependencies
|
||||
|
||||
@@ -15,10 +15,10 @@ If you're using Ubuntu 20.04, here's how to install a [newer kernel](https://wik
|
||||
|
||||
```bash
|
||||
# If this returns a version >= 5.6, you don't need to do anything
|
||||
$ uname -r
|
||||
uname -r
|
||||
|
||||
# Install the official Ubuntu hardware enablement kernel
|
||||
$ sudo apt install --install-recommends linux-generic-hwe-20.04
|
||||
sudo apt install --install-recommends linux-generic-hwe-20.04
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
@@ -81,20 +81,6 @@ Bun supports `"workspaces"` in package.json. For complete documentation refer to
|
||||
}
|
||||
```
|
||||
|
||||
## Installing dependencies for specific packages
|
||||
|
||||
In a monorepo, you can install the dependencies for a subset of packages using the `--filter` flag.
|
||||
|
||||
```bash
|
||||
# Install dependencies for all workspaces except `pkg-c`
|
||||
$ bun install --filter '!pkg-c'
|
||||
|
||||
# Install dependencies for only `pkg-a` in `./packages/pkg-a`
|
||||
$ bun install --filter './packages/pkg-a'
|
||||
```
|
||||
|
||||
For more information on filtering with `bun install`, refer to [Package Manager > Filtering](https://bun.sh/docs/cli/install#bun-install-and-bun-outdated)
|
||||
|
||||
## Overrides and resolutions
|
||||
|
||||
Bun supports npm's `"overrides"` and Yarn's `"resolutions"` in `package.json`. These are mechanisms for specifying a version range for _metadependencies_—the dependencies of your dependencies. Refer to [Package manager > Overrides and resolutions](https://bun.sh/docs/install/overrides) for complete documentation.
|
||||
@@ -229,7 +215,7 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
uses: oven-sh/setup-bun@v1
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
- name: Build app
|
||||
|
||||
@@ -59,5 +59,3 @@ If you want to do the same, but exclude the `./apps/api` workspace:
|
||||
```sh
|
||||
$ bun outdated --filter './apps/*' --filter '!./apps/api'
|
||||
```
|
||||
|
||||
Refer to [Package Manager > Filtering](https://bun.sh/docs/cli/filter#bun-install-and-bun-outdated) for more information on `--filter`.
|
||||
|
||||
@@ -153,7 +153,7 @@ $ bun run --bun vite
|
||||
|
||||
### Filtering
|
||||
|
||||
In monorepos containing multiple packages, you can use the `--filter` argument to execute scripts in many packages at once.
|
||||
in monorepos containing multiple packages, you can use the `--filter` argument to execute scripts in many packages at once.
|
||||
|
||||
Use `bun run --filter <name_pattern> <script>` to execute `<script>` in all packages whose name matches `<name_pattern>`.
|
||||
For example, if you have subdirectories containing packages named `foo`, `bar` and `baz`, running
|
||||
@@ -164,7 +164,7 @@ bun run --filter 'ba*' <script>
|
||||
|
||||
will execute `<script>` in both `bar` and `baz`, but not in `foo`.
|
||||
|
||||
Find more details in the docs page for [filter](https://bun.sh/docs/cli/filter#running-scripts-with-filter).
|
||||
Find more details in the docs page for [filter](https://bun.sh/docs/cli/filter).
|
||||
|
||||
## `bun run -` to pipe code from stdin
|
||||
|
||||
|
||||
@@ -4,18 +4,18 @@ name: Create a Discord bot
|
||||
|
||||
Discord.js works out of the box with Bun. Let's write a simple bot. First create a directory and initialize it with `bun init`.
|
||||
|
||||
```sh
|
||||
$ mkdir my-bot
|
||||
$ cd my-bot
|
||||
$ bun init
|
||||
```bash
|
||||
mkdir my-bot
|
||||
cd my-bot
|
||||
bun init
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Now install Discord.js.
|
||||
|
||||
```sh
|
||||
$ bun add discord.js
|
||||
```bash
|
||||
bun add discord.js
|
||||
```
|
||||
|
||||
---
|
||||
@@ -67,7 +67,7 @@ client.login(process.env.DISCORD_TOKEN);
|
||||
|
||||
Now we can run our bot with `bun run`. It may take a several seconds for the client to initialize the first time you run the file.
|
||||
|
||||
```sh
|
||||
```bash
|
||||
$ bun run bot.ts
|
||||
Ready! Logged in as my-bot#1234
|
||||
```
|
||||
|
||||
@@ -17,7 +17,7 @@ export default app;
|
||||
|
||||
Use `create-hono` to get started with one of Hono's project templates. Select `bun` when prompted for a template.
|
||||
|
||||
```sh
|
||||
```bash
|
||||
$ bun create hono myapp
|
||||
✔ Which template do you want to use? › bun
|
||||
cloned honojs/starter#main to /path/to/myapp
|
||||
@@ -30,7 +30,7 @@ $ bun install
|
||||
|
||||
Then start the dev server and visit [localhost:3000](http://localhost:3000).
|
||||
|
||||
```sh
|
||||
```bash
|
||||
$ bun run dev
|
||||
```
|
||||
|
||||
|
||||
@@ -8,18 +8,18 @@ MongoDB and Mongoose work out of the box with Bun. This guide assumes you've alr
|
||||
|
||||
Once MongoDB is running, create a directory and initialize it with `bun init`.
|
||||
|
||||
```sh
|
||||
$ mkdir mongoose-app
|
||||
$ cd mongoose-app
|
||||
$ bun init
|
||||
```bash
|
||||
mkdir mongoose-app
|
||||
cd mongoose-app
|
||||
bun init
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then add Mongoose as a dependency.
|
||||
|
||||
```sh
|
||||
$ bun add mongoose
|
||||
```bash
|
||||
bun add mongoose
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -17,16 +17,6 @@ Creating a new Next.js app in /path/to/my-app.
|
||||
|
||||
---
|
||||
|
||||
You can specify a starter template using the `--example` flag.
|
||||
|
||||
```sh
|
||||
$ bun create next-app --example with-supabase
|
||||
✔ What is your project named? … my-app
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To start the dev server with Bun, run `bun --bun run dev` from the project root.
|
||||
|
||||
```sh
|
||||
|
||||
@@ -16,17 +16,17 @@ As an example, let's deploy a simple Express HTTP server to Render.
|
||||
|
||||
Create a new GitHub repo named `myapp`. Git clone it locally.
|
||||
|
||||
```sh
|
||||
$ git clone git@github.com:my-github-username/myapp.git
|
||||
$ cd myapp
|
||||
```bash
|
||||
git clone git@github.com:my-github-username/myapp.git
|
||||
cd myapp
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Add the Express library.
|
||||
|
||||
```sh
|
||||
$ bun add express
|
||||
```bash
|
||||
bun add express
|
||||
```
|
||||
|
||||
---
|
||||
@@ -52,10 +52,10 @@ app.listen(port, () => {
|
||||
|
||||
Commit your changes and push to GitHub.
|
||||
|
||||
```sh
|
||||
$ git add app.ts bun.lockb package.json
|
||||
$ git commit -m "Create simple Express app"
|
||||
$ git push origin main
|
||||
```bash
|
||||
git add app.ts bun.lockb package.json
|
||||
git commit -m "Create simple Express app"
|
||||
git push origin main
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -10,8 +10,8 @@ Don't already have an account and Sentry project established? Head over to [sent
|
||||
|
||||
To start using Sentry with Bun, first install the Sentry Bun SDK.
|
||||
|
||||
```sh
|
||||
$ bun add @sentry/bun
|
||||
```bash
|
||||
bun add @sentry/bun
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -5,22 +5,18 @@ name: Hot reload an HTTP server
|
||||
Bun supports the [`--hot`](https://bun.sh/docs/runtime/hot#hot-mode) flag to run a file with hot reloading enabled. When any module or file changes, Bun re-runs the file.
|
||||
|
||||
```sh
|
||||
$ bun --hot run index.ts
|
||||
bun --hot run index.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Bun detects when you are running an HTTP server with `Bun.serve()`. It reloads your fetch handler when source files change, _without_ restarting the `bun` process. This makes hot reloads nearly instantaneous.
|
||||
|
||||
{% callout %}
|
||||
Note that this doesn't reload the page on your browser.
|
||||
{% /callout %}
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
port: 3000,
|
||||
fetch(req) {
|
||||
return new Response("Hello world");
|
||||
return new Response(`Hello world`);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: Add an optional dependency
|
||||
---
|
||||
|
||||
To add an npm package as an optional dependency, use the `--optional` flag.
|
||||
To add an npm package as a peer dependency, use the `--optional` flag.
|
||||
|
||||
```sh
|
||||
$ bun add zod --optional
|
||||
|
||||
@@ -2,43 +2,16 @@
|
||||
name: Add a peer dependency
|
||||
---
|
||||
|
||||
|
||||
To add an npm package as a peer dependency, use the `--peer` flag.
|
||||
|
||||
```sh
|
||||
$ bun add @types/bun --peer
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
This will add the package to `peerDependencies` in `package.json`.
|
||||
To add an npm package as a peer dependency, directly modify the `peerDependencies` object in your package.json. Running `bun install` will install peer dependencies by default, unless marked optional in `peerDependenciesMeta`.
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"peerDependencies": {
|
||||
+ "@types/bun": "^1.0.0"
|
||||
+ "zod": "^3.0.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Running `bun install` will install peer dependencies by default, unless marked optional in `peerDependenciesMeta`.
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"peerDependencies": {
|
||||
"@types/bun": "^1.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
+ "@types/bun": {
|
||||
+ "optional": true
|
||||
+ }
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager](https://bun.sh/docs/cli/install) for complete documentation of Bun's package manager.
|
||||
|
||||
@@ -22,7 +22,7 @@ This will add the package to `dependencies` in `package.json`. By default, the `
|
||||
|
||||
---
|
||||
|
||||
To "pin" to an exact version of the package, use `--exact`. This will add the package to `dependencies` without the `^`, pinning your project to the exact version you installed.
|
||||
To "pin" to the `latest` version of the package, use `--exact`. This will add the package to `dependencies` without the `^`, pinning your project to the exact version you installed.
|
||||
|
||||
```sh
|
||||
$ bun add zod --exact
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
steps:
|
||||
# ...
|
||||
- uses: actions/checkout@v4
|
||||
+ - uses: oven-sh/setup-bun@v2
|
||||
+ - uses: oven-sh/setup-bun@v1
|
||||
|
||||
# run any `bun` or `bunx` command
|
||||
+ - run: bun install
|
||||
@@ -31,9 +31,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# ...
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
+ with:
|
||||
+ version: "latest" # or "canary"
|
||||
+ version: 0.7.0 # or "canary"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -2,12 +2,6 @@
|
||||
name: Configure git to diff Bun's lockb lockfile
|
||||
---
|
||||
|
||||
{% callout %}
|
||||
Bun v1.1.39 introduced `bun.lock`, a JSONC formatted lockfile. `bun.lock` is human-readable and git-diffable without configuration, at no cost to performance. [**Learn more.**](https://bun.sh/docs/install/lockfile#text-based-lockfile)
|
||||
{% /callout %}
|
||||
|
||||
---
|
||||
|
||||
To teach `git` how to generate a human-readable diff of Bun's binary lockfile format (`.lockb`), add the following to your local or global `.gitattributes` file:
|
||||
|
||||
```js
|
||||
|
||||
@@ -23,6 +23,8 @@ To allow Bun to execute lifecycle scripts for a specific package, add the packag
|
||||
Note that this only allows lifecycle scripts for the specific package listed in `trustedDependencies`, _not_ the dependencies of that dependency!
|
||||
{% /callout %}
|
||||
|
||||
<!-- Bun maintains an allow-list of popular packages containing `postinstall` scripts that are known to be safe. To run lifecycle scripts for packages that aren't on this list, add the package to `trustedDependencies` in your package.json. -->
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
|
||||
@@ -2,12 +2,6 @@
|
||||
name: Generate a human-readable lockfile
|
||||
---
|
||||
|
||||
{% callout %}
|
||||
Bun v1.1.39 introduced `bun.lock`, a JSONC formatted lockfile. `bun.lock` is human-readable and git-diffable without configuration, at no cost to performance. [**Learn more.**](https://bun.sh/docs/install/lockfile#text-based-lockfile)
|
||||
{% /callout %}
|
||||
|
||||
---
|
||||
|
||||
By default Bun generates a binary `bun.lockb` file when you run `bun install`. In some cases, it's preferable to generate a human-readable lockfile instead.
|
||||
|
||||
---
|
||||
|
||||
@@ -44,11 +44,9 @@ console.log(values);
|
||||
console.log(positionals);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
then it outputs
|
||||
|
||||
```sh
|
||||
```
|
||||
$ bun run cli.ts --flag1 --flag2 value
|
||||
{
|
||||
flag1: true,
|
||||
|
||||
@@ -4,9 +4,9 @@ name: Define and replace static globals & constants
|
||||
|
||||
The `--define` flag lets you declare statically-analyzable constants and globals. It replace all usages of an identifier or property in a JavaScript or TypeScript file with a constant value. This feature is supported at runtime and also in `bun build`. This is sort of similar to `#define` in C/C++, except for JavaScript.
|
||||
|
||||
```sh
|
||||
$ bun --define process.env.NODE_ENV="'production'" src/index.ts # Runtime
|
||||
$ bun build --define process.env.NODE_ENV="'production'" src/index.ts # Build
|
||||
```ts
|
||||
bun --define process.env.NODE_ENV="'production'" src/index.ts # Runtime
|
||||
bun build --define process.env.NODE_ENV="'production'" src/index.ts # Build
|
||||
```
|
||||
|
||||
---
|
||||
@@ -25,12 +25,12 @@ if (process.env.NODE_ENV === "production") {
|
||||
|
||||
Before the code reaches the JavaScript engine, Bun replaces `process.env.NODE_ENV` with `"production"`.
|
||||
|
||||
```ts-diff
|
||||
+ if ("production" === "production") {
|
||||
console.log("Production mode");
|
||||
} else {
|
||||
console.log("Development mode");
|
||||
}
|
||||
```ts
|
||||
if ("production" === "production") {
|
||||
console.log("Production mode");
|
||||
} else {
|
||||
console.log("Development mode");
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
@@ -39,12 +39,12 @@ It doesn't stop there. Bun's optimizing transpiler is smart enough to do some ba
|
||||
|
||||
Since `"production" === "production"` is always `true`, Bun replaces the entire expression with the `true` value.
|
||||
|
||||
```ts-diff
|
||||
+ if (true) {
|
||||
console.log("Production mode");
|
||||
} else {
|
||||
console.log("Development mode");
|
||||
}
|
||||
```ts
|
||||
if (true) {
|
||||
console.log("Production mode");
|
||||
} else {
|
||||
console.log("Development mode");
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -2,15 +2,22 @@
|
||||
name: Delete a file
|
||||
---
|
||||
|
||||
The `Bun.file()` function accepts a path and returns a `BunFile` instance. Use the `.delete()` method to delete the file.
|
||||
To synchronously delete a file with Bun, use the `unlinkSync` function from the [`node:fs`](https://nodejs.org/api/fs.html#fs_fs_unlink_path_callback) module. (Currently, there is no `Bun` API for deleting files.)
|
||||
|
||||
```ts
|
||||
const path = "/path/to/file.txt";
|
||||
const file = Bun.file(path);
|
||||
import { unlinkSync } from "node:fs";
|
||||
|
||||
await file.delete();
|
||||
const path = "/path/to/file.txt";
|
||||
unlinkSync(path);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > File I/O](https://bun.sh/docs/api/file-io#reading-files-bun-file) for complete documentation of `Bun.file()`.
|
||||
To remove a file asynchronously, use the `unlink` function from the [`node:fs/promises`](https://nodejs.org/api/fs.html#fs_fspromises_unlink_path) module.
|
||||
|
||||
```ts
|
||||
import { unlink } from "node:fs/promises";
|
||||
|
||||
const path = "/path/to/file.txt";
|
||||
await unlink(path);
|
||||
```
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`, or the path defined by the environment variable `BUN_INSTALL_CACHE_DIR`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
|
||||
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
|
||||
|
||||
{% details summary="Configuring cache behavior (bunfig.toml)" %}
|
||||
|
||||
@@ -15,8 +15,6 @@ disable = false
|
||||
disableManifest = false
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Minimizing re-downloads
|
||||
|
||||
Bun strives to avoid re-downloading packages multiple times. When installing a package, if the cache already contains a version in the range specified by `package.json`, Bun will use the cached package instead of downloading it again.
|
||||
@@ -35,14 +33,14 @@ Once a package is downloaded into the cache, Bun still needs to copy those files
|
||||
|
||||
## Saving disk space
|
||||
|
||||
Since Bun uses hardlinks to "copy" a module into a project's `node_modules` directory on Linux and Windows, the contents of the package only exist in a single location on disk, greatly reducing the amount of disk space dedicated to `node_modules`.
|
||||
Since Bun uses hardlinks to "copy" a module into a project's `node_modules` directory on Linux, the contents of the package only exist in a single location on disk, greatly reducing the amount of disk space dedicated to `node_modules`.
|
||||
|
||||
This benefit also applies to macOS, but there are exceptions. It uses `clonefile` which is copy-on-write, meaning it will not occupy disk space, but it will count towards drive's limit. This behavior is useful if something attempts to patch `node_modules/*`, so it's impossible to affect other installations.
|
||||
|
||||
{% details summary="Installation strategies" %}
|
||||
This behavior is configurable with the `--backend` flag, which is respected by all of Bun's package management commands.
|
||||
|
||||
- **`hardlink`**: Default on Linux and Windows.
|
||||
- **`hardlink`**: Default on Linux.
|
||||
- **`clonefile`** Default on macOS.
|
||||
- **`clonefile_each_dir`**: Similar to `clonefile`, except it clones each file individually per directory. It is only available on macOS and tends to perform slower than `clonefile`.
|
||||
- **`copyfile`**: The fallback used when any of the above fail. It is the slowest option. On macOS, it uses `fcopyfile()`; on Linux it uses `copy_file_range()`.
|
||||
|
||||
@@ -62,18 +62,12 @@ To exclude dependency types from installing, use `--omit` with `dev`, `optional`
|
||||
$ bun install --omit=dev --omit=optional
|
||||
```
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything or update the lockfile):
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
$ bun install --dry-run
|
||||
```
|
||||
|
||||
To generate a lockfile without install packages:
|
||||
|
||||
```bash
|
||||
$ bun install --lockfile-only
|
||||
```
|
||||
|
||||
To modify logging verbosity:
|
||||
|
||||
```bash
|
||||
@@ -143,12 +137,6 @@ To add a package as an optional dependency (`"optionalDependencies"`):
|
||||
$ bun add --optional lodash
|
||||
```
|
||||
|
||||
To add a package as a peer dependency (`"peerDependencies"`):
|
||||
|
||||
```bash
|
||||
$ bun add --peer @types/bun
|
||||
```
|
||||
|
||||
To install a package globally:
|
||||
|
||||
```bash
|
||||
|
||||
@@ -49,18 +49,6 @@ Packages, metadata for those packages, the hoisted install order, dependencies f
|
||||
|
||||
It uses linear arrays for all data. [Packages](https://github.com/oven-sh/bun/blob/be03fc273a487ac402f19ad897778d74b6d72963/src/install/install.zig#L1825) are referenced by an auto-incrementing integer ID or a hash of the package name. Strings longer than 8 characters are de-duplicated. Prior to saving on disk, the lockfile is garbage-collected & made deterministic by walking the package tree and cloning the packages in dependency order.
|
||||
|
||||
#### Generate a lockfile without installing?
|
||||
|
||||
To generate a lockfile without installing to `node_modules` you can use the `--lockfile-only` flag. The lockfile will always be saved to disk, even if it is up-to-date with the `package.json`(s) for your project.
|
||||
|
||||
```bash
|
||||
$ bun install --lockfile-only
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note** - using `--lockfile-only` will still populate the global install cache with registry metadata and git/tarball dependencies.
|
||||
{% /callout %}
|
||||
|
||||
#### Can I opt out?
|
||||
|
||||
To install without creating a lockfile:
|
||||
@@ -84,8 +72,6 @@ $ bun install --yarn
|
||||
print = "yarn"
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Text-based lockfile
|
||||
|
||||
Bun v1.1.39 introduced `bun.lock`, a JSONC formatted lockfile. `bun.lock` is human-readable and git-diffable without configuration, at [no cost to performance](https://bun.sh/blog/bun-lock-text-lockfile#cached-bun-install-gets-30-faster).
|
||||
@@ -100,10 +86,12 @@ $ head -n3 bun.lock
|
||||
"workspaces": {
|
||||
```
|
||||
|
||||
Once `bun.lock` is generated, Bun will use it for all subsequent installs and updates through commands that read and modify the lockfile. If both lockfiles exist, `bun.lock` will be chosen over `bun.lockb`.
|
||||
Once `bun.lock` is generated, Bun will use it for all subsequent installs and updates through commands that read and modify the lockfile. If both lockfiles exist, `bun.lock` will be choosen over `bun.lockb`.
|
||||
|
||||
Bun v1.2.0 will switch the default lockfile format to `bun.lock`.
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
{% details summary="Configuring lockfile" %}
|
||||
|
||||
```toml
|
||||
|
||||
@@ -53,16 +53,6 @@ Each workspace has it's own `package.json`. When referencing other packages in t
|
||||
}
|
||||
```
|
||||
|
||||
`bun install` will install dependencies for all workspaces in the monorepo, de-duplicating packages if possible. If you only want to install dependencies for specific workspaces, you can use the `--filter` flag.
|
||||
|
||||
```bash
|
||||
# Install dependencies for all workspaces starting with `pkg-` except for `pkg-c`
|
||||
$ bun install --filter "pkg-*" --filter "!pkg-c"
|
||||
|
||||
# Paths can also be used. This is equivalent to the command above.
|
||||
$ bun install --filter "./packages/pkg-*" --filter "!pkg-c" # or --filter "!./packages/pkg-c"
|
||||
```
|
||||
|
||||
Workspaces have a couple major benefits.
|
||||
|
||||
- **Code can be split into logical parts.** If one package relies on another, you can simply add it as a dependency in `package.json`. If package `b` depends on `a`, `bun install` will install your local `packages/a` directory into `node_modules` instead of downloading it from the npm registry.
|
||||
|
||||
@@ -311,9 +311,6 @@ export default {
|
||||
page("api/streams", "Streams", {
|
||||
description: `Reading, writing, and manipulating streams of data in Bun.`,
|
||||
}), // "`Bun.serve`"),
|
||||
page("api/s3", "S3 Object Storage", {
|
||||
description: `Bun provides fast, native bindings for interacting with S3-compatible object storage services.`,
|
||||
}),
|
||||
page("api/file-io", "File I/O", {
|
||||
description: `Read and write files fast with Bun's heavily optimized file system API.`,
|
||||
}), // "`Bun.write`"),
|
||||
|
||||
@@ -30,9 +30,9 @@ process.env.FOO = "hello";
|
||||
Bun supports `--env-file` to override which specific `.env` file to load. You can use `--env-file` when running scripts in bun's runtime, or when running package.json scripts.
|
||||
|
||||
```sh
|
||||
$ bun --env-file=.env.1 src/index.ts
|
||||
bun --env-file=.env.1 src/index.ts
|
||||
|
||||
$ bun --env-file=.env.abc --env-file=.env.def run build
|
||||
bun --env-file=.env.abc --env-file=.env.def run build
|
||||
```
|
||||
|
||||
### Quotation marks
|
||||
|
||||
@@ -53,7 +53,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:events`](https://nodejs.org/api/events.html)
|
||||
|
||||
🟢 Fully implemented. `EventEmitterAsyncResource` uses `AsyncResource` underneath.
|
||||
🟡 `events.addAbortListener` & `events.getMaxListeners` do not support (web api) `EventTarget`
|
||||
|
||||
### [`node:fs`](https://nodejs.org/api/fs.html)
|
||||
|
||||
@@ -157,11 +157,11 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:v8`](https://nodejs.org/api/v8.html)
|
||||
|
||||
🟡 `writeHeapSnapshot` and `getHeapSnapshot` are implemented. `serialize` and `deserialize` use JavaScriptCore's wire format instead of V8's. Other methods are not implemented. For profiling, use [`bun:jsc`](https://bun.sh/docs/project/benchmarking#bunjsc) instead.
|
||||
🔴 `serialize` and `deserialize` use JavaScriptCore's wire format instead of V8's. Otherwise, not implemented. For profiling, use [`bun:jsc`](https://bun.sh/docs/project/benchmarking#bunjsc) instead.
|
||||
|
||||
### [`node:vm`](https://nodejs.org/api/vm.html)
|
||||
|
||||
🟡 Core functionality works, but experimental VM ES modules are not implemented, including `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`,`importModuleDynamically`, and `vm.measureMemory`. Options like `timeout`, `breakOnSigint`, `cachedData` are not implemented yet.
|
||||
🟡 Core functionality works, but experimental VM ES modules are not implemented, including `vm.Module`, `vm.SourceTextModule`, `vm.SyntheticModule`,`importModuleDynamically`, and `vm.measureMemory`. Options like `timeout`, `breakOnSigint`, `cachedData` are not implemented yet. There is a bug with `this` value for contextified options not having the correct prototype.
|
||||
|
||||
### [`node:wasi`](https://nodejs.org/api/wasi.html)
|
||||
|
||||
@@ -341,7 +341,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`process`](https://nodejs.org/api/process.html)
|
||||
|
||||
🟡 Missing `initgroups` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled`. `process.binding` is partially implemented.
|
||||
🟡 Missing `domain` `initgroups` `setegid` `seteuid` `setgid` `setgroups` `setuid` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled`. `process.binding` is partially implemented.
|
||||
|
||||
### [`queueMicrotask()`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask)
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ The default handling of non-zero exit codes can be configured by calling `.nothr
|
||||
import { $ } from "bun";
|
||||
// shell promises will not throw, meaning you will have to
|
||||
// check for `exitCode` manually on every shell command.
|
||||
$.nothrow(); // equivalent to $.throws(false)
|
||||
$.nothrow(); // equivilent to $.throws(false)
|
||||
|
||||
// default behavior, non-zero exit codes will throw an error
|
||||
$.throws(true);
|
||||
|
||||
51
oxlint.json
51
oxlint.json
@@ -1,51 +0,0 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/oxc-project/oxc/refs/heads/main/npm/oxlint/configuration_schema.json",
|
||||
"categories": {
|
||||
"correctness": "warn" // TODO: gradually fix bugs and turn this to error
|
||||
},
|
||||
"rules": {
|
||||
"const-comparisons": "off", // TODO: there's a bug when comparing private identifiers. Re-enable once it's fixed.
|
||||
"no-cond-assign": "error",
|
||||
"no-const-assign": "error",
|
||||
"no-debugger": "error",
|
||||
"no-dupe-class-members": "error",
|
||||
"no-dupe-keys": "error",
|
||||
"no-empty-pattern": "error",
|
||||
"import/no-duplicates": "error",
|
||||
|
||||
"no-useless-escape": "off" // there's a lot of these. Should be fixed eventually.
|
||||
},
|
||||
"ignorePatterns": [
|
||||
"vendor",
|
||||
"build",
|
||||
"test/snapshots/**",
|
||||
"bench/react-hello-world/*.js",
|
||||
|
||||
"test/js/node/**/parallel/**",
|
||||
"test/js/node/test/fixtures", // full of JS with intentional syntax errors
|
||||
"test/snippets/**",
|
||||
"test/regression/issue/14477/*.tsx",
|
||||
"test/js/**/*bad.js",
|
||||
"test/bundler/transpiler/decorators.test.ts", // uses `arguments` as decorator
|
||||
"test/bundler/native-plugin.test.ts", // parser doesn't handle import metadata
|
||||
"test/bundler/transpiler/with-statement-works.js" // parser doesn't allow `with` statement
|
||||
],
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["test/**", "examples/**", "packages/bun-internal/test/runners/**"],
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"no-unused-private-class-members": "off",
|
||||
"no-unnecessary-await": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["test/**", "bench/**"],
|
||||
"rules": {
|
||||
"no-shadow-restricted-names": "off",
|
||||
"no-empty-file": "off",
|
||||
"no-unnecessary-await": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.1.44",
|
||||
"version": "1.1.42",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
@@ -49,8 +49,8 @@
|
||||
"fmt": "bun run prettier",
|
||||
"fmt:cpp": "bun run clang-format",
|
||||
"fmt:zig": "bun run zig-format",
|
||||
"lint": "oxlint --config oxlint.json",
|
||||
"lint:fix": "oxlint --config oxlint.json --fix",
|
||||
"lint": "eslint './**/*.d.ts' --cache",
|
||||
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
|
||||
"test": "node scripts/runner.node.mjs --exec-path ./build/debug/bun-debug",
|
||||
"test:release": "node scripts/runner.node.mjs --exec-path ./build/release/bun",
|
||||
"banned": "bun packages/bun-internal-test/src/linter.ts",
|
||||
|
||||
768
packages/bun-types/bun.d.ts
vendored
768
packages/bun-types/bun.d.ts
vendored
@@ -17,7 +17,6 @@ declare module "bun" {
|
||||
import type { FFIFunctionCallableSymbol } from "bun:ffi";
|
||||
import type { Encoding as CryptoEncoding } from "crypto";
|
||||
import type { CipherNameAndProtocol, EphemeralKeyInfo, PeerCertificate } from "tls";
|
||||
import type { Stats } from "node:fs";
|
||||
interface Env {
|
||||
NODE_ENV?: string;
|
||||
/**
|
||||
@@ -1035,10 +1034,6 @@ declare module "bun" {
|
||||
errors: number;
|
||||
totalCount: number;
|
||||
};
|
||||
|
||||
ADDRCONFIG: number;
|
||||
ALL: number;
|
||||
V4MAPPED: number;
|
||||
};
|
||||
|
||||
interface DNSLookup {
|
||||
@@ -1230,231 +1225,46 @@ declare module "bun" {
|
||||
* Deletes the file.
|
||||
*/
|
||||
unlink(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Deletes the file. ( same as unlink )
|
||||
*/
|
||||
delete(): Promise<void>
|
||||
|
||||
/**
|
||||
* Provides useful information about the file.
|
||||
*/
|
||||
stat(): Promise<Stats>
|
||||
}
|
||||
interface NetworkSink extends FileSink {
|
||||
/**
|
||||
* Write a chunk of data to the network.
|
||||
*
|
||||
* If the network is not writable yet, the data is buffered.
|
||||
*/
|
||||
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
|
||||
/**
|
||||
* Flush the internal buffer, committing the data to the network.
|
||||
*/
|
||||
flush(): number | Promise<number>;
|
||||
/**
|
||||
* Finish the upload. This also flushes the internal buffer.
|
||||
*/
|
||||
end(error?: Error): number | Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the stat of the file.
|
||||
*/
|
||||
stat(): Promise<Stats>;
|
||||
}
|
||||
|
||||
var S3Client: S3Client;
|
||||
|
||||
/**
|
||||
* Creates a new S3File instance for working with a single file.
|
||||
*
|
||||
* @param path The path or key of the file
|
||||
* @param options S3 configuration options
|
||||
* @returns `S3File` instance for the specified path
|
||||
*
|
||||
* @example
|
||||
* import { s3 } from "bun";
|
||||
* const file = s3("my-file.txt", {
|
||||
* bucket: "my-bucket",
|
||||
* accessKeyId: "your-access-key",
|
||||
* secretAccessKey: "your-secret-key"
|
||||
* });
|
||||
*
|
||||
* // Read the file
|
||||
* const content = await file.text();
|
||||
*
|
||||
* @example
|
||||
* // Using s3:// protocol
|
||||
* const file = s3("s3://my-bucket/my-file.txt", {
|
||||
* accessKeyId: "your-access-key",
|
||||
* secretAccessKey: "your-secret-key"
|
||||
* });
|
||||
*/
|
||||
function s3(path: string | URL, options?: S3Options): S3File;
|
||||
|
||||
/**
|
||||
* Configuration options for S3 operations
|
||||
*/
|
||||
interface S3Options extends BlobPropertyBag {
|
||||
interface S3FileOptions extends BlobPropertyBag {
|
||||
/**
|
||||
* The Access Control List (ACL) policy for the file.
|
||||
* Controls who can access the file and what permissions they have.
|
||||
*
|
||||
* @example
|
||||
* // Setting public read access
|
||||
* const file = s3("public-file.txt", {
|
||||
* acl: "public-read",
|
||||
* bucket: "my-bucket"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Using with presigned URLs
|
||||
* const url = file.presign({
|
||||
* acl: "public-read",
|
||||
* expiresIn: 3600
|
||||
* });
|
||||
*/
|
||||
acl?:
|
||||
| "private"
|
||||
| "public-read"
|
||||
| "public-read-write"
|
||||
| "aws-exec-read"
|
||||
| "authenticated-read"
|
||||
| "bucket-owner-read"
|
||||
| "bucket-owner-full-control"
|
||||
| "log-delivery-write";
|
||||
|
||||
/**
|
||||
* The S3 bucket name. Can be set via `S3_BUCKET` or `AWS_BUCKET` environment variables.
|
||||
*
|
||||
* @example
|
||||
* // Using explicit bucket
|
||||
* const file = s3("my-file.txt", { bucket: "my-bucket" });
|
||||
*
|
||||
* @example
|
||||
* // Using environment variables
|
||||
* // With S3_BUCKET=my-bucket in .env
|
||||
* const file = s3("my-file.txt");
|
||||
* The bucket to use for the S3 client. by default will use the `S3_BUCKET` and `AWS_BUCKET` environment variable, or deduce as first part of the path.
|
||||
*/
|
||||
bucket?: string;
|
||||
|
||||
/**
|
||||
* The AWS region. Can be set via `S3_REGION` or `AWS_REGION` environment variables.
|
||||
*
|
||||
* @example
|
||||
* const file = s3("my-file.txt", {
|
||||
* bucket: "my-bucket",
|
||||
* region: "us-west-2"
|
||||
* });
|
||||
* The region to use for the S3 client. By default, it will use the `S3_REGION` and `AWS_REGION` environment variable.
|
||||
*/
|
||||
region?: string;
|
||||
|
||||
/**
|
||||
* The access key ID for authentication.
|
||||
* Can be set via `S3_ACCESS_KEY_ID` or `AWS_ACCESS_KEY_ID` environment variables.
|
||||
* The access key ID to use for the S3 client. By default, it will use the `S3_ACCESS_KEY_ID` and `AWS_ACCESS_KEY_ID` environment variable.
|
||||
*/
|
||||
accessKeyId?: string;
|
||||
|
||||
/**
|
||||
* The secret access key for authentication.
|
||||
* Can be set via `S3_SECRET_ACCESS_KEY` or `AWS_SECRET_ACCESS_KEY` environment variables.
|
||||
* The secret access key to use for the S3 client. By default, it will use the `S3_SECRET_ACCESS_KEY and `AWS_SECRET_ACCESS_KEY` environment variable.
|
||||
*/
|
||||
secretAccessKey?: string;
|
||||
|
||||
/**
|
||||
* Optional session token for temporary credentials.
|
||||
* Can be set via `S3_SESSION_TOKEN` or `AWS_SESSION_TOKEN` environment variables.
|
||||
*
|
||||
* @example
|
||||
* // Using temporary credentials
|
||||
* const file = s3("my-file.txt", {
|
||||
* accessKeyId: tempAccessKey,
|
||||
* secretAccessKey: tempSecretKey,
|
||||
* sessionToken: tempSessionToken
|
||||
* });
|
||||
*/
|
||||
sessionToken?: string;
|
||||
|
||||
/**
|
||||
* The S3-compatible service endpoint URL.
|
||||
* Can be set via `S3_ENDPOINT` or `AWS_ENDPOINT` environment variables.
|
||||
*
|
||||
* @example
|
||||
* // AWS S3
|
||||
* const file = s3("my-file.txt", {
|
||||
* endpoint: "https://s3.us-east-1.amazonaws.com"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Cloudflare R2
|
||||
* const file = s3("my-file.txt", {
|
||||
* endpoint: "https://<account-id>.r2.cloudflarestorage.com"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // DigitalOcean Spaces
|
||||
* const file = s3("my-file.txt", {
|
||||
* endpoint: "https://<region>.digitaloceanspaces.com"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // MinIO (local development)
|
||||
* const file = s3("my-file.txt", {
|
||||
* endpoint: "http://localhost:9000"
|
||||
* });
|
||||
* The endpoint to use for the S3 client. Defaults to `https://s3.{region}.amazonaws.com`, it will also use the `S3_ENDPOINT` and `AWS_ENDPOINT` environment variable.
|
||||
*/
|
||||
endpoint?: string;
|
||||
|
||||
/**
|
||||
* The size of each part in multipart uploads (in bytes).
|
||||
* - Minimum: 5 MiB
|
||||
* - Maximum: 5120 MiB
|
||||
* - Default: 5 MiB
|
||||
*
|
||||
* @example
|
||||
* // Configuring multipart uploads
|
||||
* const file = s3("large-file.dat", {
|
||||
* partSize: 10 * 1024 * 1024, // 10 MiB parts
|
||||
* queueSize: 4 // Upload 4 parts in parallel
|
||||
* });
|
||||
*
|
||||
* const writer = file.writer();
|
||||
* // ... write large file in chunks
|
||||
* The size of each part in MiB. Minimum and Default is 5 MiB and maximum is 5120 MiB.
|
||||
*/
|
||||
partSize?: number;
|
||||
|
||||
/**
|
||||
* Number of parts to upload in parallel for multipart uploads.
|
||||
* - Default: 5
|
||||
* - Maximum: 255
|
||||
*
|
||||
* Increasing this value can improve upload speeds for large files
|
||||
* but will use more memory.
|
||||
* The number of parts to upload in parallel. Default is 5 and maximum is 255. This can speed up the upload of large files but will also use more memory.
|
||||
*/
|
||||
queueSize?: number;
|
||||
|
||||
/**
|
||||
* Number of retry attempts for failed uploads.
|
||||
* - Default: 3
|
||||
* - Maximum: 255
|
||||
*
|
||||
* @example
|
||||
* // Setting retry attempts
|
||||
* const file = s3("my-file.txt", {
|
||||
* retry: 5 // Retry failed uploads up to 5 times
|
||||
* });
|
||||
* The number of times to retry the upload if it fails. Default is 3 and maximum is 255.
|
||||
*/
|
||||
retry?: number;
|
||||
|
||||
/**
|
||||
* The Content-Type of the file.
|
||||
* Automatically set based on file extension when possible.
|
||||
*
|
||||
* @example
|
||||
* // Setting explicit content type
|
||||
* const file = s3("data.bin", {
|
||||
* type: "application/octet-stream"
|
||||
* });
|
||||
* The Content-Type of the file. If not provided, it is automatically set based on the file extension when possible.
|
||||
*/
|
||||
type?: string;
|
||||
|
||||
@@ -1464,534 +1274,144 @@ declare module "bun" {
|
||||
highWaterMark?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for generating presigned URLs
|
||||
*/
|
||||
interface S3FilePresignOptions extends S3Options {
|
||||
interface S3FilePresignOptions extends S3FileOptions {
|
||||
/**
|
||||
* Number of seconds until the presigned URL expires.
|
||||
* - Default: 86400 (1 day)
|
||||
*
|
||||
* @example
|
||||
* // Short-lived URL
|
||||
* const url = file.presign({
|
||||
* expiresIn: 3600 // 1 hour
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Long-lived public URL
|
||||
* const url = file.presign({
|
||||
* expiresIn: 7 * 24 * 60 * 60, // 7 days
|
||||
* acl: "public-read"
|
||||
* });
|
||||
* The number of seconds the presigned URL will be valid for. Defaults to 86400 (1 day).
|
||||
*/
|
||||
expiresIn?: number;
|
||||
|
||||
/**
|
||||
* The HTTP method allowed for the presigned URL.
|
||||
*
|
||||
* @example
|
||||
* // GET URL for downloads
|
||||
* const downloadUrl = file.presign({
|
||||
* method: "GET",
|
||||
* expiresIn: 3600
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // PUT URL for uploads
|
||||
* const uploadUrl = file.presign({
|
||||
* method: "PUT",
|
||||
* expiresIn: 3600,
|
||||
* type: "application/json"
|
||||
* });
|
||||
* The HTTP method to use for the presigned URL. Defaults to GET.
|
||||
*/
|
||||
method?: "GET" | "POST" | "PUT" | "DELETE" | "HEAD";
|
||||
method?: string;
|
||||
}
|
||||
|
||||
interface S3Stats {
|
||||
size: number;
|
||||
lastModified: Date;
|
||||
etag: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a file in an S3-compatible storage service.
|
||||
* Extends the Blob interface for compatibility with web APIs.
|
||||
*/
|
||||
interface S3File extends Blob {
|
||||
interface S3File extends BunFile {
|
||||
/**
|
||||
* @param path - The path to the file. If bucket options is not provided or set in the path, it will be deduced from the path.
|
||||
* @param options - The options to use for the S3 client.
|
||||
*/
|
||||
new (path: string | URL, options?: S3FileOptions): S3File;
|
||||
/**
|
||||
* The size of the file in bytes.
|
||||
* This is a Promise because it requires a network request to determine the size.
|
||||
*
|
||||
* @example
|
||||
* // Getting file size
|
||||
* const size = await file.size;
|
||||
* console.log(`File size: ${size} bytes`);
|
||||
*
|
||||
* @example
|
||||
* // Check if file is larger than 1MB
|
||||
* if (await file.size > 1024 * 1024) {
|
||||
* console.log("Large file detected");
|
||||
* }
|
||||
*/
|
||||
size: Promise<number>;
|
||||
/**
|
||||
* TODO: figure out how to get the typescript types to not error for this property.
|
||||
*/
|
||||
// size: Promise<number>;
|
||||
|
||||
/**
|
||||
* Creates a new S3File representing a slice of the original file.
|
||||
* Uses HTTP Range headers for efficient partial downloads.
|
||||
* Offset any operation on the file starting at `begin` and ending at `end`. `end` is relative to 0
|
||||
*
|
||||
* @param begin - Starting byte offset
|
||||
* @param end - Ending byte offset (exclusive)
|
||||
* @param contentType - Optional MIME type for the slice
|
||||
* @returns A new S3File representing the specified range
|
||||
* Similar to [`TypedArray.subarray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). Does not copy the file, open the file, or modify the file.
|
||||
*
|
||||
* @example
|
||||
* // Reading file header
|
||||
* const header = file.slice(0, 1024);
|
||||
* const headerText = await header.text();
|
||||
* It will use [`range`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Range) to download only the bytes you need.
|
||||
*
|
||||
* @example
|
||||
* // Reading with content type
|
||||
* const jsonSlice = file.slice(1024, 2048, "application/json");
|
||||
* const data = await jsonSlice.json();
|
||||
*
|
||||
* @example
|
||||
* // Reading from offset to end
|
||||
* const remainder = file.slice(1024);
|
||||
* const content = await remainder.text();
|
||||
* @param begin - start offset in bytes
|
||||
* @param end - absolute offset in bytes (relative to 0)
|
||||
* @param contentType - MIME type for the new S3File
|
||||
*/
|
||||
slice(begin?: number, end?: number, contentType?: string): S3File;
|
||||
|
||||
/** */
|
||||
/**
|
||||
* Offset any operation on the file starting at `begin`
|
||||
*
|
||||
* Similar to [`TypedArray.subarray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). Does not copy the file, open the file, or modify the file.
|
||||
*
|
||||
* It will use [`range`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Range) to download only the bytes you need.
|
||||
*
|
||||
* @param begin - start offset in bytes
|
||||
* @param contentType - MIME type for the new S3File
|
||||
*/
|
||||
slice(begin?: number, contentType?: string): S3File;
|
||||
|
||||
/**
|
||||
* @param contentType - MIME type for the new S3File
|
||||
*/
|
||||
slice(contentType?: string): S3File;
|
||||
|
||||
/**
|
||||
* Creates a writable stream for uploading data.
|
||||
* Suitable for large files as it uses multipart upload.
|
||||
*
|
||||
* @param options - Configuration for the upload
|
||||
* @returns A NetworkSink for writing data
|
||||
*
|
||||
* @example
|
||||
* // Basic streaming write
|
||||
* const writer = file.writer({
|
||||
* type: "application/json"
|
||||
* });
|
||||
* writer.write('{"hello": ');
|
||||
* writer.write('"world"}');
|
||||
* await writer.end();
|
||||
*
|
||||
* @example
|
||||
* // Optimized large file upload
|
||||
* const writer = file.writer({
|
||||
* partSize: 10 * 1024 * 1024, // 10MB parts
|
||||
* queueSize: 4, // Upload 4 parts in parallel
|
||||
* retry: 3 // Retry failed parts
|
||||
* });
|
||||
*
|
||||
* // Write large chunks of data efficiently
|
||||
* for (const chunk of largeDataChunks) {
|
||||
* await writer.write(chunk);
|
||||
* }
|
||||
* await writer.end();
|
||||
*
|
||||
* @example
|
||||
* // Error handling
|
||||
* const writer = file.writer();
|
||||
* try {
|
||||
* await writer.write(data);
|
||||
* await writer.end();
|
||||
* } catch (err) {
|
||||
* console.error('Upload failed:', err);
|
||||
* // Writer will automatically abort multipart upload on error
|
||||
* }
|
||||
* Incremental writer to stream writes to S3, this is equivalent of using MultipartUpload and is suitable for large files.
|
||||
*/
|
||||
writer(options?: S3Options): NetworkSink;
|
||||
writer(options?: S3FileOptions): FileSink;
|
||||
|
||||
/**
|
||||
* Gets a readable stream of the file's content.
|
||||
* Useful for processing large files without loading them entirely into memory.
|
||||
*
|
||||
* @returns A ReadableStream for the file content
|
||||
*
|
||||
* @example
|
||||
* // Basic streaming read
|
||||
* const stream = file.stream();
|
||||
* for await (const chunk of stream) {
|
||||
* console.log('Received chunk:', chunk);
|
||||
* }
|
||||
*
|
||||
* @example
|
||||
* // Piping to response
|
||||
* const stream = file.stream();
|
||||
* return new Response(stream, {
|
||||
* headers: { 'Content-Type': file.type }
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Processing large files
|
||||
* const stream = file.stream();
|
||||
* const textDecoder = new TextDecoder();
|
||||
* for await (const chunk of stream) {
|
||||
* const text = textDecoder.decode(chunk);
|
||||
* // Process text chunk by chunk
|
||||
* }
|
||||
* The readable stream of the file.
|
||||
*/
|
||||
readonly readable: ReadableStream;
|
||||
|
||||
/**
|
||||
* Get a readable stream of the file.
|
||||
*/
|
||||
stream(): ReadableStream;
|
||||
|
||||
/**
|
||||
* The name or path of the file in the bucket.
|
||||
*
|
||||
* @example
|
||||
* const file = s3("folder/image.jpg");
|
||||
* console.log(file.name); // "folder/image.jpg"
|
||||
* The name or path of the file, as specified in the constructor.
|
||||
*/
|
||||
readonly name?: string;
|
||||
|
||||
/**
|
||||
* The bucket name containing the file.
|
||||
*
|
||||
* @example
|
||||
* const file = s3("s3://my-bucket/file.txt");
|
||||
* console.log(file.bucket); // "my-bucket"
|
||||
* The bucket name of the file.
|
||||
*/
|
||||
readonly bucket?: string;
|
||||
|
||||
/**
|
||||
* Checks if the file exists in S3.
|
||||
* Uses HTTP HEAD request to efficiently check existence without downloading.
|
||||
*
|
||||
* @returns Promise resolving to true if file exists, false otherwise
|
||||
*
|
||||
* @example
|
||||
* // Basic existence check
|
||||
* if (await file.exists()) {
|
||||
* console.log("File exists in S3");
|
||||
* }
|
||||
*
|
||||
* @example
|
||||
* // With error handling
|
||||
* try {
|
||||
* const exists = await file.exists();
|
||||
* if (!exists) {
|
||||
* console.log("File not found");
|
||||
* }
|
||||
* } catch (err) {
|
||||
* console.error("Error checking file:", err);
|
||||
* }
|
||||
* Does the file exist?
|
||||
* It will use [`head`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) to check if the file exists.
|
||||
*/
|
||||
exists(): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Uploads data to S3.
|
||||
* Supports various input types and automatically handles large files.
|
||||
*
|
||||
* @param data - The data to upload
|
||||
* @param options - Upload configuration options
|
||||
* @returns Promise resolving to number of bytes written
|
||||
*
|
||||
* @example
|
||||
* // Writing string data
|
||||
* await file.write("Hello World", {
|
||||
* type: "text/plain"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Writing JSON
|
||||
* const data = { hello: "world" };
|
||||
* await file.write(JSON.stringify(data), {
|
||||
* type: "application/json"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Writing from Response
|
||||
* const response = await fetch("https://example.com/data");
|
||||
* await file.write(response);
|
||||
*
|
||||
* @example
|
||||
* // Writing with ACL
|
||||
* await file.write(data, {
|
||||
* acl: "public-read",
|
||||
* type: "application/octet-stream"
|
||||
* });
|
||||
* Uploads the data to S3. This is equivalent of using {@link S3File.upload} with a {@link S3File}.
|
||||
* @param data - The data to write.
|
||||
* @param options - The options to use for the S3 client.
|
||||
*/
|
||||
write(
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob,
|
||||
options?: S3Options,
|
||||
options?: S3FileOptions,
|
||||
): Promise<number>;
|
||||
|
||||
/**
|
||||
* Generates a presigned URL for the file.
|
||||
* Allows temporary access to the file without exposing credentials.
|
||||
*
|
||||
* @param options - Configuration for the presigned URL
|
||||
* @returns Presigned URL string
|
||||
*
|
||||
* @example
|
||||
* // Basic download URL
|
||||
* const url = file.presign({
|
||||
* expiresIn: 3600 // 1 hour
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Upload URL with specific content type
|
||||
* const uploadUrl = file.presign({
|
||||
* method: "PUT",
|
||||
* expiresIn: 3600,
|
||||
* type: "image/jpeg",
|
||||
* acl: "public-read"
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // URL with custom permissions
|
||||
* const url = file.presign({
|
||||
* method: "GET",
|
||||
* expiresIn: 7 * 24 * 60 * 60, // 7 days
|
||||
* acl: "public-read"
|
||||
* });
|
||||
* Returns a presigned URL for the file.
|
||||
* @param options - The options to use for the presigned URL.
|
||||
*/
|
||||
presign(options?: S3FilePresignOptions): string;
|
||||
|
||||
/**
|
||||
* Deletes the file from S3.
|
||||
*
|
||||
* @returns Promise that resolves when deletion is complete
|
||||
*
|
||||
* @example
|
||||
* // Basic deletion
|
||||
* await file.delete();
|
||||
*
|
||||
* @example
|
||||
* // With error handling
|
||||
* try {
|
||||
* await file.delete();
|
||||
* console.log("File deleted successfully");
|
||||
* } catch (err) {
|
||||
* console.error("Failed to delete file:", err);
|
||||
* }
|
||||
*/
|
||||
delete(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Alias for delete() method.
|
||||
* Provided for compatibility with Node.js fs API naming.
|
||||
*
|
||||
* @example
|
||||
* await file.unlink();
|
||||
*/
|
||||
unlink: S3File["delete"];
|
||||
|
||||
/**
|
||||
* Get the stat of a file in an S3-compatible storage service.
|
||||
*
|
||||
* @returns Promise resolving to S3Stat
|
||||
*/
|
||||
stat(): Promise<S3Stats>;
|
||||
unlink(): Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* A configured S3 bucket instance for managing files.
|
||||
* The instance is callable to create S3File instances and provides methods
|
||||
* for common operations.
|
||||
*
|
||||
* @example
|
||||
* // Basic bucket setup
|
||||
* const bucket = new S3Client({
|
||||
* bucket: "my-bucket",
|
||||
* accessKeyId: "key",
|
||||
* secretAccessKey: "secret"
|
||||
* });
|
||||
*
|
||||
* // Get file instance
|
||||
* const file = bucket("image.jpg");
|
||||
*
|
||||
* // Common operations
|
||||
* await bucket.write("data.json", JSON.stringify({hello: "world"}));
|
||||
* const url = bucket.presign("file.pdf");
|
||||
* await bucket.unlink("old.txt");
|
||||
*/
|
||||
type S3Client = {
|
||||
namespace S3File {
|
||||
/**
|
||||
* Create a new instance of an S3 bucket so that credentials can be managed
|
||||
* from a single instance instead of being passed to every method.
|
||||
*
|
||||
* @param options The default options to use for the S3 client. Can be
|
||||
* overriden by passing options to the methods.
|
||||
*
|
||||
* ## Keep S3 credentials in a single instance
|
||||
*
|
||||
* @example
|
||||
* const bucket = new Bun.S3Client({
|
||||
* accessKeyId: "your-access-key",
|
||||
* secretAccessKey: "your-secret-key",
|
||||
* bucket: "my-bucket",
|
||||
* endpoint: "https://s3.us-east-1.amazonaws.com",
|
||||
* sessionToken: "your-session-token",
|
||||
* });
|
||||
*
|
||||
* // S3Client is callable, so you can do this:
|
||||
* const file = bucket.file("my-file.txt");
|
||||
*
|
||||
* // or this:
|
||||
* await file.write("Hello Bun!");
|
||||
* await file.text();
|
||||
*
|
||||
* // To delete the file:
|
||||
* await bucket.delete("my-file.txt");
|
||||
*
|
||||
* // To write a file without returning the instance:
|
||||
* await bucket.write("my-file.txt", "Hello Bun!");
|
||||
*
|
||||
* Uploads the data to S3.
|
||||
* @param data - The data to write.
|
||||
* @param options - The options to use for the S3 client.
|
||||
*/
|
||||
new (options?: S3Options): S3Client;
|
||||
|
||||
/**
|
||||
* Creates an S3File instance for the given path.
|
||||
*
|
||||
* @example
|
||||
* const file = bucket.file("image.jpg");
|
||||
* await file.write(imageData);
|
||||
* const configFile = bucket("config.json", {
|
||||
* type: "application/json",
|
||||
* acl: "private"
|
||||
* });
|
||||
*/
|
||||
file(path: string, options?: S3Options): S3File;
|
||||
|
||||
/**
|
||||
* Writes data directly to a path in the bucket.
|
||||
* Supports strings, buffers, streams, and web API types.
|
||||
*
|
||||
* @example
|
||||
* // Write string
|
||||
* await bucket.write("hello.txt", "Hello World");
|
||||
*
|
||||
* // Write JSON with type
|
||||
* await bucket.write(
|
||||
* "data.json",
|
||||
* JSON.stringify({hello: "world"}),
|
||||
* {type: "application/json"}
|
||||
* );
|
||||
*
|
||||
* // Write from fetch
|
||||
* const res = await fetch("https://example.com/data");
|
||||
* await bucket.write("data.bin", res);
|
||||
*
|
||||
* // Write with ACL
|
||||
* await bucket.write("public.html", html, {
|
||||
* acl: "public-read",
|
||||
* type: "text/html"
|
||||
* });
|
||||
*/
|
||||
write(
|
||||
path: string,
|
||||
data:
|
||||
| string
|
||||
| ArrayBufferView
|
||||
| ArrayBuffer
|
||||
| SharedArrayBuffer
|
||||
| Request
|
||||
| Response
|
||||
| BunFile
|
||||
| S3File
|
||||
| Blob
|
||||
| File,
|
||||
options?: S3Options,
|
||||
function upload(
|
||||
path: string | S3File,
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File,
|
||||
options?: S3FileOptions,
|
||||
): Promise<number>;
|
||||
|
||||
/**
|
||||
* Generate a presigned URL for temporary access to a file.
|
||||
* Useful for generating upload/download URLs without exposing credentials.
|
||||
*
|
||||
* @example
|
||||
* // Download URL
|
||||
* const downloadUrl = bucket.presign("file.pdf", {
|
||||
* expiresIn: 3600 // 1 hour
|
||||
* });
|
||||
*
|
||||
* // Upload URL
|
||||
* const uploadUrl = bucket.presign("uploads/image.jpg", {
|
||||
* method: "PUT",
|
||||
* expiresIn: 3600,
|
||||
* type: "image/jpeg",
|
||||
* acl: "public-read"
|
||||
* });
|
||||
*
|
||||
* // Long-lived public URL
|
||||
* const publicUrl = bucket.presign("public/doc.pdf", {
|
||||
* expiresIn: 7 * 24 * 60 * 60, // 7 days
|
||||
* acl: "public-read"
|
||||
* });
|
||||
* Returns a presigned URL for the file.
|
||||
* @param options - The options to use for the presigned URL.
|
||||
*/
|
||||
presign(path: string, options?: S3FilePresignOptions): string;
|
||||
function presign(path: string | S3File, options?: S3FilePresignOptions): string;
|
||||
|
||||
/**
|
||||
* Delete a file from the bucket.
|
||||
*
|
||||
* @example
|
||||
* // Simple delete
|
||||
* await bucket.unlink("old-file.txt");
|
||||
*
|
||||
* // With error handling
|
||||
* try {
|
||||
* await bucket.unlink("file.dat");
|
||||
* console.log("File deleted");
|
||||
* } catch (err) {
|
||||
* console.error("Delete failed:", err);
|
||||
* }
|
||||
* Deletes the file from S3.
|
||||
*/
|
||||
unlink(path: string, options?: S3Options): Promise<void>;
|
||||
delete: S3Client["unlink"];
|
||||
function unlink(path: string | S3File, options?: S3FileOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Get the size of a file in bytes.
|
||||
* Uses HEAD request to efficiently get size.
|
||||
*
|
||||
* @example
|
||||
* // Get size
|
||||
* const bytes = await bucket.size("video.mp4");
|
||||
* console.log(`Size: ${bytes} bytes`);
|
||||
*
|
||||
* // Check if file is large
|
||||
* if (await bucket.size("data.zip") > 100 * 1024 * 1024) {
|
||||
* console.log("File is larger than 100MB");
|
||||
* }
|
||||
* The size of the file in bytes.
|
||||
*/
|
||||
size(path: string, options?: S3Options): Promise<number>;
|
||||
function size(path: string | S3File, options?: S3FileOptions): Promise<number>;
|
||||
|
||||
/**
|
||||
* Check if a file exists in the bucket.
|
||||
* Uses HEAD request to check existence.
|
||||
*
|
||||
* @example
|
||||
* // Check existence
|
||||
* if (await bucket.exists("config.json")) {
|
||||
* const file = bucket("config.json");
|
||||
* const config = await file.json();
|
||||
* }
|
||||
*
|
||||
* // With error handling
|
||||
* try {
|
||||
* if (!await bucket.exists("required.txt")) {
|
||||
* throw new Error("Required file missing");
|
||||
* }
|
||||
* } catch (err) {
|
||||
* console.error("Check failed:", err);
|
||||
* }
|
||||
* The size of the file in bytes.
|
||||
*/
|
||||
exists(path: string, options?: S3Options): Promise<boolean>;
|
||||
/**
|
||||
* Get the stat of a file in an S3-compatible storage service.
|
||||
*
|
||||
* @param path The path to the file.
|
||||
* @param options The options to use for the S3 client.
|
||||
*/
|
||||
stat(path: string, options?: S3Options): Promise<S3Stats>;
|
||||
};
|
||||
function exists(path: string | S3File, options?: S3FileOptions): Promise<boolean>;
|
||||
}
|
||||
|
||||
/**
|
||||
* This lets you use macros as regular imports
|
||||
@@ -3843,6 +3263,17 @@ declare module "bun" {
|
||||
// tslint:disable-next-line:unified-signatures
|
||||
function file(fileDescriptor: number, options?: BlobPropertyBag): BunFile;
|
||||
|
||||
/**
|
||||
* Lazily load/upload a file from S3.
|
||||
* @param path - The path to the file. If bucket options is not provided or set in the path, it will be deduced from the path.
|
||||
* @param options - The options to use for the S3 client.
|
||||
*/
|
||||
function s3(path: string | URL, options?: S3FileOptions): S3File;
|
||||
/**
|
||||
* The S3 file class.
|
||||
*/
|
||||
const S3: typeof S3File;
|
||||
|
||||
/**
|
||||
* Allocate a new [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) without zeroing the bytes.
|
||||
*
|
||||
@@ -4149,24 +3580,9 @@ declare module "bun" {
|
||||
function nanoseconds(): number;
|
||||
|
||||
/**
|
||||
* Show precise statistics about memory usage of your application
|
||||
*
|
||||
* Generate a heap snapshot in JavaScriptCore's format that can be viewed with `bun --inspect` or Safari's Web Inspector
|
||||
* Generate a heap snapshot for seeing where the heap is being used
|
||||
*/
|
||||
function generateHeapSnapshot(format?: "jsc"): HeapSnapshot;
|
||||
|
||||
/**
|
||||
* Show precise statistics about memory usage of your application
|
||||
*
|
||||
* Generate a V8 Heap Snapshot that can be used with Chrome DevTools & Visual Studio Code
|
||||
*
|
||||
* This is a JSON string that can be saved to a file.
|
||||
* ```ts
|
||||
* const snapshot = Bun.generateHeapSnapshot("v8");
|
||||
* await Bun.write("heap.heapsnapshot", snapshot);
|
||||
* ```
|
||||
*/
|
||||
function generateHeapSnapshot(format: "v8"): string;
|
||||
function generateHeapSnapshot(): HeapSnapshot;
|
||||
|
||||
/**
|
||||
* The next time JavaScriptCore is idle, clear unused memory and attempt to reduce the heap size.
|
||||
|
||||
2
packages/bun-types/html-rewriter.d.ts
vendored
2
packages/bun-types/html-rewriter.d.ts
vendored
@@ -50,7 +50,7 @@ declare namespace HTMLRewriterTypes {
|
||||
|
||||
interface Element {
|
||||
tagName: string;
|
||||
readonly attributes: IterableIterator<[string, string]>;
|
||||
readonly attributes: IterableIterator<string[]>;
|
||||
readonly removed: boolean;
|
||||
/** Whether the element is explicitly self-closing, e.g. `<foo />` */
|
||||
readonly selfClosing: boolean;
|
||||
|
||||
4
packages/bun-types/test.d.ts
vendored
4
packages/bun-types/test.d.ts
vendored
@@ -1050,7 +1050,7 @@ declare module "bun:test" {
|
||||
*
|
||||
* @example
|
||||
* const o = { a: 'foo', b: 'bar', c: 'baz' };
|
||||
* expect(o).toContainAnyValues(['qux', 'foo']);
|
||||
` * expect(o).toContainAnyValues(['qux', 'foo']);
|
||||
* expect(o).toContainAnyValues(['qux', 'bar']);
|
||||
* expect(o).toContainAnyValues(['qux', 'baz']);
|
||||
* expect(o).not.toContainAnyValues(['qux']);
|
||||
@@ -1060,8 +1060,6 @@ declare module "bun:test" {
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
*
|
||||
* @example
|
||||
* expect({ a: 'foo', b: 'bar', c: 'baz' }).toContainKeys(['a', 'b']);
|
||||
* expect({ a: 'foo', b: 'bar', c: 'baz' }).toContainKeys(['a', 'b', 'c']);
|
||||
* expect({ a: 'foo', b: 'bar', c: 'baz' }).not.toContainKeys(['a', 'b', 'e']);
|
||||
|
||||
@@ -843,6 +843,13 @@ static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
#ifndef _WIN32
|
||||
// 700 permission by default
|
||||
fchmod(listenFd, S_IRWXU);
|
||||
#else
|
||||
_chmod(path, S_IREAD | S_IWRITE | S_IEXEC);
|
||||
#endif
|
||||
|
||||
#ifdef _WIN32
|
||||
_unlink(path);
|
||||
#else
|
||||
|
||||
@@ -35,9 +35,6 @@ us_ssl_ctx_get_X509_without_callback_from(struct us_cert_string_t content) {
|
||||
OPENSSL_PUT_ERROR(SSL, ERR_R_PEM_LIB);
|
||||
goto end;
|
||||
}
|
||||
|
||||
// NOTE: PEM_read_bio_X509 allocates, so input BIO must be freed.
|
||||
BIO_free(in);
|
||||
return x;
|
||||
end:
|
||||
X509_free(x);
|
||||
@@ -143,4 +140,4 @@ extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
}
|
||||
|
||||
return store;
|
||||
}
|
||||
}
|
||||
@@ -90,8 +90,8 @@ enum {
|
||||
#define POLL_TYPE_MASK (POLL_TYPE_KIND_MASK | POLL_TYPE_POLLING_MASK)
|
||||
|
||||
/* Bun APIs implemented in Zig */
|
||||
void Bun__lock(zig_mutex_t *lock);
|
||||
void Bun__unlock(zig_mutex_t *lock);
|
||||
void Bun__lock(uint32_t *lock);
|
||||
void Bun__unlock(uint32_t *lock);
|
||||
|
||||
struct addrinfo_request;
|
||||
struct addrinfo_result_entry {
|
||||
|
||||
@@ -20,18 +20,6 @@
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#if defined(__APPLE__)
|
||||
#include <os/lock.h>
|
||||
typedef os_unfair_lock zig_mutex_t;
|
||||
#elif defined(__linux__)
|
||||
typedef uint32_t zig_mutex_t;
|
||||
#elif defined(_WIN32)
|
||||
// SRWLOCK
|
||||
typedef void* zig_mutex_t;
|
||||
#else
|
||||
#error "Unsupported platform"
|
||||
#endif
|
||||
|
||||
// IMPORTANT: When changing this, don't forget to update the zig version in uws.zig as well!
|
||||
struct us_internal_loop_data_t {
|
||||
struct us_timer_t *sweep_timer;
|
||||
@@ -51,7 +39,7 @@ struct us_internal_loop_data_t {
|
||||
int low_prio_budget;
|
||||
struct us_connecting_socket_t *dns_ready_head;
|
||||
struct us_connecting_socket_t *closed_connecting_head;
|
||||
zig_mutex_t mutex;
|
||||
uint32_t mutex;
|
||||
void *parent_ptr;
|
||||
char parent_tag;
|
||||
/* We do not care if this flips or not, it doesn't matter */
|
||||
|
||||
@@ -21,30 +21,38 @@
|
||||
#ifndef WIN32
|
||||
#include <sys/ioctl.h>
|
||||
#endif
|
||||
#include "wtf/Platform.h"
|
||||
|
||||
#if ASSERT_ENABLED
|
||||
extern const size_t Bun__lock__size;
|
||||
extern void __attribute((__noreturn__)) Bun__panic(const char* message, size_t length);
|
||||
#define BUN_PANIC(message) Bun__panic(message, sizeof(message) - 1)
|
||||
#endif
|
||||
|
||||
/* The loop has 2 fallthrough polls */
|
||||
void us_internal_loop_data_init(struct us_loop_t *loop, void (*wakeup_cb)(struct us_loop_t *loop),
|
||||
void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop)) {
|
||||
// We allocate with calloc, so we only need to initialize the specific fields in use.
|
||||
loop->data.sweep_timer = us_create_timer(loop, 1, 0);
|
||||
loop->data.recv_buf = malloc(LIBUS_RECV_BUFFER_LENGTH + LIBUS_RECV_BUFFER_PADDING * 2);
|
||||
loop->data.send_buf = malloc(LIBUS_SEND_BUFFER_LENGTH);
|
||||
loop->data.ssl_data = 0;
|
||||
loop->data.head = 0;
|
||||
loop->data.iterator = 0;
|
||||
loop->data.closed_udp_head = 0;
|
||||
loop->data.closed_head = 0;
|
||||
loop->data.low_prio_head = 0;
|
||||
loop->data.low_prio_budget = 0;
|
||||
|
||||
loop->data.pre_cb = pre_cb;
|
||||
loop->data.post_cb = post_cb;
|
||||
loop->data.iteration_nr = 0;
|
||||
|
||||
loop->data.closed_connecting_head = 0;
|
||||
loop->data.dns_ready_head = 0;
|
||||
loop->data.mutex = 0;
|
||||
|
||||
loop->data.parent_ptr = 0;
|
||||
loop->data.parent_tag = 0;
|
||||
|
||||
loop->data.closed_context_head = 0;
|
||||
loop->data.jsc_vm = 0;
|
||||
|
||||
loop->data.wakeup_async = us_internal_create_async(loop, 1, 0);
|
||||
us_internal_async_set(loop->data.wakeup_async, (void (*)(struct us_internal_async *)) wakeup_cb);
|
||||
#if ASSERT_ENABLED
|
||||
if (Bun__lock__size != sizeof(loop->data.mutex)) {
|
||||
BUN_PANIC("The size of the mutex must match the size of the lock");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void us_internal_loop_data_free(struct us_loop_t *loop) {
|
||||
@@ -430,10 +438,6 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
}
|
||||
|
||||
if(eof && s) {
|
||||
if (UNLIKELY(us_socket_is_closed(0, s))) {
|
||||
// Do not call on_end after the socket has been closed
|
||||
return;
|
||||
}
|
||||
if (us_socket_is_shut_down(0, s)) {
|
||||
/* We got FIN back after sending it */
|
||||
s = us_socket_close(0, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, NULL);
|
||||
|
||||
@@ -432,9 +432,8 @@ public:
|
||||
|
||||
/* Try and end the response. Returns [true, true] on success.
|
||||
* Starts a timeout in some cases. Returns [ok, hasResponded] */
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uintmax_t totalSize = 0, bool closeConnection = false) {
|
||||
bool ok = internalEnd(data, totalSize, true, true, closeConnection);
|
||||
return {ok, hasResponded()};
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uint64_t totalSize = 0, bool closeConnection = false) {
|
||||
return {internalEnd(data, totalSize, true, true, closeConnection), hasResponded()};
|
||||
}
|
||||
|
||||
/* Write the end of chunked encoded stream */
|
||||
|
||||
@@ -256,7 +256,7 @@ private:
|
||||
std::string segment = std::string(getUrlSegment(i).first);
|
||||
Node *next = nullptr;
|
||||
for (std::unique_ptr<Node> &child : n->children) {
|
||||
if (((segment.length() && child->name.length() && segment[0] == ':' && child->name[0] == ':') || child->name == segment) && child->isHighPriority == (priority == HIGH_PRIORITY)) {
|
||||
if (child->name == segment && child->isHighPriority == (priority == HIGH_PRIORITY)) {
|
||||
next = child.get();
|
||||
break;
|
||||
}
|
||||
@@ -304,19 +304,12 @@ public:
|
||||
for (auto &p : root.children) {
|
||||
if (p->name == method) {
|
||||
/* Then route the url */
|
||||
if (executeHandlers(p.get(), 0, userData)) {
|
||||
return true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
return executeHandlers(p.get(), 0, userData);
|
||||
}
|
||||
}
|
||||
|
||||
/* Always test any route last (this check should not be necessary if we always have at least one handler) */
|
||||
if (root.children.empty()) [[unlikely]] {
|
||||
return false;
|
||||
}
|
||||
return executeHandlers(root.children.back().get(), 0, userData);
|
||||
/* We did not find any handler for this method and url */
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Adds the corresponding entires in matching tree and handler list */
|
||||
@@ -386,11 +379,11 @@ public:
|
||||
/* Removes ALL routes with the same handler as can be found with the given parameters.
|
||||
* Removing a wildcard is done by removing ONE OF the methods the wildcard would match with.
|
||||
* Example: If wildcard includes POST, GET, PUT, you can remove ALL THREE by removing GET. */
|
||||
bool remove(std::string method, std::string pattern, uint32_t priority) {
|
||||
void remove(std::string method, std::string pattern, uint32_t priority) {
|
||||
uint32_t handler = findHandler(method, pattern, priority);
|
||||
if (handler == UINT32_MAX) {
|
||||
/* Not found or already removed, do nothing */
|
||||
return false;
|
||||
return;
|
||||
}
|
||||
|
||||
/* Cull the entire tree */
|
||||
@@ -401,8 +394,6 @@ public:
|
||||
|
||||
/* Now remove the actual handler */
|
||||
handlers.erase(handlers.begin() + (handler & HANDLER_MASK));
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -26,316 +26,183 @@
|
||||
"mime",
|
||||
],
|
||||
"packages": {
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.5", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="],
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
|
||||
|
||||
"@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="],
|
||||
|
||||
"@jridgewell/source-map": ["@jridgewell/source-map@0.3.6", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25" } }, "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ=="],
|
||||
|
||||
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="],
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
|
||||
|
||||
"@sinclair/typebox": ["@sinclair/typebox@0.30.4", "", {}, "sha512-wFuuDR+O1OAE2GL0q68h1Ty00RE6Ihcixr55A6TU5RCvOUHnwJw9LGuDVg9NxDiAp7m/YJpa+UaOuLAz0ziyOQ=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.1.13", "", { "dependencies": { "bun-types": "1.1.34" } }, "sha512-KmQxSBgVWCl6RSuerlLGZlIWfdxkKqat0nxN61+qu4y1KDn0Ll3j7v1Pl8GnaL3a/U6GGWVTJh75ap62kR1E8Q=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.1.14", "", { "dependencies": { "bun-types": "1.1.37" } }, "sha512-opVYiFGtO2af0dnWBdZWlioLBoxSdDO5qokaazLhq8XQtGZbY4pY3/JxY8Zdf/hEwGubbp7ErZXoN1+h2yesxA=="],
|
||||
"@types/eslint": ["@types/eslint@9.6.1", "", { "dependencies": { "@types/estree": "*", "@types/json-schema": "*" } }, "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag=="],
|
||||
|
||||
"@types/eslint-scope": ["@types/eslint-scope@3.7.7", "", { "dependencies": { "@types/eslint": "*", "@types/estree": "*" } }, "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.6", "", {}, "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="],
|
||||
|
||||
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
|
||||
|
||||
"@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
|
||||
|
||||
"@types/ws": ["@types/ws@8.5.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA=="],
|
||||
|
||||
"@webassemblyjs/ast": ["@webassemblyjs/ast@1.14.1", "", { "dependencies": { "@webassemblyjs/helper-numbers": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2" } }, "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ=="],
|
||||
|
||||
"@webassemblyjs/floating-point-hex-parser": ["@webassemblyjs/floating-point-hex-parser@1.13.2", "", {}, "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA=="],
|
||||
|
||||
"@webassemblyjs/helper-api-error": ["@webassemblyjs/helper-api-error@1.13.2", "", {}, "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ=="],
|
||||
|
||||
"@webassemblyjs/helper-buffer": ["@webassemblyjs/helper-buffer@1.14.1", "", {}, "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA=="],
|
||||
|
||||
"@webassemblyjs/helper-numbers": ["@webassemblyjs/helper-numbers@1.13.2", "", { "dependencies": { "@webassemblyjs/floating-point-hex-parser": "1.13.2", "@webassemblyjs/helper-api-error": "1.13.2", "@xtuc/long": "4.2.2" } }, "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA=="],
|
||||
|
||||
"@webassemblyjs/helper-wasm-bytecode": ["@webassemblyjs/helper-wasm-bytecode@1.13.2", "", {}, "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA=="],
|
||||
|
||||
"@webassemblyjs/helper-wasm-section": ["@webassemblyjs/helper-wasm-section@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/wasm-gen": "1.14.1" } }, "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw=="],
|
||||
|
||||
"@webassemblyjs/ieee754": ["@webassemblyjs/ieee754@1.13.2", "", { "dependencies": { "@xtuc/ieee754": "^1.2.0" } }, "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw=="],
|
||||
|
||||
"@webassemblyjs/leb128": ["@webassemblyjs/leb128@1.13.2", "", { "dependencies": { "@xtuc/long": "4.2.2" } }, "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw=="],
|
||||
|
||||
"@webassemblyjs/utf8": ["@webassemblyjs/utf8@1.13.2", "", {}, "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ=="],
|
||||
|
||||
"@webassemblyjs/wasm-edit": ["@webassemblyjs/wasm-edit@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/helper-wasm-section": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-opt": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1", "@webassemblyjs/wast-printer": "1.14.1" } }, "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ=="],
|
||||
|
||||
"@webassemblyjs/wasm-gen": ["@webassemblyjs/wasm-gen@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg=="],
|
||||
|
||||
"@webassemblyjs/wasm-opt": ["@webassemblyjs/wasm-opt@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1" } }, "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw=="],
|
||||
|
||||
"@webassemblyjs/wasm-parser": ["@webassemblyjs/wasm-parser@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-api-error": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ=="],
|
||||
|
||||
"@webassemblyjs/wast-printer": ["@webassemblyjs/wast-printer@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@xtuc/long": "4.2.2" } }, "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw=="],
|
||||
|
||||
"@xtuc/ieee754": ["@xtuc/ieee754@1.2.0", "", {}, "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA=="],
|
||||
|
||||
"@xtuc/long": ["@xtuc/long@4.2.2", "", {}, "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ=="],
|
||||
|
||||
"accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="],
|
||||
|
||||
"acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
|
||||
|
||||
"acorn": ["acorn@8.14.0", "", {}, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
|
||||
"acorn-loose": ["acorn-loose@8.4.0", "", { "dependencies": { "acorn": "^8.11.0" } }, "sha512-M0EUka6rb+QC4l9Z3T0nJEzNOO7JcoJlYMrBlyBCiFSXRyxjLKayd4TbQs2FDRWQU1h9FR7QVNHt+PEaoNL5rQ=="],
|
||||
|
||||
"ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="],
|
||||
|
||||
"ajv-keywords": ["ajv-keywords@3.5.2", "", { "peerDependencies": { "ajv": "^6.9.1" } }, "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ=="],
|
||||
|
||||
"array-flatten": ["array-flatten@1.1.1", "", {}, "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="],
|
||||
|
||||
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||
|
||||
"axios": ["axios@1.7.7", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q=="],
|
||||
|
||||
"body-parser": ["body-parser@1.20.1", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.4", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.11.0", "raw-body": "2.5.1", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw=="],
|
||||
|
||||
"browserslist": ["browserslist@4.24.2", "", { "dependencies": { "caniuse-lite": "^1.0.30001669", "electron-to-chromium": "^1.5.41", "node-releases": "^2.0.18", "update-browserslist-db": "^1.1.1" }, "bin": { "browserslist": "cli.js" } }, "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg=="],
|
||||
|
||||
"axios": ["axios@1.7.9", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw=="],
|
||||
"body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="],
|
||||
"browserslist": ["browserslist@4.24.2", "", { "dependencies": { "caniuse-lite": "^1.0.30001669", "electron-to-chromium": "^1.5.41", "node-releases": "^2.0.18", "update-browserslist-db": "^1.1.1" } }, "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg=="],
|
||||
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.1.34", "", { "dependencies": { "@types/node": "~20.12.8", "@types/ws": "~8.5.10" } }, "sha512-br5QygTEL/TwB4uQOb96Ky22j4Gq2WxWH/8Oqv20fk5HagwKXo/akB+LiYgSfzexCt6kkcUaVm+bKiPl71xPvw=="],
|
||||
|
||||
"bun-types": ["bun-types@1.1.37", "", { "dependencies": { "@types/node": "~20.12.8", "@types/ws": "~8.5.10" } }, "sha512-C65lv6eBr3LPJWFZ2gswyrGZ82ljnH8flVE03xeXxKhi2ZGtFiO4isRKTKnitbSqtRAcaqYSR6djt1whI66AbA=="],
|
||||
"bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
|
||||
|
||||
"call-bind": ["call-bind@1.0.2", "", { "dependencies": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" } }, "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA=="],
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001680", "", {}, "sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA=="],
|
||||
|
||||
"call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="],
|
||||
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.1", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g=="],
|
||||
"call-bound": ["call-bound@1.0.2", "", { "dependencies": { "call-bind": "^1.0.8", "get-intrinsic": "^1.2.5" } }, "sha512-0lk0PHFe/uz0vl527fG9CgdE9WdafjDbCXvBbs+LUv000TVt2Jjhqbs4Jwm8gz070w8xXyEAxrPOMullsxXeGg=="],
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001688", "", {}, "sha512-Nmqpru91cuABu/DTCXbM2NSRHzM2uVHfPnhJ/1zEAJx/ILBRVmz3pzH4N7DZqbdG0gWClsCC05Oj0mJ/1AWMbA=="],
|
||||
"chrome-trace-event": ["chrome-trace-event@1.0.4", "", {}, "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ=="],
|
||||
|
||||
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
|
||||
|
||||
"commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
|
||||
|
||||
"content-disposition": ["content-disposition@0.5.4", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ=="],
|
||||
|
||||
"content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="],
|
||||
|
||||
"cookie": ["cookie@0.5.0", "", {}, "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw=="],
|
||||
|
||||
"cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="],
|
||||
"cookie-signature": ["cookie-signature@1.0.6", "", {}, "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="],
|
||||
|
||||
"debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
|
||||
|
||||
"define-data-property": ["define-data-property@1.1.4", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="],
|
||||
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||
|
||||
"depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="],
|
||||
|
||||
"destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="],
|
||||
|
||||
"dunder-proto": ["dunder-proto@1.0.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A=="],
|
||||
"ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="],
|
||||
|
||||
"electron-to-chromium": ["electron-to-chromium@1.5.58", "", {}, "sha512-al2l4r+24ZFL7WzyPTlyD0fC33LLzvxqLCwurtBibVPghRGO9hSTl+tis8t1kD7biPiH/en4U0I7o/nQbYeoVA=="],
|
||||
|
||||
"elysia": ["elysia@0.6.3", "", { "dependencies": { "@sinclair/typebox": "^0.30.4", "fast-querystring": "^1.1.2", "memoirist": "0.1.4", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeers": ["typescript"] }, "sha512-LhdH476fotAQuEUpnLdn8fAzwo3ZmwHVrYzQhujo+x+OpmMXGMJXT7L7/Ct+b5wwR2txP5xCxI1A0suxhRxgIQ=="],
|
||||
|
||||
"encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="],
|
||||
|
||||
"electron-to-chromium": ["electron-to-chromium@1.5.73", "", {}, "sha512-8wGNxG9tAG5KhGd3eeA0o6ixhiNdgr0DcHWm85XPCphwZgD1lIEoi6t3VERayWao7SF7AAZTw6oARGJeVjH8Kg=="],
|
||||
"elysia": ["elysia@0.6.24", "", { "dependencies": { "@sinclair/typebox": "^0.30.4", "fast-querystring": "^1.1.2", "memoirist": "0.1.4", "mergician": "^1.1.0", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeerDependencies": ["typescript"] }, "sha512-qaN8b816tSecNIsgNwFCMOMlayOaChme9i/VHxCRZyPTgtdAAnrYDZaUQfatyt1jcHUdkf3IT4ny5GuS7NB26w=="],
|
||||
"encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="],
|
||||
"enhanced-resolve": ["enhanced-resolve@5.17.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg=="],
|
||||
|
||||
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
|
||||
"es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
|
||||
"es-module-lexer": ["es-module-lexer@1.5.4", "", {}, "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw=="],
|
||||
|
||||
"es-object-atoms": ["es-object-atoms@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw=="],
|
||||
"escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
|
||||
|
||||
"escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="],
|
||||
|
||||
"eslint-scope": ["eslint-scope@5.1.1", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" } }, "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw=="],
|
||||
|
||||
"esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="],
|
||||
|
||||
"estraverse": ["estraverse@4.3.0", "", {}, "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw=="],
|
||||
|
||||
"etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="],
|
||||
|
||||
"events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="],
|
||||
|
||||
"express": ["express@4.18.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.1", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.5.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.2.0", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.1", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", "proxy-addr": "~2.0.7", "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.18.0", "serve-static": "1.15.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ=="],
|
||||
|
||||
"express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="],
|
||||
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
|
||||
"fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="],
|
||||
|
||||
"fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="],
|
||||
|
||||
"finalhandler": ["finalhandler@1.2.0", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg=="],
|
||||
|
||||
"finalhandler": ["finalhandler@1.3.1", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ=="],
|
||||
"follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="],
|
||||
|
||||
"form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="],
|
||||
|
||||
"forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="],
|
||||
|
||||
"fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.1", "", {}, "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="],
|
||||
|
||||
"get-intrinsic": ["get-intrinsic@1.2.1", "", { "dependencies": { "function-bind": "^1.1.1", "has": "^1.0.3", "has-proto": "^1.0.1", "has-symbols": "^1.0.3" } }, "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
"get-intrinsic": ["get-intrinsic@1.2.6", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "dunder-proto": "^1.0.0", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.0.0" } }, "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA=="],
|
||||
"glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="],
|
||||
|
||||
"gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"has": ["has@1.0.3", "", { "dependencies": { "function-bind": "^1.1.1" } }, "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw=="],
|
||||
|
||||
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
|
||||
|
||||
"has-proto": ["has-proto@1.0.1", "", {}, "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg=="],
|
||||
|
||||
"has-symbols": ["has-symbols@1.0.3", "", {}, "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A=="],
|
||||
|
||||
"has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="],
|
||||
"has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
|
||||
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||
"http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="],
|
||||
|
||||
"iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
|
||||
|
||||
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
|
||||
|
||||
"ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="],
|
||||
|
||||
"jest-worker": ["jest-worker@27.5.1", "", { "dependencies": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" } }, "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
"json-parse-even-better-errors": ["json-parse-even-better-errors@2.3.1", "", {}, "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w=="],
|
||||
|
||||
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
|
||||
|
||||
"loader-runner": ["loader-runner@4.3.0", "", {}, "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
"math-intrinsics": ["math-intrinsics@1.0.0", "", {}, "sha512-4MqMiKP90ybymYvsut0CH2g4XWbfLtmlCkXmtmdcDCxNB+mQcu1w/1+L/VD7vi/PSv7X2JYV7SCcR+jiPXnQtA=="],
|
||||
"media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="],
|
||||
|
||||
"memoirist": ["memoirist@0.1.4", "", {}, "sha512-D6GbPSqO2nUVOmm7VZjJc5tC60pkOVUPzLwkKl1vCiYP+2b1cG8N9q1O3P0JmNM68u8vsgefPbxRUCSGxSXD+g=="],
|
||||
|
||||
"merge-descriptors": ["merge-descriptors@1.0.1", "", {}, "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w=="],
|
||||
|
||||
"merge-descriptors": ["merge-descriptors@1.0.3", "", {}, "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ=="],
|
||||
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
|
||||
|
||||
"mergician": ["mergician@1.1.0", "", {}, "sha512-FXbxzU6BBhGkV8XtUr8Sk015ZRaAALviit8Lle6OEgd1udX8wlu6tBeUMLGQGdz1MfHpAVNNQkXowyDnJuhXpA=="],
|
||||
"methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="],
|
||||
|
||||
"mime": ["mime@3.0.0", "", { "bin": { "mime": "cli.js" } }, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="],
|
||||
|
||||
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
|
||||
"mime": ["mime@3.0.0", "", {}, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="],
|
||||
"mime-db": ["mime-db@1.53.0", "", {}, "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg=="],
|
||||
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
|
||||
"ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
"negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="],
|
||||
|
||||
"neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="],
|
||||
|
||||
"node-releases": ["node-releases@2.0.18", "", {}, "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g=="],
|
||||
|
||||
"object-inspect": ["object-inspect@1.12.3", "", {}, "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g=="],
|
||||
|
||||
"node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="],
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
"object-inspect": ["object-inspect@1.13.3", "", {}, "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA=="],
|
||||
"on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="],
|
||||
|
||||
"openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="],
|
||||
|
||||
"parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="],
|
||||
|
||||
"path-to-regexp": ["path-to-regexp@0.1.7", "", {}, "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="],
|
||||
|
||||
"path-to-regexp": ["path-to-regexp@0.1.12", "", {}, "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ=="],
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
|
||||
"prop-types": ["prop-types@15.8.1", "", { "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.13.1" } }, "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg=="],
|
||||
"proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="],
|
||||
|
||||
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
|
||||
|
||||
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
|
||||
|
||||
"qs": ["qs@6.11.0", "", { "dependencies": { "side-channel": "^1.0.4" } }, "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q=="],
|
||||
|
||||
"qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="],
|
||||
"randombytes": ["randombytes@2.1.0", "", { "dependencies": { "safe-buffer": "^5.1.0" } }, "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ=="],
|
||||
|
||||
"range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="],
|
||||
|
||||
"raw-body": ["raw-body@2.5.1", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig=="],
|
||||
|
||||
"react": ["react@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-QquU1j1TmZR+KgGSFvWTlOuwLvGrA8ldUJean+gT0nYIhSJ1ZkdXJQFnFRWqxoc74C7SY1o4NMz0yJxpUBoQ2w=="],
|
||||
|
||||
"react-dom": ["react-dom@0.0.0-experimental-380f5d67-20241113", "", { "dependencies": { "scheduler": "0.0.0-experimental-380f5d67-20241113" }, "peerDependencies": { "react": "0.0.0-experimental-380f5d67-20241113" } }, "sha512-1ok9k5rAF7YuTveNefkPOvZHHuh5RLnCc5DU7sT7IL3i2K+LZmlsbSdlylMevjt9OzovxWQdsk04Fd4GKVCBWg=="],
|
||||
|
||||
"react-refresh": ["react-refresh@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-PwTxoYh02oTSdM2DLV8r3ZzHwObVDIsS05fxNcajIZe+/kIFTWThmXYJpGMljzjIs0wwScVkMONU6URTRPQvHA=="],
|
||||
|
||||
"raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="],
|
||||
"react": ["react@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", "scheduler": "0.0.0-fec00a869" } }, "sha512-FaS3ViFU4ag7cuhDHQgGK3DAdWaD8YFXzEbO/Qzz33Si7VEzRRdnyoegFwg7VkEKxR6CvCVP6revi9Tm3Gq+WQ=="],
|
||||
"react-dom": ["react-dom@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", "scheduler": "0.0.0-fec00a869" }, "peerDependencies": { "react": "0.0.0-fec00a869" } }, "sha512-atB5i2HgCvbvhtGXq9oaX/BCL2AFZjnccougU8S9eulRFNQbNrfGNwIcj04PRo3XU1ZsBw5syL/5l596UaolKA=="],
|
||||
"react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],
|
||||
"react-refresh": ["react-refresh@0.0.0-f77c7b9d7", "", {}, "sha512-mErwv0xcQz2sYnCJPaQ93D23Irnrfo5c+wG2k2KAgWOvFfqXPQdIUZ1j9S+gKYQI2kqgd0fdTJchEJydqroyJw=="],
|
||||
"react-server-dom-bun": ["react-server-dom-bun@0.0.0-experimental-603e6108-20241029", "", { "dependencies": { "neo-async": "^2.6.1" } }, "sha512-FfteCHlOgJSnDJRatgIkIU74jQQ9M1+fH2e6kfY9Sibu8FAWEUjgApKQPDfiXgjrkY7w0ITQu0b2FezC0eGzCw=="],
|
||||
|
||||
"react-server-dom-webpack": ["react-server-dom-webpack@0.0.0-experimental-380f5d67-20241113", "", { "dependencies": { "acorn-loose": "^8.3.0", "neo-async": "^2.6.1", "webpack-sources": "^3.2.0" }, "peerDependencies": { "react": "0.0.0-experimental-380f5d67-20241113", "react-dom": "0.0.0-experimental-380f5d67-20241113", "webpack": "^5.59.0" } }, "sha512-hUluisy+9Srvrju5yS+qBOIAX82E+MRYOmoTNbV0kUsTi964ZZFLBzuruASAyUbbP1OhtFl0DwBxYN+UT0yUFQ=="],
|
||||
|
||||
"react-server-dom-webpack": ["react-server-dom-webpack@0.0.0-experimental-feed8f3f9-20240118", "", { "dependencies": { "acorn-loose": "^8.3.0", "loose-envify": "^1.1.0", "neo-async": "^2.6.1" }, "peerDependencies": { "react": "0.0.0-experimental-feed8f3f9-20240118", "react-dom": "0.0.0-experimental-feed8f3f9-20240118", "webpack": "^5.59.0" } }, "sha512-9+gS3ydJF5aYwKkvfzN+DtHfICzvQ+gYGv+2MVZo65gDSit1wC0vwOd0YebHqJNC2JruND+nEyd7wQAYmVdAZA=="],
|
||||
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
|
||||
|
||||
"scheduler": ["scheduler@0.0.0-experimental-380f5d67-20241113", "", {}, "sha512-UtSmlBSHar7hQvCXiozfIryfUFCL58+mqjrZONnLD06xdTlfgLrTcI5gS3Xo/RnNhUziLPV0DsinpI3a+q7Yzg=="],
|
||||
|
||||
"scheduler": ["scheduler@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-0U25jnyBP6dRPYwaVW4WMYB0jJSYlrIHFmIuXv27X+KIHJr7vyE9gcFTqZ61NQTuxYLYepAHnUs4KgQEUDlI+g=="],
|
||||
"schema-utils": ["schema-utils@3.3.0", "", { "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } }, "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg=="],
|
||||
|
||||
"send": ["send@0.18.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg=="],
|
||||
|
||||
"send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw=="],
|
||||
"serialize-javascript": ["serialize-javascript@6.0.2", "", { "dependencies": { "randombytes": "^2.1.0" } }, "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g=="],
|
||||
|
||||
"serve-static": ["serve-static@1.15.0", "", { "dependencies": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.18.0" } }, "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g=="],
|
||||
|
||||
"serve-static": ["serve-static@1.16.2", "", { "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.19.0" } }, "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw=="],
|
||||
"set-function-length": ["set-function-length@1.2.2", "", { "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2" } }, "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg=="],
|
||||
"setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="],
|
||||
|
||||
"side-channel": ["side-channel@1.0.4", "", { "dependencies": { "call-bind": "^1.0.0", "get-intrinsic": "^1.0.2", "object-inspect": "^1.9.0" } }, "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw=="],
|
||||
|
||||
"side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="],
|
||||
"side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="],
|
||||
"side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="],
|
||||
"side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="],
|
||||
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
|
||||
"source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="],
|
||||
|
||||
"statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="],
|
||||
|
||||
"supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="],
|
||||
|
||||
"tapable": ["tapable@2.2.1", "", {}, "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ=="],
|
||||
|
||||
"terser": ["terser@5.36.0", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.8.2", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, "bin": { "terser": "bin/terser" } }, "sha512-IYV9eNMuFAV4THUspIRXkLakHnV6XO7FEdtKjf/mDyrnqUg9LnlOn6/RwRvM9SZjR4GUq8Nk8zj67FzVARr74w=="],
|
||||
|
||||
"terser": ["terser@5.37.0", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.8.2", "commander": "^2.20.0", "source-map-support": "~0.5.20" } }, "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA=="],
|
||||
"terser-webpack-plugin": ["terser-webpack-plugin@5.3.10", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.20", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.1", "terser": "^5.26.0" }, "peerDependencies": { "webpack": "^5.1.0" } }, "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w=="],
|
||||
|
||||
"toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="],
|
||||
|
||||
"type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="],
|
||||
|
||||
"typescript": ["typescript@5.7.2", "", {}, "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg=="],
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.1.1", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.1.1", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" } }, "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A=="],
|
||||
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
|
||||
|
||||
"utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="],
|
||||
|
||||
"vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="],
|
||||
|
||||
"watchpack": ["watchpack@2.4.2", "", { "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" } }, "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw=="],
|
||||
|
||||
"webpack": ["webpack@5.96.1", "", { "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", "@webassemblyjs/ast": "^1.12.1", "@webassemblyjs/wasm-edit": "^1.12.1", "@webassemblyjs/wasm-parser": "^1.12.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.2.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.3.10", "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" }, "bin": { "webpack": "bin/webpack.js" } }, "sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA=="],
|
||||
|
||||
"webpack": ["webpack@5.97.1", "", { "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", "@webassemblyjs/ast": "^1.14.1", "@webassemblyjs/wasm-edit": "^1.14.1", "@webassemblyjs/wasm-parser": "^1.14.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.2.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.3.10", "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" } }, "sha512-EksG6gFY3L1eFMROS/7Wzgrii5mBAFe4rIr3r2BTfo7bcc+DWwFZ4OJ/miOuHJO/A85HwyI4eQ0F6IKXesO7Fg=="],
|
||||
"webpack-sources": ["webpack-sources@3.2.3", "", {}, "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w=="],
|
||||
|
||||
"send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="],
|
||||
"send/mime": ["mime@1.6.0", "", {}, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="],
|
||||
"debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
|
||||
"mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
"esrecurse/estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="],
|
||||
|
||||
"send/mime": ["mime@1.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="],
|
||||
|
||||
"send/ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,20 +66,25 @@ const Wasi = {
|
||||
return Date.now();
|
||||
},
|
||||
environ_sizes_get() {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
environ_get(__environ: unknown, environ_buf: unknown) {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
|
||||
fd_close(fd: number) {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
proc_exit() {},
|
||||
|
||||
fd_seek(fd: number, offset_bigint: bigint, whence: unknown, newOffset: unknown) {
|
||||
debugger;
|
||||
},
|
||||
fd_write(fd: unknown, iov: unknown, iovcnt: unknown, pnum: unknown) {
|
||||
debugger;
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -132,28 +132,16 @@ function cmakePath(path) {
|
||||
return path.replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
/** @param {string} str */
|
||||
const toAlphaNumeric = str => str.replace(/[^a-z0-9]/gi, "-");
|
||||
function getCachePath(branch) {
|
||||
const {
|
||||
BUILDKITE_BUILD_PATH: buildPath,
|
||||
BUILDKITE_REPO: repository,
|
||||
BUILDKITE_PULL_REQUEST_REPO: fork,
|
||||
BUILDKITE_BRANCH,
|
||||
BUILDKITE_STEP_KEY,
|
||||
} = process.env;
|
||||
|
||||
// NOTE: settings that could be long should be truncated to avoid hitting max
|
||||
// path length limit on windows (4096)
|
||||
const repositoryKey = toAlphaNumeric(
|
||||
// remove domain name, only leaving 'org/repo'
|
||||
(fork || repository).replace(/^https?:\/\/github\.com\/?/, ""),
|
||||
);
|
||||
const branchName = toAlphaNumeric(branch || BUILDKITE_BRANCH);
|
||||
const buildPath = process.env.BUILDKITE_BUILD_PATH;
|
||||
const repository = process.env.BUILDKITE_REPO;
|
||||
const fork = process.env.BUILDKITE_PULL_REQUEST_REPO;
|
||||
const repositoryKey = (fork || repository).replace(/[^a-z0-9]/gi, "-");
|
||||
const branchName = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
|
||||
const branchKey = branchName.startsWith("gh-readonly-queue-")
|
||||
? branchName.slice(18, branchName.indexOf("-pr-"))
|
||||
: branchName.slice(0, 32);
|
||||
const stepKey = toAlphaNumeric(BUILDKITE_STEP_KEY);
|
||||
: branchName;
|
||||
const stepKey = process.env.BUILDKITE_STEP_KEY.replace(/[^a-z0-9]/gi, "-");
|
||||
return resolve(buildPath, "..", "cache", repositoryKey, branchKey, stepKey);
|
||||
}
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ esac
|
||||
|
||||
export BUN_DEBUG_QUIET_LOGS=1
|
||||
|
||||
for x in $(find test/js/node/test/parallel -type f -name "test-$1*.js" | sort)
|
||||
for x in $(find test/js/node/test/parallel -type f -name "test-$1*.js")
|
||||
do
|
||||
i=$((i+1))
|
||||
echo ./$x
|
||||
|
||||
@@ -49,13 +49,6 @@ const spawnTimeout = 5_000;
|
||||
const testTimeout = 3 * 60_000;
|
||||
const integrationTimeout = 5 * 60_000;
|
||||
|
||||
function getNodeParallelTestTimeout(testPath) {
|
||||
if (testPath.includes("test-dns")) {
|
||||
return 90_000;
|
||||
}
|
||||
return 10_000;
|
||||
}
|
||||
|
||||
const { values: options, positionals: filters } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
@@ -63,7 +56,6 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
type: "boolean",
|
||||
default: false,
|
||||
},
|
||||
/** Path to bun binary */
|
||||
["exec-path"]: {
|
||||
type: "string",
|
||||
default: "bun",
|
||||
@@ -253,19 +245,15 @@ async function runTests() {
|
||||
|
||||
if (!failedResults.length) {
|
||||
for (const testPath of tests) {
|
||||
const absoluteTestPath = join(testsPath, testPath);
|
||||
const title = relative(cwd, absoluteTestPath).replaceAll(sep, "/");
|
||||
if (isNodeParallelTest(testPath)) {
|
||||
const subcommand = title.includes("needs-test") ? "test" : "run";
|
||||
const title = relative(cwd, join(testsPath, testPath)).replace(/\\/g, "/");
|
||||
if (title.startsWith("test/js/node/test/parallel/")) {
|
||||
await runTest(title, async () => {
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
args: [subcommand, "--config=./bunfig.node-test.toml", absoluteTestPath],
|
||||
timeout: getNodeParallelTestTimeout(title),
|
||||
args: [title],
|
||||
timeout: 10_000,
|
||||
env: {
|
||||
FORCE_COLOR: "0",
|
||||
NO_COLOR: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
},
|
||||
stdout: chunk => pipeTestStdout(process.stdout, chunk),
|
||||
stderr: chunk => pipeTestStdout(process.stderr, chunk),
|
||||
@@ -283,9 +271,9 @@ async function runTests() {
|
||||
stdoutPreview: stdoutPreview,
|
||||
};
|
||||
});
|
||||
} else {
|
||||
await runTest(title, async () => spawnBunTest(execPath, join("test", testPath)));
|
||||
continue;
|
||||
}
|
||||
await runTest(title, async () => spawnBunTest(execPath, join("test", testPath)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -542,7 +530,7 @@ async function spawnSafe(options) {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} execPath Path to bun binary
|
||||
* @param {string} execPath
|
||||
* @param {SpawnOptions} options
|
||||
* @returns {Promise<SpawnResult>}
|
||||
*/
|
||||
@@ -570,11 +558,9 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
// Used in Node.js tests.
|
||||
TEST_TMPDIR: tmpdirPath,
|
||||
};
|
||||
|
||||
if (env) {
|
||||
Object.assign(bunEnv, env);
|
||||
}
|
||||
|
||||
if (isWindows) {
|
||||
delete bunEnv["PATH"];
|
||||
bunEnv["Path"] = path;
|
||||
@@ -864,20 +850,12 @@ function isJavaScriptTest(path) {
|
||||
return isJavaScript(path) && /\.test|spec\./.test(basename(path));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} testPath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isNodeParallelTest(testPath) {
|
||||
return testPath.replaceAll(sep, "/").includes("js/node/test/parallel/");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isTest(path) {
|
||||
if (isNodeParallelTest(path) && targetDoesRunNodeTests()) return true;
|
||||
if (path.replaceAll(sep, "/").startsWith("js/node/test/parallel/") && targetDoesRunNodeTests()) return true;
|
||||
if (path.replaceAll(sep, "/").startsWith("js/node/cluster/test-") && path.endsWith(".ts")) return true;
|
||||
return isTestStrict(path);
|
||||
}
|
||||
@@ -899,9 +877,6 @@ function isHidden(path) {
|
||||
return /node_modules|node.js/.test(dirname(path)) || /^\./.test(basename(path));
|
||||
}
|
||||
|
||||
/** Files with these extensions are not treated as test cases */
|
||||
const IGNORED_EXTENSIONS = new Set([".md"]);
|
||||
|
||||
/**
|
||||
* @param {string} cwd
|
||||
* @returns {string[]}
|
||||
@@ -911,9 +886,8 @@ function getTests(cwd) {
|
||||
const dirname = join(cwd, path);
|
||||
for (const entry of readdirSync(dirname, { encoding: "utf-8", withFileTypes: true })) {
|
||||
const { name } = entry;
|
||||
const ext = name.slice(name.lastIndexOf("."));
|
||||
const filename = join(path, name);
|
||||
if (isHidden(filename) || IGNORED_EXTENSIONS.has(ext)) {
|
||||
if (isHidden(filename)) {
|
||||
continue;
|
||||
}
|
||||
if (entry.isFile() && isTest(filename)) {
|
||||
@@ -1061,7 +1035,7 @@ function getRelevantTests(cwd) {
|
||||
const filteredTests = [];
|
||||
|
||||
if (options["node-tests"]) {
|
||||
tests = tests.filter(isNodeParallelTest);
|
||||
tests = tests.filter(testPath => testPath.includes("js/node/test/parallel/"));
|
||||
}
|
||||
|
||||
const isMatch = (testPath, filter) => {
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# @file trace.sh
|
||||
# @summary build + run bun with Instruments. All args are forwarded to `bun-debug`.
|
||||
#
|
||||
# @description
|
||||
# This script builds bun, signs it with debug entitlements, and runs it with an
|
||||
# Allocations template. After running, a `.trace` folder will be created. Open
|
||||
# it with `open foo.trace` to view it in Instruments.
|
||||
#
|
||||
# This script requires xcode command line tools to be installed and only works
|
||||
# on MacOS.
|
||||
|
||||
set -e -o pipefail
|
||||
|
||||
BUN="bun-debug"
|
||||
DEBUG_BUN="build/debug/${BUN}"
|
||||
|
||||
file_to_run=$1
|
||||
if [[ -z $file_to_run ]]; then
|
||||
echo "Usage: $0 <file_to_run> [bun args]"
|
||||
echo " $0 test <file_to_run> [bun args]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bun run build
|
||||
|
||||
echo "Signing bun binary..."
|
||||
codesign --entitlements $(realpath entitlements.debug.plist) --force --timestamp --sign - -vvvv --deep --strict ${DEBUG_BUN}
|
||||
|
||||
export BUN_JSC_logJITCodeForPerf=1
|
||||
export BUN_JSC_collectExtraSamplingProfilerData=1
|
||||
export BUN_JSC_sampleCCode=1
|
||||
export BUN_JSC_alwaysGeneratePCToCodeOriginMap=1
|
||||
|
||||
echo "Tracing ${file_to_run}..."
|
||||
xcrun xctrace record --template "Allocations" -output . --launch -- "./${DEBUG_BUN}" $file_to_run
|
||||
# perf record -k 1 --sample-cpu -e cycles:u -j any --call-graph dwarf,16384 -F 499 -p (pgrep -f "${BUN}")
|
||||
|
||||
# DEBUGINFOD_URLS="" perf inject --jit --input perf.data --output=perf.jit.data -v
|
||||
@@ -28,29 +28,12 @@ pub fn deinit(this: *HTMLScanner) void {
|
||||
this.import_records.deinitWithAllocator(this.allocator);
|
||||
}
|
||||
|
||||
fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKind) !void {
|
||||
// In HTML, sometimes people do /src/index.js
|
||||
// In that case, we don't want to use the absolute filesystem path, we want to use the path relative to the project root
|
||||
const path_to_use = if (input_path.len > 1 and input_path[0] == '/')
|
||||
bun.path.joinAbsString(bun.fs.FileSystem.instance.top_level_dir, &[_][]const u8{input_path[1..]}, .auto)
|
||||
|
||||
// Check if imports to (e.g) "App.tsx" are actually relative imoprts w/o the "./"
|
||||
else if (input_path.len > 2 and input_path[0] != '.' and input_path[1] != '/') blk: {
|
||||
const index_of_dot = std.mem.lastIndexOfScalar(u8, input_path, '.') orelse break :blk input_path;
|
||||
const ext = input_path[index_of_dot..];
|
||||
if (ext.len > 4) break :blk input_path;
|
||||
// /foo/bar/index.html -> /foo/bar
|
||||
const dirname: []const u8 = std.fs.path.dirname(this.source.path.text) orelse break :blk input_path;
|
||||
const resolved = bun.path.joinAbsString(dirname, &[_][]const u8{input_path}, .auto);
|
||||
break :blk if (bun.sys.exists(resolved)) resolved else input_path;
|
||||
} else input_path;
|
||||
|
||||
fn createImportRecord(this: *HTMLScanner, path: []const u8, kind: ImportKind) !void {
|
||||
const record = ImportRecord{
|
||||
.path = fs.Path.init(try this.allocator.dupeZ(u8, path_to_use)),
|
||||
.path = fs.Path.init(try this.allocator.dupe(u8, path)),
|
||||
.kind = kind,
|
||||
.range = logger.Range.None,
|
||||
};
|
||||
|
||||
try this.import_records.push(this.allocator, record);
|
||||
}
|
||||
|
||||
|
||||
222
src/Mutex.zig
222
src/Mutex.zig
@@ -1,222 +0,0 @@
|
||||
//! This is a copy-pasta of std.Thread.Mutex with some changes.
|
||||
//! - No assert with unreachable
|
||||
//! - uses bun.Futex instead of std.Thread.Futex
|
||||
//! Mutex is a synchronization primitive which enforces atomic access to a shared region of code known as the "critical section".
|
||||
//! It does this by blocking ensuring only one thread is in the critical section at any given point in time by blocking the others.
|
||||
//! Mutex can be statically initialized and is at most `@sizeOf(u64)` large.
|
||||
//! Use `lock()` or `tryLock()` to enter the critical section and `unlock()` to leave it.
|
||||
//!
|
||||
//! Example:
|
||||
//! ```
|
||||
//! var m = Mutex{};
|
||||
//!
|
||||
//! {
|
||||
//! m.lock();
|
||||
//! defer m.unlock();
|
||||
//! // ... critical section code
|
||||
//! }
|
||||
//!
|
||||
//! if (m.tryLock()) {
|
||||
//! defer m.unlock();
|
||||
//! // ... critical section code
|
||||
//! }
|
||||
//! ```
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const bun = @import("root").bun;
|
||||
const assert = bun.assert;
|
||||
const testing = std.testing;
|
||||
const Thread = std.Thread;
|
||||
const Futex = bun.Futex;
|
||||
|
||||
impl: Impl = .{},
|
||||
|
||||
/// Tries to acquire the mutex without blocking the caller's thread.
|
||||
/// Returns `false` if the calling thread would have to block to acquire it.
|
||||
/// Otherwise, returns `true` and the caller should `unlock()` the Mutex to release it.
|
||||
pub fn tryLock(self: *Mutex) bool {
|
||||
return self.impl.tryLock();
|
||||
}
|
||||
|
||||
/// Acquires the mutex, blocking the caller's thread until it can.
|
||||
/// It is undefined behavior if the mutex is already held by the caller's thread.
|
||||
/// Once acquired, call `unlock()` on the Mutex to release it.
|
||||
pub fn lock(self: *Mutex) void {
|
||||
self.impl.lock();
|
||||
}
|
||||
|
||||
/// Releases the mutex which was previously acquired with `lock()` or `tryLock()`.
|
||||
/// It is undefined behavior if the mutex is unlocked from a different thread that it was locked from.
|
||||
pub fn unlock(self: *Mutex) void {
|
||||
self.impl.unlock();
|
||||
}
|
||||
|
||||
const Impl = if (builtin.mode == .Debug and !builtin.single_threaded)
|
||||
DebugImpl
|
||||
else
|
||||
ReleaseImpl;
|
||||
|
||||
pub const ReleaseImpl =
|
||||
if (builtin.os.tag == .windows)
|
||||
WindowsImpl
|
||||
else if (builtin.os.tag.isDarwin())
|
||||
DarwinImpl
|
||||
else
|
||||
FutexImpl;
|
||||
|
||||
pub const ExternImpl = ReleaseImpl.Type;
|
||||
|
||||
const DebugImpl = struct {
|
||||
locking_thread: std.atomic.Value(Thread.Id) = std.atomic.Value(Thread.Id).init(0), // 0 means it's not locked.
|
||||
impl: ReleaseImpl = .{},
|
||||
|
||||
inline fn tryLock(self: *@This()) bool {
|
||||
const locking = self.impl.tryLock();
|
||||
if (locking) {
|
||||
self.locking_thread.store(Thread.getCurrentId(), .unordered);
|
||||
}
|
||||
return locking;
|
||||
}
|
||||
|
||||
inline fn lock(self: *@This()) void {
|
||||
const current_id = Thread.getCurrentId();
|
||||
if (self.locking_thread.load(.unordered) == current_id and current_id != 0) {
|
||||
@panic("Deadlock detected");
|
||||
}
|
||||
self.impl.lock();
|
||||
self.locking_thread.store(current_id, .unordered);
|
||||
}
|
||||
|
||||
inline fn unlock(self: *@This()) void {
|
||||
assert(self.locking_thread.load(.unordered) == Thread.getCurrentId());
|
||||
self.locking_thread.store(0, .unordered);
|
||||
self.impl.unlock();
|
||||
}
|
||||
};
|
||||
|
||||
// SRWLOCK on windows is almost always faster than Futex solution.
|
||||
// It also implements an efficient Condition with requeue support for us.
|
||||
const WindowsImpl = struct {
|
||||
srwlock: Type = .{},
|
||||
|
||||
fn tryLock(self: *@This()) bool {
|
||||
return windows.kernel32.TryAcquireSRWLockExclusive(&self.srwlock) != windows.FALSE;
|
||||
}
|
||||
|
||||
fn lock(self: *@This()) void {
|
||||
windows.kernel32.AcquireSRWLockExclusive(&self.srwlock);
|
||||
}
|
||||
|
||||
fn unlock(self: *@This()) void {
|
||||
windows.kernel32.ReleaseSRWLockExclusive(&self.srwlock);
|
||||
}
|
||||
|
||||
const windows = std.os.windows;
|
||||
|
||||
pub const Type = windows.SRWLOCK;
|
||||
};
|
||||
|
||||
// os_unfair_lock on darwin supports priority inheritance and is generally faster than Futex solutions.
|
||||
const DarwinImpl = struct {
|
||||
oul: Type = .{},
|
||||
|
||||
fn tryLock(self: *@This()) bool {
|
||||
return c.os_unfair_lock_trylock(&self.oul);
|
||||
}
|
||||
|
||||
fn lock(self: *@This()) void {
|
||||
c.os_unfair_lock_lock(&self.oul);
|
||||
}
|
||||
|
||||
fn unlock(self: *@This()) void {
|
||||
c.os_unfair_lock_unlock(&self.oul);
|
||||
}
|
||||
|
||||
const c = std.c;
|
||||
pub const Type = c.os_unfair_lock;
|
||||
};
|
||||
|
||||
const FutexImpl = struct {
|
||||
state: std.atomic.Value(u32) = std.atomic.Value(u32).init(unlocked),
|
||||
|
||||
const unlocked: u32 = 0b00;
|
||||
const locked: u32 = 0b01;
|
||||
const contended: u32 = 0b11; // must contain the `locked` bit for x86 optimization below
|
||||
|
||||
fn lock(self: *@This()) void {
|
||||
if (!self.tryLock())
|
||||
self.lockSlow();
|
||||
}
|
||||
|
||||
fn tryLock(self: *@This()) bool {
|
||||
// On x86, use `lock bts` instead of `lock cmpxchg` as:
|
||||
// - they both seem to mark the cache-line as modified regardless: https://stackoverflow.com/a/63350048
|
||||
// - `lock bts` is smaller instruction-wise which makes it better for inlining
|
||||
if (comptime builtin.target.cpu.arch.isX86()) {
|
||||
const locked_bit = @ctz(locked);
|
||||
return self.state.bitSet(locked_bit, .acquire) == 0;
|
||||
}
|
||||
|
||||
// Acquire barrier ensures grabbing the lock happens before the critical section
|
||||
// and that the previous lock holder's critical section happens before we grab the lock.
|
||||
return self.state.cmpxchgWeak(unlocked, locked, .acquire, .monotonic) == null;
|
||||
}
|
||||
|
||||
fn lockSlow(self: *@This()) void {
|
||||
@setCold(true);
|
||||
|
||||
// Avoid doing an atomic swap below if we already know the state is contended.
|
||||
// An atomic swap unconditionally stores which marks the cache-line as modified unnecessarily.
|
||||
if (self.state.load(.monotonic) == contended) {
|
||||
Futex.waitForever(&self.state, contended);
|
||||
}
|
||||
|
||||
// Try to acquire the lock while also telling the existing lock holder that there are threads waiting.
|
||||
//
|
||||
// Once we sleep on the Futex, we must acquire the mutex using `contended` rather than `locked`.
|
||||
// If not, threads sleeping on the Futex wouldn't see the state change in unlock and potentially deadlock.
|
||||
// The downside is that the last mutex unlocker will see `contended` and do an unnecessary Futex wake
|
||||
// but this is better than having to wake all waiting threads on mutex unlock.
|
||||
//
|
||||
// Acquire barrier ensures grabbing the lock happens before the critical section
|
||||
// and that the previous lock holder's critical section happens before we grab the lock.
|
||||
while (self.state.swap(contended, .acquire) != unlocked) {
|
||||
Futex.waitForever(&self.state, contended);
|
||||
}
|
||||
}
|
||||
|
||||
fn unlock(self: *@This()) void {
|
||||
// Unlock the mutex and wake up a waiting thread if any.
|
||||
//
|
||||
// A waiting thread will acquire with `contended` instead of `locked`
|
||||
// which ensures that it wakes up another thread on the next unlock().
|
||||
//
|
||||
// Release barrier ensures the critical section happens before we let go of the lock
|
||||
// and that our critical section happens before the next lock holder grabs the lock.
|
||||
const state = self.state.swap(unlocked, .release);
|
||||
assert(state != unlocked);
|
||||
|
||||
if (state == contended) {
|
||||
Futex.wake(&self.state, 1);
|
||||
}
|
||||
}
|
||||
|
||||
pub const Type = u32;
|
||||
};
|
||||
|
||||
const Mutex = @This();
|
||||
|
||||
pub fn spinCycle() void {}
|
||||
|
||||
// These have to be a size known to C.
|
||||
export fn Bun__lock(ptr: *ReleaseImpl) void {
|
||||
ptr.lock();
|
||||
}
|
||||
|
||||
// These have to be a size known to C.
|
||||
export fn Bun__unlock(ptr: *ReleaseImpl) void {
|
||||
ptr.unlock();
|
||||
}
|
||||
|
||||
export const Bun__lock__size: usize = @sizeOf(ReleaseImpl);
|
||||
@@ -1,8 +1,7 @@
|
||||
//! A nullable allocator the same size as `std.mem.Allocator`.
|
||||
const std = @import("std");
|
||||
const bun = @import("root").bun;
|
||||
|
||||
const NullableAllocator = @This();
|
||||
const bun = @import("root").bun;
|
||||
|
||||
ptr: *anyopaque = undefined,
|
||||
// Utilize the null pointer optimization on the vtable instead of
|
||||
@@ -18,9 +18,8 @@ const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const windows = std.os.windows;
|
||||
const testing = std.testing;
|
||||
const assert = bun.assert;
|
||||
const assert = (std.debug).assert;
|
||||
const Progress = @This();
|
||||
const bun = @import("root").bun;
|
||||
|
||||
/// `null` if the current node (and its children) should
|
||||
/// not print on update()
|
||||
@@ -65,7 +64,7 @@ done: bool = true,
|
||||
/// Protects the `refresh` function, as well as `node.recently_updated_child`.
|
||||
/// Without this, callsites would call `Node.end` and then free `Node` memory
|
||||
/// while it was still being accessed by the `refresh` function.
|
||||
update_mutex: bun.Mutex = .{},
|
||||
update_mutex: std.Thread.Mutex = .{},
|
||||
|
||||
/// Keeps track of how many columns in the terminal have been output, so that
|
||||
/// we can move the cursor back later.
|
||||
|
||||
@@ -168,7 +168,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
none,
|
||||
|
||||
/// It probably is not possible to run two decoding jobs on the same file
|
||||
var init_lock: bun.Mutex = .{};
|
||||
var init_lock: bun.Lock = .{};
|
||||
|
||||
pub fn load(this: *LazySourceMap) ?*SourceMap.ParsedSourceMap {
|
||||
init_lock.lock();
|
||||
|
||||
@@ -267,7 +267,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
};
|
||||
}
|
||||
|
||||
const Mutex = bun.Mutex;
|
||||
const Mutex = @import("./lock.zig").Lock;
|
||||
|
||||
/// Append-only list.
|
||||
/// Stores an initial count in .bss section of the object file
|
||||
|
||||
@@ -1,477 +0,0 @@
|
||||
const std = @import("std");
|
||||
const bun = @import("bun.zig");
|
||||
const js_ast = bun.JSAst;
|
||||
const Ast = js_ast.Ast;
|
||||
|
||||
pub const RecordKind = enum(u8) {
|
||||
/// var_name
|
||||
declared_variable,
|
||||
/// let_name
|
||||
lexical_variable,
|
||||
/// module_name, import_name, local_name
|
||||
import_info_single,
|
||||
/// module_name, import_name = '*', local_name
|
||||
import_info_namespace,
|
||||
/// export_name, import_name, module_name
|
||||
export_info_indirect,
|
||||
/// export_name, local_name, padding (for local => indirect conversion)
|
||||
export_info_local,
|
||||
/// export_name, module_name
|
||||
export_info_namespace,
|
||||
/// module_name
|
||||
export_info_star,
|
||||
_,
|
||||
|
||||
pub fn len(record: RecordKind) !usize {
|
||||
return switch (record) {
|
||||
.declared_variable, .lexical_variable => 1,
|
||||
.import_info_single => 3,
|
||||
.import_info_namespace => 3,
|
||||
.export_info_indirect => 3,
|
||||
.export_info_local => 3,
|
||||
.export_info_namespace => 2,
|
||||
.export_info_star => 1,
|
||||
else => return error.InvalidRecordKind,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const ModuleInfoDeserialized = struct {
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []align(1) const u32,
|
||||
requested_modules_keys: []align(1) const StringID,
|
||||
requested_modules_values: []align(1) const ModuleInfo.FetchParameters,
|
||||
buffer: []align(1) const StringID,
|
||||
record_kinds: []align(1) const RecordKind,
|
||||
contains_import_meta: bool,
|
||||
owner: union(enum) {
|
||||
module_info,
|
||||
allocated_slice: struct {
|
||||
slice: []const u8,
|
||||
allocator: std.mem.Allocator,
|
||||
},
|
||||
},
|
||||
dead: bool = false,
|
||||
|
||||
pub fn deinit(self: *ModuleInfoDeserialized) void {
|
||||
switch (self.owner) {
|
||||
.module_info => {
|
||||
const mi: *ModuleInfo = @fieldParentPtr("_deserialized", self);
|
||||
mi.destroy();
|
||||
},
|
||||
.allocated_slice => |as| {
|
||||
as.allocator.free(as.slice);
|
||||
as.allocator.destroy(self);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
inline fn eat(rem: *[]const u8, len: usize) ![]const u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
inline fn eatC(rem: *[]const u8, comptime len: usize) !*const [len]u8 {
|
||||
if (rem.*.len < len) return error.BadModuleInfo;
|
||||
const res = rem.*[0..len];
|
||||
rem.* = rem.*[len..];
|
||||
return res;
|
||||
}
|
||||
pub fn create(source: []const u8, gpa: std.mem.Allocator) !*ModuleInfoDeserialized {
|
||||
std.log.info("ModuleInfoDeserialized.create", .{});
|
||||
var rem = try gpa.dupe(u8, source);
|
||||
errdefer gpa.free(rem);
|
||||
var res = try gpa.create(ModuleInfoDeserialized);
|
||||
errdefer res.deinit();
|
||||
|
||||
const record_kinds_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const record_kinds = std.mem.bytesAsSlice(RecordKind, try eat(&rem, record_kinds_len * @sizeOf(RecordKind)));
|
||||
const buffer_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const buffer = std.mem.bytesAsSlice(StringID, try eat(&rem, buffer_len * @sizeOf(StringID)));
|
||||
const requested_modules_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const requested_modules_keys = std.mem.bytesAsSlice(StringID, try eat(&rem, requested_modules_len * @sizeOf(StringID)));
|
||||
const requested_modules_values = std.mem.bytesAsSlice(ModuleInfo.FetchParameters, try eat(&rem, requested_modules_len * @sizeOf(ModuleInfo.FetchParameters)));
|
||||
const contains_import_meta = (try eatC(&rem, 1))[0] != 0;
|
||||
const strings_len = std.mem.readInt(u32, try eatC(&rem, 4), .little);
|
||||
const strings_lens = std.mem.bytesAsSlice(u32, try eat(&rem, strings_len * @sizeOf(u32)));
|
||||
const strings_buf = rem;
|
||||
|
||||
res.* = .{
|
||||
.strings_buf = strings_buf,
|
||||
.strings_lens = strings_lens,
|
||||
.requested_modules_keys = requested_modules_keys,
|
||||
.requested_modules_values = requested_modules_values,
|
||||
.buffer = buffer,
|
||||
.record_kinds = record_kinds,
|
||||
.contains_import_meta = contains_import_meta,
|
||||
.owner = .{ .allocated_slice = .{
|
||||
.slice = source,
|
||||
.allocator = gpa,
|
||||
} },
|
||||
};
|
||||
return res;
|
||||
}
|
||||
pub fn serialize(self: *const ModuleInfoDeserialized, writer: anytype) !void {
|
||||
try writer.writeInt(u32, @truncate(self.record_kinds.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.record_kinds));
|
||||
try writer.writeInt(u32, @truncate(self.buffer.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.buffer));
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.requested_modules_keys.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_keys));
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.requested_modules_values));
|
||||
|
||||
try writer.writeInt(u8, @intFromBool(self.contains_import_meta), .little);
|
||||
|
||||
try writer.writeInt(u32, @truncate(self.strings_lens.len), .little);
|
||||
try writer.writeAll(std.mem.sliceAsBytes(self.strings_lens));
|
||||
try writer.writeAll(self.strings_buf);
|
||||
}
|
||||
};
|
||||
|
||||
const StringMapKey = enum(u32) {
|
||||
get_or_put = std.math.maxInt(u32),
|
||||
_,
|
||||
};
|
||||
pub const StringContext = struct {
|
||||
get_or_put_key: []const u8,
|
||||
strings_buf: []const u8,
|
||||
strings_lens: []const u32,
|
||||
|
||||
pub fn hash(self: @This(), s: StringMapKey) u32 {
|
||||
bun.assert(s == .get_or_put);
|
||||
return @as(u32, @truncate(std.hash.Wyhash.hash(0, self.get_or_put_key)));
|
||||
}
|
||||
pub fn eql(self: @This(), fetch_key: StringMapKey, item_key: StringMapKey, item_i: usize) bool {
|
||||
bun.assert(item_key != .get_or_put);
|
||||
bun.assert(fetch_key == .get_or_put);
|
||||
return bun.strings.eqlLong(self.get_or_put_key, self.strings_buf[@intFromEnum(item_key)..][0..self.strings_lens[item_i]], true);
|
||||
}
|
||||
};
|
||||
|
||||
pub const ModuleInfo = struct {
|
||||
/// all strings in wtf-8. index in hashmap = StringID
|
||||
gpa: std.mem.Allocator,
|
||||
strings_map: std.ArrayHashMapUnmanaged(StringMapKey, void, StringContext, true),
|
||||
strings_buf: std.ArrayListUnmanaged(u8),
|
||||
strings_lens: std.ArrayListUnmanaged(u32),
|
||||
requested_modules: std.AutoArrayHashMap(StringID, FetchParameters),
|
||||
buffer: std.ArrayList(StringID),
|
||||
record_kinds: std.ArrayList(RecordKind),
|
||||
exported_names: std.AutoArrayHashMapUnmanaged(StringID, void),
|
||||
contains_import_meta: bool,
|
||||
finalized: bool = false,
|
||||
|
||||
_deserialized: ModuleInfoDeserialized = undefined,
|
||||
|
||||
pub fn asDeserialized(self: *ModuleInfo) *ModuleInfoDeserialized {
|
||||
bun.assert(self.finalized);
|
||||
return &self._deserialized;
|
||||
}
|
||||
|
||||
pub const FetchParameters = enum(u32) {
|
||||
none = std.math.maxInt(u32),
|
||||
javascript = std.math.maxInt(u32) - 1,
|
||||
webassembly = std.math.maxInt(u32) - 2,
|
||||
json = std.math.maxInt(u32) - 3,
|
||||
_, // host_defined: cast to StringID
|
||||
pub fn hostDefined(value: StringID) FetchParameters {
|
||||
return @enumFromInt(@intFromEnum(value));
|
||||
}
|
||||
};
|
||||
|
||||
pub const VarKind = enum { declared, lexical };
|
||||
pub fn addVar(self: *ModuleInfo, name: []const u8, kind: VarKind) !void {
|
||||
switch (kind) {
|
||||
.declared => try self.addDeclaredVariable(name),
|
||||
.lexical => try self.addLexicalVariable(name),
|
||||
}
|
||||
}
|
||||
|
||||
fn _addRecord(self: *ModuleInfo, kind: RecordKind, data: []const StringID) !void {
|
||||
bun.assert(!self.finalized);
|
||||
bun.assert(data.len == kind.len() catch unreachable);
|
||||
try self.record_kinds.append(kind);
|
||||
try self.buffer.appendSlice(data);
|
||||
}
|
||||
pub fn addDeclaredVariable(self: *ModuleInfo, id: []const u8) !void {
|
||||
try self._addRecord(.declared_variable, &.{try self.str(id)});
|
||||
}
|
||||
pub fn addLexicalVariable(self: *ModuleInfo, id: []const u8) !void {
|
||||
try self._addRecord(.lexical_variable, &.{try self.str(id)});
|
||||
}
|
||||
pub fn addImportInfoSingle(self: *ModuleInfo, module_name: []const u8, import_name: []const u8, local_name: []const u8) !void {
|
||||
try self._addRecord(.import_info_single, &.{ try self.str(module_name), try self.str(import_name), try self.str(local_name) });
|
||||
}
|
||||
pub fn addImportInfoNamespace(self: *ModuleInfo, module_name: []const u8, local_name: []const u8) !void {
|
||||
try self._addRecord(.import_info_namespace, &.{ try self.str(module_name), try self.str("*"), try self.str(local_name) });
|
||||
}
|
||||
pub fn addExportInfoIndirect(self: *ModuleInfo, export_name: []const u8, import_name: []const u8, module_name: []const u8) !void {
|
||||
const export_name_id = try self.str(export_name);
|
||||
if (try self._hasOrAddExportedName(export_name_id)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_indirect, &.{ export_name_id, try self.str(import_name), try self.str(module_name) });
|
||||
}
|
||||
pub fn addExportInfoLocal(self: *ModuleInfo, export_name: []const u8, local_name: []const u8) !void {
|
||||
const export_name_id = try self.str(export_name);
|
||||
if (try self._hasOrAddExportedName(export_name_id)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_local, &.{ export_name_id, try self.str(local_name), @enumFromInt(std.math.maxInt(u32)) });
|
||||
}
|
||||
pub fn addExportInfoNamespace(self: *ModuleInfo, export_name: []const u8, module_name: []const u8) !void {
|
||||
const export_name_id = try self.str(export_name);
|
||||
if (try self._hasOrAddExportedName(export_name_id)) return; // a syntax error will be emitted later in this case
|
||||
try self._addRecord(.export_info_namespace, &.{ export_name_id, try self.str(module_name) });
|
||||
}
|
||||
pub fn addExportInfoStar(self: *ModuleInfo, module_name: []const u8) !void {
|
||||
try self._addRecord(.export_info_star, &.{try self.str(module_name)});
|
||||
}
|
||||
|
||||
pub fn _hasOrAddExportedName(self: *ModuleInfo, name: StringID) !bool {
|
||||
if (try self.exported_names.fetchPut(self.gpa, name, {}) != null) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn create(gpa: std.mem.Allocator) !*ModuleInfo {
|
||||
const res = try gpa.create(ModuleInfo);
|
||||
res.* = ModuleInfo.init(gpa);
|
||||
return res;
|
||||
}
|
||||
fn init(allocator: std.mem.Allocator) ModuleInfo {
|
||||
return .{
|
||||
.gpa = allocator,
|
||||
.strings_map = .{},
|
||||
.strings_buf = .{},
|
||||
.strings_lens = .{},
|
||||
.exported_names = .{},
|
||||
.requested_modules = std.AutoArrayHashMap(StringID, FetchParameters).init(allocator),
|
||||
.buffer = std.ArrayList(StringID).init(allocator),
|
||||
.record_kinds = std.ArrayList(RecordKind).init(allocator),
|
||||
.contains_import_meta = false,
|
||||
};
|
||||
}
|
||||
fn deinit(self: *ModuleInfo) void {
|
||||
self.strings_map.deinit(self.gpa);
|
||||
self.strings_buf.deinit(self.gpa);
|
||||
self.strings_lens.deinit(self.gpa);
|
||||
self.exported_names.deinit(self.gpa);
|
||||
self.requested_modules.deinit();
|
||||
self.buffer.deinit();
|
||||
self.record_kinds.deinit();
|
||||
}
|
||||
pub fn destroy(self: *ModuleInfo) void {
|
||||
const alloc = self.gpa;
|
||||
self.deinit();
|
||||
alloc.destroy(self);
|
||||
}
|
||||
pub fn str(self: *ModuleInfo, value: []const u8) !StringID {
|
||||
const gpres = try self.strings_map.getOrPutContext(self.gpa, .get_or_put, .{
|
||||
.get_or_put_key = value,
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
});
|
||||
if (gpres.found_existing) return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
|
||||
gpres.key_ptr.* = @enumFromInt(@as(u32, @truncate(self.strings_buf.items.len)));
|
||||
gpres.value_ptr.* = {};
|
||||
try self.strings_buf.ensureUnusedCapacity(self.gpa, value.len);
|
||||
try self.strings_lens.ensureUnusedCapacity(self.gpa, 1);
|
||||
self.strings_buf.appendSliceAssumeCapacity(value);
|
||||
self.strings_lens.appendAssumeCapacity(@as(u32, @truncate(value.len)));
|
||||
return @enumFromInt(@as(u32, @intCast(gpres.index)));
|
||||
}
|
||||
pub const star_default = "*default*";
|
||||
pub fn requestModule(self: *ModuleInfo, import_record_path: []const u8, fetch_parameters: FetchParameters) !void {
|
||||
// jsc only records the attributes of the first import with the given import_record_path. so only put if not exists.
|
||||
const gpres = try self.requested_modules.getOrPut(try self.str(import_record_path));
|
||||
if (!gpres.found_existing) gpres.value_ptr.* = fetch_parameters;
|
||||
}
|
||||
|
||||
/// find any exports marked as 'local' that are actually 'indirect' and fix them
|
||||
pub fn finalize(self: *ModuleInfo) !void {
|
||||
bun.assert(!self.finalized);
|
||||
var local_name_to_module_name = std.AutoArrayHashMap(StringID, struct { module_name: StringID, import_name: StringID }).init(bun.default_allocator);
|
||||
defer local_name_to_module_name.deinit();
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items) |k| {
|
||||
if (k == .import_info_single) {
|
||||
try local_name_to_module_name.put(self.buffer.items[i + 2], .{ .module_name = self.buffer.items[i], .import_name = self.buffer.items[i + 1] });
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (self.record_kinds.items) |*k| {
|
||||
if (k.* == .export_info_local) {
|
||||
if (local_name_to_module_name.get(self.buffer.items[i + 1])) |ip| {
|
||||
k.* = .export_info_indirect;
|
||||
self.buffer.items[i + 1] = ip.import_name;
|
||||
self.buffer.items[i + 2] = ip.module_name;
|
||||
}
|
||||
}
|
||||
i += k.len() catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
self._deserialized = .{
|
||||
.strings_buf = self.strings_buf.items,
|
||||
.strings_lens = self.strings_lens.items,
|
||||
.requested_modules_keys = self.requested_modules.keys(),
|
||||
.requested_modules_values = self.requested_modules.values(),
|
||||
.buffer = self.buffer.items,
|
||||
.record_kinds = self.record_kinds.items,
|
||||
.contains_import_meta = self.contains_import_meta,
|
||||
.owner = .module_info,
|
||||
};
|
||||
|
||||
self.finalized = true;
|
||||
}
|
||||
};
|
||||
pub const StringID = enum(u32) {
|
||||
_,
|
||||
};
|
||||
|
||||
export fn zig__renderDiff(expected_ptr: [*:0]const u8, expected_len: usize, received_ptr: [*:0]const u8, received_len: usize, globalThis: *bun.JSC.JSGlobalObject) void {
|
||||
const DiffFormatter = @import("bun.js/test/diff_format.zig").DiffFormatter;
|
||||
const formatter = DiffFormatter{
|
||||
.received_string = received_ptr[0..received_len],
|
||||
.expected_string = expected_ptr[0..expected_len],
|
||||
.globalThis = globalThis,
|
||||
};
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stderr.print("DIFF:\n{}\n", .{formatter}) catch {};
|
||||
}
|
||||
|
||||
export fn zig__ModuleInfoDeserialized__toJSModuleRecord(
|
||||
globalObject: *bun.JSC.JSGlobalObject,
|
||||
vm: *bun.JSC.VM,
|
||||
module_key: *const IdentifierArray,
|
||||
source_code: *const SourceCode,
|
||||
declared_variables: *VariableEnvironment,
|
||||
lexical_variables: *VariableEnvironment,
|
||||
res: *ModuleInfoDeserialized,
|
||||
) ?*JSModuleRecord {
|
||||
if (res.dead) @panic("ModuleInfoDeserialized already deinit()ed");
|
||||
defer res.deinit();
|
||||
|
||||
var identifiers = IdentifierArray.create(res.strings_lens.len);
|
||||
defer identifiers.destroy();
|
||||
var offset: usize = 0;
|
||||
for (0.., res.strings_lens) |index, len| {
|
||||
if (res.strings_buf.len < offset + len) return null; // error!
|
||||
const sub = res.strings_buf[offset..][0..len];
|
||||
if (bun.strings.eqlComptime(sub, ModuleInfo.star_default)) {
|
||||
identifiers.setFromStarDefault(index, vm);
|
||||
} else {
|
||||
identifiers.setFromUtf8(index, vm, sub);
|
||||
}
|
||||
offset += len;
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch 0) > res.buffer.len) return null;
|
||||
switch (k) {
|
||||
.declared_variable => declared_variables.add(identifiers, res.buffer[i]),
|
||||
.lexical_variable => lexical_variables.add(identifiers, res.buffer[i]),
|
||||
.import_info_single, .import_info_namespace, .export_info_indirect, .export_info_local, .export_info_namespace, .export_info_star => {},
|
||||
else => return null,
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
const module_record = JSModuleRecord.create(globalObject, vm, module_key, source_code, declared_variables, lexical_variables, res.contains_import_meta);
|
||||
|
||||
for (res.requested_modules_keys, res.requested_modules_values) |reqk, reqv| {
|
||||
switch (reqv) {
|
||||
.none => module_record.addRequestedModuleNullAttributesPtr(identifiers, reqk),
|
||||
.javascript => module_record.addRequestedModuleJavaScript(identifiers, reqk),
|
||||
.webassembly => module_record.addRequestedModuleWebAssembly(identifiers, reqk),
|
||||
.json => module_record.addRequestedModuleJSON(identifiers, reqk),
|
||||
else => |uv| module_record.addRequestedModuleHostDefined(identifiers, reqk, @enumFromInt(@intFromEnum(uv))),
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
for (res.record_kinds) |k| {
|
||||
if (i + (k.len() catch unreachable) > res.buffer.len) unreachable; // handled above
|
||||
switch (k) {
|
||||
.declared_variable, .lexical_variable => {},
|
||||
.import_info_single => module_record.addImportEntrySingle(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.import_info_namespace => module_record.addImportEntryNamespace(identifiers, res.buffer[i + 1], res.buffer[i + 2], res.buffer[i]),
|
||||
.export_info_indirect => module_record.addIndirectExport(identifiers, res.buffer[i + 0], res.buffer[i + 1], res.buffer[i + 2]),
|
||||
.export_info_local => module_record.addLocalExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_namespace => module_record.addNamespaceExport(identifiers, res.buffer[i], res.buffer[i + 1]),
|
||||
.export_info_star => module_record.addStarExport(identifiers, res.buffer[i]),
|
||||
else => unreachable, // handled above
|
||||
}
|
||||
i += k.len() catch unreachable; // handled above
|
||||
}
|
||||
}
|
||||
|
||||
return module_record;
|
||||
}
|
||||
export fn zig__ModuleInfo__destroy(info: *ModuleInfo) void {
|
||||
info.deinit();
|
||||
bun.default_allocator.destroy(info);
|
||||
}
|
||||
|
||||
const VariableEnvironment = opaque {
|
||||
extern fn JSC__VariableEnvironment__add(environment: *VariableEnvironment, identifier_array: *IdentifierArray, identifier_index: StringID) void;
|
||||
pub const add = JSC__VariableEnvironment__add;
|
||||
};
|
||||
const IdentifierArray = opaque {
|
||||
extern fn JSC__IdentifierArray__create(len: usize) *IdentifierArray;
|
||||
pub const create = JSC__IdentifierArray__create;
|
||||
|
||||
extern fn JSC__IdentifierArray__destroy(identifier_array: *IdentifierArray) void;
|
||||
pub const destroy = JSC__IdentifierArray__destroy;
|
||||
|
||||
extern fn JSC__IdentifierArray__setFromUtf8(identifier_array: *IdentifierArray, n: usize, vm: *bun.JSC.VM, str: [*]const u8, len: usize) void;
|
||||
pub fn setFromUtf8(self: *IdentifierArray, n: usize, vm: *bun.JSC.VM, str: []const u8) void {
|
||||
JSC__IdentifierArray__setFromUtf8(self, n, vm, str.ptr, str.len);
|
||||
}
|
||||
|
||||
extern fn JSC__IdentifierArray__setFromStarDefault(identifier_array: *IdentifierArray, n: usize, vm: *bun.JSC.VM) void;
|
||||
pub const setFromStarDefault = JSC__IdentifierArray__setFromStarDefault;
|
||||
};
|
||||
const SourceCode = opaque {};
|
||||
const JSModuleRecord = opaque {
|
||||
extern fn JSC_JSModuleRecord__create(global_object: *bun.JSC.JSGlobalObject, vm: *bun.JSC.VM, module_key: *const IdentifierArray, source_code: *const SourceCode, declared_variables: *VariableEnvironment, lexical_variables: *VariableEnvironment, has_import_meta: bool) *JSModuleRecord;
|
||||
pub const create = JSC_JSModuleRecord__create;
|
||||
|
||||
extern fn JSC_JSModuleRecord__declaredVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const declaredVariables = JSC_JSModuleRecord__declaredVariables;
|
||||
extern fn JSC_JSModuleRecord__lexicalVariables(module_record: *JSModuleRecord) *VariableEnvironment;
|
||||
pub const lexicalVariables = JSC_JSModuleRecord__lexicalVariables;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addIndirectExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, import_name: StringID, module_name: StringID) void;
|
||||
pub const addIndirectExport = JSC_JSModuleRecord__addIndirectExport;
|
||||
extern fn JSC_JSModuleRecord__addLocalExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, local_name: StringID) void;
|
||||
pub const addLocalExport = JSC_JSModuleRecord__addLocalExport;
|
||||
extern fn JSC_JSModuleRecord__addNamespaceExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, export_name: StringID, module_name: StringID) void;
|
||||
pub const addNamespaceExport = JSC_JSModuleRecord__addNamespaceExport;
|
||||
extern fn JSC_JSModuleRecord__addStarExport(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addStarExport = JSC_JSModuleRecord__addStarExport;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleNullAttributesPtr = JSC_JSModuleRecord__addRequestedModuleNullAttributesPtr;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJavaScript(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJavaScript = JSC_JSModuleRecord__addRequestedModuleJavaScript;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleWebAssembly(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleWebAssembly = JSC_JSModuleRecord__addRequestedModuleWebAssembly;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleJSON(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID) void;
|
||||
pub const addRequestedModuleJSON = JSC_JSModuleRecord__addRequestedModuleJSON;
|
||||
extern fn JSC_JSModuleRecord__addRequestedModuleHostDefined(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, module_name: StringID, host_defined_import_type: StringID) void;
|
||||
pub const addRequestedModuleHostDefined = JSC_JSModuleRecord__addRequestedModuleHostDefined;
|
||||
|
||||
extern fn JSC_JSModuleRecord__addImportEntrySingle(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntrySingle = JSC_JSModuleRecord__addImportEntrySingle;
|
||||
extern fn JSC_JSModuleRecord__addImportEntryNamespace(module_record: *JSModuleRecord, identifier_array: *IdentifierArray, import_name: StringID, local_name: StringID, module_name: StringID) void;
|
||||
pub const addImportEntryNamespace = JSC_JSModuleRecord__addImportEntryNamespace;
|
||||
};
|
||||
@@ -1631,7 +1631,7 @@ pub const Api = struct {
|
||||
origin: ?[]const u8 = null,
|
||||
|
||||
/// absolute_working_dir
|
||||
absolute_working_dir: ?[:0]const u8 = null,
|
||||
absolute_working_dir: ?[]const u8 = null,
|
||||
|
||||
/// define
|
||||
define: ?StringMap = null,
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
extern "C" BunString BakeProdResolve(JSC::JSGlobalObject*, BunString a, BunString b);
|
||||
|
||||
namespace Bake {
|
||||
using namespace JSC;
|
||||
|
||||
JSC::JSInternalPromise*
|
||||
bakeModuleLoaderImportModule(JSC::JSGlobalObject* global,
|
||||
@@ -152,14 +151,13 @@ const JSC::GlobalObjectMethodTable GlobalObject::s_globalObjectMethodTable = {
|
||||
INHERIT_HOOK_METHOD(deriveShadowRealmGlobalObject),
|
||||
INHERIT_HOOK_METHOD(codeForEval),
|
||||
INHERIT_HOOK_METHOD(canCompileStrings),
|
||||
INHERIT_HOOK_METHOD(trustedScriptStructure),
|
||||
};
|
||||
|
||||
GlobalObject* GlobalObject::create(JSC::VM& vm, JSC::Structure* structure,
|
||||
const JSC::GlobalObjectMethodTable* methodTable)
|
||||
{
|
||||
Bake::GlobalObject* ptr = new (NotNull, JSC::allocateCell<Bake::GlobalObject>(vm))
|
||||
Bake::GlobalObject(vm, structure, methodTable);
|
||||
GlobalObject* ptr = new (NotNull, JSC::allocateCell<GlobalObject>(vm))
|
||||
GlobalObject(vm, structure, methodTable);
|
||||
ptr->finishCreation(vm);
|
||||
return ptr;
|
||||
}
|
||||
@@ -170,13 +168,6 @@ void GlobalObject::finishCreation(JSC::VM& vm)
|
||||
ASSERT(inherits(info()));
|
||||
}
|
||||
|
||||
JSC::Structure* GlobalObject::createStructure(JSC::VM& vm)
|
||||
{
|
||||
auto* structure = JSC::Structure::create(vm, nullptr, jsNull(), JSC::TypeInfo(JSC::GlobalObjectType, StructureFlags & ~IsImmutablePrototypeExoticObject), info());
|
||||
structure->setTransitionWatchpointIsLikelyToBeFired(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
struct BunVirtualMachine;
|
||||
extern "C" BunVirtualMachine* Bun__getVM();
|
||||
|
||||
@@ -190,9 +181,9 @@ extern "C" GlobalObject* BakeCreateProdGlobal(void* console)
|
||||
BunVirtualMachine* bunVM = Bun__getVM();
|
||||
WebCore::JSVMClientData::create(&vm, bunVM);
|
||||
|
||||
JSC::Structure* structure = Bake::GlobalObject::createStructure(vm);
|
||||
Bake::GlobalObject* global = Bake::GlobalObject::create(
|
||||
vm, structure, &Bake::GlobalObject::s_globalObjectMethodTable);
|
||||
JSC::Structure* structure = GlobalObject::createStructure(vm);
|
||||
GlobalObject* global = GlobalObject::create(
|
||||
vm, structure, &GlobalObject::s_globalObjectMethodTable);
|
||||
if (!global)
|
||||
BUN_PANIC("Failed to create BakeGlobalObject");
|
||||
|
||||
@@ -202,7 +193,6 @@ extern "C" GlobalObject* BakeCreateProdGlobal(void* console)
|
||||
|
||||
global->setConsole(console);
|
||||
global->setStackTraceLimit(10); // Node.js defaults to 10
|
||||
global->isThreadLocalDefaultGlobalObject = true;
|
||||
|
||||
// TODO: it segfaults! process.nextTick is scoped out for now i guess!
|
||||
// vm.setOnComputeErrorInfo(computeErrorInfoWrapper);
|
||||
@@ -224,7 +214,4 @@ extern "C" void BakeGlobalObject__attachPerThreadData(GlobalObject* global, Prod
|
||||
global->m_perThreadData = perThreadData;
|
||||
}
|
||||
|
||||
const JSC::ClassInfo Bake::GlobalObject::s_info = { "GlobalObject"_s, &Base::s_info, nullptr, nullptr,
|
||||
CREATE_METHOD_TABLE(Bake::GlobalObject) };
|
||||
|
||||
}; // namespace Bake
|
||||
|
||||
@@ -10,8 +10,7 @@ class GlobalObject : public Zig::GlobalObject {
|
||||
public:
|
||||
using Base = Zig::GlobalObject;
|
||||
|
||||
ProductionPerThread* m_perThreadData = nullptr;
|
||||
DECLARE_INFO;
|
||||
ProductionPerThread* m_perThreadData;
|
||||
|
||||
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
@@ -23,20 +22,16 @@ public:
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForBakeGlobalScope = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceForBakeGlobalScope.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceForBakeGlobalScope = std::forward<decltype(space)>(space); },
|
||||
[](auto& server) -> JSC::HeapCellType& { return server.m_heapCellTypeForBakeGlobalObject; });
|
||||
[](auto& server) -> JSC::HeapCellType& { return server.m_heapCellTypeForJSWorkerGlobalScope; });
|
||||
}
|
||||
|
||||
static const JSC::GlobalObjectMethodTable s_globalObjectMethodTable;
|
||||
static GlobalObject* create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable);
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm);
|
||||
|
||||
void finishCreation(JSC::VM& vm);
|
||||
|
||||
GlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable)
|
||||
: Zig::GlobalObject(vm, structure, methodTable)
|
||||
{
|
||||
}
|
||||
GlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable)
|
||||
: Zig::GlobalObject(vm, structure, methodTable) { }
|
||||
};
|
||||
|
||||
}; // namespace Kit
|
||||
|
||||
@@ -13,7 +13,7 @@ pub const igLog = bun.Output.scoped(.IncrementalGraph, false);
|
||||
pub const Options = struct {
|
||||
/// Arena must live until DevServer.deinit()
|
||||
arena: Allocator,
|
||||
root: [:0]const u8,
|
||||
root: []const u8,
|
||||
vm: *VirtualMachine,
|
||||
framework: bake.Framework,
|
||||
bundler_options: bake.SplitBundlerOptions,
|
||||
@@ -4446,7 +4446,7 @@ pub const EntryPointList = struct {
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Mutex = bun.Mutex;
|
||||
const Mutex = std.Thread.Mutex;
|
||||
const ArrayListUnmanaged = std.ArrayListUnmanaged;
|
||||
const AutoArrayHashMapUnmanaged = std.AutoArrayHashMapUnmanaged;
|
||||
|
||||
@@ -4485,5 +4485,5 @@ const JSModuleLoader = JSC.JSModuleLoader;
|
||||
const EventLoopHandle = JSC.EventLoopHandle;
|
||||
const JSInternalPromise = JSC.JSInternalPromise;
|
||||
|
||||
const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena;
|
||||
const ThreadlocalArena = @import("../mimalloc_arena.zig").Arena;
|
||||
const Chunk = bun.bundle_v2.Chunk;
|
||||
|
||||
@@ -15,7 +15,7 @@ pub const UserOptions = struct {
|
||||
arena: std.heap.ArenaAllocator,
|
||||
allocations: StringRefList,
|
||||
|
||||
root: [:0]const u8,
|
||||
root: []const u8,
|
||||
framework: Framework,
|
||||
bundler_options: SplitBundlerOptions,
|
||||
|
||||
@@ -78,9 +78,9 @@ pub const UserOptions = struct {
|
||||
const StringRefList = struct {
|
||||
strings: std.ArrayListUnmanaged(ZigString.Slice),
|
||||
|
||||
pub fn track(al: *StringRefList, str: ZigString.Slice) [:0]const u8 {
|
||||
pub fn track(al: *StringRefList, str: ZigString.Slice) []const u8 {
|
||||
al.strings.append(bun.default_allocator, str) catch bun.outOfMemory();
|
||||
return str.sliceZ();
|
||||
return str.slice();
|
||||
}
|
||||
|
||||
pub fn free(al: *StringRefList) void {
|
||||
|
||||
@@ -21,12 +21,12 @@ const default_allocator = bun.default_allocator;
|
||||
const JestPrettyFormat = @import("./test/pretty_format.zig").JestPrettyFormat;
|
||||
const JSPromise = JSC.JSPromise;
|
||||
const EventType = JSC.EventType;
|
||||
|
||||
pub const shim = Shimmer("Bun", "ConsoleObject", @This());
|
||||
pub const Type = *anyopaque;
|
||||
pub const name = "Bun::ConsoleObject";
|
||||
pub const include = "\"ConsoleObject.h\"";
|
||||
pub const namespace = shim.namespace;
|
||||
|
||||
const Counter = std.AutoHashMapUnmanaged(u64, u32);
|
||||
|
||||
const BufferedWriter = std.io.BufferedWriter(4096, Output.WriterType);
|
||||
@@ -72,8 +72,8 @@ pub const MessageType = enum(u32) {
|
||||
_,
|
||||
};
|
||||
|
||||
var stderr_mutex: bun.Mutex = .{};
|
||||
var stdout_mutex: bun.Mutex = .{};
|
||||
var stderr_mutex: bun.Lock = .{};
|
||||
var stdout_mutex: bun.Lock = .{};
|
||||
|
||||
threadlocal var stderr_lock_count: u16 = 0;
|
||||
threadlocal var stdout_lock_count: u16 = 0;
|
||||
@@ -378,13 +378,13 @@ pub const TablePrinter = struct {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var cols_iter = try JSC.JSPropertyIterator(.{
|
||||
var cols_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(this.globalObject, row_value);
|
||||
defer cols_iter.deinit();
|
||||
|
||||
while (try cols_iter.next()) |col_key| {
|
||||
while (cols_iter.next()) |col_key| {
|
||||
const value = cols_iter.value;
|
||||
|
||||
// find or create the column for the property
|
||||
@@ -561,13 +561,13 @@ pub const TablePrinter = struct {
|
||||
}.callback);
|
||||
if (ctx_.err) return error.JSError;
|
||||
} else {
|
||||
var rows_iter = try JSC.JSPropertyIterator(.{
|
||||
var rows_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(globalObject, this.tabular_data);
|
||||
defer rows_iter.deinit();
|
||||
|
||||
while (try rows_iter.next()) |row_key| {
|
||||
while (rows_iter.next()) |row_key| {
|
||||
try this.updateColumnsForRow(&columns, .{ .str = String.init(row_key) }, rows_iter.value);
|
||||
}
|
||||
}
|
||||
@@ -634,13 +634,13 @@ pub const TablePrinter = struct {
|
||||
}.callback);
|
||||
if (ctx_.err) return error.JSError;
|
||||
} else {
|
||||
var rows_iter = try JSC.JSPropertyIterator(.{
|
||||
var rows_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(globalObject, this.tabular_data);
|
||||
defer rows_iter.deinit();
|
||||
|
||||
while (try rows_iter.next()) |row_key| {
|
||||
while (rows_iter.next()) |row_key| {
|
||||
try this.printRow(Writer, writer, enable_ansi_colors, &columns, .{ .str = String.init(row_key) }, rows_iter.value);
|
||||
}
|
||||
}
|
||||
@@ -2486,9 +2486,6 @@ pub const Formatter = struct {
|
||||
} else if (value.as(JSC.WebCore.Blob)) |blob| {
|
||||
blob.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {};
|
||||
return;
|
||||
} else if (value.as(JSC.WebCore.S3Client)) |s3client| {
|
||||
s3client.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {};
|
||||
return;
|
||||
} else if (value.as(JSC.FetchHeaders) != null) {
|
||||
if (value.get_unsafe(this.globalThis, "toJSON")) |toJSONFunction| {
|
||||
this.addForNewLine("Headers ".len);
|
||||
@@ -2731,6 +2728,10 @@ pub const Formatter = struct {
|
||||
this.quote_strings = true;
|
||||
defer this.quote_strings = prev_quote_strings;
|
||||
|
||||
if (!this.single_line) {
|
||||
this.writeIndent(Writer, writer_) catch {};
|
||||
}
|
||||
|
||||
const set_name = if (value.jsType() == .WeakSet) "WeakSet" else "Set";
|
||||
|
||||
if (length == 0) {
|
||||
@@ -2761,7 +2762,7 @@ pub const Formatter = struct {
|
||||
},
|
||||
}
|
||||
}
|
||||
if (!this.single_line) {
|
||||
if (this.single_line) {
|
||||
this.writeIndent(Writer, writer_) catch {};
|
||||
}
|
||||
writer.writeAll("}");
|
||||
@@ -2998,7 +2999,7 @@ pub const Formatter = struct {
|
||||
this.quote_strings = true;
|
||||
defer this.quote_strings = prev_quote_strings;
|
||||
|
||||
var props_iter = try JSC.JSPropertyIterator(.{
|
||||
var props_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = true,
|
||||
|
||||
.include_value = true,
|
||||
@@ -3012,7 +3013,7 @@ pub const Formatter = struct {
|
||||
defer this.indent -|= 1;
|
||||
const count_without_children = props_iter.len - @as(usize, @intFromBool(children_prop != null));
|
||||
|
||||
while (try props_iter.next()) |prop| {
|
||||
while (props_iter.next()) |prop| {
|
||||
if (prop.eqlComptime("children"))
|
||||
continue;
|
||||
|
||||
|
||||
@@ -9,9 +9,7 @@
|
||||
/// Version 10: Constant folding for ''.charCodeAt(n)
|
||||
/// Version 11: Fix \uFFFF printing regression
|
||||
/// Version 12: "use strict"; makes it CommonJS if we otherwise don't know which one to pick.
|
||||
/// Version 13: Hoist `import.meta.require` definition, see #15738
|
||||
/// Version 14: Include module info with an ES Module, see #15758
|
||||
const expected_version = 14;
|
||||
const expected_version = 12;
|
||||
|
||||
const bun = @import("root").bun;
|
||||
const std = @import("std");
|
||||
@@ -34,7 +32,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
|
||||
const seed = 42;
|
||||
pub const Metadata = struct {
|
||||
@@ -55,10 +52,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
sourcemap_byte_length: u64 = 0,
|
||||
sourcemap_hash: u64 = 0,
|
||||
|
||||
esm_record_byte_offset: u64 = 0,
|
||||
esm_record_byte_length: u64 = 0,
|
||||
esm_record_hash: u64 = 0,
|
||||
|
||||
pub const size = brk: {
|
||||
var count: usize = 0;
|
||||
const meta: Metadata = .{};
|
||||
@@ -85,10 +78,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
try writer.writeInt(u64, this.sourcemap_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_byte_length, .little);
|
||||
try writer.writeInt(u64, this.sourcemap_hash, .little);
|
||||
|
||||
try writer.writeInt(u64, this.esm_record_byte_offset, .little);
|
||||
try writer.writeInt(u64, this.esm_record_byte_length, .little);
|
||||
try writer.writeInt(u64, this.esm_record_hash, .little);
|
||||
}
|
||||
|
||||
pub fn decode(this: *Metadata, reader: anytype) !void {
|
||||
@@ -113,10 +102,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
this.sourcemap_byte_length = try reader.readInt(u64, .little);
|
||||
this.sourcemap_hash = try reader.readInt(u64, .little);
|
||||
|
||||
this.esm_record_byte_offset = try reader.readInt(u64, .little);
|
||||
this.esm_record_byte_length = try reader.readInt(u64, .little);
|
||||
this.esm_record_hash = try reader.readInt(u64, .little);
|
||||
|
||||
switch (this.module_type) {
|
||||
.esm, .cjs => {},
|
||||
// Invalid module type
|
||||
@@ -135,7 +120,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
metadata: Metadata,
|
||||
output_code: OutputCode = .{ .utf8 = "" },
|
||||
sourcemap: []const u8 = "",
|
||||
esm_record: []const u8 = "",
|
||||
|
||||
pub const OutputCode = union(enum) {
|
||||
utf8: []const u8,
|
||||
string: bun.String,
|
||||
@@ -157,14 +142,11 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator, esm_record_allocator: std.mem.Allocator) void {
|
||||
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator) void {
|
||||
this.output_code.deinit(output_code_allocator);
|
||||
if (this.sourcemap.len > 0) {
|
||||
sourcemap_allocator.free(this.sourcemap);
|
||||
}
|
||||
if (this.esm_record.len > 0) {
|
||||
esm_record_allocator.free(this.esm_record);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save(
|
||||
@@ -174,7 +156,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
output_code: OutputCode,
|
||||
exports_kind: bun.JSAst.ExportsKind,
|
||||
) !void {
|
||||
@@ -220,8 +201,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
.output_byte_offset = Metadata.size,
|
||||
.output_byte_length = output_bytes.len,
|
||||
.sourcemap_byte_offset = Metadata.size + output_bytes.len,
|
||||
.esm_record_byte_offset = Metadata.size + output_bytes.len + sourcemap.len,
|
||||
.esm_record_byte_length = esm_record.len,
|
||||
};
|
||||
|
||||
metadata.output_hash = hash(output_bytes);
|
||||
@@ -240,26 +219,20 @@ pub const RuntimeTranspilerCache = struct {
|
||||
break :brk metadata_buf[0..metadata_stream.pos];
|
||||
};
|
||||
|
||||
var vecs_buf: [4]bun.PlatformIOVecConst = undefined;
|
||||
var vecs_i: usize = 0;
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(metadata_bytes);
|
||||
vecs_i += 1;
|
||||
if (output_bytes.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(output_bytes);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (sourcemap.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(sourcemap);
|
||||
vecs_i += 1;
|
||||
}
|
||||
if (esm_record.len > 0) {
|
||||
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(esm_record);
|
||||
vecs_i += 1;
|
||||
}
|
||||
const vecs: []const bun.PlatformIOVecConst = vecs_buf[0..vecs_i];
|
||||
const vecs: []const bun.PlatformIOVecConst = if (output_bytes.len > 0)
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(output_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
}
|
||||
else
|
||||
&.{
|
||||
bun.platformIOVecConstCreate(metadata_bytes),
|
||||
bun.platformIOVecConstCreate(sourcemap),
|
||||
};
|
||||
|
||||
var position: isize = 0;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len + esm_record.len;
|
||||
const end_position = Metadata.size + output_bytes.len + sourcemap.len;
|
||||
|
||||
if (bun.Environment.allow_assert) {
|
||||
var total: usize = 0;
|
||||
@@ -269,7 +242,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
}
|
||||
bun.assert(end_position == total);
|
||||
}
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size + esm_record.len)));
|
||||
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
|
||||
|
||||
bun.C.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {};
|
||||
while (position < end_position) {
|
||||
@@ -290,7 +263,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
file: std.fs.File,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
const stat_size = try file.getEndPos();
|
||||
if (stat_size < Metadata.size + this.metadata.output_byte_length + this.metadata.sourcemap_byte_length) {
|
||||
@@ -366,17 +338,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
|
||||
this.sourcemap = sourcemap;
|
||||
}
|
||||
|
||||
if (this.metadata.esm_record_byte_length > 0) {
|
||||
const esm_record = try esm_record_allocator.alloc(u8, this.metadata.esm_record_byte_length);
|
||||
errdefer esm_record_allocator.free(esm_record);
|
||||
const read_bytes = try file.preadAll(esm_record, this.metadata.esm_record_byte_offset);
|
||||
if (read_bytes != this.metadata.esm_record_byte_length) {
|
||||
return error.MissingData;
|
||||
}
|
||||
|
||||
this.esm_record = esm_record;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -494,7 +455,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var tracer = bun.tracy.traceNamed(@src(), "RuntimeTranspilerCache.fromFile");
|
||||
defer tracer.end();
|
||||
@@ -509,7 +469,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size,
|
||||
sourcemap_allocator,
|
||||
output_code_allocator,
|
||||
esm_record_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -520,7 +479,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_stat_size: u64,
|
||||
sourcemap_allocator: std.mem.Allocator,
|
||||
output_code_allocator: std.mem.Allocator,
|
||||
esm_record_allocator: std.mem.Allocator,
|
||||
) !Entry {
|
||||
var metadata_bytes_buf: [Metadata.size * 2]u8 = undefined;
|
||||
const cache_fd = try bun.sys.open(cache_file_path.sliceAssumeZ(), bun.O.RDONLY, 0).unwrap();
|
||||
@@ -552,7 +510,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return error.MismatchedFeatureHash;
|
||||
}
|
||||
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator, esm_record_allocator);
|
||||
try entry.load(file, sourcemap_allocator, output_code_allocator);
|
||||
|
||||
return entry;
|
||||
}
|
||||
@@ -569,7 +527,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash: u64,
|
||||
features_hash: u64,
|
||||
sourcemap: []const u8,
|
||||
esm_record: []const u8,
|
||||
source_code: bun.String,
|
||||
exports_kind: bun.JSAst.ExportsKind,
|
||||
) !void {
|
||||
@@ -609,7 +566,6 @@ pub const RuntimeTranspilerCache = struct {
|
||||
input_hash,
|
||||
features_hash,
|
||||
sourcemap,
|
||||
esm_record,
|
||||
output_code,
|
||||
exports_kind,
|
||||
);
|
||||
@@ -643,7 +599,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
parser_options.hashForRuntimeTranspiler(&features_hasher, used_jsx);
|
||||
this.features_hash = features_hasher.final();
|
||||
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator) catch |err| {
|
||||
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator) catch |err| {
|
||||
debug("get(\"{s}\") = {s}", .{ source.path.text, @errorName(err) });
|
||||
return false;
|
||||
};
|
||||
@@ -659,7 +615,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
if (!bun_debug_restore_from_cache) {
|
||||
if (this.entry) |*entry| {
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator);
|
||||
entry.deinit(this.sourcemap_allocator, this.output_code_allocator);
|
||||
this.entry = null;
|
||||
}
|
||||
}
|
||||
@@ -668,7 +624,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
return this.entry != null;
|
||||
}
|
||||
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8, esm_record: []const u8) void {
|
||||
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8) void {
|
||||
if (comptime !bun.FeatureFlags.runtime_transpiler_cache)
|
||||
@compileError("RuntimeTranspilerCache is disabled");
|
||||
|
||||
@@ -679,7 +635,7 @@ pub const RuntimeTranspilerCache = struct {
|
||||
const output_code = bun.String.createLatin1(output_code_bytes);
|
||||
this.output_code = output_code;
|
||||
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, esm_record, output_code, this.exports_kind) catch |err| {
|
||||
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, output_code, this.exports_kind) catch |err| {
|
||||
debug("put() = {s}", .{@errorName(err)});
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const conv = std.builtin.CallingConvention.Unspecified;
|
||||
const S3File = @import("../webcore/S3File.zig");
|
||||
|
||||
/// How to add a new function or property to the Bun global
|
||||
///
|
||||
/// - Add a callback or property to the below struct
|
||||
@@ -18,6 +18,7 @@ pub const BunObject = struct {
|
||||
pub const createShellInterpreter = toJSCallback(bun.shell.Interpreter.createShellInterpreter);
|
||||
pub const deflateSync = toJSCallback(JSZlib.deflateSync);
|
||||
pub const file = toJSCallback(WebCore.Blob.constructBunFile);
|
||||
pub const generateHeapSnapshot = toJSCallback(Bun.generateHeapSnapshot);
|
||||
pub const gunzipSync = toJSCallback(JSZlib.gunzipSync);
|
||||
pub const gzipSync = toJSCallback(JSZlib.gzipSync);
|
||||
pub const indexOfLine = toJSCallback(Bun.indexOfLine);
|
||||
@@ -30,7 +31,7 @@ pub const BunObject = struct {
|
||||
pub const registerMacro = toJSCallback(Bun.registerMacro);
|
||||
pub const resolve = toJSCallback(Bun.resolve);
|
||||
pub const resolveSync = toJSCallback(Bun.resolveSync);
|
||||
pub const s3 = S3File.createJSS3File;
|
||||
pub const s3 = toJSCallback(WebCore.Blob.constructS3File);
|
||||
pub const serve = toJSCallback(Bun.serve);
|
||||
pub const sha = toJSCallback(JSC.wrapStaticMethod(Crypto.SHA512_256, "hash_", true));
|
||||
pub const shellEscape = toJSCallback(Bun.shellEscape);
|
||||
@@ -56,6 +57,7 @@ pub const BunObject = struct {
|
||||
pub const SHA384 = toJSGetter(Crypto.SHA384.getter);
|
||||
pub const SHA512 = toJSGetter(Crypto.SHA512.getter);
|
||||
pub const SHA512_256 = toJSGetter(Crypto.SHA512_256.getter);
|
||||
pub const S3 = toJSGetter(JSC.WebCore.Blob.getJSS3FileConstructor);
|
||||
pub const TOML = toJSGetter(Bun.getTOMLObject);
|
||||
pub const Transpiler = toJSGetter(Bun.getTranspilerConstructor);
|
||||
pub const argv = toJSGetter(Bun.getArgv);
|
||||
@@ -71,7 +73,6 @@ pub const BunObject = struct {
|
||||
pub const stdin = toJSGetter(Bun.getStdin);
|
||||
pub const stdout = toJSGetter(Bun.getStdout);
|
||||
pub const unsafe = toJSGetter(Bun.getUnsafe);
|
||||
pub const S3Client = toJSGetter(Bun.getS3ClientConstructor);
|
||||
// --- Getters ---
|
||||
|
||||
fn getterName(comptime baseName: anytype) [:0]const u8 {
|
||||
@@ -109,6 +110,7 @@ pub const BunObject = struct {
|
||||
@export(BunObject.FileSystemRouter, .{ .name = getterName("FileSystemRouter") });
|
||||
@export(BunObject.MD4, .{ .name = getterName("MD4") });
|
||||
@export(BunObject.MD5, .{ .name = getterName("MD5") });
|
||||
@export(BunObject.S3, .{ .name = getterName("S3") });
|
||||
@export(BunObject.SHA1, .{ .name = getterName("SHA1") });
|
||||
@export(BunObject.SHA224, .{ .name = getterName("SHA224") });
|
||||
@export(BunObject.SHA256, .{ .name = getterName("SHA256") });
|
||||
@@ -132,7 +134,6 @@ pub const BunObject = struct {
|
||||
@export(BunObject.unsafe, .{ .name = getterName("unsafe") });
|
||||
@export(BunObject.semver, .{ .name = getterName("semver") });
|
||||
@export(BunObject.embeddedFiles, .{ .name = getterName("embeddedFiles") });
|
||||
@export(BunObject.S3Client, .{ .name = getterName("S3Client") });
|
||||
// --- Getters --
|
||||
|
||||
// -- Callbacks --
|
||||
@@ -144,6 +145,7 @@ pub const BunObject = struct {
|
||||
@export(BunObject.createShellInterpreter, .{ .name = callbackName("createShellInterpreter") });
|
||||
@export(BunObject.deflateSync, .{ .name = callbackName("deflateSync") });
|
||||
@export(BunObject.file, .{ .name = callbackName("file") });
|
||||
@export(BunObject.generateHeapSnapshot, .{ .name = callbackName("generateHeapSnapshot") });
|
||||
@export(BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") });
|
||||
@export(BunObject.gzipSync, .{ .name = callbackName("gzipSync") });
|
||||
@export(BunObject.indexOfLine, .{ .name = callbackName("indexOfLine") });
|
||||
@@ -234,6 +236,7 @@ const JSPromise = bun.JSC.JSPromise;
|
||||
const JSInternalPromise = bun.JSC.JSInternalPromise;
|
||||
const JSModuleLoader = bun.JSC.JSModuleLoader;
|
||||
const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation;
|
||||
const Exception = bun.JSC.Exception;
|
||||
const ErrorableZigString = bun.JSC.ErrorableZigString;
|
||||
const ZigGlobalObject = bun.JSC.ZigGlobalObject;
|
||||
const VM = bun.JSC.VM;
|
||||
@@ -827,12 +830,13 @@ pub fn sleepSync(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
pub fn generateHeapSnapshot(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
return globalObject.generateHeapSnapshot();
|
||||
}
|
||||
|
||||
pub fn gc(vm: *JSC.VirtualMachine, sync: bool) usize {
|
||||
return vm.garbageCollect(sync);
|
||||
}
|
||||
export fn Bun__gc(vm: *JSC.VirtualMachine, sync: bool) callconv(.C) usize {
|
||||
return @call(.always_inline, gc, .{ vm, sync });
|
||||
}
|
||||
|
||||
pub fn shrink(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
globalObject.vm().shrinkFootprint();
|
||||
@@ -1150,11 +1154,11 @@ pub const Crypto = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub const names: std.EnumArray(Algorithm, bun.String) = brk: {
|
||||
var all = std.EnumArray(Algorithm, bun.String).initUndefined();
|
||||
pub const names: std.EnumArray(Algorithm, ZigString) = brk: {
|
||||
var all = std.EnumArray(Algorithm, ZigString).initUndefined();
|
||||
var iter = all.iterator();
|
||||
while (iter.next()) |entry| {
|
||||
entry.value.* = bun.String.init(@tagName(entry.key));
|
||||
entry.value.* = ZigString.init(@tagName(entry.key));
|
||||
}
|
||||
break :brk all;
|
||||
};
|
||||
@@ -2325,7 +2329,8 @@ pub const Crypto = struct {
|
||||
_: JSValue,
|
||||
_: JSValue,
|
||||
) JSC.JSValue {
|
||||
return bun.String.toJSArray(globalThis_, &EVP.Algorithm.names.values);
|
||||
var values = EVP.Algorithm.names.values;
|
||||
return JSC.JSValue.createStringArray(globalThis_, &values, values.len, true);
|
||||
}
|
||||
|
||||
fn hashToEncoding(globalThis: *JSGlobalObject, evp: *EVP, input: JSC.Node.BlobOrStringOrBuffer, encoding: JSC.Node.Encoding) bun.JSError!JSC.JSValue {
|
||||
@@ -3400,9 +3405,7 @@ pub fn getTOMLObject(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSVa
|
||||
pub fn getGlobConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.API.Glob.getConstructor(globalThis);
|
||||
}
|
||||
pub fn getS3ClientConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.WebCore.S3Client.getConstructor(globalThis);
|
||||
}
|
||||
|
||||
pub fn getEmbeddedFiles(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
const vm = globalThis.bunVM();
|
||||
const graph = vm.standalone_module_graph orelse return JSC.JSValue.createEmptyArray(globalThis, 0);
|
||||
|
||||
@@ -38,7 +38,7 @@ const JSAst = bun.JSAst;
|
||||
const JSParser = bun.js_parser;
|
||||
const JSPrinter = bun.js_printer;
|
||||
const ScanPassResult = JSParser.ScanPassResult;
|
||||
const Mimalloc = @import("../../allocators/mimalloc_arena.zig");
|
||||
const Mimalloc = @import("../../mimalloc_arena.zig");
|
||||
const Runtime = @import("../../runtime.zig").Runtime;
|
||||
const JSLexer = bun.js_lexer;
|
||||
const Expr = JSAst.Expr;
|
||||
@@ -453,13 +453,13 @@ pub const JSBundler = struct {
|
||||
return globalThis.throwInvalidArguments("define must be an object", .{});
|
||||
}
|
||||
|
||||
var define_iter = try JSC.JSPropertyIterator(.{
|
||||
var define_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = true,
|
||||
.include_value = true,
|
||||
}).init(globalThis, define);
|
||||
defer define_iter.deinit();
|
||||
|
||||
while (try define_iter.next()) |prop| {
|
||||
while (define_iter.next()) |prop| {
|
||||
const property_value = define_iter.value;
|
||||
const value_type = property_value.jsType();
|
||||
|
||||
@@ -485,7 +485,7 @@ pub const JSBundler = struct {
|
||||
}
|
||||
|
||||
if (try config.getOwnObject(globalThis, "loader")) |loaders| {
|
||||
var loader_iter = try JSC.JSPropertyIterator(.{
|
||||
var loader_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = true,
|
||||
.include_value = true,
|
||||
}).init(globalThis, loaders);
|
||||
@@ -496,7 +496,7 @@ pub const JSBundler = struct {
|
||||
var loader_values = try allocator.alloc(Api.Loader, loader_iter.len);
|
||||
errdefer allocator.free(loader_values);
|
||||
|
||||
while (try loader_iter.next()) |prop| {
|
||||
while (loader_iter.next()) |prop| {
|
||||
if (!prop.hasPrefixComptime(".") or prop.length() < 2) {
|
||||
return globalThis.throwInvalidArguments("loader property names must be file extensions, such as '.txt'", .{});
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ const JSAst = bun.JSAst;
|
||||
const JSParser = bun.js_parser;
|
||||
const JSPrinter = bun.js_printer;
|
||||
const ScanPassResult = JSParser.ScanPassResult;
|
||||
const Mimalloc = @import("../../allocators/mimalloc_arena.zig");
|
||||
const Mimalloc = @import("../../mimalloc_arena.zig");
|
||||
const Runtime = @import("../../runtime.zig").Runtime;
|
||||
const JSLexer = bun.js_lexer;
|
||||
const Expr = JSAst.Expr;
|
||||
@@ -338,7 +338,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
|
||||
return globalObject.throwInvalidArguments("define must be an object", .{});
|
||||
}
|
||||
|
||||
var define_iter = try JSC.JSPropertyIterator(.{
|
||||
var define_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = true,
|
||||
|
||||
.include_value = true,
|
||||
@@ -351,7 +351,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
|
||||
|
||||
var values = map_entries[define_iter.len..];
|
||||
|
||||
while (try define_iter.next()) |prop| {
|
||||
while (define_iter.next()) |prop| {
|
||||
const property_value = define_iter.value;
|
||||
const value_type = property_value.jsType();
|
||||
|
||||
@@ -462,6 +462,10 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
|
||||
}
|
||||
|
||||
transpiler.runtime.allow_runtime = false;
|
||||
transpiler.runtime.use_import_meta_require = switch (transpiler.transform.target orelse .browser) {
|
||||
.bun, .bun_macro => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
if (try object.getTruthy(globalThis, "macro")) |macros| {
|
||||
macros: {
|
||||
@@ -620,25 +624,26 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
|
||||
return globalObject.throwInvalidArguments("replace must be an object", .{});
|
||||
}
|
||||
|
||||
var iter = try JSC.JSPropertyIterator(.{
|
||||
var iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = true,
|
||||
.include_value = true,
|
||||
}).init(globalThis, replace);
|
||||
defer iter.deinit();
|
||||
|
||||
if (iter.len > 0) {
|
||||
errdefer iter.deinit();
|
||||
try replacements.ensureUnusedCapacity(bun.default_allocator, iter.len);
|
||||
|
||||
// We cannot set the exception before `try` because it could be
|
||||
// a double free with the `errdefer`.
|
||||
defer if (globalThis.hasException()) {
|
||||
iter.deinit();
|
||||
for (replacements.keys()) |key| {
|
||||
bun.default_allocator.free(@constCast(key));
|
||||
}
|
||||
replacements.clearAndFree(bun.default_allocator);
|
||||
};
|
||||
|
||||
while (try iter.next()) |key_| {
|
||||
while (iter.next()) |key_| {
|
||||
const value = iter.value;
|
||||
if (value == .zero) continue;
|
||||
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "S3Client",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
configurable: false,
|
||||
klass: {
|
||||
file: {
|
||||
fn: "staticFile",
|
||||
length: 2,
|
||||
},
|
||||
unlink: {
|
||||
fn: "staticUnlink",
|
||||
length: 2,
|
||||
},
|
||||
delete: {
|
||||
/// just an alias for unlink
|
||||
fn: "staticUnlink",
|
||||
length: 2,
|
||||
},
|
||||
presign: {
|
||||
fn: "staticPresign",
|
||||
length: 2,
|
||||
},
|
||||
exists: {
|
||||
fn: "staticExists",
|
||||
length: 2,
|
||||
},
|
||||
size: {
|
||||
fn: "staticSize",
|
||||
length: 2,
|
||||
},
|
||||
write: {
|
||||
fn: "staticWrite",
|
||||
length: 2,
|
||||
},
|
||||
stat: {
|
||||
fn: "staticStat",
|
||||
length: 2,
|
||||
},
|
||||
},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
file: {
|
||||
fn: "file",
|
||||
length: 2,
|
||||
},
|
||||
unlink: {
|
||||
fn: "unlink",
|
||||
length: 2,
|
||||
},
|
||||
delete: {
|
||||
/// just an alias for unlink
|
||||
fn: "unlink",
|
||||
length: 2,
|
||||
},
|
||||
presign: {
|
||||
fn: "presign",
|
||||
length: 2,
|
||||
},
|
||||
exists: {
|
||||
fn: "exists",
|
||||
length: 2,
|
||||
},
|
||||
size: {
|
||||
fn: "size",
|
||||
length: 2,
|
||||
},
|
||||
write: {
|
||||
fn: "write",
|
||||
length: 2,
|
||||
},
|
||||
stat: {
|
||||
fn: "stat",
|
||||
length: 2,
|
||||
},
|
||||
},
|
||||
}),
|
||||
];
|
||||
@@ -1,30 +0,0 @@
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "S3Stat",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
configurable: false,
|
||||
klass: {},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
size: {
|
||||
getter: "getSize",
|
||||
cache: true,
|
||||
},
|
||||
lastModified: {
|
||||
getter: "getLastModified",
|
||||
cache: true,
|
||||
},
|
||||
etag: {
|
||||
getter: "getEtag",
|
||||
cache: true,
|
||||
},
|
||||
type: {
|
||||
getter: "getContentType",
|
||||
cache: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
];
|
||||
@@ -10,7 +10,6 @@ const Async = @import("async");
|
||||
const uv = bun.windows.libuv;
|
||||
const StatWatcherScheduler = @import("../node/node_fs_stat_watcher.zig").StatWatcherScheduler;
|
||||
const Timer = @This();
|
||||
const DNSResolver = @import("./bun/dns_resolver.zig").DNSResolver;
|
||||
|
||||
/// TimeoutMap is map of i32 to nullable Timeout structs
|
||||
/// i32 is exposed to JavaScript and can be used with clearTimeout, clearInterval, etc.
|
||||
@@ -731,10 +730,7 @@ pub const EventLoopTimer = struct {
|
||||
TestRunner,
|
||||
StatWatcherScheduler,
|
||||
UpgradedDuplex,
|
||||
DNSResolver,
|
||||
WindowsNamedPipe,
|
||||
PostgresSQLConnectionTimeout,
|
||||
PostgresSQLConnectionMaxLifetime,
|
||||
|
||||
pub fn Type(comptime T: Tag) type {
|
||||
return switch (T) {
|
||||
@@ -743,10 +739,7 @@ pub const EventLoopTimer = struct {
|
||||
.TestRunner => JSC.Jest.TestRunner,
|
||||
.StatWatcherScheduler => StatWatcherScheduler,
|
||||
.UpgradedDuplex => uws.UpgradedDuplex,
|
||||
.DNSResolver => DNSResolver,
|
||||
.WindowsNamedPipe => uws.WindowsNamedPipe,
|
||||
.PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection,
|
||||
.PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection,
|
||||
};
|
||||
}
|
||||
} else enum {
|
||||
@@ -755,9 +748,6 @@ pub const EventLoopTimer = struct {
|
||||
TestRunner,
|
||||
StatWatcherScheduler,
|
||||
UpgradedDuplex,
|
||||
DNSResolver,
|
||||
PostgresSQLConnectionTimeout,
|
||||
PostgresSQLConnectionMaxLifetime,
|
||||
|
||||
pub fn Type(comptime T: Tag) type {
|
||||
return switch (T) {
|
||||
@@ -766,9 +756,6 @@ pub const EventLoopTimer = struct {
|
||||
.TestRunner => JSC.Jest.TestRunner,
|
||||
.StatWatcherScheduler => StatWatcherScheduler,
|
||||
.UpgradedDuplex => uws.UpgradedDuplex,
|
||||
.DNSResolver => DNSResolver,
|
||||
.PostgresSQLConnectionTimeout => JSC.Postgres.PostgresSQLConnection,
|
||||
.PostgresSQLConnectionMaxLifetime => JSC.Postgres.PostgresSQLConnection,
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -821,14 +808,11 @@ pub const EventLoopTimer = struct {
|
||||
|
||||
pub fn fire(this: *EventLoopTimer, now: *const timespec, vm: *VirtualMachine) Arm {
|
||||
switch (this.tag) {
|
||||
.PostgresSQLConnectionTimeout => return @as(*JSC.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(),
|
||||
.PostgresSQLConnectionMaxLifetime => return @as(*JSC.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", this))).onMaxLifetimeTimeout(),
|
||||
inline else => |t| {
|
||||
var container: *t.Type() = @alignCast(@fieldParentPtr("event_loop_timer", this));
|
||||
if (comptime t.Type() == TimerObject) {
|
||||
return container.fire(now, vm);
|
||||
}
|
||||
|
||||
if (comptime t.Type() == StatWatcherScheduler) {
|
||||
return container.timerCallback();
|
||||
}
|
||||
@@ -846,10 +830,6 @@ pub const EventLoopTimer = struct {
|
||||
return .disarm;
|
||||
}
|
||||
|
||||
if (comptime t.Type() == DNSResolver) {
|
||||
return container.checkTimeouts(now, vm);
|
||||
}
|
||||
|
||||
return container.callback(container);
|
||||
},
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2899,14 +2899,14 @@ pub const H2FrameParser = struct {
|
||||
var buffer = shared_request_buffer[0 .. shared_request_buffer.len - FrameHeader.byteSize];
|
||||
var encoded_size: usize = 0;
|
||||
|
||||
var iter = try JSC.JSPropertyIterator(.{
|
||||
var iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(globalObject, headers_arg);
|
||||
defer iter.deinit();
|
||||
|
||||
// TODO: support CONTINUE for more headers if headers are too big
|
||||
while (try iter.next()) |header_name| {
|
||||
while (iter.next()) |header_name| {
|
||||
if (header_name.length() == 0) continue;
|
||||
|
||||
const name_slice = header_name.toUTF8(bun.default_allocator);
|
||||
@@ -3231,7 +3231,7 @@ pub const H2FrameParser = struct {
|
||||
}
|
||||
|
||||
// we iterate twice, because pseudo headers must be sent first, but can appear anywhere in the headers object
|
||||
var iter = try JSC.JSPropertyIterator(.{
|
||||
var iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = false,
|
||||
.include_value = true,
|
||||
}).init(globalObject, headers_arg);
|
||||
@@ -3240,7 +3240,7 @@ pub const H2FrameParser = struct {
|
||||
for (0..2) |ignore_pseudo_headers| {
|
||||
iter.reset();
|
||||
|
||||
while (try iter.next()) |header_name| {
|
||||
while (iter.next()) |header_name| {
|
||||
if (header_name.length() == 0) continue;
|
||||
|
||||
const name_slice = header_name.toUTF8(bun.default_allocator);
|
||||
|
||||
@@ -1626,7 +1626,6 @@ fn NewSocket(comptime ssl: bool) type {
|
||||
|
||||
if (callback == .zero) {
|
||||
if (handlers.promise.trySwap()) |promise| {
|
||||
handlers.promise.deinit();
|
||||
if (this.this_value != .zero) {
|
||||
this.this_value = .zero;
|
||||
}
|
||||
@@ -1634,7 +1633,7 @@ fn NewSocket(comptime ssl: bool) type {
|
||||
|
||||
// reject the promise on connect() error
|
||||
const err_value = err.toErrorInstance(globalObject);
|
||||
promise.asPromise().?.reject(globalObject, err_value);
|
||||
promise.asPromise().?.rejectOnNextTick(globalObject, err_value);
|
||||
}
|
||||
|
||||
return;
|
||||
@@ -1658,7 +1657,7 @@ fn NewSocket(comptime ssl: bool) type {
|
||||
// The error is effectively handled, but we should still reject the promise.
|
||||
var promise = val.asPromise().?;
|
||||
const err_ = err.toErrorInstance(globalObject);
|
||||
promise.rejectAsHandled(globalObject, err_);
|
||||
promise.rejectOnNextTickAsHandled(globalObject, err_);
|
||||
}
|
||||
}
|
||||
pub fn onConnectError(this: *This, _: Socket, errno: c_int) void {
|
||||
|
||||
@@ -122,18 +122,17 @@ pub const ResourceUsage = struct {
|
||||
};
|
||||
|
||||
pub fn appendEnvpFromJS(globalThis: *JSC.JSGlobalObject, object: JSC.JSValue, envp: *std.ArrayList(?[*:0]const u8), PATH: *[]const u8) !void {
|
||||
var object_iter = try JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true }).init(globalThis, object);
|
||||
var object_iter = JSC.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true }).init(globalThis, object);
|
||||
defer object_iter.deinit();
|
||||
|
||||
try envp.ensureTotalCapacityPrecise(object_iter.len +
|
||||
// +1 incase there's IPC
|
||||
// +1 for null terminator
|
||||
2);
|
||||
while (try object_iter.next()) |key| {
|
||||
while (object_iter.next()) |key| {
|
||||
var value = object_iter.value;
|
||||
if (value == .undefined) continue;
|
||||
|
||||
const line = try std.fmt.allocPrintZ(envp.allocator, "{}={}", .{ key, value.getZigString(globalThis) });
|
||||
var line = try std.fmt.allocPrintZ(envp.allocator, "{}={}", .{ key, value.getZigString(globalThis) });
|
||||
|
||||
if (key.eqlComptime("PATH")) {
|
||||
PATH.* = bun.asByteSlice(line["PATH=".len..]);
|
||||
@@ -1704,8 +1703,6 @@ pub const Subprocess = struct {
|
||||
return spawnMaybeSync(globalThis, args, secondaryArgsValue, true);
|
||||
}
|
||||
|
||||
extern "C" const BUN_DEFAULT_PATH_FOR_SPAWN: [*:0]const u8;
|
||||
|
||||
// This is split into a separate function to conserve stack space.
|
||||
// On Windows, a single path buffer can take 64 KB.
|
||||
fn getArgv0(globalThis: *JSC.JSGlobalObject, PATH: []const u8, cwd: []const u8, argv0: ?[*:0]const u8, first_cmd: JSValue, allocator: std.mem.Allocator) bun.JSError!struct {
|
||||
@@ -1720,30 +1717,14 @@ pub const Subprocess = struct {
|
||||
|
||||
var actual_argv0: [:0]const u8 = "";
|
||||
|
||||
const argv0_to_use: []const u8 = if (argv0) |_argv0|
|
||||
bun.sliceTo(_argv0, 0)
|
||||
else
|
||||
arg0.slice();
|
||||
|
||||
// This mimicks libuv's behavior, which mimicks execvpe
|
||||
// Only resolve from $PATH when the command is not an absolute path
|
||||
const PATH_to_use: []const u8 = if (strings.containsChar(argv0_to_use, '/'))
|
||||
""
|
||||
// If no $PATH is provided, we fallback to the one from environ
|
||||
// This is already the behavior of the PATH passed in here.
|
||||
else if (PATH.len > 0)
|
||||
PATH
|
||||
else if (comptime Environment.isPosix)
|
||||
// If the user explicitly passed an empty $PATH, we fallback to the OS-specific default (which libuv also does)
|
||||
bun.sliceTo(BUN_DEFAULT_PATH_FOR_SPAWN, 0)
|
||||
else
|
||||
"";
|
||||
|
||||
if (PATH_to_use.len == 0) {
|
||||
actual_argv0 = try allocator.dupeZ(u8, argv0_to_use);
|
||||
if (argv0 == null) {
|
||||
const resolved = Which.which(path_buf, PATH, cwd, arg0.slice()) orelse {
|
||||
return throwCommandNotFound(globalThis, arg0.slice());
|
||||
};
|
||||
actual_argv0 = try allocator.dupeZ(u8, resolved);
|
||||
} else {
|
||||
const resolved = Which.which(path_buf, PATH_to_use, cwd, argv0_to_use) orelse {
|
||||
return throwCommandNotFound(globalThis, argv0_to_use);
|
||||
const resolved = Which.which(path_buf, PATH, cwd, bun.sliceTo(argv0.?, 0)) orelse {
|
||||
return throwCommandNotFound(globalThis, arg0.slice());
|
||||
};
|
||||
actual_argv0 = try allocator.dupeZ(u8, resolved);
|
||||
}
|
||||
@@ -1754,41 +1735,6 @@ pub const Subprocess = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn getArgv(globalThis: *JSC.JSGlobalObject, args: JSValue, PATH: []const u8, cwd: []const u8, argv0: *?[*:0]const u8, allocator: std.mem.Allocator, argv: *std.ArrayList(?[*:0]const u8)) bun.JSError!void {
|
||||
var cmds_array = args.arrayIterator(globalThis);
|
||||
// + 1 for argv0
|
||||
// + 1 for null terminator
|
||||
argv.* = try @TypeOf(argv.*).initCapacity(allocator, cmds_array.len + 2);
|
||||
|
||||
if (args.isEmptyOrUndefinedOrNull()) {
|
||||
return globalThis.throwInvalidArguments("cmd must be an array of strings", .{});
|
||||
}
|
||||
|
||||
if (cmds_array.len == 0) {
|
||||
return globalThis.throwInvalidArguments("cmd must not be empty", .{});
|
||||
}
|
||||
|
||||
const argv0_result = try getArgv0(globalThis, PATH, cwd, argv0.*, cmds_array.next().?, allocator);
|
||||
|
||||
argv0.* = argv0_result.argv0.ptr;
|
||||
argv.appendAssumeCapacity(argv0_result.arg0.ptr);
|
||||
|
||||
while (cmds_array.next()) |value| {
|
||||
const arg = try value.toBunString2(globalThis);
|
||||
defer arg.deref();
|
||||
|
||||
// if the string is empty, ignore it, don't add it to the argv
|
||||
if (arg.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
argv.appendAssumeCapacity(try arg.toOwnedSliceZ(allocator));
|
||||
}
|
||||
|
||||
if (argv.items.len == 0) {
|
||||
return globalThis.throwInvalidArguments("cmd must be an array of strings", .{});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawnMaybeSync(
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
args_: JSValue,
|
||||
@@ -1884,6 +1830,40 @@ pub const Subprocess = struct {
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var cmds_array = cmd_value.arrayIterator(globalThis);
|
||||
// + 1 for argv0
|
||||
// + 1 for null terminator
|
||||
argv = try @TypeOf(argv).initCapacity(allocator, cmds_array.len + 2);
|
||||
|
||||
if (cmd_value.isEmptyOrUndefinedOrNull()) {
|
||||
return globalThis.throwInvalidArguments("cmd must be an array of strings", .{});
|
||||
}
|
||||
|
||||
if (cmds_array.len == 0) {
|
||||
return globalThis.throwInvalidArguments("cmd must not be empty", .{});
|
||||
}
|
||||
|
||||
const argv0_result = try getArgv0(globalThis, PATH, cwd, argv0, cmds_array.next().?, allocator);
|
||||
argv0 = argv0_result.argv0.ptr;
|
||||
argv.appendAssumeCapacity(argv0_result.arg0.ptr);
|
||||
|
||||
while (cmds_array.next()) |value| {
|
||||
const arg = try value.toBunString2(globalThis);
|
||||
defer arg.deref();
|
||||
|
||||
// if the string is empty, ignore it, don't add it to the argv
|
||||
if (arg.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
argv.appendAssumeCapacity(try arg.toOwnedSliceZ(allocator));
|
||||
}
|
||||
|
||||
if (argv.items.len == 0) {
|
||||
return globalThis.throwInvalidArguments("cmd must be an array of strings", .{});
|
||||
}
|
||||
}
|
||||
|
||||
if (args != .zero and args.isObject()) {
|
||||
// This must run before the stdio parsing happens
|
||||
if (!is_sync) {
|
||||
@@ -1950,15 +1930,11 @@ pub const Subprocess = struct {
|
||||
|
||||
override_env = true;
|
||||
// If the env object does not include a $PATH, it must disable path lookup for argv[0]
|
||||
var NEW_PATH: []const u8 = "";
|
||||
PATH = "";
|
||||
var envp_managed = env_array.toManaged(allocator);
|
||||
try appendEnvpFromJS(globalThis, object, &envp_managed, &NEW_PATH);
|
||||
try appendEnvpFromJS(globalThis, object, &envp_managed, &PATH);
|
||||
env_array = envp_managed.moveToUnmanaged();
|
||||
PATH = NEW_PATH;
|
||||
}
|
||||
|
||||
try getArgv(globalThis, cmd_value, PATH, cwd, &argv0, allocator, &argv);
|
||||
|
||||
if (try args.get(globalThis, "stdio")) |stdio_val| {
|
||||
if (!stdio_val.isEmptyOrUndefinedOrNull()) {
|
||||
if (stdio_val.jsType().isArray()) {
|
||||
@@ -2031,8 +2007,6 @@ pub const Subprocess = struct {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try getArgv(globalThis, cmd_value, PATH, cwd, &argv0, allocator, &argv);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2150,20 +2124,6 @@ pub const Subprocess = struct {
|
||||
}) {
|
||||
.err => |err| {
|
||||
spawn_options.deinit();
|
||||
switch (err.getErrno()) {
|
||||
.ACCES, .NOENT, .PERM, .ISDIR, .NOTDIR => {
|
||||
const display_path: [:0]const u8 = if (argv0 != null)
|
||||
std.mem.sliceTo(argv0.?, 0)
|
||||
else if (argv.items.len > 0 and argv.items[0] != null)
|
||||
std.mem.sliceTo(argv.items[0].?, 0)
|
||||
else
|
||||
"";
|
||||
if (display_path.len > 0)
|
||||
return globalThis.throwValue(err.withPath(display_path).toJSC(globalThis));
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return globalThis.throwValue(err.toJSC(globalThis));
|
||||
},
|
||||
.result => |result| result,
|
||||
|
||||
@@ -63,6 +63,7 @@ const JSPromise = bun.JSC.JSPromise;
|
||||
const JSInternalPromise = bun.JSC.JSInternalPromise;
|
||||
const JSModuleLoader = bun.JSC.JSModuleLoader;
|
||||
const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation;
|
||||
const Exception = bun.JSC.Exception;
|
||||
const ErrorableZigString = bun.JSC.ErrorableZigString;
|
||||
const ZigGlobalObject = bun.JSC.ZigGlobalObject;
|
||||
const VM = bun.JSC.VM;
|
||||
@@ -705,9 +706,9 @@ pub const FFI = struct {
|
||||
if (try object.getTruthy(globalThis, "define")) |define_value| {
|
||||
if (define_value.isObject()) {
|
||||
const Iter = JSC.JSPropertyIterator(.{ .include_value = true, .skip_empty_name = true });
|
||||
var iter = try Iter.init(globalThis, define_value);
|
||||
var iter = Iter.init(globalThis, define_value);
|
||||
defer iter.deinit();
|
||||
while (try iter.next()) |entry| {
|
||||
while (iter.next()) |entry| {
|
||||
const key = entry.toOwnedSliceZ(bun.default_allocator) catch bun.outOfMemory();
|
||||
var owned_value: [:0]const u8 = "";
|
||||
if (iter.value != .zero and iter.value != .undefined) {
|
||||
@@ -982,14 +983,8 @@ pub const FFI = struct {
|
||||
return val;
|
||||
}
|
||||
JSC.markBinding(@src());
|
||||
var strs = std.ArrayList(bun.String).initCapacity(allocator, symbols.count()) catch bun.outOfMemory();
|
||||
defer {
|
||||
for (strs.items) |str| {
|
||||
str.deref();
|
||||
}
|
||||
strs.deinit();
|
||||
}
|
||||
for (symbols.values()) |*function| {
|
||||
var zig_strings = allocator.alloc(ZigString, symbols.count()) catch unreachable;
|
||||
for (symbols.values(), 0..) |*function, i| {
|
||||
var arraylist = std.ArrayList(u8).init(allocator);
|
||||
var writer = arraylist.writer();
|
||||
function.printSourceCode(&writer) catch {
|
||||
@@ -997,6 +992,9 @@ pub const FFI = struct {
|
||||
for (symbols.keys()) |key| {
|
||||
allocator.free(@constCast(key));
|
||||
}
|
||||
for (zig_strings) |zig_string| {
|
||||
allocator.free(@constCast(zig_string.slice()));
|
||||
}
|
||||
for (symbols.values()) |*function_| {
|
||||
function_.arg_types.deinit(allocator);
|
||||
}
|
||||
@@ -1004,14 +1002,17 @@ pub const FFI = struct {
|
||||
symbols.clearAndFree(allocator);
|
||||
return ZigString.init("Error while printing code").toErrorInstance(global);
|
||||
};
|
||||
strs.appendAssumeCapacity(bun.String.createUTF8(arraylist.items));
|
||||
zig_strings[i] = ZigString.init(arraylist.items);
|
||||
}
|
||||
|
||||
const ret = bun.String.toJSArray(global, strs.items);
|
||||
const ret = JSC.JSValue.createStringArray(global, zig_strings.ptr, zig_strings.len, true);
|
||||
|
||||
for (symbols.keys()) |key| {
|
||||
allocator.free(@constCast(key));
|
||||
}
|
||||
for (zig_strings) |zig_string| {
|
||||
allocator.free(@constCast(zig_string.slice()));
|
||||
}
|
||||
for (symbols.values()) |*function_| {
|
||||
function_.arg_types.deinit(allocator);
|
||||
if (function_.step == .compiled) {
|
||||
@@ -1420,7 +1421,7 @@ pub const FFI = struct {
|
||||
JSC.markBinding(@src());
|
||||
const allocator = VirtualMachine.get().allocator;
|
||||
|
||||
var symbols_iter = try JSC.JSPropertyIterator(.{
|
||||
var symbols_iter = JSC.JSPropertyIterator(.{
|
||||
.skip_empty_name = true,
|
||||
|
||||
.include_value = true,
|
||||
@@ -1429,7 +1430,7 @@ pub const FFI = struct {
|
||||
|
||||
try symbols.ensureTotalCapacity(allocator, symbols_iter.len);
|
||||
|
||||
while (try symbols_iter.next()) |prop| {
|
||||
while (symbols_iter.next()) |prop| {
|
||||
const value = symbols_iter.value;
|
||||
|
||||
if (value.isEmptyOrUndefinedOrNull()) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user